[subversion] - add upstream test suite fixes for APR hash change (r1293602, r1293811)

jorton jorton at fedoraproject.org
Tue Feb 28 13:20:46 UTC 2012


commit 7fbff7dd7a748897738a01ec12a1ed46df29f6fd
Author: Joe Orton <jorton at redhat.com>
Date:   Tue Feb 28 12:42:52 2012 +0000

    - add upstream test suite fixes for APR hash change (r1293602, r1293811)

 .gitignore                       |    4 +
 subversion-1.7.3-hashorder.patch |  885 ++++++++++++++++++++++++++++++++++++++
 subversion.spec                  |    7 +-
 3 files changed, 895 insertions(+), 1 deletions(-)
---
diff --git a/.gitignore b/.gitignore
index eacc26d..ced8619 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,3 +11,7 @@ subversion-1.6.12.tar.bz2
 /subversion-1.7.1.tar.bz2
 /subversion-1.7.2.tar.bz2
 /subversion-1.7.3.tar.bz2
+/subversion
+/subversion-1.?.?
+/*.asc
+/subversion-1.?.??
diff --git a/subversion-1.7.3-hashorder.patch b/subversion-1.7.3-hashorder.patch
new file mode 100644
index 0000000..27e4d46
--- /dev/null
+++ b/subversion-1.7.3-hashorder.patch
@@ -0,0 +1,885 @@
+
+http://svn.apache.org/viewvc?rev=1293602&view=rev
+http://svn.apache.org/viewvc?rev=1293811&view=rev
+
+--- subversion-1.7.3/subversion/bindings/swig/python/tests/trac/versioncontrol/tests/svn_fs.py.hashorder
++++ subversion-1.7.3/subversion/bindings/swig/python/tests/trac/versioncontrol/tests/svn_fs.py
+@@ -278,16 +278,27 @@ class SubversionRepositoryTestCase(unitt
+ 
+     def test_diff_dir_different_dirs(self):
+         diffs = self.repos.get_deltas('trunk', 1, 'branches/v1x', 12)
+-        self._cmp_diff((None, ('branches/v1x/dir1', 12),
+-                        (Node.DIRECTORY, Changeset.ADD)), diffs.next())
+-        self._cmp_diff((None, ('branches/v1x/dir1/dir2', 12),
+-                        (Node.DIRECTORY, Changeset.ADD)), diffs.next())
+-        self._cmp_diff((None, ('branches/v1x/dir1/dir3', 12),
+-                        (Node.DIRECTORY, Changeset.ADD)), diffs.next())
+-        self._cmp_diff((None, ('branches/v1x/README.txt', 12),
+-                        (Node.FILE, Changeset.ADD)), diffs.next())
+-        self._cmp_diff((None, ('branches/v1x/README2.txt', 12),
+-                        (Node.FILE, Changeset.ADD)), diffs.next())
++        expected = [
++          (None, ('branches/v1x/README.txt', 12),
++           (Node.FILE, Changeset.ADD)),
++          (None, ('branches/v1x/README2.txt', 12),
++           (Node.FILE, Changeset.ADD)),
++          (None, ('branches/v1x/dir1', 12),
++           (Node.DIRECTORY, Changeset.ADD)),
++          (None, ('branches/v1x/dir1/dir2', 12),
++           (Node.DIRECTORY, Changeset.ADD)),
++          (None, ('branches/v1x/dir1/dir3', 12),
++           (Node.DIRECTORY, Changeset.ADD)),
++        ]
++        actual = [diffs.next() for i in range(5)]
++        actual = sorted(actual, key=lambda diff: (diff[1].path, diff[1].rev))
++        # for e,a in zip(expected, actual):
++        #   t.write("%r\n" % (e,))
++        #   t.write("%r\n" % ((None, (a[1].path, a[1].rev), (a[2], a[3])),) )
++        #   t.write('\n')
++        self.assertEqual(len(expected), len(actual))
++        for e,a in zip(expected, actual):
++          self._cmp_diff(e,a)
+         self.assertRaises(StopIteration, diffs.next)
+ 
+     def test_diff_dir_no_change(self):
+--- subversion-1.7.3/subversion/tests/cmdline/diff_tests.py.hashorder
++++ subversion-1.7.3/subversion/tests/cmdline/diff_tests.py
+@@ -1165,8 +1165,10 @@ def diff_base_to_repos(sbox):
+     if not re_infoline.match(line):
+       list2.append(line)
+ 
+-  if list1 != list2:
+-    raise svntest.Failure
++  # Two files in diff may be in any order.
++  list1 = svntest.verify.UnorderedOutput(list1)
++
++  svntest.verify.compare_and_display_lines('', '', list1, list2)
+ 
+ 
+ #----------------------------------------------------------------------
+@@ -3590,6 +3592,9 @@ def diff_git_empty_files(sbox):
+   ] + make_git_diff_header(iota_path, "iota", "revision 2", "working copy",
+                            delete=True, text_changes=False)
+ 
++  # Two files in diff may be in any order.
++  expected_output = svntest.verify.UnorderedOutput(expected_output)
++
+   svntest.actions.run_and_verify_svn(None, expected_output, [], 'diff',
+                                      '--git', wc_dir)
+ 
+@@ -3630,6 +3635,9 @@ def diff_git_with_props(sbox):
+                     make_diff_prop_header("iota") + \
+                     make_diff_prop_added("svn:keywords", "Id")
+ 
++  # Files in diff may be in any order.
++  expected_output = svntest.verify.UnorderedOutput(expected_output)
++
+   svntest.actions.run_and_verify_svn(None, expected_output, [], 'diff',
+                                      '--git', wc_dir)
+ 
+--- subversion-1.7.3/subversion/tests/cmdline/lock_tests.py.hashorder
++++ subversion-1.7.3/subversion/tests/cmdline/lock_tests.py
+@@ -1172,6 +1172,8 @@ def repos_lock_with_info(sbox):
+ 
+ 
+ #----------------------------------------------------------------------
++ at Issue(4126)
++ at Skip(svntest.main.is_ra_type_dav_serf) # Issue 4126 unpredictable result
+ def unlock_already_unlocked_files(sbox):
+   "(un)lock set of files, one already (un)locked"
+ 
+--- subversion-1.7.3/subversion/tests/cmdline/patch_tests.py.hashorder
++++ subversion-1.7.3/subversion/tests/cmdline/patch_tests.py
+@@ -2822,12 +2822,13 @@ def patch_prop_offset(sbox):
+ 
+   os.chdir(wc_dir)
+ 
+-  expected_output = [
++  # Changing two properties so output order not well defined.
++  expected_output = svntest.verify.UnorderedOutput([
+     ' U        iota\n',
+     '>         applied hunk ## -6,6 +6,9 ## with offset -1 (prop1)\n',
+     '>         applied hunk ## -14,11 +17,8 ## with offset 4 (prop1)\n',
+     '>         applied hunk ## -5,6 +5,7 ## with offset -3 (prop2)\n',
+-  ]
++  ])
+ 
+   expected_disk = svntest.main.greek_state.copy()
+   expected_disk.tweak('iota', props = {'prop1' : prop1_content,
+--- subversion-1.7.3/subversion/tests/cmdline/stat_tests.py.hashorder
++++ subversion-1.7.3/subversion/tests/cmdline/stat_tests.py
+@@ -924,38 +924,13 @@ def status_in_xml(sbox):
+   else:
+     raise svntest.Failure
+ 
+-  template = ['<?xml version="1.0" encoding="UTF-8"?>\n',
+-              "<status>\n",
+-              "<target\n",
+-              "   path=\"%s\">\n" % (file_path),
+-              "<entry\n",
+-              "   path=\"%s\">\n" % (file_path),
+-              "<wc-status\n",
+-              "   props=\"none\"\n",
+-              "   item=\"modified\"\n",
+-              "   revision=\"1\">\n",
+-              "<commit\n",
+-              "   revision=\"1\">\n",
+-              "<author>%s</author>\n" % svntest.main.wc_author,
+-              time_str,
+-              "</commit>\n",
+-              "</wc-status>\n",
+-              "</entry>\n",
+-              "<against\n",
+-              "   revision=\"1\"/>\n",
+-              "</target>\n",
+-              "</status>\n",
+-             ]
++  expected_entries = {file_path : {'wcprops' : 'none',
++                                   'wcitem' : 'modified',
++                                   'wcrev' : '1',
++                                   'crev' : '1',
++                                   'author' : svntest.main.wc_author}}
+ 
+-  exit_code, output, error = svntest.actions.run_and_verify_svn(None, None, [],
+-                                                                'status',
+-                                                                file_path,
+-                                                                '--xml', '-u')
+-
+-  for i in range(0, len(output)):
+-    if output[i] != template[i]:
+-      print("ERROR: expected: %s actual: %s" % (template[i], output[i]))
+-      raise svntest.Failure
++  svntest.actions.run_and_verify_status_xml(expected_entries, file_path, '-u')
+ 
+   svntest.actions.run_and_verify_svn(None, None, [],
+                                      'cp', '-m', 'repo-to-repo copy',
+@@ -964,36 +939,12 @@ def status_in_xml(sbox):
+   
+   file_path = sbox.ospath('iota2')
+ 
+-  template = ['<?xml version="1.0" encoding="UTF-8"?>\n',
+-              "<status>\n",
+-              "<target\n",
+-              "   path=\"%s\">\n" % (file_path),
+-              "<entry\n",
+-              "   path=\"%s\">\n" % (file_path),
+-              "<wc-status\n",
+-              "   props=\"none\"\n",
+-              "   item=\"none\">\n",
+-              "</wc-status>\n",
+-              "<repos-status\n",
+-              "   props=\"none\"\n",
+-              "   item=\"added\">\n",
+-              "</repos-status>\n",
+-              "</entry>\n",
+-              "<against\n",
+-              "   revision=\"2\"/>\n",
+-              "</target>\n",
+-              "</status>\n",
+-             ]
++  expected_entries = {file_path : {'wcprops' : 'none',
++                                   'wcitem' : 'none',
++                                   'rprops' : 'none',
++                                   'ritem' : 'added'}}
+ 
+-  exit_code, output, error = svntest.actions.run_and_verify_svn(None, None, [],
+-                                                                'status',
+-                                                                file_path,
+-                                                                '--xml', '-u')
+-
+-  for i in range(0, len(output)):
+-    if output[i] != template[i]:
+-      print("ERROR: expected: %s actual: %s" % (template[i], output[i]))
+-      raise svntest.Failure
++  svntest.actions.run_and_verify_status_xml(expected_entries, file_path, '-u')
+ 
+ #----------------------------------------------------------------------
+ 
+@@ -1269,53 +1220,23 @@ def status_update_with_incoming_props(sb
+   else:
+     raise svntest.Failure
+ 
+-  xout = ['<?xml version="1.0" encoding="UTF-8"?>\n',
+-          "<status>\n",
+-          "<target\n",
+-          "   path=\"%s\">\n" % (wc_dir),
+-          "<entry\n",
+-          "   path=\"%s\">\n" % (A_path),
+-          "<wc-status\n",
+-          "   props=\"none\"\n",
+-          "   item=\"normal\"\n",
+-          "   revision=\"1\">\n",
+-          "<commit\n",
+-          "   revision=\"1\">\n",
+-          "<author>%s</author>\n" % svntest.main.wc_author,
+-          time_str,
+-          "</commit>\n",
+-          "</wc-status>\n",
+-          "<repos-status\n",
+-          "   props=\"modified\"\n",
+-          "   item=\"none\">\n",
+-          "</repos-status>\n",
+-          "</entry>\n",
+-          "<entry\n",
+-          "   path=\"%s\">\n" % (wc_dir),
+-          "<wc-status\n",
+-          "   props=\"none\"\n",
+-          "   item=\"normal\"\n",
+-          "   revision=\"1\">\n",
+-          "<commit\n",
+-          "   revision=\"1\">\n",
+-          "<author>%s</author>\n" % svntest.main.wc_author,
+-          time_str,
+-          "</commit>\n",
+-          "</wc-status>\n",
+-          "<repos-status\n",
+-          "   props=\"modified\"\n",
+-          "   item=\"none\">\n",
+-          "</repos-status>\n",
+-          "</entry>\n",
+-          "<against\n",
+-          "   revision=\"2\"/>\n",
+-          "</target>\n",
+-          "</status>\n",]
+-
+-  exit_code, output, error = svntest.actions.run_and_verify_svn(None, xout, [],
+-                                                                'status',
+-                                                                wc_dir,
+-                                                                '--xml', '-uN')
++  expected_entries ={wc_dir : {'wcprops' : 'none',
++                               'wcitem' : 'normal',
++                               'wcrev' : '1',
++                               'crev' : '1',
++                               'author' : svntest.main.wc_author,
++                               'rprops' : 'modified',
++                               'ritem' : 'none'},
++                     A_path : {'wcprops' : 'none',
++                               'wcitem' : 'normal',
++                               'wcrev' : '1',
++                               'crev' : '1',
++                               'author' : svntest.main.wc_author,
++                               'rprops' : 'modified',
++                               'ritem' : 'none'},
++                     }
++
++  svntest.actions.run_and_verify_status_xml(expected_entries, wc_dir, '-uN')
+ 
+ # more incoming prop updates.
+ def status_update_verbose_with_incoming_props(sbox):
+--- subversion-1.7.3/subversion/tests/cmdline/svnlook_tests.py.hashorder
++++ subversion-1.7.3/subversion/tests/cmdline/svnlook_tests.py
+@@ -117,35 +117,39 @@ def test_misc(sbox):
+   # the 'svnlook tree --full-paths' output if demanding the whole repository
+   treelist = run_svnlook('tree', repo_dir)
+   treelistfull = run_svnlook('tree', '--full-paths', repo_dir)
++
+   path = ''
+-  n = 0
++  treelistexpand = []
+   for entry in treelist:
+     len1 = len(entry)
+     len2 = len(entry.lstrip())
+-    path = path[0:2*(len1-len2)-1] + entry.strip()
+-    test = treelistfull[n].rstrip()
+-    if n != 0:
+-      test = "/" + test
+-    if not path == test:
+-      print("Unexpected result from tree with --full-paths:")
+-      print("  entry            : %s" % entry.rstrip())
+-      print("  with --full-paths: %s" % treelistfull[n].rstrip())
+-      raise svntest.Failure
+-    n = n + 1
++    path = path[0:2*(len1-len2)-1] + entry.strip() + '\n'
++    if path == '/\n':
++      treelistexpand.append(path)
++    else:
++      treelistexpand.append(path[1:])
++
++  treelistexpand = svntest.verify.UnorderedOutput(treelistexpand)
++  svntest.verify.compare_and_display_lines('Unexpected result from tree', '',
++                                           treelistexpand, treelistfull)
+ 
+   # check if the 'svnlook tree' output is the ending of
+   # the 'svnlook tree --full-paths' output if demanding
+   # any part of the repository
+-  n = 0
+   treelist = run_svnlook('tree', repo_dir, '/A/B')
+   treelistfull = run_svnlook('tree', '--full-paths', repo_dir, '/A/B')
++
++  path = ''
++  treelistexpand = []
+   for entry in treelist:
+-    if not treelistfull[n].endswith(entry.lstrip()):
+-      print("Unexpected result from tree with --full-paths:")
+-      print("  entry            : %s" % entry.rstrip())
+-      print("  with --full-paths: %s" % treelistfull[n].rstrip())
+-      raise svntest.Failure
+-    n = n + 1
++    len1 = len(entry)
++    len2 = len(entry.lstrip())
++    path = path[0:2*(len1-len2)] + entry.strip() + '\n'
++    treelistexpand.append('/A/' + path)
++
++  treelistexpand = svntest.verify.UnorderedOutput(treelistexpand)
++  svntest.verify.compare_and_display_lines('Unexpected result from tree', '',
++                                           treelistexpand, treelistfull)
+ 
+   treelist = run_svnlook('tree', repo_dir, '/')
+   if treelist[0] != '/\n':
+@@ -695,7 +699,7 @@ fp.close()"""
+                     #  internal property, not really expected
+                     '  svn:check-locks\n',
+                     '  bogus_rev_prop\n', '  svn:date\n']
+-  verify_logfile(logfilepath, expected_data)
++  verify_logfile(logfilepath, svntest.verify.UnorderedOutput(expected_data))
+ 
+ ########################################################################
+ # Run the tests
+--- subversion-1.7.3/subversion/tests/cmdline/svnrdump_tests.py.hashorder
++++ subversion-1.7.3/subversion/tests/cmdline/svnrdump_tests.py
+@@ -70,8 +70,31 @@ def build_repos(sbox):
+   # Create an empty repository.
+   svntest.main.create_repos(sbox.repo_dir)
+ 
++def compare_repos_dumps(svnrdump_sbox, svnadmin_dumpfile):
++  """Compare two dumpfiles, one created from SVNRDUMP_SBOX, and other given
++  by SVNADMIN_DUMPFILE.  The dumpfiles do not need to match linewise, as the
++  SVNADMIN_DUMPFILE contents will first be loaded into a repository and then
++  re-dumped to do the match, which should generate the same dumpfile as
++  dumping SVNRDUMP_SBOX."""
++
++  svnrdump_contents = svntest.actions.run_and_verify_dump(
++                                                    svnrdump_sbox.repo_dir)
++
++  svnadmin_sbox = svnrdump_sbox.clone_dependent()
++  svntest.main.safe_rmtree(svnadmin_sbox.repo_dir)
++  svntest.main.create_repos(svnadmin_sbox.repo_dir)
++
++  svntest.actions.run_and_verify_load(svnadmin_sbox.repo_dir, svnadmin_dumpfile)
++
++  svnadmin_contents = svntest.actions.run_and_verify_dump(
++                                                    svnadmin_sbox.repo_dir)
++
++  svntest.verify.compare_dump_files(
++    "Dump files", "DUMP", svnadmin_contents, svnrdump_contents)
++
+ def run_dump_test(sbox, dumpfile_name, expected_dumpfile_name = None,
+-                  subdir = None, bypass_prop_validation = False):
++                  subdir = None, bypass_prop_validation = False,
++                  ignore_base_checksums = False):
+   """Load a dumpfile using 'svnadmin load', dump it with 'svnrdump
+   dump' and check that the same dumpfile is produced or that
+   expected_dumpfile_name is produced if provided. Additionally, the
+@@ -107,12 +130,21 @@ def run_dump_test(sbox, dumpfile_name, e
+     svnadmin_dumpfile = open(os.path.join(svnrdump_tests_dir,
+                                           expected_dumpfile_name),
+                              'rb').readlines()
++    # Compare the output from stdout
++    if ignore_base_checksums:
++      svnadmin_dumpfile = [l for l in svnadmin_dumpfile
++                                    if not l.startswith('Text-delta-base-md5')]
++      svnrdump_dumpfile = [l for l in svnrdump_dumpfile
++                                    if not l.startswith('Text-delta-base-md5')]
++
+     svnadmin_dumpfile = svntest.verify.UnorderedOutput(svnadmin_dumpfile)
+ 
+-  # Compare the output from stdout
+-  svntest.verify.compare_and_display_lines(
+-    "Dump files", "DUMP", svnadmin_dumpfile, svnrdump_dumpfile,
+-    None, mismatched_headers_re)
++    svntest.verify.compare_and_display_lines(
++      "Dump files", "DUMP", svnadmin_dumpfile, svnrdump_dumpfile,
++      None, mismatched_headers_re)
++    
++  else:
++    compare_repos_dumps(sbox, svnadmin_dumpfile)
+ 
+ def run_load_test(sbox, dumpfile_name, expected_dumpfile_name = None,
+                   expect_deltas = True):
+@@ -155,9 +187,12 @@ def run_load_test(sbox, dumpfile_name, e
+                                           expected_dumpfile_name),
+                              'rb').readlines()
+ 
+-  # Compare the output from stdout
+-  svntest.verify.compare_and_display_lines(
+-    "Dump files", "DUMP", svnrdump_dumpfile, svnadmin_dumpfile)
++    # Compare the output from stdout
++    svntest.verify.compare_and_display_lines(
++      "Dump files", "DUMP", svnrdump_dumpfile, svnadmin_dumpfile)
++
++  else:
++    compare_repos_dumps(sbox, svnrdump_dumpfile)
+ 
+ ######################################################################
+ # Tests
+@@ -345,7 +380,7 @@ def copy_bad_line_endings2_dump(sbox):
+   "dump: non-LF line endings in svn:* props"
+   run_dump_test(sbox, "copy-bad-line-endings2.dump",
+                 expected_dumpfile_name="copy-bad-line-endings2.expected.dump",
+-                bypass_prop_validation=True)
++                bypass_prop_validation=True, ignore_base_checksums=True)
+ 
+ @Skip(svntest.main.is_ra_type_dav_serf)
+ def commit_a_copy_of_root_dump(sbox):
+--- subversion-1.7.3/subversion/tests/cmdline/svnsync_tests.py.hashorder
++++ subversion-1.7.3/subversion/tests/cmdline/svnsync_tests.py
+@@ -222,7 +222,7 @@ def verify_mirror(dest_sbox, exp_dump_fi
+   # Create a dump file from the mirror repository.
+   dest_dump = svntest.actions.run_and_verify_dump(dest_sbox.repo_dir)
+ 
+-  svntest.verify.compare_and_display_lines(
++  svntest.verify.compare_dump_files(
+     "Dump files", "DUMP", exp_dump_file_contents, dest_dump)
+ 
+ def run_test(sbox, dump_file_name, subdir=None, exp_dump_file_name=None,
+--- subversion-1.7.3/subversion/tests/cmdline/svntest/actions.py.hashorder
++++ subversion-1.7.3/subversion/tests/cmdline/svntest/actions.py
+@@ -556,7 +556,8 @@ class LogEntry:
+       self.revprops = revprops
+ 
+   def assert_changed_paths(self, changed_paths):
+-    """Not implemented, so just raises svntest.Failure.
++    """Assert that changed_paths is the same as this entry's changed_paths
++    Raises svntest.Failure if not.
+     """
+     raise Failure('NOT IMPLEMENTED')
+ 
+@@ -1079,13 +1080,21 @@ def run_and_verify_merge(dir, rev1, rev2
+   if dry_run and merge_diff_out != out_dry:
+     # Due to the way ra_serf works, it's possible that the dry-run and
+     # real merge operations did the same thing, but the output came in
+-    # a different order.  Let's see if maybe that's the case.
++    # a different order.  Let's see if maybe that's the case by comparing
++    # the outputs as unordered sets rather than as lists.
++    #
++    # This now happens for other RA layers with modern APR because the
++    # hash order now varies.
+     #
+-    # NOTE:  Would be nice to limit this dance to serf tests only, but...
+-    out_copy = merge_diff_out[:]
+-    out_dry_copy = out_dry[:]
+-    out_copy.sort()
+-    out_dry_copy.sort()
++    # The different orders of the real and dry-run merges may cause
++    # the "Merging rX through rY into" lines to be duplicated a
++    # different number of times in the two outputs.  The list-set
++    # conversion removes duplicates so these differences are ignored.
++    # It also removes "U some/path" duplicate lines.  Perhaps we
++    # should avoid that?
++    out_copy = set(merge_diff_out[:])
++    out_dry_copy = set(out_dry[:])
++
+     if out_copy != out_dry_copy:
+       print("=============================================================")
+       print("Merge outputs differ")
+@@ -1198,16 +1207,11 @@ def run_and_verify_patch(dir, patch_path
+     raise verify.SVNUnexpectedStderr
+ 
+   if dry_run and out != out_dry:
+-    print("=============================================================")
+-    print("Outputs differ")
+-    print("'svn patch --dry-run' output:")
+-    for x in out_dry:
+-      sys.stdout.write(x)
+-    print("'svn patch' output:")
+-    for x in out:
+-      sys.stdout.write(x)
+-    print("=============================================================")
+-    raise main.SVNUnmatchedError
++    # APR hash order means the output order can vary, assume everything is OK
++    # if only the order changes.
++    out_dry_expected = svntest.verify.UnorderedOutput(out)
++    verify.compare_and_display_lines('dry-run patch output not as expected',
++                                     '', out_dry_expected, out_dry)
+ 
+   def missing_skip(a, b):
+     print("=============================================================")
+@@ -1230,7 +1234,8 @@ def run_and_verify_patch(dir, patch_path
+ 
+   # when the expected output is a list, we want a line-by-line
+   # comparison to happen instead of a tree comparison
+-  if isinstance(output_tree, list):
++  if (isinstance(output_tree, list)
++      or isinstance(output_tree, verify.UnorderedOutput)):
+     verify.verify_outputs(None, out, err, output_tree, error_re_string)
+     output_tree = None
+ 
+@@ -1503,6 +1508,56 @@ def run_and_verify_unquiet_status(wc_dir
+     tree.dump_tree_script(actual, wc_dir_name + os.sep)
+     raise
+ 
++def run_and_verify_status_xml(expected_entries = [],
++                              *args):
++  """ Run 'status --xml' with arguments *ARGS.  If successful the output
++  is parsed into an XML document and will be verified by comparing against
++  EXPECTED_ENTRIES.
++  """
++
++  exit_code, output, errput = run_and_verify_svn(None, None, [],
++                                                 'status', '--xml', *args)
++
++  if len(errput) > 0:
++    raise Failure
++
++  doc = parseString(''.join(output))
++  entries = doc.getElementsByTagName('entry')
++
++  def getText(nodelist):
++    rc = []
++    for node in nodelist:
++        if node.nodeType == node.TEXT_NODE:
++            rc.append(node.data)
++    return ''.join(rc)
++
++  actual_entries = {}
++  for entry in entries:
++    wcstatus = entry.getElementsByTagName('wc-status')[0]
++    commit = entry.getElementsByTagName('commit')
++    author = entry.getElementsByTagName('author')
++    rstatus = entry.getElementsByTagName('repos-status')
++
++    actual_entry = {'wcprops' : wcstatus.getAttribute('props'),
++                    'wcitem' : wcstatus.getAttribute('item'),
++                    }
++    if wcstatus.hasAttribute('revision'):
++      actual_entry['wcrev'] = wcstatus.getAttribute('revision')
++    if (commit):
++      actual_entry['crev'] = commit[0].getAttribute('revision')
++    if (author):
++      actual_entry['author'] = getText(author[0].childNodes)
++    if (rstatus):
++      actual_entry['rprops'] = rstatus[0].getAttribute('props')
++      actual_entry['ritem'] = rstatus[0].getAttribute('item')
++
++    actual_entries[entry.getAttribute('path')] = actual_entry
++
++  if expected_entries != actual_entries:
++    raise Failure('\n' + '\n'.join(difflib.ndiff(
++          pprint.pformat(expected_entries).splitlines(),
++          pprint.pformat(actual_entries).splitlines())))
++
+ def run_and_verify_diff_summarize_xml(error_re_string = [],
+                                       expected_prefix = None,
+                                       expected_paths = [],
+--- subversion-1.7.3/subversion/tests/cmdline/svntest/verify.py.hashorder
++++ subversion-1.7.3/subversion/tests/cmdline/svntest/verify.py
+@@ -25,7 +25,8 @@
+ ######################################################################
+ 
+ import re, sys
+-from difflib import unified_diff
++from difflib import unified_diff, ndiff
++import pprint
+ 
+ import svntest
+ 
+@@ -68,6 +69,10 @@ class SVNIncorrectDatatype(SVNUnexpected
+   run_and_verify_* API"""
+   pass
+ 
++class SVNDumpParseError(svntest.Failure):
++  """Exception raised if parsing a dump file fails"""
++  pass
++
+ 
+ ######################################################################
+ # Comparison of expected vs. actual output
+@@ -397,3 +402,193 @@ def verify_exit_code(message, actual, ex
+     display_lines(message, "Exit Code",
+                   str(expected) + '\n', str(actual) + '\n')
+     raise raisable
++
++# A simple dump file parser.  While sufficient for the current
++# testsuite it doesn't cope with all valid dump files.
++class DumpParser:
++  def __init__(self, lines):
++    self.current = 0
++    self.lines = lines
++    self.parsed = {}
++
++  def parse_line(self, regex, required=True):
++    m = re.match(regex, self.lines[self.current])
++    if not m:
++      if required:
++        raise SVNDumpParseError("expected '%s' at line %d\n%s"
++                                % (regex, self.current,
++                                   self.lines[self.current]))
++      else:
++        return None
++    self.current += 1
++    return m.group(1)
++
++  def parse_blank(self, required=True):
++    if self.lines[self.current] != '\n':  # Works on Windows
++      if required:
++        raise SVNDumpParseError("expected blank at line %d\n%s"
++                                % (self.current, self.lines[self.current]))
++      else:
++        return False
++    self.current += 1
++    return True
++
++  def parse_format(self):
++    return self.parse_line('SVN-fs-dump-format-version: ([0-9]+)$')
++
++  def parse_uuid(self):
++    return self.parse_line('UUID: ([0-9a-z-]+)$')
++
++  def parse_revision(self):
++    return self.parse_line('Revision-number: ([0-9]+)$')
++
++  def parse_prop_length(self, required=True):
++    return self.parse_line('Prop-content-length: ([0-9]+)$', required)
++
++  def parse_content_length(self, required=True):
++    return self.parse_line('Content-length: ([0-9]+)$', required)
++
++  def parse_path(self):
++    path = self.parse_line('Node-path: (.+)$', required=False)
++    if not path and self.lines[self.current] == 'Node-path: \n':
++      self.current += 1
++      path = ''
++    return path
++
++  def parse_kind(self):
++    return self.parse_line('Node-kind: (.+)$', required=False)
++
++  def parse_action(self):
++    return self.parse_line('Node-action: ([0-9a-z-]+)$')
++
++  def parse_copyfrom_rev(self):
++    return self.parse_line('Node-copyfrom-rev: ([0-9]+)$', required=False)
++
++  def parse_copyfrom_path(self):
++    path = self.parse_line('Node-copyfrom-path: (.+)$', required=False)
++    if not path and self.lines[self.current] == 'Node-copyfrom-path: \n':
++      self.current += 1
++      path = ''
++    return path
++
++  def parse_copy_md5(self):
++    return self.parse_line('Text-copy-source-md5: ([0-9a-z]+)$', required=False)
++
++  def parse_copy_sha1(self):
++    return self.parse_line('Text-copy-source-sha1: ([0-9a-z]+)$', required=False)
++
++  def parse_text_md5(self):
++    return self.parse_line('Text-content-md5: ([0-9a-z]+)$', required=False)
++
++  def parse_text_sha1(self):
++    return self.parse_line('Text-content-sha1: ([0-9a-z]+)$', required=False)
++
++  def parse_text_length(self):
++    return self.parse_line('Text-content-length: ([0-9]+)$', required=False)
++
++  # One day we may need to parse individual property name/values into a map
++  def get_props(self):
++    props = []
++    while not re.match('PROPS-END$', self.lines[self.current]):
++      props.append(self.lines[self.current])
++      self.current += 1
++    self.current += 1
++    return props
++
++  def get_content(self, length):
++    content = ''
++    while len(content) < length:
++      content += self.lines[self.current]
++      self.current += 1
++    if len(content) == length + 1:
++      content = content[:-1]
++    elif len(content) != length:
++      raise SVNDumpParseError("content length expected %d actual %d at line %d"
++                              % (length, len(content), self.current))
++    return content
++
++  def parse_one_node(self):
++    node = {}
++    node['kind'] = self.parse_kind()
++    action = self.parse_action()
++    node['copyfrom_rev'] = self.parse_copyfrom_rev()
++    node['copyfrom_path'] = self.parse_copyfrom_path()
++    node['copy_md5'] = self.parse_copy_md5()
++    node['copy_sha1'] = self.parse_copy_sha1()
++    node['prop_length'] = self.parse_prop_length(required=False)
++    node['text_length'] = self.parse_text_length()
++    node['text_md5'] = self.parse_text_md5()
++    node['text_sha1'] = self.parse_text_sha1()
++    node['content_length'] = self.parse_content_length(required=False)
++    self.parse_blank()
++    if node['prop_length']:
++      node['props'] = self.get_props()
++    if node['text_length']:
++      node['content'] = self.get_content(int(node['text_length']))
++    # Hard to determine how may blanks is 'correct' (a delete that is
++    # followed by an add that is a replace and a copy has one fewer
++    # than expected but that can't be predicted until seeing the add)
++    # so allow arbitrary number
++    blanks = 0
++    while self.current < len(self.lines) and self.parse_blank(required=False):
++      blanks += 1
++    node['blanks'] = blanks
++    return action, node
++
++  def parse_all_nodes(self):
++    nodes = {}
++    while True:
++      if self.current >= len(self.lines):
++        break
++      path = self.parse_path()
++      if not path and not path is '':
++        break
++      if not nodes.get(path):
++        nodes[path] = {}
++      action, node = self.parse_one_node()
++      if nodes[path].get(action):
++        raise SVNDumpParseError("duplicate action '%s' for node '%s' at line %d"
++                                % (action, path, self.current))
++      nodes[path][action] = node
++    return nodes
++
++  def parse_one_revision(self):
++    revision = {}
++    number = self.parse_revision()
++    revision['prop_length'] = self.parse_prop_length()
++    revision['content_length'] = self.parse_content_length()
++    self.parse_blank()
++    revision['props'] = self.get_props()
++    self.parse_blank()
++    revision['nodes'] = self.parse_all_nodes()
++    return number, revision
++
++  def parse_all_revisions(self):
++    while self.current < len(self.lines):
++      number, revision = self.parse_one_revision()
++      if self.parsed.get(number):
++        raise SVNDumpParseError("duplicate revision %d at line %d"
++                                % (number, self.current))
++      self.parsed[number] = revision
++
++  def parse(self):
++    self.parsed['format'] = self.parse_format()
++    self.parse_blank()
++    self.parsed['uuid'] = self.parse_uuid()
++    self.parse_blank()
++    self.parse_all_revisions()
++    return self.parsed
++
++def compare_dump_files(message, label, expected, actual):
++  """Parse two dump files EXPECTED and ACTUAL, both of which are lists
++  of lines as returned by run_and_verify_dump, and check that the same
++  revisions, nodes, properties, etc. are present in both dumps.
++  """
++
++  parsed_expected = DumpParser(expected).parse()
++  parsed_actual = DumpParser(actual).parse()
++
++  if parsed_expected != parsed_actual:
++    raise svntest.Failure('\n' + '\n'.join(ndiff(
++          pprint.pformat(parsed_expected).splitlines(),
++          pprint.pformat(parsed_actual).splitlines())))
+--- subversion-1.7.3/subversion/tests/cmdline/switch_tests.py.hashorder
++++ subversion-1.7.3/subversion/tests/cmdline/switch_tests.py
+@@ -1532,33 +1532,29 @@ def mergeinfo_switch_elision(sbox):
+   beta_path     = os.path.join(wc_dir, "A", "B", "E", "beta")
+ 
+   # Make branches A/B_COPY_1 and A/B_COPY_2
+-  svntest.actions.run_and_verify_svn(
+-    None,
+-    ["A    " + os.path.join(wc_dir, "A", "B_COPY_1", "lambda") + "\n",
++  expected_stdout = verify.UnorderedOutput([
++     "A    " + os.path.join(wc_dir, "A", "B_COPY_1", "lambda") + "\n",
+      "A    " + os.path.join(wc_dir, "A", "B_COPY_1", "E") + "\n",
+      "A    " + os.path.join(wc_dir, "A", "B_COPY_1", "E", "alpha") + "\n",
+      "A    " + os.path.join(wc_dir, "A", "B_COPY_1", "E", "beta") + "\n",
+      "A    " + os.path.join(wc_dir, "A", "B_COPY_1", "F") + "\n",
+      "Checked out revision 1.\n",
+-     "A         " + B_COPY_1_path + "\n"],
+-    [],
+-    'copy',
+-    sbox.repo_url + "/A/B",
+-    B_COPY_1_path)
+-
+-  svntest.actions.run_and_verify_svn(
+-    None,
+-    ["A    " + os.path.join(wc_dir, "A", "B_COPY_2", "lambda") + "\n",
++     "A         " + B_COPY_1_path + "\n",
++    ])
++  svntest.actions.run_and_verify_svn(None, expected_stdout, [], 'copy',
++                                     sbox.repo_url + "/A/B", B_COPY_1_path)
++
++  expected_stdout = verify.UnorderedOutput([
++     "A    " + os.path.join(wc_dir, "A", "B_COPY_2", "lambda") + "\n",
+      "A    " + os.path.join(wc_dir, "A", "B_COPY_2", "E") + "\n",
+      "A    " + os.path.join(wc_dir, "A", "B_COPY_2", "E", "alpha") + "\n",
+      "A    " + os.path.join(wc_dir, "A", "B_COPY_2", "E", "beta") + "\n",
+      "A    " + os.path.join(wc_dir, "A", "B_COPY_2", "F") + "\n",
+      "Checked out revision 1.\n",
+-     "A         " + B_COPY_2_path + "\n"],
+-    [],
+-    'copy',
+-    sbox.repo_url + "/A/B",
+-    B_COPY_2_path)
++     "A         " + B_COPY_2_path + "\n",
++    ])
++  svntest.actions.run_and_verify_svn(None, expected_stdout, [], 'copy',
++                                     sbox.repo_url + "/A/B", B_COPY_2_path)
+ 
+   expected_output = svntest.wc.State(wc_dir, {
+     'A/B_COPY_1' : Item(verb='Adding'),
+--- subversion-1.7.3/subversion/tests/cmdline/update_tests.py.hashorder
++++ subversion-1.7.3/subversion/tests/cmdline/update_tests.py
+@@ -1189,6 +1189,7 @@ def another_hudson_problem(sbox):
+ 
+   # Sigh, I can't get run_and_verify_update to work (but not because
+   # of issue 919 as far as I can tell)
++  expected_output = svntest.verify.UnorderedOutput(expected_output)
+   svntest.actions.run_and_verify_svn(None,
+                                      expected_output, [],
+                                      'up', G_path)
+@@ -3142,19 +3143,17 @@ def mergeinfo_update_elision(sbox):
+   lambda_path = os.path.join(wc_dir, "A", "B", "lambda")
+ 
+   # Make a branch A/B_COPY
+-  svntest.actions.run_and_verify_svn(
+-    None,
+-    ["A    " + os.path.join(wc_dir, "A", "B_COPY", "lambda") + "\n",
++  expected_stdout =  verify.UnorderedOutput([
++     "A    " + os.path.join(wc_dir, "A", "B_COPY", "lambda") + "\n",
+      "A    " + os.path.join(wc_dir, "A", "B_COPY", "E") + "\n",
+      "A    " + os.path.join(wc_dir, "A", "B_COPY", "E", "alpha") + "\n",
+      "A    " + os.path.join(wc_dir, "A", "B_COPY", "E", "beta") + "\n",
+      "A    " + os.path.join(wc_dir, "A", "B_COPY", "F") + "\n",
+      "Checked out revision 1.\n",
+-     "A         " + B_COPY_path + "\n"],
+-    [],
+-    'copy',
+-    sbox.repo_url + "/A/B",
+-    B_COPY_path)
++     "A         " + B_COPY_path + "\n",
++    ])
++  svntest.actions.run_and_verify_svn(None, expected_stdout, [], 'copy',
++                                     sbox.repo_url + "/A/B", B_COPY_path)
+ 
+   expected_output = wc.State(wc_dir, {'A/B_COPY' : Item(verb='Adding')})
+ 
+--- subversion-1.7.3/subversion/tests/cmdline/upgrade_tests.py.hashorder
++++ subversion-1.7.3/subversion/tests/cmdline/upgrade_tests.py
+@@ -1079,7 +1079,7 @@ def upgrade_with_missing_subdir(sbox):
+   svntest.main.safe_rmtree(sbox.ospath('A/B'))
+ 
+   # Now upgrade the working copy and expect a missing subdir
+-  expected_output = [
++  expected_output = svntest.verify.UnorderedOutput([
+     "Upgraded '%s'\n" % sbox.wc_dir,
+     "Upgraded '%s'\n" % sbox.ospath('A'),
+     "Skipped '%s'\n" % sbox.ospath('A/B'),
+@@ -1087,7 +1087,7 @@ def upgrade_with_missing_subdir(sbox):
+     "Upgraded '%s'\n" % sbox.ospath('A/D'),
+     "Upgraded '%s'\n" % sbox.ospath('A/D/G'),
+     "Upgraded '%s'\n" % sbox.ospath('A/D/H'),
+-  ]
++  ])
+   svntest.actions.run_and_verify_svn(None, expected_output, [],
+                                      'upgrade', sbox.wc_dir)
+ 
diff --git a/subversion.spec b/subversion.spec
index cc8bee1..9ca9c8a 100644
--- a/subversion.spec
+++ b/subversion.spec
@@ -16,7 +16,7 @@
 Summary: A Modern Concurrent Version Control System
 Name: subversion
 Version: 1.7.3
-Release: 1%{?dist}
+Release: 2%{?dist}
 License: ASL 2.0
 Group: Development/Tools
 URL: http://subversion.apache.org/
@@ -30,6 +30,7 @@ Patch1: subversion-1.7.0-rpath.patch
 Patch2: subversion-1.7.0-pie.patch
 Patch3: subversion-1.7.0-kwallet.patch
 Patch4: subversion-1.7.2-ruby19.patch
+Patch5: subversion-1.7.3-hashorder.patch
 BuildRequires: autoconf, libtool, python, python-devel, texinfo, which
 BuildRequires: db4-devel >= 4.1.25, swig >= 1.3.24, gettext
 BuildRequires: apr-devel >= 1.3.0, apr-util-devel >= 1.3.0
@@ -159,6 +160,7 @@ This package includes the Ruby bindings to the Subversion libraries.
 %patch2 -p1 -b .pie
 %patch3 -p1 -b .kwallet
 %patch4 -p1 -b .ruby
+%patch5 -p1 -b .hashorder
 
 mv tools/client-side/bash_completion .
 
@@ -404,6 +406,9 @@ fi
 %endif
 
 %changelog
+* Tue Feb 28 2012 Joe Orton <jorton at redhat.com> - 1.7.3-2
+- add upstream test suite fixes for APR hash change (r1293602, r1293811)
+
 * Mon Feb 13 2012 Joe Orton <jorton at redhat.com> - 1.7.3-1
 - update to 1.7.3
 - ship, enable mod_dontdothat


More information about the scm-commits mailing list