The package rpms/mongodb.git has added or updated architecture specific content in its
spec file (ExclusiveArch/ExcludeArch or %ifarch/%ifnarch) in commit(s):
https://src.fedoraproject.org/cgit/rpms/mongodb.git/commit/?id=930d3b7845....
Change:
+%ifarch ppc64le
Thanks.
Full change:
============
commit 930d3b784589afb1b182f702ecf7963ed85e609a
Author: Marek Skalický <mskalick(a)redhat.com>
Date: Tue Apr 24 14:57:11 2018 +0200
Rebase patches to new mongodb version
diff --git a/mongodb.spec b/mongodb.spec
index 05cfcaa..cc2bb17 100644
--- a/mongodb.spec
+++ b/mongodb.spec
@@ -195,7 +195,9 @@ the MongoDB sources.
%setup -q -n %{MONGO_DISTNAME}
%patch1 -p1
%patch2 -p1
+%ifarch ppc64le
%patch3 -p1
+%endif
# Patch only Fedora specific architectures
%ifnarch %{upstream_arches}
%patch0 -p1
diff --git a/ppc64le-disable-altivec.patch b/ppc64le-disable-altivec.patch
index 85f7d5c..4202141 100644
--- a/ppc64le-disable-altivec.patch
+++ b/ppc64le-disable-altivec.patch
@@ -1,13 +1,13 @@
-diff --git a/src/mongo/db/fts/unicode/byte_vector.h
b/src/mongo/db/fts/unicode/byte_vector.h
-index 2404e15dfd..d361a857ff 100644
---- a/src/mongo/db/fts/unicode/byte_vector.h
-+++ b/src/mongo/db/fts/unicode/byte_vector.h
-@@ -33,8 +33,6 @@
- // TODO replace this with #if BOOST_HW_SIMD_X86 >= BOOST_HW_SIMD_X86_SSE2_VERSION in
boost 1.60
- #if defined(_M_AMD64) || defined(__amd64__)
- #include "mongo/db/fts/unicode/byte_vector_sse2.h"
--#elif defined(__powerpc64__)
--#include "mongo/db/fts/unicode/byte_vector_altivec.h"
- #else // Other platforms go above here.
- #undef MONGO_HAVE_FAST_BYTE_VECTOR
- #endif
+diff --git a/src/mongo/db/fts/unicode/byte_vector_altivec.h
b/src/mongo/db/fts/unicode/byte_vector_altivec.h
+index f7a07ac074..74a6441c00 100644
+--- a/src/mongo/db/fts/unicode/byte_vector_altivec.h
++++ b/src/mongo/db/fts/unicode/byte_vector_altivec.h
+@@ -98,7 +98,7 @@ public:
+ // big endian by comparison.
+ const Native bits = {120, 112, 104, 96, 88, 80, 72, 64, 56, 48, 40, 32, 24, 16,
8, 0};
+
+- return vec_extract(vec_vbpermq(_data, bits), 0);
++ return vec_extract(vec_vbpermq(_data, bits), 1);
+ }
+
+ /**
diff --git a/python3-buildscripts-tests.patch b/python3-buildscripts-tests.patch
index f2d9c79..5bf9f72 100644
--- a/python3-buildscripts-tests.patch
+++ b/python3-buildscripts-tests.patch
@@ -1,5 +1,5 @@
diff --git a/SConstruct b/SConstruct
-index f949f65..78eab69 100644
+index f949f65475..78eab698b9 100644
--- a/SConstruct
+++ b/SConstruct
@@ -383,7 +383,7 @@ win_version_min_choices = {
@@ -47,216 +47,8 @@ index f949f65..78eab69 100644
blacklist_options=["-fsanitize-blacklist=%s" % blackfile
for blackfile in blackfiles
if os.stat(blackfile.path).st_size != 0]
-diff --git a/buildscripts/aggregate_tracefiles.py b/buildscripts/aggregate_tracefiles.py
-index 8f1db78..0b4bfd2 100644
---- a/buildscripts/aggregate_tracefiles.py
-+++ b/buildscripts/aggregate_tracefiles.py
-@@ -16,7 +16,7 @@ def aggregate(inputs, output):
-
- args += ['-o', output]
-
-- print ' '.join(args)
-+ print(' '.join(args))
-
- return subprocess.call(args)
-
-diff --git a/buildscripts/buildlogger.py b/buildscripts/buildlogger.py
-index 163c10a..fd2c095 100644
---- a/buildscripts/buildlogger.py
-+++ b/buildscripts/buildlogger.py
-@@ -45,8 +45,8 @@ import subprocess
- import sys
- import time
- import traceback
--import urllib2
--import utils
-+import urllib.request, urllib.error, urllib.parse
-+from . import utils
-
- # suppress deprecation warnings that happen when
- # we import the 'buildbot.tac' file below
-@@ -82,7 +82,7 @@ for path in possible_paths:
- if os.path.isfile(credentials_path):
- credentials = {}
- try:
-- execfile(credentials_path, credentials, credentials)
-+ exec(compile(open(credentials_path).read(), credentials_path,
'exec'), credentials, credentials)
- username = credentials.get('slavename',
credentials.get('username'))
- password = credentials.get('passwd',
credentials.get('password'))
- break
-@@ -94,14 +94,14 @@ URL_ROOT = os.environ.get('BUILDLOGGER_URL',
'http://buildlogs.mongodb.org/')
- TIMEOUT_SECONDS = 10
- socket.setdefaulttimeout(TIMEOUT_SECONDS)
-
--auth_handler = urllib2.HTTPBasicAuthHandler()
-+auth_handler = urllib.request.HTTPBasicAuthHandler()
- auth_handler.add_password(
- realm='buildlogs',
- uri=URL_ROOT,
- user=username,
- passwd=password)
-
--url_opener = urllib2.build_opener(auth_handler, urllib2.HTTPErrorProcessor())
-+url_opener = urllib.request.build_opener(auth_handler, urllib2.HTTPErrorProcessor())
-
- def url(endpoint):
- if not endpoint.endswith('/'):
-@@ -115,10 +115,10 @@ def post(endpoint, data, headers=None):
- headers = headers or {}
- headers.update({'Content-Type': 'application/json; charset=utf-8'})
-
-- req = urllib2.Request(url=url(endpoint), data=data, headers=headers)
-+ req = urllib.request.Request(url=url(endpoint), data=data, headers=headers)
- try:
- response = url_opener.open(req)
-- except urllib2.URLError:
-+ except urllib.error.URLError:
- import traceback
- traceback.print_exc(file=sys.stderr)
- sys.stderr.flush()
-@@ -145,7 +145,7 @@ def traceback_to_stderr(func):
- def wrapper(*args, **kwargs):
- try:
- return func(*args, **kwargs)
-- except urllib2.HTTPError, err:
-+ except urllib.error.HTTPError as err:
- sys.stderr.write('error: HTTP code %d\n----\n' % err.code)
- if hasattr(err, 'hdrs'):
- for k, v in err.hdrs.items():
-diff --git a/buildscripts/burn_in_tests.py b/buildscripts/burn_in_tests.py
-index a78c905..2255e97 100644
---- a/buildscripts/burn_in_tests.py
-+++ b/buildscripts/burn_in_tests.py
-@@ -16,7 +16,7 @@ import re
- import requests
- import shlex
- import sys
--import urlparse
-+import urllib.parse
- import yaml
-
-
-@@ -114,7 +114,7 @@ def find_last_activated_task(revisions, variant, branch_name):
- evg_cfg = read_evg_config()
- if evg_cfg is not None and "api_server_host" in evg_cfg:
- api_server = "{url.scheme}://{url.netloc}".format(
-- url=urlparse.urlparse(evg_cfg["api_server_host"]))
-+ url=urllib.parse.urlparse(evg_cfg["api_server_host"]))
- else:
- api_server = API_SERVER_DEFAULT
-
-@@ -161,7 +161,7 @@ def find_changed_tests(branch_name, base_commit, max_revisions,
buildvariant, ch
- # commit among 'revs_to_check' that's been activated in
Evergreen. We handle this by
- # only considering tests changed in the current commit.
- last_activated = "HEAD"
-- print "Comparing current branch against", last_activated
-+ print("Comparing current branch against", last_activated)
- revisions = callo(["git", "rev-list", base_commit +
"..." + last_activated]).splitlines()
- base_commit = last_activated
- else:
-@@ -169,10 +169,10 @@ def find_changed_tests(branch_name, base_commit, max_revisions,
buildvariant, ch
-
- revision_count = len(revisions)
- if revision_count > max_revisions:
-- print "There are too many revisions included (%d)." % revision_count,
\
-+ print("There are too many revisions included (%d)." % revision_count,
\
- "This is likely because your base branch is not " + branch_name
+ ".", \
- "You can allow us to review more than 25 revisions by using", \
-- "the --maxRevisions option."
-+ "the --maxRevisions option.")
- return changed_tests
-
- changed_files = callo(["git", "diff", "--name-only",
base_commit]).splitlines()
-@@ -279,7 +279,7 @@ def create_task_list(evergreen_conf, buildvariant, suites,
exclude_tasks):
-
- evg_buildvariant = evergreen_conf.get_variant(buildvariant)
- if not evg_buildvariant:
-- print "Buildvariant", buildvariant, "not found in",
evergreen_conf.path
-+ print("Buildvariant", buildvariant, "not found in",
evergreen_conf.path)
- sys.exit(1)
-
- # Find all the buildvariant task's resmoke_args.
-@@ -366,9 +366,9 @@ def main():
- evergreen_conf = evergreen.EvergreenProjectConfig(values.evergreen_file)
-
- if values.buildvariant is None:
-- print "Option buildVariant must be specified to find changed
tests.\n", \
-+ print("Option buildVariant must be specified to find changed
tests.\n", \
- "Select from the following: \n" \
-- "\t",
"\n\t".join(sorted(evergreen_conf.variant_names))
-+ "\t",
"\n\t".join(sorted(evergreen_conf.variant_names)))
- sys.exit(1)
-
- changed_tests = find_changed_tests(values.branch,
-@@ -380,7 +380,7 @@ def main():
- changed_tests = filter_tests(changed_tests, exclude_tests)
- # If there are no changed tests, exit cleanly.
- if not changed_tests:
-- print "No new or modified tests found."
-+ print("No new or modified tests found.")
- _write_report_file({}, values.test_list_outfile)
- sys.exit(0)
- suites = resmokelib.parser.get_suites(values, changed_tests)
-@@ -403,7 +403,7 @@ def main():
- try:
- subprocess.check_call(resmoke_cmd, shell=False)
- except subprocess.CalledProcessError as err:
-- print "Resmoke returned an error with task:", task
-+ print("Resmoke returned an error with task:", task)
- _save_report_data(test_results, values.report_file, task)
- _write_report_file(test_results, values.report_file)
- sys.exit(err.returncode)
-diff --git a/buildscripts/ciconfig/evergreen.py b/buildscripts/ciconfig/evergreen.py
-index 9a4f7a9..9cb6c3f 100644
---- a/buildscripts/ciconfig/evergreen.py
-+++ b/buildscripts/ciconfig/evergreen.py
-@@ -30,7 +30,7 @@ class EvergreenProjectConfig(object):
- @property
- def task_names(self):
- """The list of task names."""
-- return self._tasks_by_name.keys()
-+ return list(self._tasks_by_name.keys())
-
- def get_task(self, task_name):
- """Return the task with the given name as a Task
instance."""
-@@ -52,7 +52,7 @@ class EvergreenProjectConfig(object):
- @property
- def variant_names(self):
- """The list of build variant names."""
-- return self._variants_by_name.keys()
-+ return list(self._variants_by_name.keys())
-
- def get_variant(self, variant_name):
- """Return the variant with the given name as a Variant
instance."""
-diff --git a/buildscripts/ciconfig/tags.py b/buildscripts/ciconfig/tags.py
-index 418d0e3..bfdb25c 100644
---- a/buildscripts/ciconfig/tags.py
-+++ b/buildscripts/ciconfig/tags.py
-@@ -12,7 +12,7 @@ import yaml
-
- # Setup to preserve order in yaml.dump, see
https://stackoverflow.com/a/8661021
- def _represent_dict_order(self, data):
-- return self.represent_mapping("tag:yaml.org,2002:map", data.items())
-+ return self.represent_mapping("tag:yaml.org,2002:map",
list(data.items()))
-
- yaml.add_representer(collections.OrderedDict, _represent_dict_order)
- # End setup
-@@ -55,11 +55,11 @@ class TagsConfig(object):
-
- def get_test_kinds(self):
- """List the test kinds."""
-- return self._conf.keys()
-+ return list(self._conf.keys())
-
- def get_test_patterns(self, test_kind):
- """List the test patterns under
'test_kind'."""
-- return getdefault(self._conf, test_kind, {}).keys()
-+ return list(getdefault(self._conf, test_kind, {}).keys())
-
- def get_tags(self, test_kind, test_pattern):
- """List the tags under 'test_kind' and
'test_pattern'."""
diff --git a/buildscripts/clang_format.py b/buildscripts/clang_format.py
-index cf9884d..c0f4140 100755
+index cf9884d8b1..c0f4140e59 100755
--- a/buildscripts/clang_format.py
+++ b/buildscripts/clang_format.py
@@ -20,7 +20,7 @@ import sys
@@ -291,21 +83,8 @@ index cf9884d..c0f4140 100755
else:
repo.commit(["--all", "--amend",
"--no-edit"])
-diff --git a/buildscripts/cleanbb.py b/buildscripts/cleanbb.py
-index b599dc8..485109c 100644
---- a/buildscripts/cleanbb.py
-+++ b/buildscripts/cleanbb.py
-@@ -79,7 +79,7 @@ def tryToRemove(path):
-
- def cleanup( root , nokill ):
- if nokill:
-- print "nokill requested, not killing anybody"
-+ print("nokill requested, not killing anybody")
- else:
- if killprocs( root=root ) > 0:
- time.sleep(3)
diff --git a/buildscripts/cpplint.py b/buildscripts/cpplint.py
-index c6aa51b..ed9cdb3 100755
+index 6979cbcd4e..bc9ff038fd 100755
--- a/buildscripts/cpplint.py
+++ b/buildscripts/cpplint.py
@@ -835,7 +835,7 @@ class _CppLintState(object):
@@ -326,7 +105,7 @@ index c6aa51b..ed9cdb3 100755
char = line[i]
if char in '([{':
# Found start of parenthesized expression, push to expression stack
-@@ -1687,7 +1687,7 @@ def CheckForCopyright(filename, lines, error):
+@@ -1681,7 +1681,7 @@ def CheckForCopyright(filename, lines, error):
# We'll say it should occur by line 10. Don't forget there's a
# dummy line at the front.
@@ -335,7 +114,7 @@ index c6aa51b..ed9cdb3 100755
if re.search(r'Copyright', lines[line], re.I): break
else: # means no copyright line was found
error(filename, 0, 'legal/copyright', 5,
-@@ -1838,7 +1838,7 @@ def CheckForBadCharacters(filename, lines, error):
+@@ -1832,7 +1832,7 @@ def CheckForBadCharacters(filename, lines, error):
error: The function to call with any errors found.
"""
for linenum, line in enumerate(lines):
@@ -344,7 +123,7 @@ index c6aa51b..ed9cdb3 100755
error(filename, linenum, 'readability/utf8', 5,
'Line contains invalid UTF-8 (or Unicode replacement character).')
if '\0' in line:
-@@ -2884,7 +2884,7 @@ def CheckForFunctionLengths(filename, clean_lines, linenum,
+@@ -2878,7 +2878,7 @@ def CheckForFunctionLengths(filename, clean_lines, linenum,
if starting_func:
body_found = False
@@ -353,7 +132,7 @@ index c6aa51b..ed9cdb3 100755
start_line = lines[start_linenum]
joined_line += ' ' + start_line.lstrip()
if Search(r'(;|})', start_line): # Declarations and trivial functions
-@@ -3361,7 +3361,7 @@ def CheckBracesSpacing(filename, clean_lines, linenum, error):
+@@ -3355,7 +3355,7 @@ def CheckBracesSpacing(filename, clean_lines, linenum, error):
trailing_text = ''
if endpos > -1:
trailing_text = endline[endpos:]
@@ -362,7 +141,7 @@ index c6aa51b..ed9cdb3 100755
min(endlinenum + 3, clean_lines.NumLines() - 1)):
trailing_text += clean_lines.elided[offset]
if not Match(r'^[\s}]*[{.;,)<>\]:]', trailing_text):
-@@ -3530,7 +3530,7 @@ def IsRValueType(clean_lines, nesting_state, linenum, column):
+@@ -3524,7 +3524,7 @@ def IsRValueType(clean_lines, nesting_state, linenum, column):
# Look for the previous 'for(' in the previous lines.
before_text = match_symbol.group(1)
@@ -371,7 +150,7 @@ index c6aa51b..ed9cdb3 100755
before_text = clean_lines.elided[i] + before_text
if Search(r'for\s*\([^{};]*$', before_text):
# This is the condition inside a for-loop
-@@ -3657,12 +3657,12 @@ def IsRValueAllowed(clean_lines, linenum):
+@@ -3651,12 +3651,12 @@ def IsRValueAllowed(clean_lines, linenum):
True if line is within the region where RValue references are allowed.
"""
# Allow region marked by PUSH/POP macros
@@ -386,7 +165,7 @@ index c6aa51b..ed9cdb3 100755
line = clean_lines.elided[j]
if Match(r'GOOGLE_ALLOW_RVALUE_REFERENCES_(?:PUSH|POP)', line):
return line.endswith('POP')
-@@ -4142,7 +4142,7 @@ def CheckCheck(filename, clean_lines, linenum, error):
+@@ -4136,7 +4136,7 @@ def CheckCheck(filename, clean_lines, linenum, error):
expression = lines[linenum][start_pos + 1:end_pos - 1]
else:
expression = lines[linenum][start_pos + 1:]
@@ -395,7 +174,7 @@ index c6aa51b..ed9cdb3 100755
expression += lines[i]
expression += last_line[0:end_pos - 1]
-@@ -4270,7 +4270,7 @@ def GetLineWidth(line):
+@@ -4264,7 +4264,7 @@ def GetLineWidth(line):
The width of the line in column positions, accounting for Unicode
combining characters and wide characters.
"""
@@ -404,7 +183,7 @@ index c6aa51b..ed9cdb3 100755
width = 0
for uc in unicodedata.normalize('NFC', line):
if unicodedata.east_asian_width(uc) in ('W', 'F'):
-@@ -4623,7 +4623,7 @@ def _GetTextInside(text, start_pattern):
+@@ -4617,7 +4617,7 @@ def _GetTextInside(text, start_pattern):
# Give opening punctuations to get the matching close-punctuations.
matching_punctuation = {'(': ')', '{': '}',
'[': ']'}
@@ -413,7 +192,7 @@ index c6aa51b..ed9cdb3 100755
# Find the position to start extracting text.
match = re.search(start_pattern, text, re.M)
-@@ -4949,7 +4949,7 @@ def IsDerivedFunction(clean_lines, linenum):
+@@ -4943,7 +4943,7 @@ def IsDerivedFunction(clean_lines, linenum):
virt-specifier.
"""
# Scan back a few lines for start of current function
@@ -422,7 +201,7 @@ index c6aa51b..ed9cdb3 100755
match = Match(r'^([^()]*\w+)\(', clean_lines.elided[i])
if match:
# Look for "override" after the matching closing parenthesis
-@@ -4970,7 +4970,7 @@ def IsInitializerList(clean_lines, linenum):
+@@ -4964,7 +4964,7 @@ def IsInitializerList(clean_lines, linenum):
True if current line appears to be inside constructor initializer
list, False otherwise.
"""
@@ -431,7 +210,7 @@ index c6aa51b..ed9cdb3 100755
line = clean_lines.elided[i]
if i == linenum:
remove_function_body = Match(r'^(.*)\{\s*$', line)
-@@ -5066,7 +5066,7 @@ def CheckForNonConstReference(filename, clean_lines, linenum,
+@@ -5060,7 +5060,7 @@ def CheckForNonConstReference(filename, clean_lines, linenum,
# Found the matching < on an earlier line, collect all
# pieces up to current line.
line = ''
@@ -440,7 +219,7 @@ index c6aa51b..ed9cdb3 100755
line += clean_lines.elided[i].strip()
# Check for non-const references in function parameters. A single '&'
may
-@@ -5090,7 +5090,7 @@ def CheckForNonConstReference(filename, clean_lines, linenum,
+@@ -5084,7 +5084,7 @@ def CheckForNonConstReference(filename, clean_lines, linenum,
# appear inside the second set of parentheses on the current line as
# opposed to the first set.
if linenum > 0:
@@ -449,7 +228,7 @@ index c6aa51b..ed9cdb3 100755
previous_line = clean_lines.elided[i]
if not Search(r'[),]\s*$', previous_line):
break
-@@ -5121,7 +5121,7 @@ def CheckForNonConstReference(filename, clean_lines, linenum,
+@@ -5115,7 +5115,7 @@ def CheckForNonConstReference(filename, clean_lines, linenum,
# Don't see a whitelisted function on this line. Actually we
# didn't see any function name on this line, so this is likely a
# multi-line parameter list. Try a bit harder to catch this case.
@@ -458,7 +237,7 @@ index c6aa51b..ed9cdb3 100755
if (linenum > i and
Search(whitelisted_functions, clean_lines.elided[linenum - i - 1])):
return
-@@ -5283,7 +5283,7 @@ def CheckCStyleCast(filename, clean_lines, linenum, cast_type,
pattern, error):
+@@ -5277,7 +5277,7 @@ def CheckCStyleCast(filename, clean_lines, linenum, cast_type,
pattern, error):
# Try expanding current context to see if we one level of
# parentheses inside a macro.
if linenum > 0:
@@ -467,7 +246,7 @@ index c6aa51b..ed9cdb3 100755
context = clean_lines.elided[i] + context
if Match(r'.*\b[_A-Z][_A-Z0-9]*\s*\((?:\([^()]*\)|[^()])*$', context):
return False
-@@ -5540,7 +5540,7 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state,
error,
+@@ -5534,7 +5534,7 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state,
error,
required = {} # A map of header name to linenumber and the template entity.
# Example of required: { '<functional>': (1219,
'less<>') }
@@ -476,7 +255,7 @@ index c6aa51b..ed9cdb3 100755
line = clean_lines.elided[linenum]
if not line or line[0] == '#':
continue
-@@ -5589,7 +5589,7 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state,
error,
+@@ -5583,7 +5583,7 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state,
error,
# include_dict is modified during iteration, so we iterate over a copy of
# the keys.
@@ -485,7 +264,7 @@ index c6aa51b..ed9cdb3 100755
for header in header_keys:
(same_module, common_path) = FilesBelongToSameModule(abs_filename, header)
fullpath = common_path + header
-@@ -5684,7 +5684,7 @@ def CheckRedundantVirtual(filename, clean_lines, linenum, error):
+@@ -5678,7 +5678,7 @@ def CheckRedundantVirtual(filename, clean_lines, linenum, error):
end_col = -1
end_line = -1
start_col = len(virtual.group(1))
@@ -494,7 +273,7 @@ index c6aa51b..ed9cdb3 100755
line = clean_lines.elided[start_line][start_col:]
parameter_list = Match(r'^([^(]*)\(', line)
if parameter_list:
-@@ -5699,7 +5699,7 @@ def CheckRedundantVirtual(filename, clean_lines, linenum, error):
+@@ -5693,7 +5693,7 @@ def CheckRedundantVirtual(filename, clean_lines, linenum, error):
# Look for "override" or "final" after the parameter list
# (possibly on the next few lines).
@@ -503,7 +282,7 @@ index c6aa51b..ed9cdb3 100755
line = clean_lines.elided[i][end_col:]
match = Search(r'\b(override|final)\b', line)
if match:
-@@ -5926,7 +5926,7 @@ def ProcessFileData(filename, file_extension, lines, error,
+@@ -5920,7 +5920,7 @@ def ProcessFileData(filename, file_extension, lines, error,
RemoveMultiLineComments(filename, lines, error)
clean_lines = CleansedLines(lines)
@@ -513,7 +292,7 @@ index c6aa51b..ed9cdb3 100755
include_state, function_state, nesting_state, error,
extra_check_functions)
diff --git a/buildscripts/errorcodes.py b/buildscripts/errorcodes.py
-index cc46789..7351e6a 100755
+index cc46789907..7351e6a12e 100755
--- a/buildscripts/errorcodes.py
+++ b/buildscripts/errorcodes.py
@@ -5,13 +5,16 @@
@@ -606,7 +385,7 @@ index cc46789..7351e6a 100755
diff --git a/buildscripts/eslint.py b/buildscripts/eslint.py
-index c1ab04f..d5c6aef 100755
+index c1ab04fbab..d5c6aef4fc 100755
--- a/buildscripts/eslint.py
+++ b/buildscripts/eslint.py
@@ -18,7 +18,7 @@ import sys
@@ -627,92 +406,8 @@ index c1ab04f..d5c6aef 100755
eslint_distfile = ESLINT_SOURCE_TAR_BASE.substitute(platform=platform, arch=arch)
extract_eslint(temp_tar_file, eslint_distfile)
-diff --git a/buildscripts/gdb/mongo_lock.py b/buildscripts/gdb/mongo_lock.py
-index 98dc66d..9538c29 100644
---- a/buildscripts/gdb/mongo_lock.py
-+++ b/buildscripts/gdb/mongo_lock.py
-@@ -231,8 +231,8 @@ def find_mutex_holder(graph, thread_dict, show):
- mutex_waiter_lwpid))
- if graph:
- graph.add_edge(Thread(mutex_waiter_id, mutex_waiter_lwpid),
-- Lock(long(mutex_value), "Mutex"))
-- graph.add_edge(Lock(long(mutex_value), "Mutex"),
Thread(mutex_holder_id, mutex_holder))
-+ Lock(int(mutex_value), "Mutex"))
-+ graph.add_edge(Lock(int(mutex_value), "Mutex"),
Thread(mutex_holder_id, mutex_holder))
-
-
- def find_lock_manager_holders(graph, thread_dict, show):
-@@ -262,8 +262,8 @@ def find_lock_manager_holders(graph, thread_dict, show):
- lock_head, lock_request["mode"], lock_thread_id,
lock_thread_lwpid) +
- " waited on by thread 0x{:x} (LWP
{})".format(thread_dict[lwpid], lwpid))
- if graph:
-- graph.add_edge(Thread(thread_dict[lwpid], lwpid), Lock(long(lock_head),
"MongoDB lock"))
-- graph.add_edge(Lock(long(lock_head), "MongoDB lock"),
-+ graph.add_edge(Thread(thread_dict[lwpid], lwpid), Lock(int(lock_head),
"MongoDB lock"))
-+ graph.add_edge(Lock(int(lock_head), "MongoDB lock"),
- Thread(lock_thread_id, lock_thread_lwpid))
- lock_request_ptr = lock_request["next"]
-
-diff --git a/buildscripts/hang_analyzer.py b/buildscripts/hang_analyzer.py
-index d554e67..0cca026 100755
---- a/buildscripts/hang_analyzer.py
-+++ b/buildscripts/hang_analyzer.py
-@@ -12,7 +12,7 @@ A prototype hang analyzer for Evergreen integration to help investigate
test tim
- Supports Linux, MacOS X, Solaris, and Windows.
- """
-
--import StringIO
-+import io
- import csv
- import glob
- import itertools
-@@ -177,7 +177,7 @@ class WindowsProcessList(object):
-
- ret = callo([ps, "/FO", "CSV"], logger)
-
-- b = StringIO.StringIO(ret)
-+ b = io.StringIO(ret)
- csvReader = csv.reader(b)
-
- p = [[int(row[1]), row[0]] for row in csvReader if row[1] != "PID"]
-@@ -270,7 +270,7 @@ class DarwinProcessList(object):
-
- ret = callo([ps, "-axco", "pid,comm"], logger)
-
-- b = StringIO.StringIO(ret)
-+ b = io.StringIO(ret)
- csvReader = csv.reader(b, delimiter=' ', quoting=csv.QUOTE_NONE,
skipinitialspace=True)
-
- p = [[int(row[0]), row[1]] for row in csvReader if row[0] != "PID"]
-@@ -411,7 +411,7 @@ class LinuxProcessList(object):
-
- ret = callo([ps, "-eo", "pid,args"], logger)
-
-- b = StringIO.StringIO(ret)
-+ b = io.StringIO(ret)
- csvReader = csv.reader(b, delimiter=' ', quoting=csv.QUOTE_NONE,
skipinitialspace=True)
-
- p = [[int(row[0]), os.path.split(row[1])[1]] for row in csvReader if row[0] !=
"PID"]
-@@ -433,7 +433,7 @@ class SolarisProcessList(object):
-
- ret = callo([ps, "-eo", "pid,args"], logger)
-
-- b = StringIO.StringIO(ret)
-+ b = io.StringIO(ret)
- csvReader = csv.reader(b, delimiter=' ', quoting=csv.QUOTE_NONE,
skipinitialspace=True)
-
- p = [[int(row[0]), os.path.split(row[1])[1]] for row in csvReader if row[0] !=
"PID"]
-@@ -545,7 +545,7 @@ def signal_process(logger, pid, signalnum):
-
- logger.info("Waiting for process to report")
- time.sleep(5)
-- except OSError, e:
-+ except OSError as e:
- logger.error("Hit OS error trying to signal process: %s" % str(e))
-
- except AttributeError:
diff --git a/buildscripts/idl/idl/binder.py b/buildscripts/idl/idl/binder.py
-index 354acca..9612e39 100644
+index 354acca974..9612e39305 100644
--- a/buildscripts/idl/idl/binder.py
+++ b/buildscripts/idl/idl/binder.py
@@ -608,7 +608,7 @@ def _validate_enum_int(ctxt, idl_enum):
@@ -725,7 +420,7 @@ index 354acca..9612e39 100644
if valid_int != int_values_set:
ctxt.add_enum_non_continuous_range_error(idl_enum, idl_enum.name)
diff --git a/buildscripts/idl/idl/bson.py b/buildscripts/idl/idl/bson.py
-index 214b67a..b84421d 100644
+index 214b67a7bf..b84421d657 100644
--- a/buildscripts/idl/idl/bson.py
+++ b/buildscripts/idl/idl/bson.py
@@ -141,7 +141,7 @@ def cpp_bson_type_name(name):
@@ -738,7 +433,7 @@ index 214b67a..b84421d 100644
def is_valid_bindata_subtype(name):
diff --git a/buildscripts/idl/idl/cpp_types.py b/buildscripts/idl/idl/cpp_types.py
-index aafcf87..e989664 100644
+index aafcf87224..e989664eee 100644
--- a/buildscripts/idl/idl/cpp_types.py
+++ b/buildscripts/idl/idl/cpp_types.py
@@ -28,6 +28,7 @@ from . import writer
@@ -776,7 +471,7 @@ index aafcf87..e989664 100644
# type: (ast.Field) -> None
"""Construct a BsonCppTypeBase."""
diff --git a/buildscripts/idl/idl/enum_types.py b/buildscripts/idl/idl/enum_types.py
-index 3caed6f..f17c926 100644
+index 3caed6f67d..f17c926748 100644
--- a/buildscripts/idl/idl/enum_types.py
+++ b/buildscripts/idl/idl/enum_types.py
@@ -29,11 +29,11 @@ from . import common
@@ -813,7 +508,7 @@ index 3caed6f..f17c926 100644
# type: (Union[syntax.Enum,ast.Enum]) -> None
super(_EnumTypeString, self).__init__(idl_enum)
diff --git a/buildscripts/idl/idl/generator.py b/buildscripts/idl/idl/generator.py
-index 9f591ec..8f97abf 100644
+index 9f591eccc4..8f97abf28c 100644
--- a/buildscripts/idl/idl/generator.py
+++ b/buildscripts/idl/idl/generator.py
@@ -33,6 +33,7 @@ from . import enum_types
@@ -860,7 +555,7 @@ index 9f591ec..8f97abf 100644
def generate_code(spec, output_base_dir, header_file_name, source_file_name):
diff --git a/buildscripts/idl/idl/parser.py b/buildscripts/idl/idl/parser.py
-index fd0af9b..ac36a3a 100644
+index fd0af9b144..ac36a3a280 100644
--- a/buildscripts/idl/idl/parser.py
+++ b/buildscripts/idl/idl/parser.py
@@ -30,6 +30,7 @@ from . import common
@@ -885,7 +580,7 @@ index fd0af9b..ac36a3a 100644
# type: () -> None
"""Construct a ImportResolver."""
diff --git a/buildscripts/idl/idl/struct_types.py b/buildscripts/idl/idl/struct_types.py
-index 9e2a950..cd46e7c 100644
+index 9e2a9504ac..cd46e7c0ff 100644
--- a/buildscripts/idl/idl/struct_types.py
+++ b/buildscripts/idl/idl/struct_types.py
@@ -23,6 +23,7 @@ from . import ast
@@ -910,7 +605,7 @@ index 9e2a950..cd46e7c 100644
def get_constructor_method(self):
# type: () -> MethodInfo
diff --git a/buildscripts/idl/idl/syntax.py b/buildscripts/idl/idl/syntax.py
-index 056d2e9..ff9a395 100644
+index 056d2e9dc3..ff9a3953db 100644
--- a/buildscripts/idl/idl/syntax.py
+++ b/buildscripts/idl/idl/syntax.py
@@ -82,7 +82,7 @@ def _item_and_type(dic):
@@ -923,7 +618,7 @@ index 056d2e9..ff9a395 100644
class SymbolTable(object):
diff --git a/buildscripts/idl/tests/test_binder.py
b/buildscripts/idl/tests/test_binder.py
-index 5502b69..b0f4ba4 100644
+index 5502b69d36..b0f4ba4269 100644
--- a/buildscripts/idl/tests/test_binder.py
+++ b/buildscripts/idl/tests/test_binder.py
@@ -72,7 +72,7 @@ class TestBinder(testcase.IDLTestcase):
@@ -936,7 +631,7 @@ index 5502b69..b0f4ba4 100644
def test_type_positive(self):
diff --git a/buildscripts/lint.py b/buildscripts/lint.py
-index d4061a9..b1ca5b6 100644
+index d4061a9b04..b1ca5b6169 100644
--- a/buildscripts/lint.py
+++ b/buildscripts/lint.py
@@ -2,8 +2,8 @@
@@ -951,7 +646,7 @@ index d4061a9..b1ca5b6 100644
class CheckForConfigH:
def __init__(self):
diff --git a/buildscripts/linter/base.py b/buildscripts/linter/base.py
-index ae78d52..7988876 100644
+index ae78d52066..7988876c98 100644
--- a/buildscripts/linter/base.py
+++ b/buildscripts/linter/base.py
@@ -5,12 +5,11 @@ from __future__ import print_function
@@ -970,7 +665,7 @@ index ae78d52..7988876 100644
# type: (str, str) -> None
"""
diff --git a/buildscripts/linter/git.py b/buildscripts/linter/git.py
-index edde6d0..4680e2f 100644
+index edde6d0a49..4680e2f5fd 100644
--- a/buildscripts/linter/git.py
+++ b/buildscripts/linter/git.py
@@ -175,7 +175,7 @@ def get_files_to_check_from_patch(patches, filter_function):
@@ -983,7 +678,7 @@ index edde6d0..4680e2f 100644
candidates = [check.match(line).group(1) for line in lines if check.match(line)]
diff --git a/buildscripts/linter/parallel.py b/buildscripts/linter/parallel.py
-index 0648bfb..361da0c 100644
+index 0648bfb16e..361da0c559 100644
--- a/buildscripts/linter/parallel.py
+++ b/buildscripts/linter/parallel.py
@@ -2,7 +2,12 @@
@@ -1018,258 +713,8 @@ index 0648bfb..361da0c 100644
# if the queue is empty, exit the worker thread
pp_event.set()
return
-diff --git a/buildscripts/make_archive.py b/buildscripts/make_archive.py
-index 2671fa5..91e2bf0 100755
---- a/buildscripts/make_archive.py
-+++ b/buildscripts/make_archive.py
-@@ -95,14 +95,14 @@ def make_tar_archive(opts):
- enclosing_file_directory = os.path.dirname(temp_file_location)
- if not os.path.exists(enclosing_file_directory):
- os.makedirs(enclosing_file_directory)
-- print "copying %s => %s" % (input_filename, temp_file_location)
-+ print("copying %s => %s" % (input_filename, temp_file_location))
- if os.path.isdir(input_filename):
- shutil.copytree(input_filename, temp_file_location)
- else:
- shutil.copy2(input_filename, temp_file_location)
- tar_command.append(preferred_filename)
-
-- print " ".join(tar_command)
-+ print(" ".join(tar_command))
- # execute the full tar command
- run_directory = os.path.join(os.getcwd(), enclosing_archive_directory)
- proc = Popen(tar_command, stdout=PIPE, stderr=STDOUT, bufsize=0, cwd=run_directory)
-@@ -165,7 +165,7 @@ def parse_options(args):
- opts.transformations = [
- xform.replace(os.path.altsep or os.path.sep, os.path.sep).split('=',
1)
- for xform in opts.transformations]
-- except Exception, e:
-+ except Exception as e:
- parser.error(e)
-
- return opts
-diff --git a/buildscripts/make_vcxproj.py b/buildscripts/make_vcxproj.py
-index e4c2d7a..693352f 100644
---- a/buildscripts/make_vcxproj.py
-+++ b/buildscripts/make_vcxproj.py
-@@ -241,12 +241,12 @@ class ProjFileGenerator(object):
-
- def main():
- if len(sys.argv) != 2:
-- print r"Usage: python buildscripts\make_vcxproj.py FILE_NAME"
-+ print(r"Usage: python buildscripts\make_vcxproj.py FILE_NAME")
- return
-
- with ProjFileGenerator(sys.argv[1]) as projfile:
-- with open("compile_commands.json", "rb") as sjh:
-- contents = sjh.read().decode('utf-8')
-+ with open("compile_commands.json", "r") as sjh:
-+ contents = sjh.read()
- commands = json.loads(contents)
-
- for command in commands:
-diff --git a/buildscripts/mongosymb.py b/buildscripts/mongosymb.py
-index 4da0535..b0aba4d 100755
---- a/buildscripts/mongosymb.py
-+++ b/buildscripts/mongosymb.py
-@@ -36,7 +36,7 @@ def symbolize_frames(trace_doc, dbg_path_resolver,
symbolizer_path=None, dsym_hi
- """Makes a map from binary load address to description of library
from the somap, which is
- a list of dictionaries describing individual loaded libraries.
- """
-- return { so_entry["b"] : so_entry for so_entry in somap_list if
so_entry.has_key("b") }
-+ return { so_entry["b"] : so_entry for so_entry in somap_list if
"b" in so_entry }
-
- base_addr_map =
make_base_addr_map(trace_doc["processInfo"]["somap"])
-
-@@ -50,7 +50,7 @@ def symbolize_frames(trace_doc, dbg_path_resolver,
symbolizer_path=None, dsym_hi
- addr_base = frame["b"]
- else:
- addr_base = soinfo.get("vmaddr", "0")
-- addr = long(addr_base, 16) + long(frame["o"], 16)
-+ addr = int(addr_base, 16) + int(frame["o"], 16)
- # addr currently points to the return address which is the one *after* the call.
x86 is
- # variable length so going backwards is difficult. However llvm-symbolizer seems
to do the
- # right thing if we just subtract 1 byte here. This has the downside of also
adjusting the
-diff --git a/buildscripts/msitrim.py b/buildscripts/msitrim.py
-index 45ca8d4..b5781ed 100644
---- a/buildscripts/msitrim.py
-+++ b/buildscripts/msitrim.py
-@@ -29,7 +29,7 @@ def exec_update(query, column, value):
- view.Close()
-
-
--print "Trimming MSI"
-+print("Trimming MSI")
-
- db = msilib.OpenDatabase(args.file.name, msilib.MSIDBOPEN_DIRECT)
-
-diff --git a/buildscripts/packager-enterprise.py b/buildscripts/packager-enterprise.py
-index 8629be1..202a676 100755
---- a/buildscripts/packager-enterprise.py
-+++ b/buildscripts/packager-enterprise.py
-@@ -30,7 +30,7 @@ import argparse
- import errno
- import getopt
- from glob import glob
--import packager
-+from . import packager
- import os
- import re
- import shutil
-@@ -39,7 +39,7 @@ import subprocess
- import sys
- import tempfile
- import time
--import urlparse
-+import urllib.parse
-
- # The MongoDB names for the architectures we support.
- ARCH_CHOICES=["x86_64", "ppc64le", "s390x",
"arm64"]
-@@ -153,7 +153,7 @@ def main(argv):
- if prefix is None:
- prefix=tempfile.mkdtemp()
-
-- print "Working in directory %s" % prefix
-+ print("Working in directory %s" % prefix)
-
- os.chdir(prefix)
- try:
-@@ -210,7 +210,7 @@ def unpack_binaries_into(build_os, arch, spec, where):
- os.rename("%s/%s" % (release_dir, releasefile), releasefile)
- os.rmdir(release_dir)
- except Exception:
-- exc=sys.exc_value
-+ exc=sys.exc_info()[1]
- os.chdir(rootdir)
- raise exc
- os.chdir(rootdir)
-@@ -226,7 +226,7 @@ def make_package(distro, build_os, arch, spec, srcdir):
- # directory, so the debian directory is needed in all cases (and
- # innocuous in the debianoids' sdirs).
- for pkgdir in ["debian", "rpm"]:
-- print "Copying packaging files from %s to %s" % ("%s/%s" %
(srcdir, pkgdir), sdir)
-+ print("Copying packaging files from %s to %s" % ("%s/%s" %
(srcdir, pkgdir), sdir))
- # FIXME: sh-dash-cee is bad. See if tarfile can do this.
- packager.sysassert(["sh", "-c", "(cd \"%s\"
&& git archive %s %s/ ) | (cd \"%s\" && tar xvf -)" %
(srcdir, spec.metadata_gitspec(), pkgdir, sdir)])
- # Splat the binaries and snmp files under sdir. The "build" stages of
the
-@@ -304,7 +304,7 @@ def move_repos_into_place(src, dst):
- os.mkdir(dname)
- break
- except OSError:
-- exc=sys.exc_value
-+ exc=sys.exc_info()[1]
- if exc.errno == errno.EEXIST:
- pass
- else:
-@@ -324,7 +324,7 @@ def move_repos_into_place(src, dst):
- os.symlink(dname, tmpnam)
- break
- except OSError: # as exc: # Python >2.5
-- exc=sys.exc_value
-+ exc=sys.exc_info()[1]
- if exc.errno == errno.EEXIST:
- pass
- else:
-@@ -342,7 +342,7 @@ def move_repos_into_place(src, dst):
- os.symlink(os.readlink(dst), oldnam)
- break
- except OSError: # as exc: # Python >2.5
-- exc=sys.exc_value
-+ exc=sys.exc_info()[1]
- if exc.errno == errno.EEXIST:
- pass
- else:
-diff --git a/buildscripts/packager.py b/buildscripts/packager.py
-index c821da9..19d34b3 100755
---- a/buildscripts/packager.py
-+++ b/buildscripts/packager.py
-@@ -343,7 +343,7 @@ def main(argv):
- prefix = args.prefix
- if prefix is None:
- prefix = tempfile.mkdtemp()
-- print "Working in directory %s" % prefix
-+ print("Working in directory %s" % prefix)
-
- os.chdir(prefix)
- try:
-@@ -382,14 +382,14 @@ def crossproduct(*seqs):
-
- def sysassert(argv):
- """Run argv and assert that it exited with status
0."""
-- print "In %s, running %s" % (os.getcwd(), " ".join(argv))
-+ print("In %s, running %s" % (os.getcwd(), " ".join(argv)))
- sys.stdout.flush()
- sys.stderr.flush()
- assert(subprocess.Popen(argv).wait()==0)
-
- def backtick(argv):
- """Run argv and return its output string."""
-- print "In %s, running %s" % (os.getcwd(), " ".join(argv))
-+ print("In %s, running %s" % (os.getcwd(), " ".join(argv)))
- sys.stdout.flush()
- sys.stderr.flush()
- return subprocess.Popen(argv, stdout=subprocess.PIPE).communicate()[0]
-@@ -421,11 +421,11 @@ def unpack_binaries_into(build_os, arch, spec, where):
- sysassert(["tar", "xvzf",
rootdir+"/"+tarfile(build_os, arch, spec)])
- release_dir = glob('mongodb-linux-*')[0]
- for releasefile in "bin", "GNU-AGPL-3.0",
"README", "THIRD-PARTY-NOTICES", "MPL-2":
-- print "moving file: %s/%s" % (release_dir, releasefile)
-+ print("moving file: %s/%s" % (release_dir, releasefile))
- os.rename("%s/%s" % (release_dir, releasefile), releasefile)
- os.rmdir(release_dir)
- except Exception:
-- exc=sys.exc_value
-+ exc=sys.exc_info()[1]
- os.chdir(rootdir)
- raise exc
- os.chdir(rootdir)
-@@ -441,7 +441,7 @@ def make_package(distro, build_os, arch, spec, srcdir):
- # directory, so the debian directory is needed in all cases (and
- # innocuous in the debianoids' sdirs).
- for pkgdir in ["debian", "rpm"]:
-- print "Copying packaging files from %s to %s" % ("%s/%s" %
(srcdir, pkgdir), sdir)
-+ print("Copying packaging files from %s to %s" % ("%s/%s" %
(srcdir, pkgdir), sdir))
- # FIXME: sh-dash-cee is bad. See if tarfile can do this.
- sysassert(["sh", "-c", "(cd \"%s\" &&
git archive %s %s/ ) | (cd \"%s\" && tar xvf -)" % (srcdir,
spec.metadata_gitspec(), pkgdir, sdir)])
- # Splat the binaries under sdir. The "build" stages of the
-@@ -574,7 +574,7 @@ def move_repos_into_place(src, dst):
- os.mkdir(dname)
- break
- except OSError:
-- exc=sys.exc_value
-+ exc=sys.exc_info()[1]
- if exc.errno == errno.EEXIST:
- pass
- else:
-@@ -594,7 +594,7 @@ def move_repos_into_place(src, dst):
- os.symlink(dname, tmpnam)
- break
- except OSError: # as exc: # Python >2.5
-- exc=sys.exc_value
-+ exc=sys.exc_info()[1]
- if exc.errno == errno.EEXIST:
- pass
- else:
-@@ -612,7 +612,7 @@ def move_repos_into_place(src, dst):
- os.symlink(os.readlink(dst), oldnam)
- break
- except OSError: # as exc: # Python >2.5
-- exc=sys.exc_value
-+ exc=sys.exc_info()[1]
- if exc.errno == errno.EEXIST:
- pass
- else:
-@@ -765,7 +765,7 @@ def ensure_dir(filename):
- try:
- os.makedirs(dirpart)
- except OSError: # as exc: # Python >2.5
-- exc=sys.exc_value
-+ exc=sys.exc_info()[1]
- if exc.errno == errno.EEXIST:
- pass
- else:
diff --git a/buildscripts/resmokeconfig/loggers/__init__.py
b/buildscripts/resmokeconfig/loggers/__init__.py
-index 6511d49..454f675 100644
+index 6511d49636..454f675ca2 100644
--- a/buildscripts/resmokeconfig/loggers/__init__.py
+++ b/buildscripts/resmokeconfig/loggers/__init__.py
@@ -21,7 +21,7 @@ def _get_named_loggers():
@@ -1282,7 +727,7 @@ index 6511d49..454f675 100644
(short_name, ext) = os.path.splitext(filename)
if ext in (".yml", ".yaml"):
diff --git a/buildscripts/resmokeconfig/suites/__init__.py
b/buildscripts/resmokeconfig/suites/__init__.py
-index e075dd2..2ca2187 100644
+index e075dd22e0..2ca2187e6e 100644
--- a/buildscripts/resmokeconfig/suites/__init__.py
+++ b/buildscripts/resmokeconfig/suites/__init__.py
@@ -21,7 +21,7 @@ def _get_named_suites():
@@ -1295,7 +740,7 @@ index e075dd2..2ca2187 100644
(short_name, ext) = os.path.splitext(filename)
if ext in (".yml", ".yaml"):
diff --git a/buildscripts/resmokelib/config.py b/buildscripts/resmokelib/config.py
-index 1dcd7d7..8a15651 100644
+index 1dcd7d7793..8a15651b4a 100644
--- a/buildscripts/resmokelib/config.py
+++ b/buildscripts/resmokelib/config.py
@@ -58,7 +58,7 @@ DEFAULTS = {
@@ -1308,7 +753,7 @@ index 1dcd7d7..8a15651 100644
"shellReadMode": None,
"shellWriteMode": None,
diff --git a/buildscripts/resmokelib/core/process.py
b/buildscripts/resmokelib/core/process.py
-index 03fb849..e70f90a 100644
+index 03fb849616..e70f90abb4 100644
--- a/buildscripts/resmokelib/core/process.py
+++ b/buildscripts/resmokelib/core/process.py
@@ -196,8 +196,8 @@ class Process(object):
@@ -1323,10 +768,10 @@ index 03fb849..e70f90a 100644
# Adapted from implementation of Popen.terminate() in subprocess.py of
Python 2.7
# because earlier versions do not catch exceptions.
diff --git a/buildscripts/resmokelib/logging/buildlogger.py
b/buildscripts/resmokelib/logging/buildlogger.py
-index a577d64..d5405bd 100644
+index 01bba20202..044855cad0 100644
--- a/buildscripts/resmokelib/logging/buildlogger.py
+++ b/buildscripts/resmokelib/logging/buildlogger.py
-@@ -205,7 +205,7 @@ class BuildloggerServer(object):
+@@ -267,7 +267,7 @@ class BuildloggerServer(object):
def __init__(self):
tmp_globals = {}
self.config = {}
@@ -1336,7 +781,7 @@ index a577d64..d5405bd 100644
# Rename "slavename" to "username" if present.
if "slavename" in self.config and "username" not in
self.config:
diff --git a/buildscripts/resmokelib/selector.py b/buildscripts/resmokelib/selector.py
-index 3fff181..4395c3b 100644
+index 3fff181c18..4395c3b2cf 100644
--- a/buildscripts/resmokelib/selector.py
+++ b/buildscripts/resmokelib/selector.py
@@ -66,7 +66,7 @@ class TestFileExplorer(object):
@@ -1358,7 +803,7 @@ index 3fff181..4395c3b 100644
if key == "$allOf":
return _AllOfExpression(_make_expression_list(value))
diff --git a/buildscripts/resmokelib/testing/executor.py
b/buildscripts/resmokelib/testing/executor.py
-index cc66556..db8d385 100644
+index 9574111904..d92209a7b1 100644
--- a/buildscripts/resmokelib/testing/executor.py
+++ b/buildscripts/resmokelib/testing/executor.py
@@ -64,7 +64,7 @@ class TestSuiteExecutor(object):
@@ -1380,7 +825,7 @@ index cc66556..db8d385 100644
return queue
diff --git a/buildscripts/resmokelib/testing/fixtures/interface.py
b/buildscripts/resmokelib/testing/fixtures/interface.py
-index 6dffa24..dfeeb13 100644
+index 6dffa24e43..dfeeb1326d 100644
--- a/buildscripts/resmokelib/testing/fixtures/interface.py
+++ b/buildscripts/resmokelib/testing/fixtures/interface.py
@@ -4,6 +4,7 @@ Interface of the different fixtures for executing JSTests against.
@@ -1407,10 +852,10 @@ index 6dffa24..dfeeb13 100644
# is defined for all subclasses of Fixture.
REGISTERED_NAME = "Fixture"
diff --git a/buildscripts/resmokelib/testing/fixtures/replicaset.py
b/buildscripts/resmokelib/testing/fixtures/replicaset.py
-index 025ce25..678ef45 100644
+index 5274ffb4c4..49675bd5e9 100644
--- a/buildscripts/resmokelib/testing/fixtures/replicaset.py
+++ b/buildscripts/resmokelib/testing/fixtures/replicaset.py
-@@ -75,7 +75,7 @@ class ReplicaSetFixture(interface.ReplFixture):
+@@ -87,7 +87,7 @@ class ReplicaSetFixture(interface.ReplFixture):
self.replset_name = self.mongod_options.get("replSet",
"rs")
if not self.nodes:
@@ -1420,7 +865,7 @@ index 025ce25..678ef45 100644
self.nodes.append(node)
diff --git a/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
b/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
-index 4f90d16..eb96b8b 100644
+index 06e84c213b..47ba407a51 100644
--- a/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
+++ b/buildscripts/resmokelib/testing/fixtures/shardedcluster.py
@@ -85,7 +85,7 @@ class ShardedClusterFixture(interface.Fixture):
@@ -1433,11 +878,11 @@ index 4f90d16..eb96b8b 100644
shard = self._new_standalone_shard(i)
elif isinstance(self.num_rs_nodes_per_shard, int):
diff --git a/buildscripts/resmokelib/testing/hooks/interface.py
b/buildscripts/resmokelib/testing/hooks/interface.py
-index 6ca4ae7..a225a1b 100644
+index cd5200764f..958cc92192 100644
--- a/buildscripts/resmokelib/testing/hooks/interface.py
+++ b/buildscripts/resmokelib/testing/hooks/interface.py
@@ -7,6 +7,7 @@ from __future__ import absolute_import
- from ... import logging
+ from ...logging import loggers
from ...utils import registry
+import six
@@ -1460,7 +905,7 @@ index 6ca4ae7..a225a1b 100644
@staticmethod
diff --git a/buildscripts/resmokelib/testing/suite.py
b/buildscripts/resmokelib/testing/suite.py
-index 132a2d7..07262d1 100644
+index 132a2d70d9..07262d194a 100644
--- a/buildscripts/resmokelib/testing/suite.py
+++ b/buildscripts/resmokelib/testing/suite.py
@@ -262,7 +262,7 @@ class Suite(object):
@@ -1473,7 +918,7 @@ index 132a2d7..07262d1 100644
bulleter_sb = []
summary = self._summarize_report(
diff --git a/buildscripts/resmokelib/testing/summary.py
b/buildscripts/resmokelib/testing/summary.py
-index bb44472..54da218 100644
+index bb44472caa..54da2181d5 100644
--- a/buildscripts/resmokelib/testing/summary.py
+++ b/buildscripts/resmokelib/testing/summary.py
@@ -17,6 +17,6 @@ def combine(summary1, summary2):
@@ -1485,7 +930,7 @@ index bb44472..54da218 100644
args.append(summary1[i] + summary2[i])
return Summary._make(args)
diff --git a/buildscripts/resmokelib/testing/testcases/interface.py
b/buildscripts/resmokelib/testing/testcases/interface.py
-index be7f14a..f736bd5 100644
+index be7f14afd5..f736bd5c36 100644
--- a/buildscripts/resmokelib/testing/testcases/interface.py
+++ b/buildscripts/resmokelib/testing/testcases/interface.py
@@ -7,6 +7,7 @@ from __future__ import absolute_import
@@ -1525,7 +970,7 @@ index be7f14a..f736bd5 100644
# When the TestCase is created by the TestSuiteExecutor (through a call to
make_test_case())
diff --git a/buildscripts/resmokelib/testing/testcases/jstest.py
b/buildscripts/resmokelib/testing/testcases/jstest.py
-index adb2828..528a974 100644
+index adb28285f0..528a9747a9 100644
--- a/buildscripts/resmokelib/testing/testcases/jstest.py
+++ b/buildscripts/resmokelib/testing/testcases/jstest.py
@@ -235,7 +235,7 @@ class JSTestCase(interface.TestCase):
@@ -1538,7 +983,7 @@ index adb2828..528a974 100644
test_case = self._create_test_case_for_thread(logger, thread_id)
test_cases.append(test_case)
diff --git a/buildscripts/resmokelib/utils/__init__.py
b/buildscripts/resmokelib/utils/__init__.py
-index fa782f3..3ce73c9 100644
+index fa782f3430..3ce73c9b64 100644
--- a/buildscripts/resmokelib/utils/__init__.py
+++ b/buildscripts/resmokelib/utils/__init__.py
@@ -43,14 +43,14 @@ def is_string_list(lst):
@@ -1559,7 +1004,7 @@ index fa782f3..3ce73c9 100644
def is_js_file(filename):
diff --git a/buildscripts/resmokelib/utils/globstar.py
b/buildscripts/resmokelib/utils/globstar.py
-index 644ebfe..52100d7 100644
+index 644ebfe3e3..52100d7d9d 100644
--- a/buildscripts/resmokelib/utils/globstar.py
+++ b/buildscripts/resmokelib/utils/globstar.py
@@ -145,7 +145,7 @@ def _list_dir(pathname):
@@ -1572,7 +1017,7 @@ index 644ebfe..52100d7 100644
except StopIteration:
return None # 'pathname' directory does not exist
diff --git a/buildscripts/resmokelib/utils/jscomment.py
b/buildscripts/resmokelib/utils/jscomment.py
-index 18da788..a393c43 100644
+index 18da788582..a393c43723 100644
--- a/buildscripts/resmokelib/utils/jscomment.py
+++ b/buildscripts/resmokelib/utils/jscomment.py
@@ -39,7 +39,7 @@ def get_tags(pathname):
@@ -1585,7 +1030,7 @@ index 18da788..a393c43 100644
return tags
except yaml.YAMLError as err:
diff --git a/buildscripts/resmokelib/utils/queue.py
b/buildscripts/resmokelib/utils/queue.py
-index 80da5e2..41d23d5 100644
+index 80da5e2cc6..41d23d54bf 100644
--- a/buildscripts/resmokelib/utils/queue.py
+++ b/buildscripts/resmokelib/utils/queue.py
@@ -9,15 +9,20 @@ See
https://bugs.python.org/issue1167930 for more details.
@@ -1612,488 +1057,8 @@ index 80da5e2..41d23d5 100644
"""
A multi-producer, multi-consumer queue.
"""
-diff --git a/buildscripts/setup_multiversion_mongodb.py
b/buildscripts/setup_multiversion_mongodb.py
-index f3cb804..86bea8f 100755
---- a/buildscripts/setup_multiversion_mongodb.py
-+++ b/buildscripts/setup_multiversion_mongodb.py
-@@ -189,7 +189,7 @@ class MultiVersionDownloader(object):
-
- urls = []
- requested_version_parts = get_version_parts(version)
-- for link_version, link_url in self.links.iteritems():
-+ for link_version, link_url in self.links.items():
- link_version_parts = get_version_parts(link_version)
- if link_version_parts[:len(requested_version_parts)] ==
requested_version_parts:
- # The 'link_version' is a candidate for the requested
'version' if
-@@ -220,11 +220,11 @@ class MultiVersionDownloader(object):
- else:
- print("Falling back to generic architecture.")
-
-- urls.sort(key=lambda (version, _): get_version_parts(version,
for_sorting=True))
-+ urls.sort(key=lambda version__: get_version_parts(version__[0],
for_sorting=True))
- full_version = urls[-1][0]
- url = urls[-1][1]
- extract_dir = url.split("/")[-1][:-4]
-- file_suffix = os.path.splitext(urlparse.urlparse(url).path)[1]
-+ file_suffix = os.path.splitext(urllib.parse.urlparse(url).path)[1]
-
- # Only download if we don't already have the directory.
- # Note, we cannot detect if 'latest' has already been downloaded, as the
name
-diff --git a/buildscripts/smoke.py b/buildscripts/smoke.py
-index 4fdb01d..eda604e 100755
---- a/buildscripts/smoke.py
-+++ b/buildscripts/smoke.py
-@@ -34,7 +34,7 @@
- # jobs on the same host at once. So something's gotta change.
-
- from datetime import datetime
--from itertools import izip
-+
- import glob
- import logging
- from optparse import OptionParser
-@@ -55,11 +55,11 @@ from pymongo import MongoClient
- from pymongo.errors import OperationFailure
- from pymongo import ReadPreference
-
--import cleanbb
--import utils
-+from . import cleanbb
-+from . import utils
-
- try:
-- import cPickle as pickle
-+ import pickle as pickle
- except ImportError:
- import pickle
-
-@@ -133,17 +133,17 @@ class NullMongod(object):
-
-
- def dump_stacks(signal, frame):
-- print "======================================"
-- print "DUMPING STACKS due to SIGUSR1 signal"
-- print "======================================"
-+ print("======================================")
-+ print("DUMPING STACKS due to SIGUSR1 signal")
-+ print("======================================")
- threads = threading.enumerate();
-
-- print "Total Threads: " + str(len(threads))
-+ print("Total Threads: " + str(len(threads)))
-
- for id, stack in sys._current_frames().items():
-- print "Thread %d" % (id)
-- print "".join(traceback.format_stack(stack))
-- print "======================================"
-+ print("Thread %d" % (id))
-+ print("".join(traceback.format_stack(stack)))
-+ print("======================================")
-
-
- def buildlogger(cmd, is_global=False):
-@@ -196,8 +196,8 @@ class mongod(NullMongod):
- try:
- self.check_mongo_port(int(port))
- return True
-- except Exception,e:
-- print >> sys.stderr, e
-+ except Exception as e:
-+ print(e, file=sys.stderr)
- return False
-
- def did_mongod_start(self, port=mongod_port, timeout=300):
-@@ -207,14 +207,14 @@ class mongod(NullMongod):
- if is_up:
- return True
- timeout = timeout - 1
-- print >> sys.stderr, "timeout starting mongod"
-+ print("timeout starting mongod", file=sys.stderr)
- return False
-
- def start(self):
- global mongod_port
- global mongod
- if self.proc:
-- print >> sys.stderr, "probable bug: self.proc already set in
start()"
-+ print("probable bug: self.proc already set in start()",
file=sys.stderr)
- return
- self.ensure_test_dirs()
- dir_name = smoke_db_prefix + "/data/db/sconsTests/"
-@@ -270,7 +270,7 @@ class mongod(NullMongod):
- '--sslAllowConnectionsWithoutCertificates']
- if self.kwargs.get('rlp_path'):
- argv += ['--basisTechRootDirectory',
self.kwargs.get('rlp_path')]
-- print "running " + " ".join(argv)
-+ print("running " + " ".join(argv))
- self.proc = self._start(buildlogger(argv, is_global=True))
-
- # If the mongod process is spawned under buildlogger.py, then the first line of
output
-@@ -352,7 +352,7 @@ class mongod(NullMongod):
-
- def stop(self):
- if not self.proc:
-- print >> sys.stderr, "probable bug: self.proc unset in
stop()"
-+ print("probable bug: self.proc unset in stop()", file=sys.stderr)
- return
- try:
- if os.sys.platform == "win32" and self.job_object is not None:
-@@ -407,9 +407,9 @@ class mongod(NullMongod):
- self.proc.terminate()
- else:
- os.kill(self.proc.pid, 15)
-- except Exception, e:
-- print >> sys.stderr, "error shutting down mongod"
-- print >> sys.stderr, e
-+ except Exception as e:
-+ print("error shutting down mongod", file=sys.stderr)
-+ print(e, file=sys.stderr)
- self.proc.wait()
-
- if self._stdout_pipe is not None:
-@@ -424,12 +424,12 @@ class mongod(NullMongod):
- # anyway.
- retcode = self.proc.returncode
- if os.sys.platform != "win32" and retcode != 0:
-- raise(Exception('mongod process exited with non-zero code %d' %
retcode))
-+ raise Exception
-
- def wait_for_repl(self):
-- print "Awaiting replicated (w:2, wtimeout:5min) insert (port:" +
str(self.port) + ")"
-+ print("Awaiting replicated (w:2, wtimeout:5min) insert (port:" +
str(self.port) + ")")
- MongoClient(port=self.port).testing.smokeWait.insert({}, w=2,
wtimeout=5*60*1000)
-- print "Replicated write completed -- done wait_for_repl"
-+ print("Replicated write completed -- done wait_for_repl")
-
- class Bug(Exception):
- def __str__(self):
-@@ -457,7 +457,7 @@ class TestServerFailure(TestFailure):
- def check_db_hashes(master, slave):
- # Need to pause a bit so a slave might catch up...
- if not slave.slave:
-- raise(Bug("slave instance doesn't have slave attribute set"))
-+ raise Bug
-
- master.wait_for_repl()
-
-@@ -469,7 +469,7 @@ def check_db_hashes(master, slave):
-
- global lost_in_slave, lost_in_master, screwy_in_slave, replicated_collections
-
-- replicated_collections += master.dict.keys()
-+ replicated_collections += list(master.dict.keys())
-
- for coll in replicated_collections:
- if coll not in slave.dict and coll not in lost_in_slave:
-@@ -489,13 +489,13 @@ def check_db_hashes(master, slave):
- sDocs = list(sTestDB[coll].find().sort("_id", 1))
- mDiffDocs = list()
- sDiffDocs = list()
-- for left, right in izip(mDocs, sDocs):
-+ for left, right in zip(mDocs, sDocs):
- if left != right:
- mDiffDocs.append(left)
- sDiffDocs.append(right)
-
- stats["docs"] = {'master': mDiffDocs, 'slave':
sDiffDocs }
-- except Exception, e:
-+ except Exception as e:
- stats["error-docs"] = e;
-
- screwy_in_slave[coll] = stats
-@@ -506,7 +506,7 @@ def check_db_hashes(master, slave):
- mOplog = mTestDB.connection.local[oplog];
- oplog_entries = list(mOplog.find({"$or":
[{"ns":mTestDB[coll].full_name}, \
-
{"op":"c"}]}).sort("$natural", 1))
-- print "oplog for %s" % mTestDB[coll].full_name
-+ print("oplog for %s" % mTestDB[coll].full_name)
- for doc in oplog_entries:
- pprint.pprint(doc, width=200)
-
-@@ -745,7 +745,7 @@ def runTest(test, result):
-
- is_mongod_still_up = test_mongod.is_mongod_up(mongod_port)
- if start_mongod and not is_mongod_still_up:
-- print "mongod is not running after test"
-+ print("mongod is not running after test")
- result["mongod_running_at_end"] = is_mongod_still_up;
- raise TestServerFailure(path)
-
-@@ -754,7 +754,7 @@ def runTest(test, result):
- if r != 0:
- raise TestExitFailure(path, r)
-
-- print ""
-+ print("")
-
- def run_tests(tests):
- # FIXME: some suites of tests start their own mongod, so don't
-@@ -825,7 +825,7 @@ def run_tests(tests):
- result = primary.admin.command("ismaster");
- ismaster = result["ismaster"]
- if not ismaster:
-- print "waiting for primary to be available ..."
-+ print("waiting for primary to be available ...")
- time.sleep(.2)
-
- secondaryUp = False
-@@ -835,7 +835,7 @@ def run_tests(tests):
- result = sConn.admin.command("ismaster");
- secondaryUp = result["secondary"]
- if not secondaryUp:
-- print "waiting for secondary to be available ..."
-+ print("waiting for secondary to be available ...")
- time.sleep(.2)
-
- if small_oplog or small_oplog_rs:
-@@ -857,7 +857,7 @@ def run_tests(tests):
- if skipTest(test_path):
- test_result["status"] = "skip"
-
-- print "skipping " + test_path
-+ print("skipping " + test_path)
- else:
- fails.append(test)
- runTest(test, test_result)
-@@ -897,20 +897,20 @@ def run_tests(tests):
- use_ssl=use_ssl)
- master.start()
-
-- except TestFailure, f:
-+ except TestFailure as f:
- test_result["end"] = time.time()
- test_result["elapsed"] = test_result["end"] -
test_result["start"]
- test_result["error"] = str(f)
- test_result["status"] = "fail"
- test_report["results"].append( test_result )
- try:
-- print f
-+ print(f)
- # Record the failing test and re-raise.
- losers[f.path] = f.status
- raise f
-- except TestServerFailure, f:
-+ except TestServerFailure as f:
- return 2
-- except TestFailure, f:
-+ except TestFailure as f:
- if not continue_on_failure:
- return 1
- if isinstance(slave, mongod):
-@@ -925,51 +925,51 @@ def run_tests(tests):
- def check_and_report_replication_dbhashes():
- def missing(lst, src, dst):
- if lst:
-- print """The following collections were present in the %s but
not the %s
--at the end of testing:""" % (src, dst)
-+ print("""The following collections were present in the %s but
not the %s
-+at the end of testing:""" % (src, dst))
- for db in lst:
-- print db
-+ print(db)
-
- missing(lost_in_slave, "master", "slave")
- missing(lost_in_master, "slave", "master")
- if screwy_in_slave:
-- print """The following collections have different hashes in the
master and slave:"""
-+ print("""The following collections have different hashes in the
master and slave:""")
- for coll in screwy_in_slave.keys():
- stats = screwy_in_slave[coll]
- # Counts are "approx" because they are collected after the dbhash
runs and may not
- # reflect the states of the collections that were hashed. If the hashes
differ, one
- # possibility is that a test exited with writes still in-flight.
-- print "collection: %s\t (master/slave) hashes: %s/%s counts (approx):
%i/%i" % (coll, stats['hashes']['master'],
stats['hashes']['slave'], stats['counts']['master'],
stats['counts']['slave'])
-+ print("collection: %s\t (master/slave) hashes: %s/%s counts (approx):
%i/%i" % (coll, stats['hashes']['master'],
stats['hashes']['slave'], stats['counts']['master'],
stats['counts']['slave']))
- if "docs" in stats:
- if (("master" in stats["docs"] and
len(stats["docs"]["master"]) == 0) and
- ("slave" in stats["docs"] and
len(stats["docs"]["slave"]) == 0)):
-- print "All docs matched!"
-+ print("All docs matched!")
- else:
-- print "Different Docs"
-- print "Master docs:"
-+ print("Different Docs")
-+ print("Master docs:")
- pprint.pprint(stats["docs"]["master"],
indent=2)
-- print "Slave docs:"
-+ print("Slave docs:")
- pprint.pprint(stats["docs"]["slave"], indent=2)
- if "error-docs" in stats:
-- print "Error getting docs to diff:"
-+ print("Error getting docs to diff:")
- pprint.pprint(stats["error-docs"])
- return True
-
- if (small_oplog or small_oplog_rs) and not (lost_in_master or lost_in_slave or
screwy_in_slave):
-- print "replication ok for %d collections" %
(len(replicated_collections))
-+ print("replication ok for %d collections" %
(len(replicated_collections)))
-
- return False
-
-
- def report():
-- print "%d tests succeeded" % len(winners)
-+ print("%d tests succeeded" % len(winners))
- num_missed = len(tests) - (len(winners) + len(losers.keys()))
- if num_missed:
-- print "%d tests didn't get run" % num_missed
-+ print("%d tests didn't get run" % num_missed)
- if losers:
-- print "The following tests failed (with exit code):"
-+ print("The following tests failed (with exit code):")
- for loser in losers:
-- print "%s\t%d" % (loser, losers[loser])
-+ print("%s\t%d" % (loser, losers[loser]))
-
- test_result = { "start": time.time() }
- if check_and_report_replication_dbhashes():
-@@ -981,7 +981,7 @@ def report():
- test_report["results"].append( test_result )
-
- if report_file:
-- f = open( report_file, "wb" )
-+ f = open( report_file, "w" )
- f.write( json.dumps( test_report ) )
- f.close()
-
-@@ -1244,7 +1244,7 @@ def run_old_fails():
- return # This counts as passing so we will run all tests
-
- if ('version' not in state or state['version'] != file_version()):
-- print "warning: old version of failfile.smoke detected. skipping recent
fails"
-+ print("warning: old version of failfile.smoke detected. skipping recent
fails")
- clear_failfile()
- return
-
-@@ -1308,7 +1308,7 @@ def main():
- try:
- signal.signal(signal.SIGUSR1, dump_stacks)
- except AttributeError:
-- print "Cannot catch signals on Windows"
-+ print("Cannot catch signals on Windows")
-
- parser = OptionParser(usage="usage: smoke.py [OPTIONS] ARGS*")
- parser.add_option('--mode', dest='mode', default='suite',
-@@ -1450,19 +1450,19 @@ def main():
-
- if options.ignore_files != None :
- ignore_patt = re.compile( options.ignore_files )
-- print "Ignoring files with pattern: ", ignore_patt
-+ print("Ignoring files with pattern: ", ignore_patt)
-
- def ignore_test( test ):
- if ignore_patt.search( test[0] ) != None:
-- print "Ignoring test ", test[0]
-+ print("Ignoring test ", test[0])
- return False
- else:
- return True
-
-- tests = filter( ignore_test, tests )
-+ tests = list(filter( ignore_test, tests ))
-
- if not tests:
-- print "warning: no tests specified"
-+ print("warning: no tests specified")
- return
-
- if options.with_cleanbb:
-@@ -1480,7 +1480,7 @@ def main():
- test_report["failures"] = len(losers.keys())
- test_report["mongod_running_at_end"] =
mongod().is_mongod_up(mongod_port)
- if report_file:
-- f = open( report_file, "wb" )
-+ f = open( report_file, "w" )
- f.write( json.dumps( test_report, indent=4, separators=(',', ':
')) )
- f.close()
-
-diff --git a/buildscripts/test_failures.py b/buildscripts/test_failures.py
-index 63f138a..a4db503 100755
---- a/buildscripts/test_failures.py
-+++ b/buildscripts/test_failures.py
-@@ -20,7 +20,7 @@ import time
- import warnings
-
- try:
-- from urlparse import urlparse
-+ from urllib.parse import urlparse
- except ImportError:
- from urllib.parse import urlparse
-
-@@ -31,7 +31,7 @@ import yaml
- LOGGER = logging.getLogger(__name__)
-
- if sys.version_info[0] == 2:
-- _STRING_TYPES = (basestring,)
-+ _STRING_TYPES = (str,)
- else:
- _STRING_TYPES = (str,)
-
-diff --git a/buildscripts/tests/resmokelib/test_selector.py
b/buildscripts/tests/resmokelib/test_selector.py
-index ae8b0a9..66eeb40 100644
---- a/buildscripts/tests/resmokelib/test_selector.py
-+++ b/buildscripts/tests/resmokelib/test_selector.py
-@@ -178,12 +178,12 @@ class TestTestList(unittest.TestCase):
-
- def test_roots_with_unmatching_glob(self):
- glob_roots = ["unknown/subdir1/*.js"]
-- with self.assertRaisesRegexp(ValueError, "Pattern does not match any files:
unknown/subdir1/\*.js"):
-+ with self.assertRaisesRegex(ValueError, "Pattern does not match any files:
unknown/subdir1/\*.js"):
- selector._TestList(self.test_file_explorer, glob_roots)
-
- def test_roots_unknown_file(self):
- roots = ["dir/subdir1/unknown"]
-- with self.assertRaisesRegexp(ValueError, "Unrecognized test file:
dir/subdir1/unknown"):
-+ with self.assertRaisesRegex(ValueError, "Unrecognized test file:
dir/subdir1/unknown"):
- selector._TestList(self.test_file_explorer, roots, tests_are_files=True)
-
- def test_include_files(self):
-@@ -208,7 +208,7 @@ class TestTestList(unittest.TestCase):
- def test_exclude_files_no_match(self):
- roots = ["dir/subdir1/*.js", "dir/subdir2/test21.*"]
- test_list = selector._TestList(self.test_file_explorer, roots)
-- with self.assertRaisesRegexp(ValueError, "Unrecognized test file:
.*$"):
-+ with self.assertRaisesRegex(ValueError, "Unrecognized test file:
.*$"):
- test_list.exclude_files(["dir/subdir2/test26.js"])
-
- def test_exclude_files_glob(self):
-@@ -420,7 +420,7 @@ class TestFilterTests(unittest.TestCase):
-
- def test_jstest_unknown_file(self):
- config = {"roots": ["dir/subdir1/*.js",
"dir/subdir1/unknown"]}
-- with self.assertRaisesRegexp(ValueError, "Unrecognized test file:
dir/subdir1/unknown"):
-+ with self.assertRaisesRegex(ValueError, "Unrecognized test file:
dir/subdir1/unknown"):
- selector.filter_tests("js_test", config, self.test_file_explorer)
-
- def test_json_schema_exclude_files(self):
-diff --git a/buildscripts/tests/test_aws_ec2.py b/buildscripts/tests/test_aws_ec2.py
-index 40f4333..2de7d1f 100755
---- a/buildscripts/tests/test_aws_ec2.py
-+++ b/buildscripts/tests/test_aws_ec2.py
-@@ -169,7 +169,7 @@ class AwsEc2ControlStatus(AwsEc2TestCase):
-
- code, ret = self.aws_ec2.control_instance(mode="status",
image_id="bad_id")
- self.assertNotEqual(0, code, ret)
-- self.assertRegexpMatches(ret, "Invalid", ret)
-+ self.assertRegex(ret, "Invalid", ret)
-
-
- class AwsEc2ControlStart(AwsEc2TestCase):
-diff --git a/buildscripts/update_test_lifecycle.py
b/buildscripts/update_test_lifecycle.py
-index 52bf204..90ab279 100755
---- a/buildscripts/update_test_lifecycle.py
-+++ b/buildscripts/update_test_lifecycle.py
-@@ -39,7 +39,7 @@ from buildscripts.ciconfig import tags as ci_tags
- LOGGER = logging.getLogger(__name__)
-
- if sys.version_info[0] == 2:
-- _NUMBER_TYPES = (int, long, float)
-+ _NUMBER_TYPES = (int, int, float)
- else:
- _NUMBER_TYPES = (int, float)
-
diff --git a/buildscripts/utils.py b/buildscripts/utils.py
-index 69a7892..93c697a 100644
+index 69a78921ca..93c697adca 100644
--- a/buildscripts/utils.py
+++ b/buildscripts/utils.py
@@ -122,7 +122,7 @@ def getprocesslist():
@@ -2142,7 +1107,7 @@ index 69a7892..93c697a 100644
codecs.register_error('repr', replace_with_repr)
diff --git a/site_scons/libdeps.py b/site_scons/libdeps.py
-index 632ed29..1641c3b 100644
+index 632ed29be5..1641c3ba90 100644
--- a/site_scons/libdeps.py
+++ b/site_scons/libdeps.py
@@ -122,7 +122,7 @@ def __get_libdeps(node):
@@ -2173,7 +1138,7 @@ index 632ed29..1641c3b 100644
else:
result.append(d)
diff --git a/site_scons/mongo/__init__.py b/site_scons/mongo/__init__.py
-index 510bd7b..f774780 100644
+index 510bd7bcc2..f77478092b 100644
--- a/site_scons/mongo/__init__.py
+++ b/site_scons/mongo/__init__.py
@@ -5,4 +5,4 @@
@@ -2183,7 +1148,7 @@ index 510bd7b..f774780 100644
- print "%s failed: %s" % (bf.node, bf.errstr)
+ print("%s failed: %s" % (bf.node, bf.errstr))
diff --git a/site_scons/mongo/generators.py b/site_scons/mongo/generators.py
-index c07e86a..5958e69 100644
+index c07e86a4d1..5958e6923b 100644
--- a/site_scons/mongo/generators.py
+++ b/site_scons/mongo/generators.py
@@ -1,6 +1,6 @@
@@ -2204,7 +1169,7 @@ index c07e86a..5958e69 100644
hasher.update(option)
hasher.update(str(env.GetOption(option)))
diff --git a/site_scons/site_tools/dagger/__init__.py
b/site_scons/site_tools/dagger/__init__.py
-index f05228c..f10b402 100644
+index f05228cfe4..f10b4027e1 100644
--- a/site_scons/site_tools/dagger/__init__.py
+++ b/site_scons/site_tools/dagger/__init__.py
@@ -5,7 +5,7 @@ import logging
@@ -2217,7 +1182,7 @@ index f05228c..f10b402 100644
def generate(env, **kwargs):
"""The entry point for our tool. However, the builder for
diff --git a/site_scons/site_tools/dagger/dagger.py
b/site_scons/site_tools/dagger/dagger.py
-index 1eeefe1..03e7603 100644
+index 1eeefe1ea3..03e7603d29 100644
--- a/site_scons/site_tools/dagger/dagger.py
+++ b/site_scons/site_tools/dagger/dagger.py
@@ -40,8 +40,8 @@ import sys
@@ -2241,7 +1206,7 @@ index 1eeefe1..03e7603 100644
# target is given as a list of target SCons nodes - this builder is only responsible
for
diff --git a/site_scons/site_tools/dagger/graph.py
b/site_scons/site_tools/dagger/graph.py
-index 5ebe6f4..379d524 100644
+index 5ebe6f4506..379d5245e6 100644
--- a/site_scons/site_tools/dagger/graph.py
+++ b/site_scons/site_tools/dagger/graph.py
@@ -4,11 +4,13 @@ import abc
@@ -2317,7 +1282,7 @@ index 5ebe6f4..379d524 100644
if isinstance(v, list):
setattr(self, k, set(v))
diff --git a/site_scons/site_tools/dagger/graph_consts.py
b/site_scons/site_tools/dagger/graph_consts.py
-index 81fe86d..a922a4f 100644
+index 81fe86d75c..a922a4f3f6 100644
--- a/site_scons/site_tools/dagger/graph_consts.py
+++ b/site_scons/site_tools/dagger/graph_consts.py
@@ -17,8 +17,8 @@ NODE_SYM = 2
@@ -2332,7 +1297,7 @@ index 81fe86d..a922a4f 100644
"""Error/query codes"""
diff --git a/site_scons/site_tools/dagger/graph_test.py
b/site_scons/site_tools/dagger/graph_test.py
-index bc84f58..6c0168c 100644
+index bc84f5868c..6c0168cf97 100644
--- a/site_scons/site_tools/dagger/graph_test.py
+++ b/site_scons/site_tools/dagger/graph_test.py
@@ -5,8 +5,8 @@ from JSON
@@ -2424,7 +1389,7 @@ index bc84f58..6c0168c 100644
if __name__ == '__main__':
diff --git a/site_scons/site_tools/distsrc.py b/site_scons/site_tools/distsrc.py
-index 861f5d9..d2dff0b 100644
+index 861f5d9e2e..d2dff0b612 100644
--- a/site_scons/site_tools/distsrc.py
+++ b/site_scons/site_tools/distsrc.py
@@ -20,7 +20,7 @@ import shutil
@@ -2491,7 +1456,7 @@ index 861f5d9..d2dff0b 100644
git_cmd = "\"%s\" archive --format %s --output %s --prefix
${MONGO_DIST_SRC_PREFIX} HEAD" % (
diff --git a/site_scons/site_tools/icecream.py b/site_scons/site_tools/icecream.py
-index 9838b63..fdf0c26 100644
+index 9838b63349..fdf0c26030 100644
--- a/site_scons/site_tools/icecream.py
+++ b/site_scons/site_tools/icecream.py
@@ -99,7 +99,7 @@ def generate(env):
@@ -2504,7 +1469,7 @@ index 9838b63..fdf0c26 100644
continue
base = emitterdict[suffix]
diff --git a/site_scons/site_tools/idl_tool.py b/site_scons/site_tools/idl_tool.py
-index 78bedfa..628f345 100755
+index 78bedfaa74..628f345361 100755
--- a/site_scons/site_tools/idl_tool.py
+++ b/site_scons/site_tools/idl_tool.py
@@ -47,7 +47,7 @@ def idl_scanner(node, env, path):
@@ -2517,7 +1482,7 @@ index 78bedfa..628f345 100755
nodes_deps_list.extend(env.Glob('#buildscripts/idl/idl/*.py'))
diff --git a/site_scons/site_tools/jstoh.py b/site_scons/site_tools/jstoh.py
-index dc90b32..567958a 100644
+index dc90b324b2..567958a50f 100644
--- a/site_scons/site_tools/jstoh.py
+++ b/site_scons/site_tools/jstoh.py
@@ -1,3 +1,5 @@
@@ -2547,7 +1512,7 @@ index dc90b32..567958a 100644
jsToHeader(sys.argv[1], sys.argv[2:])
diff --git a/site_scons/site_tools/mongo_integrationtest.py
b/site_scons/site_tools/mongo_integrationtest.py
-index ff9a5f4..fccbbeb 100644
+index ff9a5f451b..fccbbebb47 100644
--- a/site_scons/site_tools/mongo_integrationtest.py
+++ b/site_scons/site_tools/mongo_integrationtest.py
@@ -12,10 +12,10 @@ def register_integration_test(env, test):
@@ -2564,7 +1529,7 @@ index ff9a5f4..fccbbeb 100644
finally:
ofile.close()
diff --git a/site_scons/site_tools/mongo_unittest.py
b/site_scons/site_tools/mongo_unittest.py
-index ec99ab2..a4185a6 100644
+index ec99ab2d45..a4185a6b41 100644
--- a/site_scons/site_tools/mongo_unittest.py
+++ b/site_scons/site_tools/mongo_unittest.py
@@ -11,10 +11,10 @@ def register_unit_test(env, test):
@@ -2581,7 +1546,7 @@ index ec99ab2..a4185a6 100644
finally:
ofile.close()
diff --git a/site_scons/site_tools/split_dwarf.py b/site_scons/site_tools/split_dwarf.py
-index 95130c9..c02d786 100644
+index 95130c9e9a..c02d78619f 100644
--- a/site_scons/site_tools/split_dwarf.py
+++ b/site_scons/site_tools/split_dwarf.py
@@ -52,7 +52,7 @@ def generate(env):
@@ -2594,7 +1559,7 @@ index 95130c9..c02d786 100644
continue
base = emitterdict[suffix]
diff --git a/site_scons/site_tools/thin_archive.py
b/site_scons/site_tools/thin_archive.py
-index 511c0ef..0d8a83b 100644
+index 511c0ef6e5..0d8a83b83a 100644
--- a/site_scons/site_tools/thin_archive.py
+++ b/site_scons/site_tools/thin_archive.py
@@ -41,7 +41,7 @@ def exists(env):
@@ -2607,7 +1572,7 @@ index 511c0ef..0d8a83b 100644
return bool(isgnu)
diff --git a/site_scons/site_tools/xcode.py b/site_scons/site_tools/xcode.py
-index 9ec68c3..5ddebb2 100644
+index 9ec68c3547..5ddebb2e00 100644
--- a/site_scons/site_tools/xcode.py
+++ b/site_scons/site_tools/xcode.py
@@ -9,4 +9,4 @@ def generate(env):
@@ -2617,10 +1582,10 @@ index 9ec68c3..5ddebb2 100644
- print "NOTE: Xcode detected; propagating DEVELOPER_DIR from shell
environment to subcommands"
+ print("NOTE: Xcode detected; propagating DEVELOPER_DIR from shell
environment to subcommands")
diff --git a/src/mongo/SConscript b/src/mongo/SConscript
-index 426e57c..cc09ab4 100644
+index 5d6ec52d14..ccd588899c 100644
--- a/src/mongo/SConscript
+++ b/src/mongo/SConscript
-@@ -156,7 +156,7 @@ js_engine_ver = get_option("js-engine") if
get_option("server-js") == "on" else
+@@ -157,7 +157,7 @@ js_engine_ver = get_option("js-engine") if
get_option("server-js") == "on" else
# On windows, we need to escape the backslashes in the command-line
# so that windows paths look okay.
@@ -2629,7 +1594,7 @@ index 426e57c..cc09ab4 100644
if env.TargetOSIs('windows'):
cmd_line = cmd_line.replace('\\', r'\\')
-@@ -602,7 +602,7 @@ env.Append(MODULE_BANNERS = [distsrc.File('README'),
+@@ -603,7 +603,7 @@ env.Append(MODULE_BANNERS = [distsrc.File('README'),
distsrc.File('MPL-2')])
# If no module has introduced a file named LICENSE.txt, then inject the AGPL.
@@ -2638,7 +1603,7 @@ index 426e57c..cc09ab4 100644
env.Append(MODULE_BANNERS = [distsrc.File('GNU-AGPL-3.0')])
# All module banners get staged to the top level of the tarfile, so we
-@@ -621,7 +621,7 @@ module_banner_transforms = ["--transform
%s=$SERVER_DIST_BASENAME" % d for d in
+@@ -622,7 +622,7 @@ module_banner_transforms = ["--transform
%s=$SERVER_DIST_BASENAME" % d for d in
# Allow modules to map original file name directories to subdirectories
# within the archive (e.g. { "src/mongo/db/modules/enterprise/docs":
"snmp"})
archive_addition_transforms = []
@@ -2648,7 +1613,7 @@ index 426e57c..cc09ab4 100644
(full_dir, archive_dir))
diff --git a/src/mongo/base/generate_error_codes.py
b/src/mongo/base/generate_error_codes.py
-index 420ee96..b704767 100644
+index 420ee964ff..b704767a01 100644
--- a/src/mongo/base/generate_error_codes.py
+++ b/src/mongo/base/generate_error_codes.py
@@ -26,6 +26,8 @@
@@ -2670,7 +1635,7 @@ index 420ee96..b704767 100644
def die(message=None):
diff --git a/src/mongo/db/auth/generate_action_types.py
b/src/mongo/db/auth/generate_action_types.py
-index b712b29..39252ed 100755
+index b712b29666..39252ed293 100755
--- a/src/mongo/db/auth/generate_action_types.py
+++ b/src/mongo/db/auth/generate_action_types.py
@@ -227,7 +227,7 @@ def hasDuplicateActionTypes(actionTypes):
@@ -2692,7 +1657,7 @@ index b712b29..39252ed 100755
actionTypes = parseActionTypesFromFile(sys.argv[1])
diff --git a/src/mongo/db/fts/generate_stop_words.py
b/src/mongo/db/fts/generate_stop_words.py
-index e0dc801..e0aad76 100644
+index e0dc801ca9..e0aad760e8 100644
--- a/src/mongo/db/fts/generate_stop_words.py
+++ b/src/mongo/db/fts/generate_stop_words.py
@@ -7,7 +7,7 @@ def generate( header, source, language_files ):
@@ -2723,7 +1688,7 @@ index e0dc801..e0aad76 100644
out.write( ' };\n' )
out.write( ' const size_t wordcnt = sizeof(words) /
sizeof(words[0]);\n' )
diff --git a/src/mongo/db/fts/unicode/gen_diacritic_map.py
b/src/mongo/db/fts/unicode/gen_diacritic_map.py
-index 08cfa95..7c623af 100644
+index 08cfa95cda..7c623aff60 100644
--- a/src/mongo/db/fts/unicode/gen_diacritic_map.py
+++ b/src/mongo/db/fts/unicode/gen_diacritic_map.py
@@ -45,7 +45,7 @@ def add_diacritic_mapping(codepoint):
@@ -2736,7 +1701,7 @@ index 08cfa95..7c623af 100644
for i in range(len(d)):
if ord(d[i]) not in diacritics:
diff --git a/src/mongo/db/query/collation/generate_icu_init_cpp.py
b/src/mongo/db/query/collation/generate_icu_init_cpp.py
-index 8ae084a..7c576f6 100755
+index 8ae084aeec..7c576f6ffe 100755
--- a/src/mongo/db/query/collation/generate_icu_init_cpp.py
+++ b/src/mongo/db/query/collation/generate_icu_init_cpp.py
@@ -26,6 +26,9 @@
@@ -2760,25 +1725,3 @@ index 8ae084a..7c576f6 100755
cpp_file.write(source_template %
dict(decimal_encoded_data=decimal_encoded_data))
if __name__ == '__main__':
-diff --git a/src/third_party/mozjs-45/extract/js/src/builtin/embedjs.py
b/src/third_party/mozjs-45/extract/js/src/builtin/embedjs.py
-index ece905d..03cc961 100644
---- a/src/third_party/mozjs-45/extract/js/src/builtin/embedjs.py
-+++ b/src/third_party/mozjs-45/extract/js/src/builtin/embedjs.py
-@@ -36,7 +36,7 @@
- #
- # It uses the C preprocessor to process its inputs.
-
--from __future__ import with_statement
-+from __future__ import with_statement, unicode_literals
- import re, sys, os, subprocess
- import shlex
- import which
-@@ -107,7 +107,7 @@ def preprocess(cxx, preprocessorOption, source, args = []):
- tmpOut = 'self-hosting-preprocessed.pp';
- outputArg = shlex.split(preprocessorOption + tmpOut)
-
-- with open(tmpIn, 'wb') as input:
-+ with open(tmpIn, 'w') as input:
- input.write(source)
- print(' '.join(cxx + outputArg + args + [tmpIn]))
- result = subprocess.Popen(cxx + outputArg + args + [tmpIn]).wait()
diff --git a/use-system-mozjs-icu-asio.patch b/use-system-mozjs-icu-asio.patch
index d54768d..7049460 100644
--- a/use-system-mozjs-icu-asio.patch
+++ b/use-system-mozjs-icu-asio.patch
@@ -1,5 +1,5 @@
diff --git a/SConstruct b/SConstruct
-index fe7975b..3cf8a1b 100644
+index f949f65475..9008888d9f 100644
--- a/SConstruct
+++ b/SConstruct
@@ -156,7 +156,7 @@ add_option('wiredtiger',
@@ -23,7 +23,7 @@ index fe7975b..3cf8a1b 100644
add_option('use-system-intel_decimal128',
help='use system version of intel decimal128',
nargs=0,
-@@ -2846,20 +2851,20 @@ def doConfigure(myenv):
+@@ -2850,20 +2855,20 @@ def doConfigure(myenv):
if use_system_version_of_library("icu"):
conf.FindSysLibDep("icudata", ["icudata"])
@@ -52,7 +52,7 @@ index fe7975b..3cf8a1b 100644
conf.env.Append(
CPPDEFINES=[
"BOOST_SYSTEM_NO_DEPRECATED",
-@@ -3028,7 +3033,7 @@ def doConfigure(myenv):
+@@ -3032,7 +3037,7 @@ def doConfigure(myenv):
if conf.CheckExtendedAlignment(size):
conf.env.SetConfigHeaderDefine("MONGO_CONFIG_MAX_EXTENDED_ALIGNMENT", size)
break
@@ -61,7 +61,7 @@ index fe7975b..3cf8a1b 100644
conf.env['MONGO_HAVE_LIBMONGOC'] = conf.CheckLibWithHeader(
["mongoc-1.0"],
["mongoc.h"],
-@@ -3168,7 +3173,7 @@ Export("get_option")
+@@ -3172,7 +3177,7 @@ Export("get_option")
Export("has_option")
Export("use_system_version_of_library")
Export("serverJs")
@@ -71,7 +71,7 @@ index fe7975b..3cf8a1b 100644
Export("debugBuild optBuild")
Export("wiredtiger")
diff --git a/src/third_party/SConscript b/src/third_party/SConscript
-index 416ef7a..9a4f941 100644
+index 69c4a02a61..08c37aa89b 100644
--- a/src/third_party/SConscript
+++ b/src/third_party/SConscript
@@ -2,7 +2,7 @@