[thunderbird-lightning/el5] More patch syncing with EL5 TB

Orion Poplawski orion at fedoraproject.org
Sun Dec 15 18:20:59 UTC 2013


commit 8cea7e54458aa9142fdebe58859509f1651dadbd
Author: Orion Poplawski <orion at cora.nwra.com>
Date:   Sun Dec 15 11:21:05 2013 -0700

    More patch syncing with EL5 TB

 mozilla-py24.patch               |   15 -
 mozilla-python2.patch            |10798 ++++++++++++++++++++++++++++++++++++++
 thunderbird-lightning.spec       |   23 +-
 thunderbird-python.patch         |   12 -
 xulrunner-build-gcc41.patch      |  286 -
 xulrunner-missing-pysqlite.patch |   21 +-
 6 files changed, 10814 insertions(+), 341 deletions(-)
---
diff --git a/mozilla-python2.patch b/mozilla-python2.patch
new file mode 100644
index 0000000..2d7e3e8
--- /dev/null
+++ b/mozilla-python2.patch
@@ -0,0 +1,10798 @@
+diff -up mozilla/build/appini_header.py.python2 mozilla/build/appini_header.py
+--- mozilla/build/appini_header.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/build/appini_header.py	2013-12-07 22:18:39.000000000 +0100
+@@ -25,8 +25,16 @@ def main(file):
+             flags.add('NS_XRE_ENABLE_CRASH_REPORTER')
+     except: pass
+     appdata = dict(("%s:%s" % (s, o), config.get(s, o)) for s in config.sections() for o in config.options(s))
+-    appdata['flags'] = ' | '.join(flags) if flags else '0'
+-    appdata['App:profile'] = '"%s"' % appdata['App:profile'] if 'App:profile' in appdata else 'NULL'
++    sys.stderr.write("%s" % appdata)
++    #sys.stderr.write("%s" % appdata['App:profile'])
++    appdata['flags'] = '0'
++    if flags:
++      appdata['flags'] = ' | '.join(flags)
++    if 'App:profile' in appdata:
++      appdata['App:profile'] = '"%s"' % appdata['App:profile'] 
++    else:
++      appdata['App:profile'] =  'NULL'
++    #appdata['App:profile'] = '"%s"' % appdata['App:profile'] if 'App:profile' in appdata else 'NULL'
+ 
+     print '''#include "nsXREAppData.h"
+              static const nsXREAppData sAppData = {
+diff -up mozilla/build/automation.py.in.python2 mozilla/build/automation.py.in
+--- mozilla/build/automation.py.in.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/build/automation.py.in	2013-12-07 22:18:39.000000000 +0100
+@@ -3,7 +3,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import with_statement
+ import codecs
+ from datetime import datetime, timedelta
+ import itertools
+diff -up mozilla/build/automationutils.py.python2 mozilla/build/automationutils.py
+--- mozilla/build/automationutils.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/build/automationutils.py	2013-12-07 22:18:39.000000000 +0100
+@@ -3,7 +3,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import with_statement
+ import glob, logging, os, platform, shutil, subprocess, sys, tempfile, urllib2, zipfile
+ import re
+ from urlparse import urlparse
+@@ -87,8 +86,9 @@ class ZipFileReader(object):
+       path = os.path.split(filename)[0]
+       if not os.path.isdir(path):
+         os.makedirs(path)
+-      with open(filename, "wb") as dest:
+-        dest.write(self._zipfile.read(name))
++      dest = open(filename, "wb")
++      dest.write(self._zipfile.read(name))
++      dest.close()
+ 
+   def namelist(self):
+     return self._zipfile.namelist()
+@@ -207,8 +207,9 @@ def dumpLeakLog(leakLogFile, filter = Fa
+   if not os.path.exists(leakLogFile):
+     return
+ 
+-  with open(leakLogFile, "r") as leaks:
+-    leakReport = leaks.read()
++  leaks = open(leakLogFile, "r")
++  leakReport = leaks.read()
++  leaks.close()
+ 
+   # Only |XPCOM_MEM_LEAK_LOG| reports can be actually filtered out.
+   # Only check whether an actual leak was reported.
+@@ -238,7 +239,8 @@ def processSingleLeakFile(leakLogFileNam
+   totalBytesLeaked = None
+   leakAnalysis = []
+   leakedObjectNames = []
+-  with open(leakLogFileName, "r") as leaks:
++  leaks = open(leakLogFileName, "r")
++  if 1:
+     for line in leaks:
+       if line.find("purposefully crash") > -1:
+         crashedOnPurpose = True
+@@ -266,6 +268,7 @@ def processSingleLeakFile(leakLogFileNam
+         leakedObjectNames.append(name)
+         leakAnalysis.append("TEST-INFO | leakcheck |%s leaked %d %s (%s bytes)"
+                             % (processString, numLeaked, name, bytesLeaked))
++  leaks.close()
+   log.info('\n'.join(leakAnalysis))
+ 
+   if totalBytesLeaked is None:
+diff -up mozilla/build/checksums.py.python2 mozilla/build/checksums.py
+--- mozilla/build/checksums.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/build/checksums.py	2013-12-07 22:18:39.000000000 +0100
+@@ -3,7 +3,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import with_statement
+ 
+ from optparse import OptionParser
+ import logging
+@@ -26,13 +25,15 @@ def digest_file(filename, digest, chunk_
+     if hashlib is not None:
+         logger.debug('Creating new %s object' % digest)
+         h = hashlib.new(digest)
+-        with open(filename, 'rb') as f:
++        f = open(filename, 'rb')
++        if 1:
+             while True:
+                 data = f.read(chunk_size)
+                 if not data:
+                     logger.debug('Finished reading in file')
+                     break
+                 h.update(data)
++        f.close()
+         hash = h.hexdigest()
+         logger.debug('Hash for %s is %s' % (filename, hash))
+         return hash
+@@ -65,7 +66,8 @@ def process_files(files, output_filename
+                      output_filename)
+     else:
+         logger.debug('Creating a new checksums file "%s"' % output_filename)
+-    with open(output_filename, 'w+') as output:
++    output = open(output_filename, 'w+')
++    if 1:
+         for file in files:
+             if os.path.isdir(file):
+                 logger.warn('%s is a directory, skipping' % file)
+@@ -84,6 +86,7 @@ def process_files(files, output_filename
+                     print >>output, '%s %s %s %s' % (hash, digest,
+                                                      os.path.getsize(file),
+                                                      short_file)
++    output.close()
+ 
+ def setup_logging(level=logging.DEBUG):
+     '''This function sets up the logging module using a speficiable logging
+diff -up mozilla/build/ConfigStatus.py.python2 mozilla/build/ConfigStatus.py
+--- mozilla/build/ConfigStatus.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/build/ConfigStatus.py	2013-12-07 22:18:39.000000000 +0100
+@@ -6,7 +6,6 @@
+ # drop-in replacement for autoconf 2.13's config.status, with features
+ # borrowed from autoconf > 2.5, and additional features.
+ 
+-from __future__ import print_function
+ 
+ import logging
+ import os
+@@ -118,11 +117,11 @@ def config_status(topobjdir = '.', topsr
+     log_manager.enable_unstructured()
+ 
+     if not options.files and not options.headers:
+-        print('Reticulating splines...', file=sys.stderr)
++        sys.stderr.write("Reticulating splines...\n\r")
+         summary = backend.consume(definitions)
+ 
+         for line in summary.summaries():
+-            print(line, file=sys.stderr)
++            sys.stderr.write(line+"\n\r")
+ 
+         files = [os.path.join(topobjdir, f) for f in files]
+         headers = [os.path.join(topobjdir, f) for f in headers]
+diff -up mozilla/build/link.py.python2 mozilla/build/link.py
+--- mozilla/build/link.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/build/link.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import with_statement
+ import os, subprocess, sys, threading, time
+ from win32 import procmem
+ 
+@@ -17,8 +16,9 @@ def measure_vsize_threadfunc(proc, outpu
+         maxvsize, vsize = procmem.get_vmsize(proc._handle)
+         time.sleep(0.5)
+     print "TinderboxPrint: linker max vsize: %d" % maxvsize
+-    with open(output_file, "w") as f:
+-        f.write("%d\n" % maxvsize)
++    f = open(output_file, "w")
++    f.write("%d\n" % maxvsize)
++    f.close()
+ 
+ def measure_link_vsize(output_file, args):
+     """
+diff -up mozilla/build/mach_bootstrap.py.python2 mozilla/build/mach_bootstrap.py
+--- mozilla/build/mach_bootstrap.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/build/mach_bootstrap.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import print_function, unicode_literals
+ 
+ import os
+ import platform
+diff -up mozilla/build/pymake/mkformat.py.python2 mozilla/build/pymake/mkformat.py
+--- mozilla/build/pymake/mkformat.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/build/pymake/mkformat.py	2013-12-07 22:18:39.000000000 +0100
+@@ -6,8 +6,9 @@ import pymake.parser
+ filename = sys.argv[1]
+ source = None
+ 
+-with open(filename, 'rU') as fh:
+-    source = fh.read()
++fh = open(filename, 'rU')
++source = fh.read()
++fh.close()
+ 
+ statements = pymake.parser.parsestring(source, filename)
+ print statements.to_source()
+diff -up mozilla/build/pymake/tests/formattingtests.py.python2 mozilla/build/pymake/tests/formattingtests.py
+--- mozilla/build/pymake/tests/formattingtests.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/build/pymake/tests/formattingtests.py	2013-12-07 22:18:39.000000000 +0100
+@@ -253,8 +253,9 @@ class MakefileCorupusTest(TestBase):
+                 continue
+ 
+             source = None
+-            with open(makefile, 'rU') as fh:
+-                source = fh.read()
++            fh = open(makefile, 'rU')
++            source = fh.read()
++            fh.close()
+ 
+             try:
+                 yield (makefile, source, parsestring(source, makefile))
+diff -up mozilla/build/pymake/tests/pycmd.py.python2 mozilla/build/pymake/tests/pycmd.py
+--- mozilla/build/pymake/tests/pycmd.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/build/pymake/tests/pycmd.py	2013-12-07 22:18:39.000000000 +0100
+@@ -1,21 +1,24 @@
+ import os, sys, subprocess
+ 
+ def writetofile(args):
+-  with open(args[0], 'w') as f:
+-    f.write(' '.join(args[1:]))
++  f = open(args[0], 'w')
++  f.write(' '.join(args[1:]))
++  f.close()
+ 
+ def writeenvtofile(args):
+-  with open(args[0], 'w') as f:
+-    f.write(os.environ[args[1]])
++  f = open(args[0], 'w')
++  f.write(os.environ[args[1]])
++  f.close()
+ 
+ def writesubprocessenvtofile(args):
+-  with open(args[0], 'w') as f:
+-    p = subprocess.Popen([sys.executable, "-c",
+-                          "import os; print os.environ['%s']" % args[1]],
+-                          stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+-    stdout, stderr = p.communicate()
+-    assert p.returncode == 0
+-    f.write(stdout)
++  f = open(args[0], 'w')
++  p = subprocess.Popen([sys.executable, "-c",
++                        "import os; print os.environ['%s']" % args[1]],
++                        stdout=subprocess.PIPE, stderr=subprocess.PIPE)
++  stdout, stderr = p.communicate()
++  assert p.returncode == 0
++  f.write(stdout)
++  f.close()
+ 
+ def convertasplode(arg):
+   try:
+diff -up mozilla/build/pymake/tests/subdir/pymod.py.python2 mozilla/build/pymake/tests/subdir/pymod.py
+--- mozilla/build/pymake/tests/subdir/pymod.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/build/pymake/tests/subdir/pymod.py	2013-12-07 22:18:39.000000000 +0100
+@@ -1,5 +1,6 @@
+ import testmodule
+ 
+ def writetofile(args):
+-  with open(args[0], 'w') as f:
+-    f.write(' '.join(args[1:]))
++  f = open(args[0], 'w')
++  f.write(' '.join(args[1:]))
++  f.close()
+diff -up mozilla/build/unix/add_phony_targets.py.python2 mozilla/build/unix/add_phony_targets.py
+--- mozilla/build/unix/add_phony_targets.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/build/unix/add_phony_targets.py	2013-12-07 22:18:39.000000000 +0100
+@@ -24,8 +24,9 @@ def add_phony_targets(path):
+     phony_targets = deps - targets
+     if not phony_targets:
+         return
+-    with open(path, 'a') as f:
+-        f.writelines('%s:\n' % d for d in phony_targets)
++    f = open(path, 'a')
++    f.writelines('%s:\n' % d for d in phony_targets)
++    f.close()
+ 
+ 
+ if __name__ == '__main__':
+diff -up mozilla/build/unix/build-clang/tooltool.py.python2 mozilla/build/unix/build-clang/tooltool.py
+--- mozilla/build/unix/build-clang/tooltool.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/build/unix/build-clang/tooltool.py	2013-12-07 22:18:39.000000000 +0100
+@@ -89,8 +89,10 @@ class FileRecord(object):
+ 
+     def validate_digest(self):
+         if self.present():
+-            with open(self.filename, 'rb') as f:
+-                return self.digest == digest_file(f, self.algorithm)
++            f = open(self.filename, 'rb')
++            res = self.digest == digest_file(f, self.algorithm)
++            f.close()
++            return res
+         else:
+             log.debug("trying to validate digest on a missing file, %s', self.filename")
+             raise MissingFileException(filename=self.filename)
+@@ -268,9 +270,10 @@ def open_manifest(manifest_file):
+     """I know how to take a filename and load it into a Manifest object"""
+     if os.path.exists(manifest_file):
+         manifest = Manifest()
+-        with open(manifest_file) as f:
+-            manifest.load(f)
+-            log.debug("loaded manifest from file '%s'" % manifest_file)
++        f = open(manifest_file)
++        manifest.load(f)
++        log.debug("loaded manifest from file '%s'" % manifest_file)
++        f.close()
+         return manifest
+     else:
+         log.debug("tried to load absent file '%s' as manifest" % manifest_file)
+@@ -348,8 +351,9 @@ def add_files(manifest_file, algorithm, 
+             log.debug("added '%s' to manifest" % filename)
+         else:
+             all_files_added = False
+-    with open(manifest_file, 'wb') as output:
+-        new_manifest.dump(output, fmt='json')
++    output = open(manifest_file, 'wb')
++    new_manifest.dump(output, fmt='json')
++    output.close()
+     return all_files_added
+ 
+ 
+@@ -367,8 +371,9 @@ def fetch_file(base_url, file_record, ov
+             log.info("overwriting '%s' as requested" % file_record.filename)
+         else:
+             # All of the following is for a useful error message
+-            with open(file_record.filename, 'rb') as f:
+-                d = digest_file(f, file_record.algorithm)
++            f = open(file_record.filename, 'rb')
++            d = digest_file(f, file_record.algorithm)
++            f.close()
+             log.error("digest mismatch between manifest(%s...) and local file(%s...)" % \
+                     (file_record.digest[:8], d[:8]))
+             log.debug("full digests: manifest (%s) local file (%s)" % (file_record.digest, d))
+@@ -385,7 +390,8 @@ def fetch_file(base_url, file_record, ov
+     try:
+         f = urllib2.urlopen(url)
+         log.debug("opened %s for reading" % url)
+-        with open(file_record.filename, 'wb') as out:
++        out = open(file_record.filename, 'wb')
++        if 1:
+             k = True
+             size = 0
+             while k:
+@@ -401,7 +407,9 @@ def fetch_file(base_url, file_record, ov
+                             file_record.filename, file_record.size - size))
+                 return False
+             log.info("fetched %s" % file_record.filename)
+-    except (urllib2.URLError, urllib2.HTTPError) as e:
++        out.close()
++
++    except (urllib2.URLError, urllib2.HTTPError), e:
+         log.error("failed to fetch '%s': %s" % (file_record.filename, e),
+                   exc_info=True)
+         return False
+@@ -547,7 +555,7 @@ def main():
+             try:
+                 options[option] = cfg_file.get('general', option)
+                 log.debug("read '%s' as '%s' from cfg_file" % (option, options[option]))
+-            except (ConfigParser.NoSectionError, ConfigParser.NoOptionError) as e:
++            except (ConfigParser.NoSectionError, ConfigParser.NoOptionError), e:
+                 log.debug("%s in config file" % e, exc_info=True)
+ 
+     if not options.has_key('manifest'):
+diff -up mozilla/build/virtualenv/packages.txt.python2 mozilla/build/virtualenv/packages.txt
+--- mozilla/build/virtualenv/packages.txt.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/build/virtualenv/packages.txt	2013-12-07 22:18:39.000000000 +0100
+@@ -16,3 +16,4 @@ mozilla.pth:xpcom/typelib/xpt/tools
+ copy:build/buildconfig.py
+ packages.txt:testing/mozbase/packages.txt
+ objdir:build
++rhrebase.pth:rebase
+\ No newline at end of file
+diff -up mozilla/build/virtualenv/populate_virtualenv.py.python2 mozilla/build/virtualenv/populate_virtualenv.py
+--- mozilla/build/virtualenv/populate_virtualenv.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/build/virtualenv/populate_virtualenv.py	2013-12-07 22:18:39.000000000 +0100
+@@ -5,7 +5,8 @@
+ # This file contains code for populating the virtualenv environment for
+ # Mozilla's build system. It is typically called as part of configure.
+ 
+-from __future__ import print_function, unicode_literals, with_statement
++#from __future__ import unicode_literals
++#from rhrebase import print24
+ 
+ import distutils.sysconfig
+ import os
+@@ -13,6 +14,61 @@ import shutil
+ import subprocess
+ import sys
+ 
++import os
++import sys
++
++# Creates os.path.relpath for Python 2.4
++
++if not hasattr(os, 'relpath'):
++    if os.path is sys.modules.get('ntpath'):
++        def relpath(path, start=os.path.curdir):
++            """Return a relative version of a path"""
++        
++            if not path:
++                raise ValueError("no path specified")
++            start_list = os.path.abspath(start).split(os.path.sep)
++            path_list = os.path.abspath(path).split(os.path.sep)
++            if start_list[0].lower() != path_list[0].lower():
++                unc_path, rest = os.path.splitunc(path)
++                unc_start, rest = os.path.splitunc(start)
++                if bool(unc_path) ^ bool(unc_start):
++                    raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)"
++                                                                        % (path, start))
++                else:
++                    raise ValueError("path is on drive %s, start on drive %s"
++                                                        % (path_list[0], start_list[0]))
++            # Work out how much of the filepath is shared by start and path.
++            for i in range(min(len(start_list), len(path_list))):
++                if start_list[i].lower() != path_list[i].lower():
++                    break
++            else:
++                i += 1
++        
++            rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
++            if not rel_list:
++                return os.path.curdir
++            return os.path.join(*rel_list)
++    
++    else:
++        # default to posixpath definition
++        def relpath(path, start=os.path.curdir):
++            """Return a relative version of a path"""
++        
++            if not path:
++                raise ValueError("no path specified")
++        
++            start_list = os.path.abspath(start).split(os.path.sep)
++            path_list = os.path.abspath(path).split(os.path.sep)
++        
++            # Work out how much of the filepath is shared by start and path.
++            i = len(os.path.commonprefix([start_list, path_list]))
++        
++            rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
++            if not rel_list:
++                return os.path.curdir
++            return os.path.join(*rel_list)
++        
++    os.path.relpath = relpath
+ 
+ # Minimum version of Python required to build.
+ MINIMUM_PYTHON_MAJOR = 2
+@@ -114,10 +170,10 @@ class VirtualenvManager(object):
+         env.pop('PYTHONDONTWRITEBYTECODE', None)
+ 
+         args = [sys.executable, self.virtualenv_script_path,
+-            '--system-site-packages', self.virtualenv_root]
+-
+-        result = subprocess.call(args, stdout=self.log_handle,
+-            stderr=subprocess.STDOUT, env=env)
++            '--system-site-packages', '--never-download', self.virtualenv_root]
++        result = subprocess.call(args, env=env)
++#        result = subprocess.call(args, stdout=self.log_handle,
++#            stderr=subprocess.STDOUT, env=env)
+ 
+         if result:
+             raise Exception('Error creating virtualenv.')
+@@ -125,9 +181,10 @@ class VirtualenvManager(object):
+         return self.virtualenv_root
+ 
+     def packages(self):
+-        with file(self.manifest_path, 'rU') as fh:
+-            packages = [line.rstrip().split(':')
++        fh = file(self.manifest_path, 'rU')
++        packages = [line.rstrip().split(':')
+                         for line in fh]
++        fh.close()
+         return packages
+ 
+     def populate(self):
+@@ -210,12 +267,14 @@ class VirtualenvManager(object):
+ 
+                 path = os.path.join(self.topsrcdir, package[1])
+ 
+-                with open(os.path.join(python_lib, package[0]), 'a') as f:
++                f = open(os.path.join(python_lib, package[0]), 'a')
+                     # This path is relative to the .pth file.  Using a
+                     # relative path allows the srcdir/objdir combination
+                     # to be moved around (as long as the paths relative to
+                     # each other remain the same).
+-                    f.write("%s\n" % os.path.relpath(path, python_lib))
++                f.write("%s\n" % os.path.relpath(path, python_lib))
++                #f.write("%s\n" % os.path.abspath(path))
++                f.close()
+ 
+                 return True
+ 
+@@ -225,16 +284,16 @@ class VirtualenvManager(object):
+                     return True
+                 except:
+                     print('Error processing command. Ignoring', \
+-                        'because optional. (%s)' % ':'.join(package),
+-                        file=self.log_handle)
++                        'because optional. (%s)' % ':'.join(package))
+                     return False
+ 
+             if package[0] == 'objdir':
+                 assert len(package) == 2
+                 path = os.path.join(self.topobjdir, package[1])
+ 
+-                with open(os.path.join(python_lib, 'objdir.pth'), 'a') as f:
+-                    f.write('%s\n' % path)
++                f = open(os.path.join(python_lib, 'objdir.pth'), 'a')
++                f.write('%s\n' % path)
++                f.close()
+ 
+                 return True
+ 
+@@ -299,7 +358,7 @@ class VirtualenvManager(object):
+         try:
+             output = subprocess.check_output(program, cwd=directory, stderr=subprocess.STDOUT)
+             print(output)
+-        except subprocess.CalledProcessError as e:
++        except subprocess.CalledProcessError, e:
+             if 'Python.h: No such file or directory' in e.output:
+                 print('WARNING: Python.h not found. Install Python development headers.')
+             else:
+diff -up mozilla/client.mk.python2 mozilla/client.mk
+--- mozilla/client.mk.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/client.mk	2013-12-07 22:18:39.000000000 +0100
+@@ -124,6 +124,7 @@ endif
+ # Automatically add -jN to make flags if not defined. N defaults to number of cores.
+ ifeq (,$(findstring -j,$(MOZ_MAKE_FLAGS)))
+   cores=$(shell $(PYTHON) -c 'import multiprocessing; print(multiprocessing.cpu_count())')
++  cores=1
+   MOZ_MAKE_FLAGS += -j$(cores)
+ endif
+ 
+diff -up mozilla/config/buildlist.py.python2 mozilla/config/buildlist.py
+--- mozilla/config/buildlist.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/config/buildlist.py	2013-12-07 22:18:39.000000000 +0100
+@@ -7,7 +7,6 @@ if the entry does not already exist.
+ 
+ Usage: buildlist.py <filename> <entry> [<entry> ...]
+ '''
+-from __future__ import print_function
+ 
+ import sys
+ import os
+@@ -28,7 +27,7 @@ def addEntriesToListFile(listFile, entri
+     f = open(listFile, 'a')
+     for e in entries:
+       if e not in existing:
+-        f.write("{0}\n".format(e))
++        f.write("%s\n" % (e))
+         existing.add(e)
+     f.close()
+   finally:
+@@ -36,7 +35,6 @@ def addEntriesToListFile(listFile, entri
+ 
+ if __name__ == '__main__':
+   if len(sys.argv) < 3:
+-    print("Usage: buildlist.py <list file> <entry> [<entry> ...]",
+-          file=sys.stderr)
++    sys.stderr.write("Usage: buildlist.py <list file> <entry> [<entry> ...]\n")
+     sys.exit(1)
+   addEntriesToListFile(sys.argv[1], sys.argv[2:])
+diff -up mozilla/config/check_source_count.py.python2 mozilla/config/check_source_count.py
+--- mozilla/config/check_source_count.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/config/check_source_count.py	2013-12-07 22:18:39.000000000 +0100
+@@ -9,7 +9,6 @@
+ #   not, an error message is printed, quoting ERROR_LOCATION, which should
+ #   probably be the filename and line number of the erroneous call to
+ #   check_source_count.py.
+-from __future__ import print_function
+ import sys
+ import os
+ import re
+diff -up mozilla/config/expandlibs_exec.py.python2 mozilla/config/expandlibs_exec.py
+--- mozilla/config/expandlibs_exec.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/config/expandlibs_exec.py	2013-12-07 22:18:39.000000000 +0100
+@@ -20,7 +20,6 @@ With the --symbol-order argument, follow
+ relevant linker options to change the order in which the linker puts the
+ symbols appear in the resulting binary. Only works for ELF targets.
+ '''
+-from __future__ import with_statement
+ import sys
+ import os
+ from expandlibs import ExpandArgs, relativize, isObject, ensureParentDir, ExpandLibsDeps
+@@ -172,8 +171,9 @@ class ExpandArgsMore(ExpandArgs):
+     def orderSymbols(self, order):
+         '''Given a file containing a list of symbols, adds the appropriate
+         argument to make the linker put the symbols in that order.'''
+-        with open(order) as file:
+-            sections = self._getOrderedSections([l.strip() for l in file.readlines() if l.strip()])
++        file_ = open(order)
++        sections = self._getOrderedSections([l.strip() for l in file_.readlines() if l.strip()])
++        file_.close()
+         split_sections = {}
+         linked_sections = [s[0] for s in SECTION_INSERT_BEFORE]
+         for s in sections:
+@@ -278,8 +278,9 @@ def print_command(out, args):
+     print >>out, "Executing: " + " ".join(args)
+     for tmp in [f for f in args.tmp if os.path.isfile(f)]:
+         print >>out, tmp + ":"
+-        with open(tmp) as file:
+-            print >>out, "".join(["    " + l for l in file.readlines()])
++        file_ = open(tmp)
++        print >>out, "".join(["    " + l for l in file_.readlines()])
++        file_.close()
+     out.flush()
+ 
+ def main():
+@@ -308,33 +309,38 @@ def main():
+             deps.pop(0)
+         # Remove command
+         deps.pop(0)
+-    with ExpandArgsMore(args) as args:
++    args2 = ExpandArgsMore(args)
++    args2.__enter__()
++    if 1:
+         if options.extract:
+-            args.extract()
++            args2.extract()
+         if options.symbol_order:
+-            args.orderSymbols(options.symbol_order)
++            args2.orderSymbols(options.symbol_order)
+         if options.uselist:
+-            args.makelist()
++            args2.makelist()
+ 
+         if options.verbose:
+-            print_command(sys.stderr, args)
+-        proc = subprocess.Popen(args, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
++            print_command(sys.stderr, args2)
++        proc = subprocess.Popen(args2, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
+         (stdout, stderr) = proc.communicate()
+         if proc.returncode and not options.verbose:
+-            print_command(sys.stderr, args)
++            print_command(sys.stderr, args2)
+         sys.stderr.write(stdout)
+         sys.stderr.flush()
+         if proc.returncode:
+             exit(proc.returncode)
++    args2.__exit__(0, 0, 0)
+     if not options.depend:
+         return
+     ensureParentDir(options.depend)
+-    with open(options.depend, 'w') as depfile:
++    depfile = open(options.depend, 'w')
++    if 1:
+         depfile.write("%s : %s\n" % (options.target, ' '.join(dep for dep in deps if os.path.isfile(dep) and dep != options.target)))
+ 
+         for dep in deps:
+             if os.path.isfile(dep) and dep != options.target:
+                 depfile.write("%s :\n" % dep)
++    depfile.close()
+ 
+ if __name__ == '__main__':
+     main()
+diff -up mozilla/config/expandlibs_gen.py.python2 mozilla/config/expandlibs_gen.py
+--- mozilla/config/expandlibs_gen.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/config/expandlibs_gen.py	2013-12-07 22:18:39.000000000 +0100
+@@ -5,7 +5,6 @@
+ '''Given a list of object files and library names, prints a library
+ descriptor to standard output'''
+ 
+-from __future__ import with_statement
+ import sys
+ import os
+ import expandlibs_config as conf
+@@ -39,12 +38,15 @@ if __name__ == '__main__':
+         raise Exception("Missing option: -o")
+ 
+     ensureParentDir(options.output)
+-    with open(options.output, 'w') as outfile:
+-        print >>outfile, generate(args)
++    outfile = open(options.output, 'w')
++    print >>outfile, generate(args)
++    outfile.close()
+     if options.depend:
+         ensureParentDir(options.depend)
+-        with open(options.depend, 'w') as depfile:
++        depfile = open(options.depend, 'w')
++        if 1:
+             deps = ExpandLibsDeps(args)
+             depfile.write("%s : %s\n" % (options.output, ' '.join(deps)))
+             for dep in deps:
+                 depfile.write("%s :\n" % dep)
++        depfile.close()
+diff -up mozilla/config/expandlibs.py.python2 mozilla/config/expandlibs.py
+--- mozilla/config/expandlibs.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/config/expandlibs.py	2013-12-07 22:18:39.000000000 +0100
+@@ -26,7 +26,6 @@ ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} follo
+   descriptor contains. And for each of these LIBS, also apply the same
+   rules.
+ '''
+-from __future__ import with_statement
+ import sys, os, errno
+ import expandlibs_config as conf
+ 
+@@ -68,6 +67,12 @@ def isObject(path):
+     ends with OBJ_SUFFIX or .i_o'''
+     return os.path.splitext(path)[1] in [conf.OBJ_SUFFIX, '.i_o']
+ 
++def all(iterable):
++    for element in iterable:
++        if not element:
++            return False
++    return True
++
+ class LibDescriptor(dict):
+     KEYS = ['OBJS', 'LIBS']
+ 
+@@ -118,8 +123,9 @@ class ExpandArgs(list):
+     def _expand_desc(self, arg):
+         '''Internal function taking care of lib descriptor expansion only'''
+         if os.path.exists(arg + conf.LIBS_DESC_SUFFIX):
+-            with open(arg + conf.LIBS_DESC_SUFFIX, 'r') as f:
+-                desc = LibDescriptor(f.readlines())
++            f = open(arg + conf.LIBS_DESC_SUFFIX, 'r')
++            desc = LibDescriptor(f.readlines())
++            f.close()
+             objs = [relativize(o) for o in desc['OBJS']]
+             for lib in desc['LIBS']:
+                 objs += self._expand(lib)
+diff -up mozilla/config/find_OOM_errors.py.python2 mozilla/config/find_OOM_errors.py
+--- mozilla/config/find_OOM_errors.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/config/find_OOM_errors.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # This Source Code Form is subject to the terms of the Mozilla Public
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+-from __future__ import print_function
+ 
+ usage = """%prog: A test for OOM conditions in the shell.
+ 
+@@ -65,7 +64,7 @@ def run(args, stdin=None):
+     stdout_worker.join()
+     stderr_worker.join()
+ 
+-  except KeyboardInterrupt as e:
++  except KeyboardInterrupt, e:
+     sys.exit(-1)
+ 
+   stdout, stderr = stdout_worker.all, stderr_worker.all
+diff -up mozilla/config/JarMaker.py.python2 mozilla/config/JarMaker.py
+--- mozilla/config/JarMaker.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/config/JarMaker.py	2013-12-08 08:43:23.000000000 +0100
+@@ -151,12 +151,12 @@ class JarMaker(object):
+                                   '..', 'chrome.manifest')
+ 
+     if self.useJarfileManifest:
+-      self.updateManifest(jarPath + '.manifest', chromebasepath.format(''),
++      self.updateManifest(jarPath + '.manifest', chromebasepath % (''),
+                           register)
+-      addEntriesToListFile(chromeManifest, ['manifest chrome/{0}.manifest'
+-                                            .format(os.path.basename(jarPath))])
++      addEntriesToListFile(chromeManifest, ['manifest chrome/%s.manifest'
++                                            % os.path.basename(jarPath)])
+     if self.useChromeManifest:
+-      self.updateManifest(chromeManifest, chromebasepath.format('chrome/'),
++      self.updateManifest(chromeManifest, chromebasepath % ('chrome/'),
+                           register)
+ 
+     # If requested, add a root chrome manifest entry (assumed to be in the parent directory
+@@ -260,7 +260,7 @@ class JarMaker(object):
+     # chromebasepath is used for chrome registration manifests
+     # {0} is getting replaced with chrome/ for chrome.manifest, and with
+     # an empty string for jarfile.manifest
+-    chromebasepath = '{0}' + os.path.basename(jarfile)
++    chromebasepath = '%s' + os.path.basename(jarfile)
+     if self.outputFormat == 'jar':
+       chromebasepath = 'jar:' + chromebasepath + '.jar!'
+     chromebasepath += '/'
+@@ -272,7 +272,7 @@ class JarMaker(object):
+       jarfilepath = jarfile + '.jar'
+       try:
+         os.makedirs(os.path.dirname(jarfilepath))
+-      except OSError as error:
++      except OSError, error:
+         if error.errno != errno.EEXIST:
+           raise
+       jf = ZipFile(jarfilepath, 'a', lock = True)
+@@ -345,8 +345,8 @@ class JarMaker(object):
+       if realsrc is None:
+         if jf is not None:
+           jf.close()
+-        raise RuntimeError('File "{0}" not found in {1}'
+-                           .format(src, ', '.join(src_base)))
++        raise RuntimeError('File "%s" not found in %s'
++                           % (src, ', '.join(src_base)))
+       if m.group('optPreprocess'):
+         outf = outHelper.getOutput(out)
+         inf = open(realsrc)
+@@ -402,7 +402,7 @@ class JarMaker(object):
+       # remove previous link or file
+       try:
+         os.remove(out)
+-      except OSError as e:
++      except OSError, e:
+         if e.errno != errno.ENOENT:
+           raise
+       return open(out, 'wb')
+@@ -412,7 +412,7 @@ class JarMaker(object):
+       if not os.path.isdir(outdir):
+         try:
+           os.makedirs(outdir)
+-        except OSError as error:
++        except OSError, error:
+           if error.errno != errno.EEXIST:
+             raise
+       return out
+@@ -426,10 +426,11 @@ class JarMaker(object):
+       # remove previous link or file
+       try:
+         os.remove(out)
+-      except OSError as e:
++      except OSError, e:
+         if e.errno != errno.ENOENT:
+           raise
+       if sys.platform != "win32":
++        print "simlink src, out, dst", src, out, dest
+         os.symlink(src, out)
+       else:
+         # On Win32, use ctypes to create a hardlink
+diff -up mozilla/config/make-stl-wrappers.py.python2 mozilla/config/make-stl-wrappers.py
+--- mozilla/config/make-stl-wrappers.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/config/make-stl-wrappers.py	2013-12-07 22:18:39.000000000 +0100
+@@ -1,7 +1,6 @@
+ # This Source Code Form is subject to the terms of the Mozilla Public
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+-from __future__ import print_function
+ import os, re, string, sys
+ 
+ def find_in_path(file, searchpath):
+@@ -48,9 +47,9 @@ def main(outdir, compiler, template_file
+ 
+ if __name__ == '__main__':
+     if 5 != len(sys.argv):
+-        print("""Usage:
++        sys.stderr.write("""Usage:
+   python {0} OUT_DIR ('msvc'|'gcc') TEMPLATE_FILE HEADER_LIST_FILE
+-""".format(sys.argv[0]), file=sys.stderr)
++""".format(sys.argv[0]))
+         sys.exit(1)
+ 
+     main(*sys.argv[1:])
+diff -up mozilla/config/nsinstall.py.python2 mozilla/config/nsinstall.py
+--- mozilla/config/nsinstall.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/config/nsinstall.py	2013-12-07 22:18:39.000000000 +0100
+@@ -9,7 +9,6 @@
+ # a full build environment set up.
+ # The basic limitation is, it doesn't even try to link and ignores
+ # all related options.
+-from __future__ import print_function
+ from optparse import OptionParser
+ import os
+ import os.path
+@@ -65,7 +64,7 @@ def _nsinstall_internal(argv):
+     dir = os.path.abspath(dir)
+     if os.path.exists(dir):
+       if not os.path.isdir(dir):
+-        print('nsinstall: {0} is not a directory'.format(dir), file=sys.stderr)
++        sys.stderr.write('nsinstall: {0} is not a directory'.format(dir)+"\n")
+         return 1
+       if mode:
+         os.chmod(dir, mode)
+@@ -76,7 +75,7 @@ def _nsinstall_internal(argv):
+         os.makedirs(dir, mode)
+       else:
+         os.makedirs(dir)
+-    except Exception as e:
++    except Exception, e:
+       # We might have hit EEXIST due to a race condition (see bug 463411) -- try again once
+       if try_again:
+         return maybe_create_dir(dir, mode, False)
+diff -up mozilla/config/Preprocessor.py.python2 mozilla/config/Preprocessor.py
+--- mozilla/config/Preprocessor.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/config/Preprocessor.py	2013-12-07 22:18:39.000000000 +0100
+@@ -78,9 +78,9 @@ class Preprocessor:
+   
+   def warnUnused(self, file):
+     if self.actionLevel == 0:
+-      sys.stderr.write('{0}: WARNING: no preprocessor directives found\n'.format(file))
++      sys.stderr.write('%s: WARNING: no preprocessor directives found\n' % file)
+     elif self.actionLevel == 1:
+-      sys.stderr.write('{0}: WARNING: no useful preprocessor directives found\n'.format(file))
++      sys.stderr.write('%s: WARNING: no useful preprocessor directives found\n' % file)
+     pass
+ 
+   def setLineEndings(self, aLE):
+@@ -97,8 +97,8 @@ class Preprocessor:
+     """
+     self.marker = aMarker
+     if aMarker:
+-      self.instruction = re.compile('{0}(?P<cmd>[a-z]+)(?:\s(?P<args>.*))?$'
+-                                    .format(aMarker), 
++      self.instruction = re.compile('%s(?P<cmd>[a-z]+)(?:\s(?P<args>.*))?$'
++                                    % (aMarker), 
+                                     re.U)
+       self.comment = re.compile(aMarker, re.U)
+     else:
+@@ -132,9 +132,9 @@ class Preprocessor:
+       self.writtenLines += 1
+       ln = self.context['LINE']
+       if self.writtenLines != ln:
+-        self.out.write('//@line {line} "{file}"{le}'.format(line=ln,
+-                                                            file=self.context['FILE'],
+-                                                            le=self.LE))
++        self.out.write('//@line %(line)s "%(file)s"%(le)s' % {"line" : ln,
++                                                            "file" : self.context['FILE'],
++                                                            "le" : self.LE})
+         self.writtenLines = ln
+     filteredLine = self.applyFilters(aLine)
+     if filteredLine != aLine:
+@@ -157,7 +157,7 @@ class Preprocessor:
+       if dir and not os.path.exists(dir):
+         try:
+           os.makedirs(dir)
+-        except OSError as error:
++        except OSError, error:
+           if error.errno != errno.EEXIST:
+             raise
+       self.out = open(options.output, 'w')
+diff -up mozilla/config/printprereleasesuffix.py.python2 mozilla/config/printprereleasesuffix.py
+--- mozilla/config/printprereleasesuffix.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/config/printprereleasesuffix.py	2013-12-07 22:18:39.000000000 +0100
+@@ -9,7 +9,6 @@
+ # 2.1a3pre > ""
+ # 3.2b4    > " 3.2 Beta 4"
+ # 3.2b4pre > ""
+-from __future__ import print_function
+ 
+ import sys
+ import re
+diff -up mozilla/config/tests/unit-expandlibs.py.python2 mozilla/config/tests/unit-expandlibs.py
+--- mozilla/config/tests/unit-expandlibs.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/config/tests/unit-expandlibs.py	2013-12-07 22:18:39.000000000 +0100
+@@ -161,10 +161,12 @@ class TestExpandInit(TestCaseWithTmpDir)
+         self.libx_files = [self.tmpfile('libx', Obj(f)) for f in ['g', 'h', 'i']]
+         self.liby_files = [self.tmpfile('liby', Obj(f)) for f in ['j', 'k', 'l']] + [self.tmpfile('liby', Lib('z'))]
+         self.touch(self.libx_files + self.liby_files)
+-        with open(self.tmpfile('libx', Lib('x') + config.LIBS_DESC_SUFFIX), 'w') as f:
+-            f.write(str(generate(self.libx_files)))
+-        with open(self.tmpfile('liby', Lib('y') + config.LIBS_DESC_SUFFIX), 'w') as f:
+-            f.write(str(generate(self.liby_files + [self.tmpfile('libx', Lib('x'))])))
++        f = open(self.tmpfile('libx', Lib('x') + config.LIBS_DESC_SUFFIX), 'w')
++        f.write(str(generate(self.libx_files)))
++        f.close()
++        f = open(self.tmpfile('liby', Lib('y') + config.LIBS_DESC_SUFFIX), 'w')
++        f.write(str(generate(self.liby_files + [self.tmpfile('libx', Lib('x'))])))
++        f.close()
+ 
+         # Create various objects and libraries 
+         self.arg_files = [self.tmpfile(f) for f in [Lib('a'), Obj('b'), Obj('c'), Lib('d'), Obj('e')]]
+@@ -233,14 +235,16 @@ class TestExpandArgsMore(TestExpandInit)
+                 self.assertNotEqual(args[3][0], '@')
+                 filename = args[3]
+                 content = ['INPUT("{0}")'.format(relativize(f)) for f in objs]
+-                with open(filename, 'r') as f:
+-                    self.assertEqual([l.strip() for l in f.readlines() if len(l.strip())], content)
++                f = open(filename, 'r')
++                self.assertEqual([l.strip() for l in f.readlines() if len(l.strip())], content)
++                f.close()
+             elif config.EXPAND_LIBS_LIST_STYLE == "list":
+                 self.assertEqual(args[3][0], '@')
+                 filename = args[3][1:]
+                 content = objs
+-                with open(filename, 'r') as f:
+-                    self.assertRelEqual([l.strip() for l in f.readlines() if len(l.strip())], content)
++                f = open(filename, 'r')
++                self.assertRelEqual([l.strip() for l in f.readlines() if len(l.strip())], content)
++                f.close()
+ 
+             tmp = args.tmp
+         # Check that all temporary files are properly removed
+diff -up mozilla/config/tests/unit-JarMaker.py.python2 mozilla/config/tests/unit-JarMaker.py
+--- mozilla/config/tests/unit-JarMaker.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/config/tests/unit-JarMaker.py	2013-12-07 22:18:39.000000000 +0100
+@@ -1,4 +1,3 @@
+-from __future__ import print_function
+ import unittest
+ 
+ import os, sys, os.path, time, inspect
+diff -up mozilla/config/tests/unit-mozunit.py.python2 mozilla/config/tests/unit-mozunit.py
+--- mozilla/config/tests/unit-mozunit.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/config/tests/unit-mozunit.py	2013-12-07 22:18:39.000000000 +0100
+@@ -22,8 +22,9 @@ class TestMozUnit(unittest.TestCase):
+             self.assertEqual(open('file2', 'r').read(), 'content2')
+ 
+             # Check that overwriting these files alters their content.
+-            with open('file1', 'w') as file:
+-                file.write('foo')
++            file = open('file1', 'w')
++            file.write('foo')
++            file.close()
+             self.assertEqual(open('file1', 'r').read(), 'foo')
+ 
+             # ... but not until the file is closed.
+@@ -34,16 +35,18 @@ class TestMozUnit(unittest.TestCase):
+             self.assertEqual(open('file2', 'r').read(), 'bar')
+ 
+             # Check that appending to a file does append
+-            with open('file1', 'a') as file:
+-                file.write('bar')
++            file = open('file1', 'a')
++            file.write('bar')
++            file.close()
+             self.assertEqual(open('file1', 'r').read(), 'foobar')
+ 
+             # Opening a non-existing file ought to fail.
+             self.assertRaises(IOError, open, 'file3', 'r')
+ 
+             # Check that writing a new file does create the file.
+-            with open('file3', 'w') as file:
+-                file.write('baz')
++            file = open('file3', 'w')
++            file.write('baz')
++            file.close()
+             self.assertEqual(open('file3', 'r').read(), 'baz')
+ 
+             # Check the content of the file created outside MockedOpen.
+@@ -51,15 +54,17 @@ class TestMozUnit(unittest.TestCase):
+ 
+             # Check that overwriting a file existing on the file system
+             # does modify its content.
+-            with open(path, 'w') as file:
+-                file.write('bazqux')
++            file = open(path, 'w')
++            file.write('bazqux')
++            file.close()
+             self.assertEqual(open(path, 'r').read(), 'bazqux')
+ 
+         with MockedOpen():
+             # Check that appending to a file existing on the file system
+             # does modify its content.
+-            with open(path, 'a') as file:
+-                file.write('bazqux')
++            file = open(path, 'a')
++            file.write('bazqux')
++            file.close()
+             self.assertEqual(open(path, 'r').read(), 'foobarbazqux')
+ 
+         # Check that the file was not actually modified on the file system.
+diff -up mozilla/config/tests/unit-Preprocessor.py.python2 mozilla/config/tests/unit-Preprocessor.py
+--- mozilla/config/tests/unit-Preprocessor.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/config/tests/unit-Preprocessor.py	2013-12-07 22:18:39.000000000 +0100
+@@ -9,8 +9,9 @@ from mozunit import main, MockedOpen
+ from Preprocessor import Preprocessor
+ 
+ def NamedIO(name, content):
+-  with open(name, 'w') as f:
+-    f.write(content)
++  f = open(name, 'w')
++  f.write(content)
++  f.close()
+   return name
+ 
+ class TestPreprocessor(unittest.TestCase):
+diff -up mozilla/config/utils.py.python2 mozilla/config/utils.py
+--- mozilla/config/utils.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/config/utils.py	2013-12-07 22:18:39.000000000 +0100
+@@ -24,7 +24,7 @@ class LockFile(object):
+       try:
+         os.remove(self.lockfile)
+         break
+-      except OSError as e:
++      except OSError, e:
+         if e.errno == errno.EACCES:
+           # another process probably has the file open, we'll retry.
+           # just a short sleep since we want to drop the lock ASAP
+@@ -44,7 +44,7 @@ def lockFile(lockfile, max_wait = 600):
+       fd = os.open(lockfile, os.O_EXCL | os.O_RDWR | os.O_CREAT)
+       # we created the lockfile, so we're the owner
+       break
+-    except OSError as e:
++    except OSError, e:
+       if (e.errno == errno.EEXIST or 
+           (sys.platform == "win32" and e.errno == errno.EACCES)):
+         pass
+@@ -57,7 +57,7 @@ def lockFile(lockfile, max_wait = 600):
+       # and read its contents to report the owner PID
+       f = open(lockfile, "r")
+       s = os.stat(lockfile)
+-    except EnvironmentError as e:
++    except EnvironmentError, e:
+       if e.errno == errno.ENOENT or e.errno == errno.EACCES:
+         # we didn't create the lockfile, so it did exist, but it's
+         # gone now. Just try again
+@@ -70,8 +70,8 @@ def lockFile(lockfile, max_wait = 600):
+     now = int(time.time())
+     if now - s[stat.ST_MTIME] > max_wait:
+       pid = f.readline().rstrip()
+-      sys.exit("{0} has been locked for more than "
+-               "{1} seconds (PID {2})".format(lockfile, max_wait, pid))
++      sys.exit("%s has been locked for more than "
++               "%d seconds (PID %s)" % (lockfile, max_wait, pid))
+   
+     # it's not been locked too long, wait a while and retry
+     f.close()
+@@ -81,7 +81,7 @@ def lockFile(lockfile, max_wait = 600):
+   # descriptor into a Python file object and record our PID in it
+   
+   f = os.fdopen(fd, "w")
+-  f.write("{0}\n".format(os.getpid()))
++  f.write("%d\n" % (os.getpid()))
+   f.close()
+   return LockFile(lockfile)
+ 
+diff -up mozilla/config/writemozinfo.py.python2 mozilla/config/writemozinfo.py
+--- mozilla/config/writemozinfo.py.python2	2013-12-06 16:44:50.000000000 +0100
++++ mozilla/config/writemozinfo.py	2013-12-07 22:18:39.000000000 +0100
+@@ -9,7 +9,6 @@
+ # configuration, such as the target OS and CPU.
+ #
+ # The output file is intended to be used as input to the mozinfo package.
+-from __future__ import print_function
+ import os
+ import re
+ import sys
+@@ -98,8 +97,9 @@ def write_json(file, env=None):
+     """
+     build_conf = build_dict(env=env)
+     if isinstance(file, basestring):
+-        with open(file, "w") as f:
+-            json.dump(build_conf, f)
++        f = open(file, "w")
++        json.dump(build_conf, f)
++        f.close()
+     else:
+         json.dump(build_conf, file)
+ 
+@@ -107,6 +107,6 @@ def write_json(file, env=None):
+ if __name__ == '__main__':
+     try:
+         write_json(sys.argv[1] if len(sys.argv) > 1 else sys.stdout)
+-    except Exception as e:
++    except Exception, e:
+         print(str(e), file=sys.stderr)
+         sys.exit(1)
+diff -up mozilla/dom/bindings/BindingGen.py.python2 mozilla/dom/bindings/BindingGen.py
+--- mozilla/dom/bindings/BindingGen.py.python2	2013-12-06 16:44:53.000000000 +0100
++++ mozilla/dom/bindings/BindingGen.py	2013-12-07 22:18:39.000000000 +0100
+@@ -6,6 +6,7 @@ import os
+ import cPickle
+ from Configuration import Configuration
+ from Codegen import CGBindingRoot, replaceFileIfChanged
++import rhrebase
+ 
+ def generate_binding_files(config, outputprefix, srcprefix, webidlfile):
+     """
+@@ -18,10 +19,11 @@ def generate_binding_files(config, outpu
+     replaceFileIfChanged(outputprefix + ".h", root.declare())
+     replaceFileIfChanged(outputprefix + ".cpp", root.define())
+ 
+-    with open(depsname, 'wb') as f:
+-        # Sort so that our output is stable
+-        f.write("\n".join(outputprefix + ": " + os.path.join(srcprefix, x) for
+-                          x in sorted(root.deps())))
++    f = open(depsname, 'wb')
++    # Sort so that our output is stable
++    f.write("\n".join(outputprefix + ": " + os.path.join(srcprefix, x) for
++                      x in sorted(root.deps())))
++    f.close()
+ 
+ def main():
+     # Parse arguments.
+@@ -50,7 +52,7 @@ def main():
+     allWebIDLFiles = readFile(args[2]).split()
+     changedDeps = readFile(args[3]).split()
+ 
+-    if all(f.endswith("Binding") or f == "ParserResults.pkl" for f in changedDeps):
++    if rhrebase.all(f.endswith("Binding") or f == "ParserResults.pkl" for f in changedDeps):
+         toRegenerate = filter(lambda f: f.endswith("Binding"), changedDeps)
+         if len(toRegenerate) == 0 and len(changedDeps) == 1:
+             # Work around build system bug 874923: if we get here that means
+diff -up mozilla/dom/bindings/Codegen.py.python2 mozilla/dom/bindings/Codegen.py
+--- mozilla/dom/bindings/Codegen.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/dom/bindings/Codegen.py	2013-12-07 22:18:39.000000000 +0100
+@@ -9,6 +9,7 @@ import os
+ import re
+ import string
+ import math
++import rhrebase
+ 
+ from WebIDL import BuiltinTypes, IDLBuiltinType, IDLNullValue, IDLSequenceType, IDLType
+ from Configuration import NoSuchDescriptorError, getTypesFromDescriptor, getTypesFromDictionary, getTypesFromCallback, Descriptor
+@@ -51,7 +52,7 @@ def toStringBool(arg):
+ def toBindingNamespace(arg):
+     return re.sub("((_workers)?$)", "Binding\\1", arg);
+ 
+-class CGThing():
++class CGThing:
+     """
+     Abstract base class for things that spit out code.
+     """
+@@ -74,6 +75,7 @@ class CGNativePropertyHooks(CGThing):
+     def __init__(self, descriptor, properties):
+         CGThing.__init__(self)
+         self.descriptor = descriptor
++        print "INIT1:", self.descriptor
+         self.properties = properties
+     def declare(self):
+         if self.descriptor.workers:
+@@ -106,9 +108,13 @@ class CGNativePropertyHooks(CGThing):
+             prototypeID += self.descriptor.name
+         else:
+             prototypeID += "_ID_Count"
++        print 'PROTOID1:', prototypeID, self.descriptor.name
+         parent = self.descriptor.interface.parent
+-        parentHooks = ("&" + toBindingNamespace(parent.identifier.name) + "::sNativePropertyHooks"
+-                       if parent else 'nullptr')
++        parentHooks = ()
++        if parent:          
++          parentHooks = ("&" + toBindingNamespace(parent.identifier.name) + "::sNativePropertyHooks")
++        else:
++          parentHooks = ('nullptr')
+ 
+         return CGWrapper(CGIndenter(CGList([CGGeneric(resolveOwnProperty),
+                                             CGGeneric(enumerateOwnProperties),
+@@ -124,7 +130,7 @@ class CGNativePropertyHooks(CGThing):
+                          post="\n};\n").define()
+ 
+ def NativePropertyHooks(descriptor):
+-    return "&sWorkerNativePropertyHooks" if descriptor.workers else "&sNativePropertyHooks"
++    return rhrebase.get_first_if_true("&sWorkerNativePropertyHooks", "&sNativePropertyHooks", descriptor.workers)
+ 
+ def DOMClass(descriptor):
+         protoList = ['prototypes::id::' + proto for proto in descriptor.prototypeChain]
+@@ -139,6 +145,7 @@ def DOMClass(descriptor):
+         else:
+             participant = "NS_CYCLE_COLLECTION_PARTICIPANT(%s)" % descriptor.nativeType
+         getParentObject = "GetParentObject<%s>::Get" % descriptor.nativeType
++        print 'PROTOLIST:', protoList
+         return """{
+   { %s },
+   %s,
+@@ -163,8 +170,8 @@ class CGDOMJSClass(CGThing):
+     def declare(self):
+         return "extern DOMJSClass Class;\n"
+     def define(self):
+-        traceHook = TRACE_HOOK_NAME if self.descriptor.customTrace else 'nullptr'
+-        callHook = LEGACYCALLER_HOOK_NAME if self.descriptor.operations["LegacyCaller"] else 'nullptr'
++        traceHook = rhrebase.get_first_if_true(TRACE_HOOK_NAME, 'nullptr', self.descriptor.customTrace)
++        callHook = rhrebase.get_first_if_true(LEGACYCALLER_HOOK_NAME, 'nullptr', self.descriptor.operations["LegacyCaller"])
+         classFlags = "JSCLASS_IS_DOMJSCLASS | JSCLASS_HAS_RESERVED_SLOTS(3)"
+         if self.descriptor.interface.getExtendedAttribute("NeedNewResolve"):
+             newResolveHook = "(JSResolveOp)" + NEWRESOLVE_HOOK_NAME
+@@ -194,7 +201,7 @@ DOMJSClass Class = {
+ };
+ """ % (self.descriptor.interface.identifier.name,
+        classFlags,
+-       ADDPROPERTY_HOOK_NAME if self.descriptor.concrete and not self.descriptor.workers and self.descriptor.wrapperCache else 'JS_PropertyStub',
++       rhrebase.get_first_if_true(ADDPROPERTY_HOOK_NAME, 'JS_PropertyStub', self.descriptor.concrete and not self.descriptor.workers and self.descriptor.wrapperCache),
+        newResolveHook, FINALIZE_HOOK_NAME, callHook, traceHook,
+        CGIndenter(CGGeneric(DOMClass(self.descriptor))).define())
+ 
+@@ -213,7 +220,7 @@ def PrototypeIDAndDepth(descriptor):
+ def UseHolderForUnforgeable(descriptor):
+     return (descriptor.concrete and
+             descriptor.proxy and
+-            any(m for m in descriptor.interface.members if m.isAttr() and m.isUnforgeable()))
++            rhrebase.any(m for m in descriptor.interface.members if m.isAttr() and m.isUnforgeable()))
+ 
+ def CallOnUnforgeableHolder(descriptor, code, isXrayCheck=None):
+     """
+@@ -477,9 +484,9 @@ class CGIfElseWrapper(CGList):
+ 
+ class CGTemplatedType(CGWrapper):
+     def __init__(self, templateName, child, isConst=False, isReference=False):
+-        const = "const " if isConst else ""
++        const = rhrebase.get_first_if_true("const ", "", isConst)
+         pre = "%s%s<" % (const, templateName)
+-        ref = "&" if isReference else ""
++        ref = rhrebase.get_first_if_true("&", "", isReference)
+         post = " >%s" % ref
+         CGWrapper.__init__(self, child, pre=pre, post=post)
+ 
+@@ -802,7 +809,7 @@ def UnionConversions(descriptors, dictio
+             CGWrapper(CGList(SortedDictValues(unionConversions), "\n"),
+                       post="\n\n"))
+ 
+-class Argument():
++class Argument:
+     """
+     A class for outputting the type and name of an argument
+     """
+@@ -855,7 +862,13 @@ class CGAbstractMethod(CGThing):
+         self.static = static
+         self.templateArgs = templateArgs
+     def _argstring(self, declare):
+-        return ', '.join([a.declare() if declare else a.define() for a in self.args])
++        arglist = []
++        for a in self.args:
++          if declare:
++             arglist.append(a.declare())
++          else:
++             arglist.append(a.define())
++        return ', '.join(arglist) #[a.declare() if declare else a.define() for a in self.args])
+     def _template(self):
+         if self.templateArgs is None:
+             return ''
+@@ -869,7 +882,7 @@ class CGAbstractMethod(CGThing):
+         if self.static:
+             decorators.append('static')
+         decorators.append(self.returnType)
+-        maybeNewline = " " if self.inline else "\n"
++        maybeNewline = rhrebase.get_first_if_true(" ", "\n", self.inline)
+         return ' '.join(decorators) + maybeNewline
+     def declare(self):
+         if self.inline:
+@@ -878,7 +891,7 @@ class CGAbstractMethod(CGThing):
+     def _define(self, fromDeclare=False):
+         return self.definition_prologue(fromDeclare) + "\n" + self.definition_body() + self.definition_epilogue()
+     def define(self):
+-        return "" if self.inline else self._define()
++        return rhrebase.get_first_if_true("", self._define(), self.inline)
+     def definition_prologue(self, fromDeclare):
+         return "%s%s%s(%s)\n{" % (self._template(), self._decorators(),
+                                   self.name, self._argstring(fromDeclare))
+@@ -1465,7 +1478,7 @@ class MethodDefiner(PropertyDefiner):
+                 self.regular.append(method)
+ 
+         # FIXME Check for an existing iterator on the interface first.
+-        if any(m.isGetter() and m.isIndexed() for m in methods):
++        if rhrebase.any(m.isGetter() and m.isIndexed() for m in methods):
+             self.regular.append({"name": 'iterator',
+                                  "methodInfo": False,
+                                  "nativeName": "JS_ArrayIterator",
+@@ -1558,7 +1571,7 @@ class AttrDefiner(PropertyDefiner):
+             return ""
+ 
+         def flags(attr):
+-            unforgeable = " | JSPROP_PERMANENT" if self.unforgeable else ""
++            unforgeable = rhrebase.get_first_if_true(" | JSPROP_PERMANENT", "", self.unforgeable)
+             return ("JSPROP_SHARED | JSPROP_ENUMERATE | JSPROP_NATIVE_ACCESSORS" +
+                     unforgeable)
+ 
+@@ -1567,8 +1580,7 @@ class AttrDefiner(PropertyDefiner):
+                 accessor = 'get_' + attr.identifier.name
+                 jitinfo = "nullptr"
+             else:
+-                accessor = ("genericLenientGetter" if attr.hasLenientThis()
+-                            else "genericGetter")
++                accessor = ( rhrebase.get_first_if_true("genericLenientGetter", "genericGetter", attr.hasLenientThis()) )
+                 jitinfo = "&%s_getterinfo" % attr.identifier.name
+             return "{ JS_CAST_NATIVE_TO(%s, JSPropertyOp), %s }" % \
+                    (accessor, jitinfo)
+@@ -1580,8 +1592,7 @@ class AttrDefiner(PropertyDefiner):
+                 accessor = 'set_' + attr.identifier.name
+                 jitinfo = "nullptr"
+             else:
+-                accessor = ("genericLenientSetter" if attr.hasLenientThis()
+-                            else "genericSetter")
++                accessor = ( rhrebase.get_first_if_true("genericLenientSetter", "genericSetter", attr.hasLenientThis()) )
+                 jitinfo = "&%s_setterinfo" % attr.identifier.name
+             return "{ JS_CAST_NATIVE_TO(%s, JSStrictPropertyOp), %s }" % \
+                    (accessor, jitinfo)
+@@ -1623,7 +1634,7 @@ class ConstDefiner(PropertyDefiner):
+             'ConstantSpec',
+             PropertyDefiner.getControllingCondition, specData, doIdArrays)
+ 
+-class PropertyArrays():
++class PropertyArrays:
+     def __init__(self, descriptor):
+         self.staticMethods = MethodDefiner(descriptor, "StaticMethods",
+                                            static=True)
+@@ -1641,9 +1652,9 @@ class PropertyArrays():
+                  "unforgeableAttrs", "consts" ]
+ 
+     def hasChromeOnly(self):
+-        return any(getattr(self, a).hasChromeOnly() for a in self.arrayNames())
++        return rhrebase.any(getattr(self, a).hasChromeOnly() for a in self.arrayNames())
+     def hasNonChromeOnly(self):
+-        return any(getattr(self, a).hasNonChromeOnly() for a in self.arrayNames())
++        return rhrebase.any(getattr(self, a).hasNonChromeOnly() for a in self.arrayNames())
+     def __str__(self):
+         define = ""
+         for array in self.arrayNames():
+@@ -1654,7 +1665,7 @@ class CGNativeProperties(CGList):
+     def __init__(self, descriptor, properties):
+         def generateNativeProperties(name, chrome):
+             def check(p):
+-                return p.hasChromeOnly() if chrome else p.hasNonChromeOnly()
++                return rhrebase.get_first_if_true(p.hasChromeOnly(), p.hasNonChromeOnly(), chrome)
+ 
+             nativeProps = []
+             for array in properties.arrayNames():
+@@ -1847,6 +1858,9 @@ if (!unforgeableHolder) {
+             chromeProperties = accessCheck + " ? &sChromeOnlyNativeProperties : nullptr"
+         else:
+             chromeProperties = "nullptr"
++        interface_id_name = "nullptr"
++        if needInterfaceObject:
++          interface_id_name =  '"' + self.descriptor.interface.identifier.name + '"'
+         call = ("dom::CreateInterfaceObjects(aCx, aGlobal, parentProto,\n"
+                 "                            %s, %s,\n"
+                 "                            constructorProto, %s, %s, %d, %s,\n"
+@@ -1862,7 +1876,7 @@ if (!unforgeableHolder) {
+             domClass,
+             properties,
+             chromeProperties,
+-            '"' + self.descriptor.interface.identifier.name + '"' if needInterfaceObject else "nullptr"))
++            interface_id_name))
+         if UseHolderForUnforgeable(self.descriptor):
+             assert needInterfacePrototypeObject
+             setUnforgeableHolder = CGGeneric(
+@@ -2301,11 +2315,11 @@ def numericValue(t, v):
+             return "mozilla::PositiveInfinity()"
+         if v == float("-inf"):
+             return "mozilla::NegativeInfinity()"
+-        if math.isnan(v):
++        if v == float("nan"):
+             return "mozilla::UnspecifiedNaN()"
+     return "%s%s" % (v, numericSuffixes[t])
+ 
+-class CastableObjectUnwrapper():
++class CastableObjectUnwrapper:
+     """
+     A class for unwrapping an object named by the "source" argument
+     based on the passed-in descriptor and storing it in a variable
+@@ -2443,7 +2457,7 @@ ${codeOnFailure}
+ }
+ ${target} = tmp.forget();""").substitute(self.substitution)
+ 
+-class JSToNativeConversionInfo():
++class JSToNativeConversionInfo:
+     """
+     An object representing information about a JS-to-native conversion.
+     """
+@@ -2934,7 +2948,7 @@ for (uint32_t i = 0; i < length; ++i) {
+             else:
+                 templateBody = CGList([templateBody, object], "\n")
+ 
+-            if any([arrayObject, dateObject, callbackObject, dictionaryObject,
++            if rhrebase.any([arrayObject, dateObject, callbackObject, dictionaryObject,
+                     object]):
+                 templateBody.prepend(CGGeneric("JS::Rooted<JSObject*> argObj(cx, &${val}.toObject());"))
+             templateBody = CGIfWrapper(templateBody, "${val}.isObject()")
+@@ -3697,17 +3711,21 @@ def instantiateJSToNativeConversion(info
+ 
+     if checkForValue:
+         if dealWithOptional:
++            info_decl = ""
++            if info.declArgs:
++              info_decl = getArgsCGThing(info.declArgs).define()
+             declConstruct = CGIndenter(
+                 CGGeneric("%s.Construct(%s);" %
+                           (originalDeclName,
+-                           getArgsCGThing(info.declArgs).define() if
+-                           info.declArgs else "")))
++                           info_decl)))
+             if holderType is not None:
++                info_hld = ""
++                if info.holderArgs:
++                  info_hld = getArgsCGThing(info.holderArgs).define()
+                 holderConstruct = CGIndenter(
+                     CGGeneric("%s.construct(%s);" %
+                               (originalHolderName,
+-                               getArgsCGThing(info.holderArgs).define() if
+-                               info.holderArgs else "")))
++                               info_decl )))
+             else:
+                 holderConstruct = None
+         else:
+@@ -3743,7 +3761,7 @@ def convertConstIDLValueToJSVal(value):
+     if tag in [IDLType.Tags.int64, IDLType.Tags.uint64]:
+         return "DOUBLE_TO_JSVAL(%s)" % numericValue(tag, value.value)
+     if tag == IDLType.Tags.bool:
+-        return "JSVAL_TRUE" if value.value else "JSVAL_FALSE"
++        return rhrebase.get_first_if_true("JSVAL_TRUE", "JSVAL_FALSE", value.value)
+     if tag in [IDLType.Tags.float, IDLType.Tags.double]:
+         return "DOUBLE_TO_JSVAL(%s)" % (value.value)
+     raise TypeError("Const value of unhandled type: " + value.type)
+@@ -3809,7 +3827,7 @@ class CGArgumentConverter(CGThing):
+             isEnforceRange=self.argument.enforceRange,
+             isClamp=self.argument.clamp,
+             lenientFloatCode=self.lenientFloatCode,
+-            isMember="Variadic" if self.argument.variadic else False,
++            isMember=rhrebase.get_first_if_true("Variadic", False, self.argument.variadic),
+             allowTreatNonCallableAsNull=self.allowTreatNonCallableAsNull,
+             sourceDescription=self.argDescription)
+ 
+@@ -4225,7 +4243,7 @@ def typeNeedsCx(type, descriptorProvider
+     if type.isSequence() or type.isArray():
+         return typeNeedsCx(type.inner, descriptorProvider, retVal)
+     if type.isUnion():
+-        return any(typeNeedsCx(t, descriptorProvider) for t in
++        return rhrebase.any(typeNeedsCx(t, descriptorProvider) for t in
+                    type.unroll().flatMemberTypes)
+     if type.isDictionary():
+         return dictionaryNeedsCx(type.inner, descriptorProvider)
+@@ -4236,7 +4254,7 @@ def typeNeedsCx(type, descriptorProvider
+     return type.isAny() or type.isObject()
+ 
+ def dictionaryNeedsCx(dictionary, descriptorProvider):
+-    return (any(typeNeedsCx(m.type, descriptorProvider) for m in dictionary.members) or
++    return (rhrebase.any(typeNeedsCx(m.type, descriptorProvider) for m in dictionary.members) or
+         (dictionary.parent and dictionaryNeedsCx(dictionary.parent, descriptorProvider)))
+ 
+ # Whenever this is modified, please update CGNativeMember.getRetvalInfo as
+@@ -4352,7 +4370,7 @@ def needCx(returnType, arguments, extend
+            considerTypes):
+     return (considerTypes and
+             (typeNeedsCx(returnType, descriptorProvider, True) or
+-             any(typeNeedsCx(a.type, descriptorProvider) for a in arguments)) or
++             rhrebase.any(typeNeedsCx(a.type, descriptorProvider) for a in arguments)) or
+             'implicitJSContext' in extendedAttributes)
+ 
+ class CGCallGenerator(CGThing):
+@@ -4513,7 +4531,10 @@ def wrapTypeIntoCurrentCompartment(type,
+                 if memberWrap:
+                     memberWraps.append(memberWrap)
+             myDict = myDict.parent
+-        return CGList(memberWraps, "\n") if len(memberWraps) != 0 else None
++        mem_wraps = None
++        if len(memberWraps) != 0:
++          mem_wraps = CGList(memberWraps, "\n")
++        return mem_wraps
+ 
+     if type.isUnion():
+         raise TypeError("Can't handle wrapping of unions in constructor "
+@@ -4658,7 +4679,7 @@ if (global.Failed()) {
+                             "xpc::WrapperFactory::IsXrayWrapper(obj)"))
+ 
+         cgThings.append(CGCallGenerator(
+-                    self.getErrorReport() if self.isFallible() else None,
++                    rhrebase.get_first_if_true(self.getErrorReport(), None, self.isFallible()),
+                     self.getArguments(), argsPre, returnType,
+                     self.extendedAttributes, descriptor, nativeMethodName,
+                     static))
+@@ -5077,7 +5098,7 @@ class CGGetterCall(CGPerSignatureCall):
+                                     attr.isStatic(), descriptor, attr,
+                                     getter=True)
+ 
+-class FakeArgument():
++class FakeArgument:
+     """
+     A class that quacks like an IDLArgument.  This is used to make
+     setters look like method calls or for special operations.
+@@ -5091,7 +5112,7 @@ class FakeArgument():
+         self.treatUndefinedAs = interfaceMember.treatUndefinedAs
+         self.enforceRange = False
+         self.clamp = False
+-        class FakeIdentifier():
++        class FakeIdentifier:
+             def __init__(self):
+                 self.name = name
+         self.identifier = FakeIdentifier()
+@@ -5270,7 +5291,7 @@ class CGNewResolveHook(CGAbstractBinding
+     def generate_code(self):
+         return CGIndenter(CGGeneric("return self->DoNewResolve(cx, obj, id, flags, objp);"))
+ 
+-class CppKeywords():
++class CppKeywords:
+     """
+     A class for checking if method names declared in webidl
+     are not in conflict with C++ keywords.
+@@ -5480,8 +5501,8 @@ class CGSpecializedForwardingSetter(CGSp
+         attrName = self.attr.identifier.name
+         forwardToAttrName = self.attr.getExtendedAttribute("PutForwards")[0]
+         # JS_GetProperty and JS_SetProperty can only deal with ASCII
+-        assert all(ord(c) < 128 for c in attrName)
+-        assert all(ord(c) < 128 for c in forwardToAttrName)
++        assert rhrebase.all(ord(c) < 128 for c in attrName)
++        assert rhrebase.all(ord(c) < 128 for c in forwardToAttrName)
+         return CGIndenter(CGGeneric("""JS::RootedValue v(cx);
+ if (!JS_GetProperty(cx, obj, "%s", v.address())) {
+   return false;
+@@ -5866,13 +5887,15 @@ return true;"""
+                                "  tryNext = false;\n",
+                            post="\n"
+                                 "}")
+-
++    htype = None
++    if conversionInfo.holderType:
++      htype = conversionInfo.holderType.define()
+     return {
+                 "name": name,
+                 "structType": structType,
+                 "externalType": externalType,
+                 "setter": CGIndenter(setter).define(),
+-                "holderType": conversionInfo.holderType.define() if conversionInfo.holderType else None
++                "holderType": htype
+                 }
+ 
+ def mapTemplate(template, templateVarArray):
+@@ -6158,8 +6181,11 @@ class ClassMethod(ClassItem):
+         return self.body
+ 
+     def declare(self, cgClass):
+-        templateClause = 'template <%s>\n' % ', '.join(self.templateArgs) \
+-                         if self.bodyInHeader and self.templateArgs else ''
++        
++        templateClause = ''
++        if self.bodyInHeader and self.templateArgs:
++           templateClause = 'template <%s>\n' % ', '.join(self.templateArgs)
++        print "TEMPLATECLAUSE", templateClause
+         args = ', '.join([a.declare() for a in self.args])
+         if self.bodyInHeader:
+             body = CGIndenter(CGGeneric(self.getBody())).define()
+@@ -6175,8 +6201,8 @@ class ClassMethod(ClassItem):
+                 'decorators': self.getDecorators(True),
+                 'returnType': self.returnType,
+                 'name': self.name,
+-                'const': ' const' if self.const else '',
+-                'override': ' MOZ_OVERRIDE' if self.override else '',
++                'const': rhrebase.get_first_if_true(' const', '', self.const),
++                'override': rhrebase.get_first_if_true(' MOZ_OVERRIDE', '', self.override),
+                 'args': args,
+                 'body': body
+                 })
+@@ -6212,7 +6238,7 @@ ${body}
+                   'className': cgClass.getNameString(),
+                   'name': self.name,
+                   'args': args,
+-                  'const': ' const' if self.const else '',
++                  'const': rhrebase.get_first_if_true(' const', '', self.const),
+                   'body': body })
+ 
+ class ClassUsingDeclaration(ClassItem):
+@@ -6410,7 +6436,7 @@ class ClassMember(ClassItem):
+         ClassItem.__init__(self, name, visibility)
+ 
+     def declare(self, cgClass):
+-        return '%s%s %s;\n' % ('static ' if self.static else '', self.type,
++        return '%s%s %s;\n' % ( rhrebase.get_first_if_true('static ', '', self.static), self.type,
+                                self.name)
+ 
+     def define(self, cgClass):
+@@ -6449,7 +6475,7 @@ class ClassEnum(ClassItem):
+             else:
+                 entry = '%s = %s' % (self.entries[i], self.values[i])
+             entries.append(entry)
+-        name = '' if not self.name else ' ' + self.name
++        name = rhrebase.get_first_if_true('', ' ', not self.name) + self.name
+         return 'enum%s\n{\n  %s\n};\n' % (name, ',\n  '.join(entries))
+ 
+     def define(self, cgClass):
+@@ -6472,7 +6498,7 @@ class CGClass(CGThing):
+         self.constructors = constructors
+         # We store our single destructor in a list, since all of our
+         # code wants lists of members.
+-        self.destructors = [destructor] if destructor else []
++        self.destructors = rhrebase.get_first_if_true([destructor], [], destructor)
+         self.methods = methods
+         self.typedefs = typedefs
+         self.enums = enums
+@@ -6481,7 +6507,7 @@ class CGClass(CGThing):
+         self.isStruct = isStruct
+         self.disallowCopyConstruction = disallowCopyConstruction
+         self.indent = indent
+-        self.defaultVisibility ='public' if isStruct else 'private'
++        self.defaultVisibility = rhrebase.get_first_if_true('public', 'private', isStruct)
+         self.decorators = decorators
+         self.extradeclarations = extradeclarations
+         self.extradefinitions = extradefinitions
+@@ -6502,7 +6528,7 @@ class CGClass(CGThing):
+             result = result + self.indent + 'template <%s>\n' \
+                      % ','.join([str(a) for a in templateArgs])
+ 
+-        type = 'struct' if self.isStruct else 'class'
++        type = rhrebase.get_first_if_true('struct', 'class', self.isStruct)
+ 
+         if self.templateSpecialization:
+             specialization = \
+@@ -6675,7 +6701,7 @@ class CGClassForwardDeclare(CGThing):
+         self.name = name
+         self.isStruct = isStruct
+     def declare(self):
+-        type = 'struct' if self.isStruct else 'class'
++        type = rhrebase.get_first_if_true('struct', 'class', self.isStruct)
+         return '%s %s;\n' % (type, self.name)
+     def define(self):
+         # Header only
+@@ -7501,7 +7527,7 @@ class CGDOMJSProxyHandler(CGClass):
+                          methods=methods)
+ 
+ def stripTrailingWhitespace(text):
+-    tail = '\n' if text.endswith('\n') else ''
++    tail = rhrebase.get_first_if_true('\n', '', text.endswith('\n'))
+     lines = text.splitlines()
+     for i in range(len(lines)):
+         lines[i] = lines[i].rstrip()
+@@ -7946,7 +7972,7 @@ class CGDictionary(CGThing):
+ 
+     @staticmethod
+     def makeDictionaryName(dictionary, workers):
+-        suffix = "Workers" if workers else ""
++        suffix = rhrebase.get_first_if_true("Workers", "", workers)
+         return dictionary.identifier.name + suffix
+ 
+     def makeClassName(self, dictionary):
+@@ -8363,13 +8389,13 @@ class CGBindingRoot(CGThing):
+                                             skipGen=False)
+         def descriptorRequiresPreferences(desc):
+             iface = desc.interface
+-            return any(m.getExtendedAttribute("Pref") for m in iface.members + [iface]);
+-        requiresPreferences = any(descriptorRequiresPreferences(d) for d in descriptors)
+-        hasOwnedDescriptors = any(d.nativeOwnership == 'owned' for d in descriptors)
+-        requiresContentUtils = any(d.interface.hasInterfaceObject() for d in descriptors)
++            return rhrebase.any(m.getExtendedAttribute("Pref") for m in iface.members + [iface]);
++        requiresPreferences = rhrebase.any(descriptorRequiresPreferences(d) for d in descriptors)
++        hasOwnedDescriptors = rhrebase.any(d.nativeOwnership == 'owned' for d in descriptors)
++        requiresContentUtils = rhrebase.any(d.interface.hasInterfaceObject() for d in descriptors)
+         def descriptorHasChromeOnlyMembers(desc):
+-            return any(isChromeOnly(a) for a in desc.interface.members)
+-        hasChromeOnlyMembers = any(descriptorHasChromeOnlyMembers(d) for d in descriptors)
++            return rhrebase.any(isChromeOnly(a) for a in desc.interface.members)
++        hasChromeOnlyMembers = rhrebase.any(descriptorHasChromeOnlyMembers(d) for d in descriptors)
+         # XXXkhuey ugly hack but this is going away soon.
+         isEventTarget = webIDLFile.endswith("EventTarget.webidl")
+         hasWorkerStuff = len(config.getDescriptors(webIDLFile=webIDLFile,
+@@ -8480,14 +8506,13 @@ class CGBindingRoot(CGThing):
+                           # Have to include nsDOMQS.h to get fast arg unwrapping
+                           # for old-binding things with castability.
+                           'nsDOMQS.h'
+-                          ] + (['WorkerPrivate.h',
+-                                'nsThreadUtils.h'] if hasWorkerStuff else [])
+-                            + (['mozilla/Preferences.h'] if requiresPreferences else [])
+-                            + (['mozilla/dom/NonRefcountedDOMObject.h'] if hasOwnedDescriptors else [])
+-                            + (['nsContentUtils.h'] if requiresContentUtils else [])
+-                            + (['nsCxPusher.h'] if mainDictionaries else [])
+-                            + (['AccessCheck.h'] if hasChromeOnlyMembers else [])
+-                            + (['xpcprivate.h'] if isEventTarget else []),
++                          ] + (rhrebase.get_first_if_true(['WorkerPrivate.h','nsThreadUtils.h'], [], hasWorkerStuff))
++                            + (rhrebase.get_first_if_true(['mozilla/Preferences.h'], [], requiresPreferences))
++                            + (rhrebase.get_first_if_true(['mozilla/dom/NonRefcountedDOMObject.h'], [], hasOwnedDescriptors))
++                            + (rhrebase.get_first_if_true(['nsContentUtils.h'], [], requiresContentUtils))
++                            + (rhrebase.get_first_if_true(['nsCxPusher.h'], [], mainDictionaries))
++                            + (rhrebase.get_first_if_true(['AccessCheck.h'], [], hasChromeOnlyMembers))
++                            + (rhrebase.get_first_if_true(['xpcprivate.h'], [], isEventTarget)),
+                          curr,
+                          config,
+                          jsImplemented)
+@@ -8529,7 +8554,7 @@ class CGNativeMember(ClassMethod):
+         self.passCxAsNeeded = passCxAsNeeded
+         self.jsObjectsArePtr = jsObjectsArePtr
+         self.variadicIsSequence = variadicIsSequence
+-        breakAfterSelf = "\n" if breakAfter else ""
++        breakAfterSelf = rhrebase.get_first_if_true("\n", "", breakAfter)
+         ClassMethod.__init__(self, name,
+                              self.getReturnType(signature[0], False),
+                              self.getArgs(signature[0], signature[1]),
+@@ -8832,7 +8857,7 @@ class CGNativeMember(ClassMethod):
+             decl = CGTemplatedType("Nullable", decl)
+             ref = True
+         if isMember == "Variadic":
+-            arrayType = "Sequence" if self.variadicIsSequence else "nsTArray"
++            arrayType = rhrebase.get_first_if_true("Sequence", "nsTArray", self.variadicIsSequence)
+             decl = CGTemplatedType(arrayType, decl)
+             ref = True
+         elif optional:
+@@ -8848,7 +8873,7 @@ class CGNativeMember(ClassMethod):
+         """
+         (decl, ref) = self.getArgType(arg.type,
+                                       arg.optional and not arg.defaultValue,
+-                                      "Variadic" if arg.variadic else False)
++                                      rhrebase.get_first_if_true("Variadic", False, arg.variadic))
+         if ref:
+             decl = CGWrapper(decl, pre="const ", post="&")
+ 
+@@ -8976,7 +9001,7 @@ class CGBindingImplClass(CGClass):
+         if descriptor.supportsIndexedProperties():
+             # But we don't need it if we already have an infallible
+             # "length" attribute, which we often do.
+-            haveLengthAttr = any(
++            haveLengthAttr = rhrebase.any(
+                 m for m in iface.members if m.isAttr() and
+                 CGSpecializedGetter.makeNativeName(descriptor, m) == "Length")
+             if not haveLengthAttr:
+@@ -9493,7 +9518,7 @@ class CGCallbackInterface(CGCallback):
+         CGCallback.__init__(self, iface, descriptor, "CallbackInterface",
+                             methods, getters=getters, setters=setters)
+ 
+-class FakeMember():
++class FakeMember:
+     def __init__(self):
+         self.treatUndefinedAs = self.treatNullAs = "Default"
+     def isStatic(self):
+@@ -9532,7 +9557,7 @@ class CallbackMember(CGNativeMember):
+         self.needThisHandling = needThisHandling
+         # If needThisHandling, we generate ourselves as private and the caller
+         # will handle generating public versions that handle the "this" stuff.
+-        visibility = "private" if needThisHandling else "public"
++        visibility = rhrebase.get_first_if_true("private", "public", needThisHandling)
+         # We don't care, for callback codegen, whether our original member was
+         # a method or attribute or whatnot.  Just always pass FakeMember()
+         # here.
+@@ -9652,7 +9677,7 @@ class CallbackMember(CGNativeMember):
+             arg.type, self.descriptorProvider,
+             {
+                 'result' : result,
+-                'successCode' : "continue;" if arg.variadic else "break;",
++                'successCode' : rhrebase.get_first_if_true("continue;", "break;", arg.variadic),
+                 'jsvalRef' : "argv.handleAt(%s)" % jsvalIndex,
+                 'jsvalHandle' : "argv.handleAt(%s)" % jsvalIndex,
+                 # XXXbz we don't have anything better to use for 'obj',
+@@ -9717,7 +9742,7 @@ class CallbackMember(CGNativeMember):
+ 
+     @staticmethod
+     def ensureASCIIName(idlObject):
+-        type = "attribute" if idlObject.isAttr() else "operation"
++        type = rhrebase.get_first_if_true("attribute", "operation", idlObject.isAttr())
+         if re.match("[^\x20-\x7E]", idlObject.identifier.name):
+             raise SyntaxError('Callback %s name "%s" contains non-ASCII '
+                               "characters.  We can't handle that.  %s" %
+@@ -9880,7 +9905,7 @@ class CGJSImplInitOperation(CallbackOper
+         CallbackOperationBase.__init__(self, (BuiltinTypes[IDLBuiltinType.Types.void], sig[1]),
+                                        "__init", "__Init", descriptor, False)
+ 
+-class GlobalGenRoots():
++class GlobalGenRoots:
+     """
+     Roots for global codegen.
+ 
+diff -up mozilla/dom/bindings/Configuration.py.python2 mozilla/dom/bindings/Configuration.py
+--- mozilla/dom/bindings/Configuration.py.python2	2013-12-06 16:44:53.000000000 +0100
++++ mozilla/dom/bindings/Configuration.py	2013-12-07 22:18:39.000000000 +0100
+@@ -4,9 +4,15 @@
+ 
+ from WebIDL import IDLInterface, IDLExternalInterface
+ import os
++import rhrebase
+ 
+ autogenerated_comment = "/* THIS FILE IS AUTOGENERATED - DO NOT EDIT */\n"
+ 
++def dump(o):
++  for a in dir(o):
++    if hasattr(o, a):
++      print "obj.%s = %s" % (attr, getattr(o, a))
++
+ class Configuration:
+     """
+     Represents global configuration state based on IDL parse data and
+@@ -56,7 +62,7 @@ class Configuration:
+         # Mark the descriptors for which the nativeType corresponds to exactly
+         # one interface.
+         for descriptor in self.descriptors:
+-            descriptor.unsharedImplementation = all(
++            descriptor.unsharedImplementation = rhrebase.all(
+                 d.nativeType != descriptor.nativeType or d == descriptor
+                 for d in self.descriptors)
+ 
+@@ -375,7 +381,7 @@ class Descriptor(DescriptorProvider):
+                                desc.get('wrapperCache', True))))
+ 
+         def make_name(name):
+-            return name + "_workers" if self.workers else name
++            return rhrebase.get_first_if_true(name + "_workers", name, self.workers)
+         self.name = make_name(interface.identifier.name)
+ 
+         # self.extendedAttributes is a dict of dicts, keyed on
+@@ -458,10 +464,10 @@ class Descriptor(DescriptorProvider):
+ 
+         assert member.isAttr()
+         assert bool(getter) != bool(setter)
+-        key = 'getterOnly' if getter else 'setterOnly'
++        key = rhrebase.get_first_if_true('getterOnly', 'setterOnly', getter)
+         attrs = self.extendedAttributes['all'].get(name, []) + self.extendedAttributes[key].get(name, [])
+         if throws is None:
+-            throwsAttr = "GetterThrows" if getter else "SetterThrows"
++            throwsAttr = rhrebase.get_first_if_true("GetterThrows", "SetterThrows", getter)
+             throws = member.getExtendedAttribute(throwsAttr)
+         maybeAppendInfallibleToAttrs(attrs, throws)
+         return attrs
+diff -up mozilla/dom/bindings/GlobalGen.py.python2 mozilla/dom/bindings/GlobalGen.py
+--- mozilla/dom/bindings/GlobalGen.py.python2	2013-12-06 16:44:53.000000000 +0100
++++ mozilla/dom/bindings/GlobalGen.py	2013-12-07 22:18:39.000000000 +0100
+@@ -8,8 +8,41 @@
+ import os
+ import WebIDL
+ import cPickle
++import mypickle
+ from Configuration import Configuration
+ from Codegen import GlobalGenRoots, replaceFileIfChanged
++import struct
++def pickle_save_float_fix(self, obj, pack=struct.pack):
++  import sys
++  
++  sys.stderr.write("%s\n" % obj)
++  if obj == float('inf'):
++    self.write(pickle.FLOAT + repr(float('9999999999999.0')) + '\n')
++  else:
++    if obj == float('nan'):
++      self.write(pickle.FLOAT + repr(float('8080808080808.0')) + '\n')
++    else:
++      if self.bin:
++        self.write(pickle.BINFLOAT + pack('>d', obj))
++      else:
++        self.write(pickle.FLOAT + repr(obj) + '\n')
++import types
++#setattr(pickle.Pickler, 'save_float', types.MethodType(pickle_save_float_fix, pickle.Pickler))
++#print "PICKLE INIT", pickle.Pickler.save_float, pickle_save_float_fix
++
++def dump(o): 
++  for a in dir(o):
++    if hasattr(o, a): 
++      obj = getattr(o, a)
++      print "obj.%s = %s" % (a, obj)
++      if type(obj) in [list, tuple, dict]:        
++        print "START OF [LIST/DIR/dict]", a
++        count = 0
++        for i in obj:
++          print count
++          dump(i)
++          count += 1
++        print "END OF", a
+ 
+ def generate_file(config, name, action):
+ 
+@@ -60,7 +93,15 @@ def main():
+ 
+     # Write the configuration out to a pickle.
+     resultsFile = open('ParserResults.pkl', 'wb')
+-    cPickle.dump(config, resultsFile, -1)
++    print config, dir(config)
++    #import pprint
++    #pprint.pprint(config)
++    #dump(config)
++    print mypickle, sys.path
++    #print "PICKLE INIT", pickle.Pickler.save_float, pickle_save_float_fix
++    #setattr(pickle.Pickler, 'save_float', types.MethodType(pickle_save_float_fix, pickle.Pickler))
++    #print "PICKLE INIT", pickle.Pickler.save_float, pickle_save_float_fix
++    mypickle.dump(config, resultsFile, -1)
+     resultsFile.close()
+ 
+     # Generate the prototype list.
+diff -up mozilla/dom/bindings/parser/WebIDL.py.python2 mozilla/dom/bindings/parser/WebIDL.py
+--- mozilla/dom/bindings/parser/WebIDL.py.python2	2013-12-06 16:44:53.000000000 +0100
++++ mozilla/dom/bindings/parser/WebIDL.py	2013-12-07 22:18:39.000000000 +0100
+@@ -9,7 +9,11 @@ import re
+ import os
+ import traceback
+ import math
+-
++import sys
++print sys.path
++import rhrebase
++from rhrebase import (any, all)
++print rhrebase
+ # Machinery
+ 
+ def parseInt(literal):
+@@ -67,9 +71,12 @@ class WebIDLError(Exception):
+         self.warning = warning
+ 
+     def __str__(self):
++        _locations = ""
++        if len(self.locations) != 0:
++           _locations = ", "
+         return "%s: %s%s%s" % (self.warning and 'warning' or 'error',
+                                  self.message,
+-                                 ", " if len(self.locations) != 0 else "",
++                                 _locations,
+                                  "\n".join(self.locations))
+ 
+ class Location(object):
+@@ -78,7 +85,10 @@ class Location(object):
+         self._lineno = lineno
+         self._lexpos = lexpos
+         self._lexdata = lexer.lexdata
+-        self._file = filename if filename else "<unknown>"
++        if filename:
++           self._file = filename 
++        else:
++           self._file = "<unknown>"
+ 
+     def __eq__(self, other):
+         return self._lexpos == other._lexpos and \
+@@ -497,9 +507,12 @@ class IDLExternalInterface(IDLObjectWith
+     def _getDependentObjects(self):
+         return set()
+ 
++import traceback
++
+ class IDLInterface(IDLObjectWithScope):
+     def __init__(self, location, parentScope, name, parent, members,
+                  isPartial):
++        #traceback.print_stack()
+         assert isinstance(parentScope, IDLScope)
+         assert isinstance(name, IDLUnresolvedIdentifier)
+         assert not isPartial or not parent
+@@ -530,6 +543,7 @@ class IDLInterface(IDLObjectWithScope):
+             self.setNonPartial(location, parent, members)
+         else:
+             # Just remember our members for now
++            print "1 setting members in init", members
+             self.members = members
+ 
+     def __str__(self):
+@@ -554,6 +568,7 @@ class IDLInterface(IDLObjectWithScope):
+         # Might be a ctor, which isn't in self.members
+         if newObject in self.members:
+             self.members.remove(newObject)
++            print "2 resolveIdentifierConflict", self.members, newObject.identifier, newObject.location
+         return retval
+ 
+     def finish(self, scope):
+@@ -568,7 +583,9 @@ class IDLInterface(IDLObjectWithScope):
+                               [self.location])
+ 
+         assert not self.parent or isinstance(self.parent, IDLIdentifierPlaceholder)
+-        parent = self.parent.finish(scope) if self.parent else None
++        parent = None
++        if self.parent:
++          parent = self.parent.finish(scope)
+         if parent and isinstance(parent, IDLExternalInterface):
+             raise WebIDLError("%s inherits from %s which does not have "
+                               "a definition" %
+@@ -578,8 +595,12 @@ class IDLInterface(IDLObjectWithScope):
+         assert not parent or isinstance(parent, IDLInterface)
+ 
+         self.parent = parent
+-
+-        assert iter(self.members)
++        
++        print "finish, self.members", self.members, dir(self.members)
++        # print members
++        for i in self.members:
++          print "MEMBER", i.identifier, i.location 
++        #assert iter(self.members) lets ignore this
+ 
+         if self.parent:
+             self.parent.finish(scope)
+@@ -625,10 +646,13 @@ class IDLInterface(IDLObjectWithScope):
+ 
+         # resolve() will modify self.members, so we need to iterate
+         # over a copy of the member list here.
++        print "resolve members"
+         for member in list(self.members):
++            print "member to resolve", member.identifier, member.location
+             member.resolve(self)
+ 
+         for member in self.members:
++            print "member to finish", member.identifier, member.location
+             member.finish(scope)
+ 
+         ctor = self.ctor()
+@@ -659,6 +683,9 @@ class IDLInterface(IDLObjectWithScope):
+                             "Multiple definitions of %s on %s coming from 'implements' statements" %
+                             (member.identifier.name, self),
+                             [additionalMember.location, member.location])
++            print "Adding following members: "
++            for i in additionalMembers:
++              print i.location, i.identifier
+             self.members.extend(additionalMembers)
+             iface.interfacesImplementingSelf.add(self)
+ 
+@@ -696,6 +723,9 @@ class IDLInterface(IDLObjectWithScope):
+                 # attributes of ancestor interfaces, with their corresponding
+                 # getters, on our interface, but that gets pretty complicated
+                 # and seems unnecessary.
++                print unforgeableAttr
++                print "Adding unforgetableAttr"
++                print unforgeableAttr.identifier, unforgeableAttr.location
+                 self.members.append(unforgeableAttr)
+ 
+         # Ensure that there's at most one of each {named,indexed}
+@@ -862,7 +892,9 @@ class IDLInterface(IDLObjectWithScope):
+                     raise WebIDLError("NamedConstructor must either take an identifier or take a named argument list",
+                                       [attr.location])
+ 
+-                args = attr.args() if attr.hasArgs() else []
++                args = []
++                if attr.hasArgs():
++                  args = attr.args()
+ 
+                 retType = IDLWrapperType(self.location, self)
+                 
+@@ -925,7 +957,10 @@ class IDLInterface(IDLObjectWithScope):
+                                   [attr.location])
+ 
+             attrlist = attr.listValue()
+-            self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True
++            
++            self._extendedAttrDict[identifier] = True
++            if len(attrlist):
++               self._extendedAttrDict[identifier] = attrlist 
+ 
+     def addImplementedInterface(self, implementedInterface):
+         assert(isinstance(implementedInterface, IDLInterface))
+@@ -996,7 +1031,11 @@ class IDLInterface(IDLObjectWithScope):
+         assert not self.parent
+         self.parent = parent
+         # Put the new members at the beginning
++        print "adding members from NonPartial"
++        for i in members:
++          print i.identifier, i.location
+         self.members = members + self.members
++        print self.members
+ 
+     def getJSImplementation(self):
+         classId = self.getExtendedAttribute("JSImplementation")
+@@ -1036,6 +1075,10 @@ class IDLDictionary(IDLObjectWithScope):
+ 
+         self.parent = parent
+         self._finished = False
++        print "dict, adding members"
++        for i in members:
++          print i, i.identifier, i.location
++
+         self.members = list(members)
+ 
+         IDLObjectWithScope.__init__(self, location, parentScope, name)
+@@ -1066,6 +1109,8 @@ class IDLDictionary(IDLObjectWithScope):
+             self.parent.finish(scope)
+ 
+         for member in self.members:
++            print "resolving member:", member.identifier, member.location
++
+             member.resolve(self)
+             if not member.isComplete():
+                 member.complete(scope)
+@@ -2405,7 +2450,9 @@ class IDLInterfaceMember(IDLObjectWithId
+         for attr in attrs:
+             self.handleExtendedAttribute(attr)
+             attrlist = attr.listValue()
+-            self._extendedAttrDict[attr.identifier()] = attrlist if len(attrlist) else True
++            self._extendedAttrDict[attr.identifier()] = True
++            if len(attrlist):
++               self._extendedAttrDict[attr.identifier()] = attrlist
+ 
+     def handleExtendedAttribute(self, attr):
+         pass
+@@ -3517,7 +3564,10 @@ class Parser(Tokenizer):
+         """
+             InterfaceMembers : ExtendedAttributeList InterfaceMember InterfaceMembers
+         """
+-        p[0] = [p[2]] if p[2] else []
++        if p[2]:
++           p[0] = [p[2]]
++        else:
++           p[0] = []
+ 
+         assert not p[1] or p[2]
+         p[2].addExtendedAttributes(p[1])
+@@ -3798,11 +3848,11 @@ class Parser(Tokenizer):
+         # by the parser, so we can assert here.
+         assert not stringifier or len(qualifiers) == 1
+ 
+-        getter = True if IDLMethod.Special.Getter in p[1] else False
+-        setter = True if IDLMethod.Special.Setter in p[1] else False
+-        creator = True if IDLMethod.Special.Creator in p[1] else False
+-        deleter = True if IDLMethod.Special.Deleter in p[1] else False
+-        legacycaller = True if IDLMethod.Special.LegacyCaller in p[1] else False
++        getter = rhrebase.get_first_if_true(True, False, IDLMethod.Special.Getter in p[1])
++        setter = rhrebase.get_first_if_true(True, False, IDLMethod.Special.Setter in p[1])
++        creator = rhrebase.get_first_if_true(True, False, IDLMethod.Special.Creator in p[1])
++        deleter = rhrebase.get_first_if_true(True, False, IDLMethod.Special.Deleter in p[1])
++        legacycaller = rhrebase.get_first_if_true(True, False, IDLMethod.Special.LegacyCaller in p[1])
+ 
+         if getter or deleter:
+             if setter or creator:
+@@ -3818,7 +3868,7 @@ class Parser(Tokenizer):
+         if getter or deleter:
+             if len(arguments) != 1:
+                 raise WebIDLError("%s has wrong number of arguments" %
+-                                  ("getter" if getter else "deleter"),
++                                  ( rhrebase.get_first_if_true("getter", "deleter", getter) ),
+                                   [self.getLocation(p, 2)])
+             argType = arguments[0].type
+             if argType == BuiltinTypes[IDLBuiltinType.Types.domstring]:
+@@ -3827,12 +3877,12 @@ class Parser(Tokenizer):
+                 specialType = IDLMethod.NamedOrIndexed.Indexed
+             else:
+                 raise WebIDLError("%s has wrong argument type (must be DOMString or UnsignedLong)" %
+-                                  ("getter" if getter else "deleter"),
++                                  ( rhrebase.get_first_if_true("getter", "deleter", getter) ),
+                                   [arguments[0].location])
+             if arguments[0].optional or arguments[0].variadic:
+                 raise WebIDLError("%s cannot have %s argument" %
+-                                  ("getter" if getter else "deleter",
+-                                   "optional" if arguments[0].optional else "variadic"),
++                                  ( rhrebase.get_first_if_true("getter", "deleter", getter) ,
++                                   rhrebase.get_first_if_true("optional", "variadic", arguments[0].optional) ),
+                                    [arguments[0].location])
+         if getter:
+             if returnType.isVoid():
+@@ -3841,7 +3891,7 @@ class Parser(Tokenizer):
+         if setter or creator:
+             if len(arguments) != 2:
+                 raise WebIDLError("%s has wrong number of arguments" %
+-                                  ("setter" if setter else "creator"),
++                                  ( rhrebase.get_first_if_true("setter", "creator", setter) ),
+                                   [self.getLocation(p, 2)])
+             argType = arguments[0].type
+             if argType == BuiltinTypes[IDLBuiltinType.Types.domstring]:
+@@ -3850,17 +3900,17 @@ class Parser(Tokenizer):
+                 specialType = IDLMethod.NamedOrIndexed.Indexed
+             else:
+                 raise WebIDLError("%s has wrong argument type (must be DOMString or UnsignedLong)" %
+-                                  ("setter" if setter else "creator"),
++                                  ( rhrebase.get_first_if_true("setter", "creator", setter) ),
+                                   [arguments[0].location])
+             if arguments[0].optional or arguments[0].variadic:
+                 raise WebIDLError("%s cannot have %s argument" %
+-                                  ("setter" if setter else "creator",
+-                                   "optional" if arguments[0].optional else "variadic"),
++                                  ( rhrebase.get_first_if_true("setter", "creator", setter) ,
++                                   rhrebase.get_first_if_true("optional", "variadic", arguments[0].optional) ),
+                                   [arguments[0].location])
+             if arguments[1].optional or arguments[1].variadic:
+                 raise WebIDLError("%s cannot have %s argument" %
+-                                  ("setter" if setter else "creator",
+-                                   "optional" if arguments[1].optional else "variadic"),
++                                  ( rhrebase.get_first_if_true("setter", "creator", setter) ,
++                                   rhrebase.get_first_if_true("optional", "variadic", arguments[1].optional) ),
+                                   [arguments[1].location])
+ 
+         if stringifier:
+@@ -3884,7 +3934,7 @@ class Parser(Tokenizer):
+                 raise WebIDLError("Cannot have a non-optional argument following an optional argument",
+                                   [argument.location])
+             inOptionalArguments = argument.optional
+-            variadicArgument = argument if argument.variadic else None
++            variadicArgument = rhrebase.get_first_if_true(argument, None, argument.variadic)
+ 
+         # identifier might be None.  This is only permitted for special methods.
+         if not identifier:
+@@ -3894,15 +3944,19 @@ class Parser(Tokenizer):
+                                   [self.getLocation(p, 2)])
+ 
+             location = BuiltinLocation("<auto-generated-identifier>")
+-            identifier = IDLUnresolvedIdentifier(location, "__%s%s%s%s%s%s%s" %
+-                ("named" if specialType == IDLMethod.NamedOrIndexed.Named else \
+-                 "indexed" if specialType == IDLMethod.NamedOrIndexed.Indexed else "",
+-                 "getter" if getter else "",
+-                 "setter" if setter else "",
+-                 "deleter" if deleter else "",
+-                 "creator" if creator else "",
+-                 "legacycaller" if legacycaller else "",
+-                 "stringifier" if stringifier else ""), allowDoubleUnderscore=True)
++            _named_or_indexed_or_none = ""
++            if specialType == IDLMethod.NamedOrIndexed.Named:
++               _named_or_indexed_or_none = "named"
++            elif specialType == IDLMethod.NamedOrIndexed.Indexed:
++               _named_or_indexed_or_none = "indexed"
++            identifier = IDLUnresolvedIdentifier(location, "__%s%s%s%s%s%s%s" %     
++                (_named_or_indexed_or_none,
++                 rhrebase.get_first_if_true("getter", "", getter),
++                 rhrebase.get_first_if_true("setter", "", setter),
++                 rhrebase.get_first_if_true("deleter", "", deleter),
++                 rhrebase.get_first_if_true("creator", "", creator),
++                 rhrebase.get_first_if_true("legacycaller", "", legacycaller),
++                 rhrebase.get_first_if_true("stringifier", "", stringifier)), allowDoubleUnderscore=True)
+ 
+         method = IDLMethod(self.getLocation(p, 2), identifier, returnType, arguments,
+                            static=static, getter=getter, setter=setter, creator=creator,
+@@ -4008,7 +4062,10 @@ class Parser(Tokenizer):
+         """
+             ArgumentList : Argument Arguments
+         """
+-        p[0] = [p[1]] if p[1] else []
++        if p[1]:
++           p[0] = [p[1]]
++        else:
++           p[0] = []
+         p[0].extend(p[2])
+ 
+     def p_ArgumentListEmpty(self, p):
+@@ -4021,7 +4078,10 @@ class Parser(Tokenizer):
+         """
+             Arguments : COMMA Argument Arguments
+         """
+-        p[0] = [p[2]] if p[2] else []
++        if p[2]:
++           p[0] = [p[2]]
++        else:
++           p[0] = []
+         p[0].extend(p[3])
+ 
+     def p_ArgumentsEmpty(self, p):
+@@ -4151,7 +4211,10 @@ class Parser(Tokenizer):
+         """
+             ExtendedAttributes : COMMA ExtendedAttribute ExtendedAttributes
+         """
+-        p[0] = [p[2]] if p[2] else []
++        if p[2]:
++           p[0] = [p[2]]
++        else:
++           p[0] = []
+         p[0].extend(p[3])
+ 
+     def p_ExtendedAttributesEmpty(self, p):
+diff -up mozilla/dom/browser-element/mochitest/createNewTest.py.python2 mozilla/dom/browser-element/mochitest/createNewTest.py
+--- mozilla/dom/browser-element/mochitest/createNewTest.py.python2	2013-12-06 16:44:53.000000000 +0100
++++ mozilla/dom/browser-element/mochitest/createNewTest.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ 
+ This script requires Python 2.7."""
+ 
+-from __future__ import print_function
+ 
+ import sys
+ import os
+@@ -60,8 +59,9 @@ def add_to_makefile(filenames):
+ 
+     """
+     lines_to_write = [''] + ['\t\t%s \\' % n for n in filenames]
+-    with open('Makefile.in', 'a') as f:
+-        f.write('\n'.join(lines_to_write))
++    f = open('Makefile.in', 'a')
++    f.write('\n'.join(lines_to_write))
++    f.close()
+ 
+     if 'EDITOR' not in os.environ or not os.environ['EDITOR']:
+         print_fill("""\
+@@ -70,14 +70,15 @@ def add_to_makefile(filenames):
+         return
+ 
+     # Count the number of lines in Makefile.in.
+-    with open('Makefile.in', 'r') as f:
+-        num_lines = len(f.readlines())
++    f = open('Makefile.in', 'r')
++    num_lines = len(f.readlines())
++    f.close()
+ 
+     try:
+         subprocess.call([os.environ['EDITOR'],
+                          '+%d' % (num_lines - len(lines_to_write) + 2),
+                          'Makefile.in'])
+-    except Exception as e:
++    except Exception, e:
+         print_fill("Error opening $EDITOR: %s." % str(e))
+         print()
+         print_fill("""\
+@@ -102,8 +103,9 @@ def main(test_name, bug_number):
+             # fchmod doesn't work on Windows.
+             pass
+ 
+-        with os.fdopen(fd, 'w') as file:
+-            file.write(format(template))
++        file_ = os.fdopen(fd, 'w')
++        file_.write(format(template))
++        file_.close()
+ 
+     create_file('browserElement_{test}.js', js_template)
+     create_file('test_browserElement_inproc_{test}.html', html_template)
+diff -up mozilla/dom/imptests/importTestsuite.py.python2 mozilla/dom/imptests/importTestsuite.py
+--- mozilla/dom/imptests/importTestsuite.py.python2	2013-12-06 16:44:54.000000000 +0100
++++ mozilla/dom/imptests/importTestsuite.py	2013-12-07 22:18:39.000000000 +0100
+@@ -10,7 +10,6 @@ Note: removes both source and destinatio
+       use with outstanding changes in either directory.
+ """
+ 
+-from __future__ import print_function, unicode_literals
+ 
+ import os
+ import shutil
+@@ -50,14 +49,16 @@ def getData(confFile):
+     iden = ""
+     directories = []
+     try:
+-        with open(confFile, 'r') as fp:
+-            first = True
++        fp = open(confFile, 'r')
++        first = True
++        if 1:
+             for line in fp:
+                 if first:
+                     vcs, url, iden = line.strip().split("|")
+                     first = False
+                 else:
+                     directories.append(line.strip())
++        fp.close()
+     finally:
+         return vcs, url, iden, directories
+ 
+@@ -120,11 +121,12 @@ def printMozbuildFile(dest, directories)
+     """
+     print("Creating mozbuild...")
+     path = dest + ".mozbuild"
+-    with open(path, 'w') as fh:
+-        normalized = [makeDestPath(dest, d["path"]) for d in directories]
+-        result = writeBuildFiles.substMozbuild("importTestsuite.py",
++    fh = open(path, 'w')
++    normalized = [makeDestPath(dest, d["path"]) for d in directories]
++    result = writeBuildFiles.substMozbuild("importTestsuite.py",
+             normalized)
+-        fh.write(result)
++    fh.write(result)
++    fh.close()
+ 
+     subprocess.check_call(["hg", "add", path])
+ 
+@@ -138,19 +140,22 @@ def printBuildFiles(dest, directories):
+         files = ["test_%s" % (mochitest, ) for mochitest in d["mochitests"]]
+         files.extend(d["supportfiles"])
+ 
+-        with open(path + "/Makefile.in", "w") as fh:
+-            result = writeBuildFiles.substMakefile("importTestsuite.py", files)
+-            fh.write(result)
++        fh = open(path + "/Makefile.in", "w")
++        result = writeBuildFiles.substMakefile("importTestsuite.py", files)
++        fh.write(result)
++        fh.close()
+ 
+-        with open(path + "/moz.build", "w") as fh:
+-            result = writeBuildFiles.substMozbuild("importTestsuite.py", [])
+-            fh.write(result)
++        fh = open(path + "/moz.build", "w")
++        result = writeBuildFiles.substMozbuild("importTestsuite.py", [])
++        fh.write(result)
++        fh.close()
+ 
+         if d["reftests"]:
+-            with open(path + "/reftest.list", "w") as fh:
+-                result = writeBuildFiles.substReftestList("importTestsuite.py",
++            fh = open(path + "/reftest.list", "w")
++            result = writeBuildFiles.substReftestList("importTestsuite.py",
+                     d["reftests"])
+-                fh.write(result)
++            fh.write(result)
++            fh.close()
+ 
+ 
+ def hgadd(dest, directories):
+@@ -189,7 +194,7 @@ def importRepo(confFile):
+         hgadd(dest, directories)
+         print("Removing %s again..." % hgdest)
+         subprocess.check_call(["rm", "-rf", hgdest])
+-    except subprocess.CalledProcessError as e:
++    except subprocess.CalledProcessError, e:
+         print(e.returncode)
+     finally:
+         print("Done")
+diff -up mozilla/dom/imptests/parseFailures.py.python2 mozilla/dom/imptests/parseFailures.py
+--- mozilla/dom/imptests/parseFailures.py.python2	2013-12-06 16:44:54.000000000 +0100
++++ mozilla/dom/imptests/parseFailures.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import print_function, unicode_literals
+ 
+ import collections
+ import json
+@@ -59,15 +58,17 @@ def writeFiles(files):
+         pathmap.setdefault(dirp, []).append(leaf)
+ 
+     for k, v in pathmap.items():
+-        with open(k + '/Makefile.in', 'wb') as fh:
+-            result = writeBuildFiles.substMakefile('parseFailures.py', v)
+-            result = result.encode('utf-8')
+-            fh.write(result)
+-
+-        with open(k + '/moz.build', 'wb') as fh:
+-            result = writeBuildFiles.substMozbuild('parseFailures.py', [])
+-            result = result.encode('utf-8')
+-            fh.write(result)
++        fh = open(k + '/Makefile.in', 'wb')
++        result = writeBuildFiles.substMakefile('parseFailures.py', v)
++        result = result.encode('utf-8')
++        fh.write(result)
++        fh.close()
++
++        fh = open(k + '/moz.build', 'wb')
++        result = writeBuildFiles.substMozbuild('parseFailures.py', [])
++        result = result.encode('utf-8')
++        fh.write(result)
++        fh.close()
+ 
+ def main(logPath):
+     fp = open(logPath, 'rb')
+diff -up mozilla/dom/imptests/updateTestharness.py.python2 mozilla/dom/imptests/updateTestharness.py
+--- mozilla/dom/imptests/updateTestharness.py.python2	2013-12-06 16:44:54.000000000 +0100
++++ mozilla/dom/imptests/updateTestharness.py	2013-12-07 22:18:39.000000000 +0100
+@@ -3,7 +3,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ # You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import unicode_literals
+ 
+ import subprocess
+ 
+diff -up mozilla/dom/imptests/writeBuildFiles.py.python2 mozilla/dom/imptests/writeBuildFiles.py
+--- mozilla/dom/imptests/writeBuildFiles.py.python2	2013-12-06 16:44:54.000000000 +0100
++++ mozilla/dom/imptests/writeBuildFiles.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ # You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import unicode_literals
+ 
+ import string
+ 
+diff -up mozilla/gfx/thebes/genTables.py.python2 mozilla/gfx/thebes/genTables.py
+--- mozilla/gfx/thebes/genTables.py.python2	2013-12-06 16:44:58.000000000 +0100
++++ mozilla/gfx/thebes/genTables.py	2013-12-07 22:18:39.000000000 +0100
+@@ -1,12 +1,15 @@
+ #!/usr/bin/python
++import rhrebase
+ 
+ def table_generator(f):
+     return ",\n".join([", ".join(["0x%2.2x" % h for h in [f(i) for i in range(r,r+16)]]) for r in range(0, 65536, 16)])
+-
+-with open("PremultiplyTables.h", "w") as f:
++f = open("PremultiplyTables.h", "w")
++if 1:
+   f.write("const uint8_t gfxUtils::sPremultiplyTable[256*256] = {\n");
+   f.write(table_generator(lambda i: ((i / 256) * (i % 256) + 254) / 255) + "\n")
+   f.write("};\n");
+   f.write("const uint8_t gfxUtils::sUnpremultiplyTable[256*256] = {\n");
+-  f.write(table_generator(lambda i: (i % 256) * 255 / ((i / 256) if (i / 256) > 0 else 255) % 256) + "\n")
++  f.write(table_generator(lambda i: (i % 256) * 255 / rhrebase.get_first_if_true( (i / 256), 255, (i / 256) > 0) % 256) + "\n")
++
+   f.write("};\n");
++f.close()
+diff -up mozilla/ipc/ipdl/ipdl/lower.py.python2 mozilla/ipc/ipdl/ipdl/lower.py
+--- mozilla/ipc/ipdl/ipdl/lower.py.python2	2013-12-06 16:45:01.000000000 +0100
++++ mozilla/ipc/ipdl/ipdl/lower.py	2013-12-07 22:18:39.000000000 +0100
+@@ -4422,9 +4422,12 @@ class _GenerateProtocolActorCode(ipdl.as
+                     c = c.other       # see above
+                 tmpvar = ExprVar('tmp')
+                 ct = c.bareType()
++                init_val = None
++                if ct.ptr:
++                  init_val = c.defaultValue()
+                 readcase.addstmts([
+                     StmtDecl(Decl(ct, tmpvar.name),
+-                      init=c.defaultValue() if ct.ptr else None),
++                      init=init_val),
+                     StmtExpr(ExprAssn(ExprDeref(var), tmpvar)),
+                     StmtReturn(self.read(
+                         c.ipdltype,
+@@ -5120,7 +5123,12 @@ class _GenerateProtocolActorCode(ipdl.as
+             action = ExprVar('Trigger::Recv')
+         else: assert 0 and 'unknown combo %s/%s'% (self.side, direction)
+ 
+-        msgid = md.pqMsgId() if not reply else md.pqReplyId()
++        msgid = None
++        if not reply:
++            msgid = md.pqMsgId()
++        else:
++            msgid = md.pqReplyId()
++
+         ifbad = StmtIf(ExprNot(
+             ExprCall(
+                 ExprVar(self.protocol.name +'::Transition'),
+diff -up mozilla/js/src/build/ConfigStatus.py.python2 mozilla/js/src/build/ConfigStatus.py
+--- mozilla/js/src/build/ConfigStatus.py.python2	2013-12-06 16:45:01.000000000 +0100
++++ mozilla/js/src/build/ConfigStatus.py	2013-12-07 22:18:39.000000000 +0100
+@@ -6,7 +6,6 @@
+ # drop-in replacement for autoconf 2.13's config.status, with features
+ # borrowed from autoconf > 2.5, and additional features.
+ 
+-from __future__ import print_function
+ 
+ import logging
+ import os
+@@ -118,11 +117,11 @@ def config_status(topobjdir = '.', topsr
+     log_manager.enable_unstructured()
+ 
+     if not options.files and not options.headers:
+-        print('Reticulating splines...', file=sys.stderr)
++        sys.stderr.write("Reticulating splines...\n\r")
+         summary = backend.consume(definitions)
+ 
+         for line in summary.summaries():
+-            print(line, file=sys.stderr)
++            sys.stderr.write(line+"\n\r")
+ 
+         files = [os.path.join(topobjdir, f) for f in files]
+         headers = [os.path.join(topobjdir, f) for f in headers]
+diff -up mozilla/js/src/build/unix/add_phony_targets.py.python2 mozilla/js/src/build/unix/add_phony_targets.py
+--- mozilla/js/src/build/unix/add_phony_targets.py.python2	2013-12-06 16:45:01.000000000 +0100
++++ mozilla/js/src/build/unix/add_phony_targets.py	2013-12-07 22:18:39.000000000 +0100
+@@ -24,8 +24,9 @@ def add_phony_targets(path):
+     phony_targets = deps - targets
+     if not phony_targets:
+         return
+-    with open(path, 'a') as f:
+-        f.writelines('%s:\n' % d for d in phony_targets)
++    f = open(path, 'a')
++    f.writelines('%s:\n' % d for d in phony_targets)
++    f.close()
+ 
+ 
+ if __name__ == '__main__':
+diff -up mozilla/js/src/builtin/embedjs.py.python2 mozilla/js/src/builtin/embedjs.py
+--- mozilla/js/src/builtin/embedjs.py.python2	2013-12-06 16:45:01.000000000 +0100
++++ mozilla/js/src/builtin/embedjs.py	2013-12-07 22:18:39.000000000 +0100
+@@ -36,7 +36,6 @@
+ #
+ # It uses the C preprocessor to process its inputs.
+ 
+-from __future__ import with_statement
+ import re, sys, os, fileinput, subprocess
+ import shlex
+ from optparse import OptionParser
+@@ -81,8 +80,9 @@ def embed(cpp, msgs, sources, c_out, js_
+   # Clang seems to complain and not output anything if the extension of the
+   # input is not something it recognizes, so just fake a .h here.
+   tmp = 'selfhosted.js.h'
+-  with open(tmp, 'wb') as output:
+-    output.write('\n'.join([msgs] + ['#include "%(s)s"' % { 's': source } for source in sources]))
++  output = open(tmp, 'wb')
++  output.write('\n'.join([msgs] + ['#include "%(s)s"' % { 's': source } for source in sources]))
++  output.close()
+   cmdline = cpp + ['-D%(k)s=%(v)s' % { 'k': k, 'v': env[k] } for k in env] + [tmp]
+   p = subprocess.Popen(cmdline, stdout=subprocess.PIPE)
+   processed = ''
+@@ -90,9 +90,11 @@ def embed(cpp, msgs, sources, c_out, js_
+     if not line.startswith('#'):
+       processed += line
+   os.remove(tmp)
+-  with open(js_out, 'w') as output:
+-    output.write(processed)
+-  with open(c_out, 'w') as output:
++  output = open(js_out, 'w')
++  output.write(processed)
++  output.close()
++  output = open(c_out, 'w')
++  if 1:
+     if 'USE_ZLIB' in env:
+       import zlib
+       compressed = zlib.compress(processed)
+@@ -113,19 +115,21 @@ def embed(cpp, msgs, sources, c_out, js_
+           'compressed_total_length': 0,
+           'raw_total_length': len(processed)
+       })
++  output.close()
+ 
+ def process_msgs(cpp, msgs):
+   # Clang seems to complain and not output anything if the extension of the
+   # input is not something it recognizes, so just fake a .h here.
+   tmp = 'selfhosted.msg.h'
+-  with open(tmp, 'wb') as output:
+-    output.write("""\
++  output = open(tmp, 'wb')
++  output.write("""\
+ #define hash #
+ #define id(x) x
+ #define hashify(x) id(hash)x
+ #define MSG_DEF(name, id, argc, ex, msg) hashify(define) name id
+ #include "%(msgs)s"
+ """ % { 'msgs': msgs })
++  output.close()
+   p = subprocess.Popen(cpp + [tmp], stdout=subprocess.PIPE)
+   processed = p.communicate()[0]
+   os.remove(tmp)
+diff -up mozilla/js/src/config/check_source_count.py.python2 mozilla/js/src/config/check_source_count.py
+--- mozilla/js/src/config/check_source_count.py.python2	2013-12-06 16:45:01.000000000 +0100
++++ mozilla/js/src/config/check_source_count.py	2013-12-07 22:18:39.000000000 +0100
+@@ -9,7 +9,6 @@
+ #   not, an error message is printed, quoting ERROR_LOCATION, which should
+ #   probably be the filename and line number of the erroneous call to
+ #   check_source_count.py.
+-from __future__ import print_function
+ import sys
+ import os
+ import re
+diff -up mozilla/js/src/config/check-sync-exceptions.python2 mozilla/js/src/config/check-sync-exceptions
+--- mozilla/js/src/config/check-sync-exceptions.python2	2013-12-06 16:45:01.000000000 +0100
++++ mozilla/js/src/config/check-sync-exceptions	2013-12-07 22:18:39.000000000 +0100
+@@ -21,6 +21,7 @@ system_wrappers_js
+ #*#
+ *.orig
+ *.rej
++*.python2
+ 
+ # Ignore "compiled" python files
+ *.pyc
+diff -up mozilla/js/src/config/expandlibs_exec.py.python2 mozilla/js/src/config/expandlibs_exec.py
+--- mozilla/js/src/config/expandlibs_exec.py.python2	2013-12-06 16:45:01.000000000 +0100
++++ mozilla/js/src/config/expandlibs_exec.py	2013-12-07 22:18:39.000000000 +0100
+@@ -20,7 +20,6 @@ With the --symbol-order argument, follow
+ relevant linker options to change the order in which the linker puts the
+ symbols appear in the resulting binary. Only works for ELF targets.
+ '''
+-from __future__ import with_statement
+ import sys
+ import os
+ from expandlibs import ExpandArgs, relativize, isObject, ensureParentDir, ExpandLibsDeps
+@@ -172,8 +171,9 @@ class ExpandArgsMore(ExpandArgs):
+     def orderSymbols(self, order):
+         '''Given a file containing a list of symbols, adds the appropriate
+         argument to make the linker put the symbols in that order.'''
+-        with open(order) as file:
+-            sections = self._getOrderedSections([l.strip() for l in file.readlines() if l.strip()])
++        file_ = open(order)
++        sections = self._getOrderedSections([l.strip() for l in file_.readlines() if l.strip()])
++        file_.close()
+         split_sections = {}
+         linked_sections = [s[0] for s in SECTION_INSERT_BEFORE]
+         for s in sections:
+@@ -278,8 +278,9 @@ def print_command(out, args):
+     print >>out, "Executing: " + " ".join(args)
+     for tmp in [f for f in args.tmp if os.path.isfile(f)]:
+         print >>out, tmp + ":"
+-        with open(tmp) as file:
+-            print >>out, "".join(["    " + l for l in file.readlines()])
++        file_ = open(tmp)
++        print >>out, "".join(["    " + l for l in file_.readlines()])
++        file_.close()
+     out.flush()
+ 
+ def main():
+@@ -308,33 +309,38 @@ def main():
+             deps.pop(0)
+         # Remove command
+         deps.pop(0)
+-    with ExpandArgsMore(args) as args:
++    args2 = ExpandArgsMore(args)
++    args2.__enter__()
++    if 1:
+         if options.extract:
+-            args.extract()
++            args2.extract()
+         if options.symbol_order:
+-            args.orderSymbols(options.symbol_order)
++            args2.orderSymbols(options.symbol_order)
+         if options.uselist:
+-            args.makelist()
++            args2.makelist()
+ 
+         if options.verbose:
+-            print_command(sys.stderr, args)
+-        proc = subprocess.Popen(args, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
++            print_command(sys.stderr, args2)
++        proc = subprocess.Popen(args2, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
+         (stdout, stderr) = proc.communicate()
+         if proc.returncode and not options.verbose:
+-            print_command(sys.stderr, args)
++            print_command(sys.stderr, args2)
+         sys.stderr.write(stdout)
+         sys.stderr.flush()
+         if proc.returncode:
+             exit(proc.returncode)
++    args2.__exit__(0, 0, 0)
+     if not options.depend:
+         return
+     ensureParentDir(options.depend)
+-    with open(options.depend, 'w') as depfile:
++    depfile = open(options.depend, 'w')
++    if 1:
+         depfile.write("%s : %s\n" % (options.target, ' '.join(dep for dep in deps if os.path.isfile(dep) and dep != options.target)))
+ 
+         for dep in deps:
+             if os.path.isfile(dep) and dep != options.target:
+                 depfile.write("%s :\n" % dep)
++    depfile.close()
+ 
+ if __name__ == '__main__':
+     main()
+diff -up mozilla/js/src/config/expandlibs_gen.py.python2 mozilla/js/src/config/expandlibs_gen.py
+--- mozilla/js/src/config/expandlibs_gen.py.python2	2013-12-06 16:45:01.000000000 +0100
++++ mozilla/js/src/config/expandlibs_gen.py	2013-12-07 22:18:39.000000000 +0100
+@@ -5,7 +5,6 @@
+ '''Given a list of object files and library names, prints a library
+ descriptor to standard output'''
+ 
+-from __future__ import with_statement
+ import sys
+ import os
+ import expandlibs_config as conf
+@@ -39,12 +38,15 @@ if __name__ == '__main__':
+         raise Exception("Missing option: -o")
+ 
+     ensureParentDir(options.output)
+-    with open(options.output, 'w') as outfile:
+-        print >>outfile, generate(args)
++    outfile = open(options.output, 'w')
++    print >>outfile, generate(args)
++    outfile.close()
+     if options.depend:
+         ensureParentDir(options.depend)
+-        with open(options.depend, 'w') as depfile:
++        depfile = open(options.depend, 'w')
++        if 1:
+             deps = ExpandLibsDeps(args)
+             depfile.write("%s : %s\n" % (options.output, ' '.join(deps)))
+             for dep in deps:
+                 depfile.write("%s :\n" % dep)
++        depfile.close()
+diff -up mozilla/js/src/config/expandlibs.py.python2 mozilla/js/src/config/expandlibs.py
+--- mozilla/js/src/config/expandlibs.py.python2	2013-12-06 16:45:01.000000000 +0100
++++ mozilla/js/src/config/expandlibs.py	2013-12-07 22:18:39.000000000 +0100
+@@ -26,7 +26,6 @@ ${LIB_PREFIX}${ROOT}.${LIB_SUFFIX} follo
+   descriptor contains. And for each of these LIBS, also apply the same
+   rules.
+ '''
+-from __future__ import with_statement
+ import sys, os, errno
+ import expandlibs_config as conf
+ 
+@@ -68,6 +67,12 @@ def isObject(path):
+     ends with OBJ_SUFFIX or .i_o'''
+     return os.path.splitext(path)[1] in [conf.OBJ_SUFFIX, '.i_o']
+ 
++def all(iterable):
++    for element in iterable:
++        if not element:
++            return False
++    return True
++
+ class LibDescriptor(dict):
+     KEYS = ['OBJS', 'LIBS']
+ 
+@@ -118,8 +123,9 @@ class ExpandArgs(list):
+     def _expand_desc(self, arg):
+         '''Internal function taking care of lib descriptor expansion only'''
+         if os.path.exists(arg + conf.LIBS_DESC_SUFFIX):
+-            with open(arg + conf.LIBS_DESC_SUFFIX, 'r') as f:
+-                desc = LibDescriptor(f.readlines())
++            f = open(arg + conf.LIBS_DESC_SUFFIX, 'r')
++            desc = LibDescriptor(f.readlines())
++            f.close()
+             objs = [relativize(o) for o in desc['OBJS']]
+             for lib in desc['LIBS']:
+                 objs += self._expand(lib)
+diff -up mozilla/js/src/config/find_OOM_errors.py.python2 mozilla/js/src/config/find_OOM_errors.py
+--- mozilla/js/src/config/find_OOM_errors.py.python2	2013-12-06 16:45:01.000000000 +0100
++++ mozilla/js/src/config/find_OOM_errors.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # This Source Code Form is subject to the terms of the Mozilla Public
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+-from __future__ import print_function
+ 
+ usage = """%prog: A test for OOM conditions in the shell.
+ 
+@@ -65,7 +64,7 @@ def run(args, stdin=None):
+     stdout_worker.join()
+     stderr_worker.join()
+ 
+-  except KeyboardInterrupt as e:
++  except KeyboardInterrupt, e:
+     sys.exit(-1)
+ 
+   stdout, stderr = stdout_worker.all, stderr_worker.all
+diff -up mozilla/js/src/config/nsinstall.py.python2 mozilla/js/src/config/nsinstall.py
+--- mozilla/js/src/config/nsinstall.py.python2	2013-12-06 16:45:01.000000000 +0100
++++ mozilla/js/src/config/nsinstall.py	2013-12-07 22:18:39.000000000 +0100
+@@ -9,7 +9,6 @@
+ # a full build environment set up.
+ # The basic limitation is, it doesn't even try to link and ignores
+ # all related options.
+-from __future__ import print_function
+ from optparse import OptionParser
+ import os
+ import os.path
+@@ -65,7 +64,7 @@ def _nsinstall_internal(argv):
+     dir = os.path.abspath(dir)
+     if os.path.exists(dir):
+       if not os.path.isdir(dir):
+-        print('nsinstall: {0} is not a directory'.format(dir), file=sys.stderr)
++        sys.stderr.write('nsinstall: {0} is not a directory'.format(dir)+"\n")
+         return 1
+       if mode:
+         os.chmod(dir, mode)
+@@ -76,7 +75,7 @@ def _nsinstall_internal(argv):
+         os.makedirs(dir, mode)
+       else:
+         os.makedirs(dir)
+-    except Exception as e:
++    except Exception, e:
+       # We might have hit EEXIST due to a race condition (see bug 463411) -- try again once
+       if try_again:
+         return maybe_create_dir(dir, mode, False)
+diff -up mozilla/js/src/config/Preprocessor.py.python2 mozilla/js/src/config/Preprocessor.py
+--- mozilla/js/src/config/Preprocessor.py.python2	2013-12-06 16:45:01.000000000 +0100
++++ mozilla/js/src/config/Preprocessor.py	2013-12-07 22:18:39.000000000 +0100
+@@ -78,9 +78,9 @@ class Preprocessor:
+   
+   def warnUnused(self, file):
+     if self.actionLevel == 0:
+-      sys.stderr.write('{0}: WARNING: no preprocessor directives found\n'.format(file))
++      sys.stderr.write('%s: WARNING: no preprocessor directives found\n' % file)
+     elif self.actionLevel == 1:
+-      sys.stderr.write('{0}: WARNING: no useful preprocessor directives found\n'.format(file))
++      sys.stderr.write('%s: WARNING: no useful preprocessor directives found\n' % file)
+     pass
+ 
+   def setLineEndings(self, aLE):
+@@ -97,8 +97,8 @@ class Preprocessor:
+     """
+     self.marker = aMarker
+     if aMarker:
+-      self.instruction = re.compile('{0}(?P<cmd>[a-z]+)(?:\s(?P<args>.*))?$'
+-                                    .format(aMarker), 
++      self.instruction = re.compile('%s(?P<cmd>[a-z]+)(?:\s(?P<args>.*))?$'
++                                    % (aMarker), 
+                                     re.U)
+       self.comment = re.compile(aMarker, re.U)
+     else:
+@@ -132,9 +132,9 @@ class Preprocessor:
+       self.writtenLines += 1
+       ln = self.context['LINE']
+       if self.writtenLines != ln:
+-        self.out.write('//@line {line} "{file}"{le}'.format(line=ln,
+-                                                            file=self.context['FILE'],
+-                                                            le=self.LE))
++        self.out.write('//@line %(line)s "%(file)s"%(le)s' % {"line" : ln,
++                                                            "file" : self.context['FILE'],
++                                                            "le" : self.LE})
+         self.writtenLines = ln
+     filteredLine = self.applyFilters(aLine)
+     if filteredLine != aLine:
+@@ -157,7 +157,7 @@ class Preprocessor:
+       if dir and not os.path.exists(dir):
+         try:
+           os.makedirs(dir)
+-        except OSError as error:
++        except OSError, error:
+           if error.errno != errno.EEXIST:
+             raise
+       self.out = open(options.output, 'w')
+diff -up mozilla/js/src/devtools/rootAnalysis/analyze.py.python2 mozilla/js/src/devtools/rootAnalysis/analyze.py
+--- mozilla/js/src/devtools/rootAnalysis/analyze.py.python2	2013-12-06 16:45:01.000000000 +0100
++++ mozilla/js/src/devtools/rootAnalysis/analyze.py	2013-12-07 22:18:39.000000000 +0100
+@@ -59,10 +59,11 @@ def generate_hazards(config, outfilename
+     if final_status:
+         raise subprocess.CalledProcessError(final_status, 'analyzeRoots.js')
+ 
+-    with open(outfilename, 'w') as output:
+-        command = ['cat'] + [ 'rootingHazards.%s' % (i+1,) for i in range(config['jobs']) ]
+-        print(' '.join(command) + ' > ' + outfilename)
+-        subprocess.call(command, stdout=output)
++    output = open(outfilename, 'w')
++    command = ['cat'] + [ 'rootingHazards.%s' % (i+1,) for i in range(config['jobs']) ]
++    print(' '.join(command) + ' > ' + outfilename)
++    subprocess.call(command, stdout=output)
++    output.close()
+ 
+ JOBS = { 'dbs':
+              (('%(CWD)s/run_complete',
+@@ -111,8 +112,9 @@ def run_job(name, config):
+         command = fill(command, config)
+         print(' '.join(command))
+         temp = '%s.tmp' % name
+-        with open(temp, 'w') as output:
+-            subprocess.check_call(command, stdout=output, env=env(config))
++        output = open(temp, 'w')
++        subprocess.check_call(command, stdout=output, env=env(config))
++        output.close()
+         if outfilename is not None:
+             os.rename(temp, outfilename)
+ 
+diff -up mozilla/js/src/gdb/run-tests.py.python2 mozilla/js/src/gdb/run-tests.py
+--- mozilla/js/src/gdb/run-tests.py.python2	2013-12-06 16:45:01.000000000 +0100
++++ mozilla/js/src/gdb/run-tests.py	2013-12-07 22:18:39.000000000 +0100
+@@ -110,10 +110,11 @@ class Summary(object):
+ 
+             if OPTIONS.worklist:
+                 try:
+-                    with open(OPTIONS.worklist) as out:
+-                        for test in self.failures:
+-                            out.write(test.name + '\n')
+-                except IOError as err:
++                    out = open(OPTIONS.worklist)
++                    for test in self.failures:
++                       out.write(test.name + '\n')
++                    out.close()
++                except IOError, err:
+                     sys.stderr.write("Error writing worklist file '%s': %s"
+                                      % (OPTIONS.worklist, err))
+                     sys.exit(1)
+@@ -123,7 +124,7 @@ class Summary(object):
+                     with open(OPTIONS.write_failures) as out:
+                         for test in self.failures:
+                             test.show(out)
+-                except IOError as err:
++                except IOError, err:
+                     sys.stderr.write("Error writing worklist file '%s': %s"
+                                      % (OPTIONS.write_failures, err))
+                     sys.exit(1)
+@@ -302,7 +303,7 @@ def main(argv):
+             with open(OPTIONS.read_tests) as f:
+                 for line in f:
+                     test_set.update(os.path.join(test_dir, line.strip('\n')))
+-        except IOError as err:
++        except IOError, err:
+             sys.stderr.write("Error trying to read test file '%s': %s\n"
+                              % (OPTIONS.read_tests, err))
+             sys.exit(1)
+@@ -329,7 +330,7 @@ def main(argv):
+     # directory tree.
+     try:
+         build_test_exec(OPTIONS.builddir)
+-    except subprocess.CalledProcessError as err:
++    except subprocess.CalledProcessError, err:
+         sys.stderr.write("Error building test executable: %s\n" % (err,))
+         sys.exit(1)
+ 
+@@ -338,7 +339,7 @@ def main(argv):
+         summary.start()
+         run_tests(test_list, summary)
+         summary.finish()
+-    except OSError as err:
++    except OSError, err:
+         sys.stderr.write("Error running tests: %s\n" % (err,))
+         sys.exit(1)
+ 
+diff -up mozilla/js/src/tests/lib/jittests.py.python2 mozilla/js/src/tests/lib/jittests.py
+--- mozilla/js/src/tests/lib/jittests.py.python2	2013-12-06 16:45:05.000000000 +0100
++++ mozilla/js/src/tests/lib/jittests.py	2013-12-07 22:18:39.000000000 +0100
+@@ -6,7 +6,6 @@
+ 
+ # jit_test.py -- Python harness for JavaScript trace tests.
+ 
+-from __future__ import print_function
+ import os, sys, tempfile, traceback, time
+ import subprocess
+ from subprocess import Popen, PIPE
+@@ -403,7 +402,7 @@ def run_tests_parallel(tests, prefix, op
+ 
+         # Return what the result process has returned to us
+         return result_process_return_queue.get()
+-    except (Exception, KeyboardInterrupt) as e:
++    except (Exception, KeyboardInterrupt), e:
+         # Print the exception if it's not an interrupt,
+         # might point to a bug or other faulty condition
+         if not isinstance(e,KeyboardInterrupt):
+diff -up mozilla/js/src/tests/lib/manifest.py.python2 mozilla/js/src/tests/lib/manifest.py
+--- mozilla/js/src/tests/lib/manifest.py.python2	2013-12-06 16:45:05.000000000 +0100
++++ mozilla/js/src/tests/lib/manifest.py	2013-12-07 22:18:39.000000000 +0100
+@@ -286,7 +286,8 @@ def _parse_external_manifest(filename, r
+     """
+     entries = []
+ 
+-    with open(filename, 'r') as fp:
++    fp = open(filename, 'r')
++    if 1:
+         manifest_re = re.compile(r'^\s*(.*)\s+(include|script)\s+(\S+)$')
+         for line in fp:
+             line, _, comment = line.partition('#')
+@@ -308,6 +309,7 @@ def _parse_external_manifest(filename, r
+                 path = path[:-len('jstests.list')]
+ 
+             entries.append({'path': path, 'terms': matches.group(1), 'comment': comment.strip()})
++    fp.close()
+ 
+     # if one directory name is a prefix of another, we want the shorter one first
+     entries.sort(key=lambda x: x["path"])
+diff -up mozilla/js/src/vm/make_unicode.py.python2 mozilla/js/src/vm/make_unicode.py
+--- mozilla/js/src/vm/make_unicode.py.python2	2013-12-06 16:45:06.000000000 +0100
++++ mozilla/js/src/vm/make_unicode.py	2013-12-07 22:18:39.000000000 +0100
+@@ -18,7 +18,6 @@
+ #    You should have received a copy of the GNU General Public License
+ #    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+ 
+-from __future__ import print_function
+ import csv
+ import sys
+ 
+diff -up mozilla/js/xpconnect/src/event_impl_gen.py.python2 mozilla/js/xpconnect/src/event_impl_gen.py
+--- mozilla/js/xpconnect/src/event_impl_gen.py.python2	2013-12-06 16:45:06.000000000 +0100
++++ mozilla/js/xpconnect/src/event_impl_gen.py	2013-12-07 22:18:39.000000000 +0100
+@@ -511,13 +511,13 @@ def toWebIDLType(attribute, inType=False
+     if attribute.type == "nsIVariant":
+         return "any";
+     if attribute.type == "nsISupports":
+-        return "%s%s" % (attribute.type, "" if onlyInterface else "?")
++        return "%s%s" % (attribute.type, rhrebase.get_first_if_true("", "?", onlyInterface) )
+     if attribute.type.count("nsIDOM"):
+-        return "%s%s" % (attribute.type[6:], "" if onlyInterface else "?")
++        return "%s%s" % (attribute.type[6:], rhrebase.get_first_if_true("", "?", onlyInterface) )
+     if attribute.type.count("nsI"):
+-        return "%s%s" % (attribute.type[3:], "" if onlyInterface else "?")
++        return "%s%s" % (attribute.type[3:], rhrebase.get_first_if_true("", "?", onlyInterface) )
+     if attribute.realtype.nativeType('in').endswith('*') or attribute.realtype.nativeType('in').count("nsAString"):
+-        return "%s%s" % (attribute.type, "" if onlyInterface else "?")
++        return "%s%s" % (attribute.type, rhrebase.get_first_if_true("", "?", onlyInterface) )
+     return attribute.type
+ 
+ def write_webidl(eventname, iface, fd, conf, idl):
+diff -up mozilla/layout/tools/reftest/mach_commands.py.python2 mozilla/layout/tools/reftest/mach_commands.py
+--- mozilla/layout/tools/reftest/mach_commands.py.python2	2013-12-06 16:45:12.000000000 +0100
++++ mozilla/layout/tools/reftest/mach_commands.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import unicode_literals
+ 
+ import mozpack.path
+ import os
+diff -up mozilla/mach.python2 mozilla/mach
+--- mozilla/mach.python2	2013-12-06 16:45:12.000000000 +0100
++++ mozilla/mach	2013-12-07 22:18:39.000000000 +0100
+@@ -3,7 +3,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import print_function, unicode_literals
+ 
+ import os
+ import sys
+diff -up mozilla/media/webrtc/trunk/build/escape_unicode.py.python2 mozilla/media/webrtc/trunk/build/escape_unicode.py
+--- mozilla/media/webrtc/trunk/build/escape_unicode.py.python2	2013-12-06 16:45:13.000000000 +0100
++++ mozilla/media/webrtc/trunk/build/escape_unicode.py	2013-12-07 22:18:39.000000000 +0100
+@@ -42,14 +42,15 @@ def main(argv):
+ 
+ def WriteEscapedFile(in_filename, out_filename):
+   input_data = codecs.open(in_filename, 'r', 'utf8').read()
+-  with codecs.open(out_filename, 'w', 'ascii') as out_file:
+-    for i, char in enumerate(input_data):
++  out_file = codecs.open(out_filename, 'w', 'ascii')
++  for i, char in enumerate(input_data):
+       if ord(char) > 127:
+         out_file.write(repr(char.encode('utf8'))[1:-1])
+         if input_data[i + 1:i + 2] in '0123456789abcdefABCDEF':
+           out_file.write('""')
+       else:
+         out_file.write(char.encode('ascii'))
++  out_file.close()
+ 
+ 
+ if __name__ == '__main__':
+diff -up mozilla/media/webrtc/trunk/tools/gyp/pylib/gyp/common.py.python2 mozilla/media/webrtc/trunk/tools/gyp/pylib/gyp/common.py
+--- mozilla/media/webrtc/trunk/tools/gyp/pylib/gyp/common.py.python2	2013-12-06 16:45:13.000000000 +0100
++++ mozilla/media/webrtc/trunk/tools/gyp/pylib/gyp/common.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # Use of this source code is governed by a BSD-style license that can be
+ # found in the LICENSE file.
+ 
+-from __future__ import with_statement
+ 
+ import errno
+ import filecmp
+@@ -392,13 +391,15 @@ def CopyTool(flavor, out_path):
+   # Slurp input file.
+   source_path = os.path.join(
+       os.path.dirname(os.path.abspath(__file__)), '%s_tool.py' % prefix)
+-  with open(source_path) as source_file:
+-    source = source_file.readlines()
++  source_file = open(source_path)
++  source = source_file.readlines()
++  source_file.close()
+ 
+   # Add header and write it out.
+   tool_path = os.path.join(out_path, 'gyp-%s-tool' % prefix)
+-  with open(tool_path, 'w') as tool_file:
+-    tool_file.write(
++  tool_file = open(tool_path, 'w')
++  tool_file.write(
++  tool_file.close()
+         ''.join([source[0], '# Generated by gyp. Do not edit.\n'] + source[1:]))
+ 
+   # Make file executable.
+diff -up mozilla/media/webrtc/trunk/tools/gyp/pylib/gyp/generator/mozmake.py.python2 mozilla/media/webrtc/trunk/tools/gyp/pylib/gyp/generator/mozmake.py
+--- mozilla/media/webrtc/trunk/tools/gyp/pylib/gyp/generator/mozmake.py.python2	2013-12-06 16:45:13.000000000 +0100
++++ mozilla/media/webrtc/trunk/tools/gyp/pylib/gyp/generator/mozmake.py	2013-12-07 22:18:39.000000000 +0100
+@@ -145,7 +145,8 @@ def WriteMakefile(filename, data, build_
+   #TODO: should compare with the existing file and not overwrite it if the
+   # contents are the same!
+   ensure_directory_exists(filename)
+-  with open(filename, "w") as f:
++  f = open(filename, "w")
++  if 1:
+     f.write(COMMON_HEADER % {'buildfile': build_file,
+                              'depth': depth,
+                              'topsrcdir': topsrcdir,
+@@ -155,12 +156,14 @@ def WriteMakefile(filename, data, build_
+     f.write(COMMON_FOOTER % {'common_mk_path': common_mk_path})
+     if extra_data:
+       f.write(extra_data)
++  f.close()
+ 
+ def WriteCommonMk(path, build_files, scriptname, commandline):
+-  with open(path, "w") as f:
+-    f.write(COMMON_MK % {'input_gypfiles': ' '.join(build_files),
++  f = open(path, "w")
++  f.write(COMMON_MK % {'input_gypfiles': ' '.join(build_files),
+                          'generator': scriptname,
+                          'commandline': ' '.join(commandline)})
++  f.close()
+ 
+ def striplib(name):
+   "Strip lib prefixes from library names."
+diff -up mozilla/media/webrtc/trunk/tools/gyp/test/library/gyptest-shared-obj-install-path.py.python2 mozilla/media/webrtc/trunk/tools/gyp/test/library/gyptest-shared-obj-install-path.py
+--- mozilla/media/webrtc/trunk/tools/gyp/test/library/gyptest-shared-obj-install-path.py.python2	2013-12-06 16:45:14.000000000 +0100
++++ mozilla/media/webrtc/trunk/tools/gyp/test/library/gyptest-shared-obj-install-path.py	2013-12-07 22:18:39.000000000 +0100
+@@ -10,7 +10,6 @@ their install location rather than by th
+ """
+ 
+ # Python 2.5 needs this for the with statement.
+-from __future__ import with_statement
+ 
+ import os
+ import TestGyp
+@@ -28,14 +27,16 @@ if test.format=='android':
+ else:
+   makefile_path = 'relocate/src/Makefile'
+ 
+-with open(makefile_path) as makefile:
+-  make_contents = makefile.read()
++makefile = open(makefile_path)
++make_contents = makefile.read()
++makefile.close()
+ 
+ # If we remove the code to generate lib1, Make should still be able
+ # to build lib2 since lib1.so already exists.
+ make_contents = make_contents.replace('include lib1.target.mk', '')
+-with open(makefile_path, 'w') as makefile:
+-  makefile.write(make_contents)
++makefile = open(makefile_path, 'w')
++makefile.write(make_contents)
++makefile.close()
+ 
+ test.build('shared_dependency.gyp', test.ALL, chdir='relocate/src')
+ 
+diff -up mozilla/media/webrtc/trunk/tools/gyp/test/lib/TestCmd.py.python2 mozilla/media/webrtc/trunk/tools/gyp/test/lib/TestCmd.py
+--- mozilla/media/webrtc/trunk/tools/gyp/test/lib/TestCmd.py.python2	2013-12-06 16:45:14.000000000 +0100
++++ mozilla/media/webrtc/trunk/tools/gyp/test/lib/TestCmd.py	2013-12-07 22:18:39.000000000 +0100
+@@ -1127,8 +1127,9 @@ class TestCmd(object):
+         file = self.canonicalize(file)
+         if mode[0] != 'r':
+             raise ValueError, "mode must begin with 'r'"
+-        with open(file, mode) as f:
+-            result = f.read()
++        f = open(file, mode)
++        result = f.read()
++        f.close()
+         return result
+ 
+     def rmdir(self, dir):
+@@ -1587,8 +1588,9 @@ class TestCmd(object):
+         file = self.canonicalize(file)
+         if mode[0] != 'w':
+             raise ValueError, "mode must begin with 'w'"
+-        with open(file, mode) as f:
+-            f.write(content)
++        f = open(file, mode)
++        f.write(content)
++        f.close()
+ 
+ # Local Variables:
+ # tab-width:4
+diff -up mozilla/media/webrtc/trunk/tools/gyp/test/make/gyptest-noload.py.python2 mozilla/media/webrtc/trunk/tools/gyp/test/make/gyptest-noload.py
+--- mozilla/media/webrtc/trunk/tools/gyp/test/make/gyptest-noload.py.python2	2013-12-06 16:45:14.000000000 +0100
++++ mozilla/media/webrtc/trunk/tools/gyp/test/make/gyptest-noload.py	2013-12-07 22:18:39.000000000 +0100
+@@ -10,7 +10,6 @@ optional.
+ """
+ 
+ # Python 2.5 needs this for the with statement.
+-from __future__ import with_statement
+ 
+ import os
+ import TestGyp
+diff -up mozilla/memory/replace/dmd/check_test_output.py.python2 mozilla/memory/replace/dmd/check_test_output.py
+--- mozilla/memory/replace/dmd/check_test_output.py.python2	2013-12-06 16:45:15.000000000 +0100
++++ mozilla/memory/replace/dmd/check_test_output.py	2013-12-07 22:18:39.000000000 +0100
+@@ -14,7 +14,6 @@ paths in the test output are relative.
+ 
+ """
+ 
+-from __future__ import print_function
+ 
+ import os
+ import platform
+diff -up mozilla/mozglue/build/fixcrt.py.python2 mozilla/mozglue/build/fixcrt.py
+--- mozilla/mozglue/build/fixcrt.py.python2	2013-12-06 16:45:16.000000000 +0100
++++ mozilla/mozglue/build/fixcrt.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import with_statement
+ 
+ with open('crtdll.obj', 'rb') as infile:
+   data = infile.read()
+diff -up mozilla/python/blessings/blessings/__init__.py.python2 mozilla/python/blessings/blessings/__init__.py
+--- mozilla/python/blessings/blessings/__init__.py.python2	2013-12-06 16:45:17.000000000 +0100
++++ mozilla/python/blessings/blessings/__init__.py	2013-12-07 22:18:39.000000000 +0100
+@@ -72,10 +72,10 @@ class Terminal(object):
+         """
+         if stream is None:
+             stream = sys.__stdout__
++        stream_descriptor = None
+         try:
+-            stream_descriptor = (stream.fileno() if hasattr(stream, 'fileno')
+-                                                 and callable(stream.fileno)
+-                                 else None)
++            if hasattr(stream, 'fileno') and callable(stream.fileno):
++               stream_descriptor  = stream.fileno()
+         except IOUnsupportedOperation:
+             stream_descriptor = None
+ 
+@@ -85,9 +85,10 @@ class Terminal(object):
+         # The desciptor to direct terminal initialization sequences to.
+         # sys.__stdout__ seems to always have a descriptor of 1, even if output
+         # is redirected.
+-        self._init_descriptor = (sys.__stdout__.fileno()
+-                                 if stream_descriptor is None
+-                                 else stream_descriptor)
++        if stream_descriptor is None:
++          self._init_descriptor = sys.__stdout__.fileno()
++        else:
++          self._init_descriptor = stream_descriptor
+         if self._does_styling:
+             # Make things like tigetstr() work. Explicit args make setupterm()
+             # work even when -s is passed to nosetests. Lean toward sending
+@@ -154,7 +155,9 @@ class Terminal(object):
+         Return values are always Unicode.
+ 
+         """
+-        resolution = self._resolve_formatter(attr) if self._does_styling else NullCallableString()
++        resolution = NullCallableString()
++        if self._does_styling:
++          resolution = self._resolve_formatter(attr)
+         setattr(self, attr, resolution)  # Cache capability codes.
+         return resolution
+ 
+@@ -256,7 +259,10 @@ class Terminal(object):
+         # access to it.
+         colors = tigetnum('colors')  # Returns -1 if no color support, -2 if no such cap.
+         #self.__dict__['colors'] = ret  # Cache it. It's not changing. (Doesn't work.)
+-        return colors if colors >= 0 else 0
++        if colors >= 0:
++          return colors
++        else:
++          return 0
+ 
+     def _resolve_formatter(self, attr):
+         """Resolve a sugary or plain capability name, color, or compound formatting function name into a callable capability."""
+@@ -297,11 +303,14 @@ class Terminal(object):
+         # yellow when a terminal supports setf/setb rather than setaf/setab?
+         # I'll be blasted if I can find any documentation. The following
+         # assumes it does.
+-        color_cap = (self._background_color if 'on_' in color else
+-                     self._foreground_color)
++        color_cap = self._foreground_color
++        if 'on_' in color:
++          color_cap = self._background_color
+         # curses constants go up to only 7, so add an offset to get at the
+         # bright colors at 8-15:
+-        offset = 8 if 'bright_' in color else 0
++        offset = 0
++        if 'bright_' in color:
++          offset = 8
+         base_color = color.rsplit('_', 1)[-1]
+         return self._formatting_string(
+             color_cap(getattr(curses, 'COLOR_' + base_color.upper()) + offset))
+@@ -353,8 +362,10 @@ class ParametrizingString(unicode):
+             # 3. However, appear to be a plain Unicode string otherwise so
+             # concats work.
+             parametrized = tparm(self.encode('utf-8'), *args).decode('utf-8')
+-            return (parametrized if self._normal is None else
+-                    FormattingString(parametrized, self._normal))
++            if self._normal is None:
++              return parametrized
++            else:
++              return FormattingString(parametrized, self._normal)
+         except curses.error:
+             # Catch "must call (at least) setupterm() first" errors, as when
+             # running simply `nosetests` (without progressive) on nose-
+diff -up mozilla/python/blessings/blessings/tests.py.python2 mozilla/python/blessings/blessings/tests.py
+--- mozilla/python/blessings/blessings/tests.py.python2	2013-12-06 16:45:17.000000000 +0100
++++ mozilla/python/blessings/blessings/tests.py	2013-12-07 22:18:39.000000000 +0100
+@@ -9,7 +9,6 @@ All we require from the host machine is 
+ xterm-256color exists.
+ 
+ """
+-from __future__ import with_statement  # Make 2.5-compatible
+ from curses import tigetstr, tparm
+ from functools import partial
+ from StringIO import StringIO
+diff -up mozilla/python/codegen/makeutils.py.python2 mozilla/python/codegen/makeutils.py
+--- mozilla/python/codegen/makeutils.py.python2	2013-12-06 16:45:17.000000000 +0100
++++ mozilla/python/codegen/makeutils.py	2013-12-07 22:18:39.000000000 +0100
+@@ -17,12 +17,12 @@ def writeMakeDependOutput(filename):
+     if dir and not os.path.exists(dir):
+         try:
+             os.makedirs(dir)
+-        except OSError as error:
++        except OSError, error:
+             if error.errno != errno.EEXIST:
+                 raise
+ 
+-    with open(filename, 'w') as f:
+-        if len(targets) > 0:
++    f = open(filename, 'w')
++    if len(targets) > 0:
+             f.write("%s:" % makeQuote(targets[0]))
+             for filename in dependencies:
+                 f.write(' \\\n\t\t%s' % makeQuote(filename))
+@@ -32,3 +32,4 @@ def writeMakeDependOutput(filename):
+             for filename in dependencies:
+                 f.write('%s:\n' % filename)
+ 
++    f.close()
+diff -up mozilla/python/mach/mach/base.py.python2 mozilla/python/mach/mach/base.py
+--- mozilla/python/mach/mach/base.py.python2	2013-12-06 16:45:17.000000000 +0100
++++ mozilla/python/mach/mach/base.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,9 +2,7 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import unicode_literals
+-
+-from collections import namedtuple
++from namedtuple import namedtuple
+ 
+ # Holds mach run-time state so it can easily be passed to command providers.
+ CommandContext = namedtuple('CommandContext', ['topdir', 'cwd',
+diff -up mozilla/python/mach/mach/commands/commandinfo.py.python2 mozilla/python/mach/mach/commands/commandinfo.py
+--- mozilla/python/mach/mach/commands/commandinfo.py.python2	2013-12-06 16:45:17.000000000 +0100
++++ mozilla/python/mach/mach/commands/commandinfo.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import print_function, unicode_literals
+ 
+ from mach.decorators import (
+     CommandProvider,
+diff -up mozilla/python/mach/mach/commands/settings.py.python2 mozilla/python/mach/mach/commands/settings.py
+--- mozilla/python/mach/mach/commands/settings.py.python2	2013-12-06 16:45:17.000000000 +0100
++++ mozilla/python/mach/mach/commands/settings.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ # You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import print_function, unicode_literals
+ 
+ from textwrap import TextWrapper
+ 
+diff -up mozilla/python/mach/mach/config.py.python2 mozilla/python/mach/mach/config.py
+--- mozilla/python/mach/mach/config.py.python2	2013-12-06 16:45:17.000000000 +0100
++++ mozilla/python/mach/mach/config.py	2013-12-07 22:18:39.000000000 +0100
+@@ -25,8 +25,6 @@ msgfmt binary to perform this conversion
+ can be done via the write_pot() of ConfigSettings.
+ """
+ 
+-from __future__ import unicode_literals
+-
+ import collections
+ import gettext
+ import os
+@@ -93,8 +91,10 @@ class BooleanType(ConfigType):
+ 
+     @staticmethod
+     def to_config(value):
+-        return 'true' if value else 'false'
+-
++        if value:
++          return 'true'
++        else:
++          return 'false'
+ 
+ class IntegerType(ConfigType):
+     @staticmethod
+@@ -227,7 +227,8 @@ class ConfigProvider(object):
+             raise Exception('Setting has already been registered: %s.%s' % (
+                 section, option))
+ 
+-        domain = domain if domain is not None else section
++        if domain is None:
++          domain = section
+ 
+         meta = {
+             'short': '%s.short' % option,
+@@ -246,7 +247,8 @@ class ConfigProvider(object):
+         cls.config_settings[section][option] = meta
+ 
+ 
+-class ConfigSettings(collections.Mapping):
++#class ConfigSettings(collections.Mapping):
++class ConfigSettings(dict):
+     """Interface for configuration settings.
+ 
+     This is the main interface to the configuration.
+@@ -292,7 +294,7 @@ class ConfigSettings(collections.Mapping
+     will result in exceptions being raised.
+     """
+ 
+-    class ConfigSection(collections.MutableMapping, object):
++    class ConfigSection(dict, object):
+         """Represents an individual config section."""
+         def __init__(self, config, name, settings):
+             object.__setattr__(self, '_config', config)
+diff -up mozilla/python/mach/mach/decorators.py.python2 mozilla/python/mach/mach/decorators.py
+--- mozilla/python/mach/mach/decorators.py.python2	2013-12-06 16:45:17.000000000 +0100
++++ mozilla/python/mach/mach/decorators.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,18 +2,16 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import unicode_literals
+-
+ import inspect
+ import types
+ 
+-from .base import (
++from base import (
+     MachError,
+     MethodHandler
+ )
+ 
+-from .config import ConfigProvider
+-from .registrar import Registrar
++from mach.config import ConfigProvider
++from mach.registrar import Registrar
+ 
+ 
+ def CommandProvider(cls):
+diff -up mozilla/python/mach/mach/dispatcher.py.python2 mozilla/python/mach/mach/dispatcher.py
+--- mozilla/python/mach/mach/dispatcher.py.python2	2013-12-06 16:45:17.000000000 +0100
++++ mozilla/python/mach/mach/dispatcher.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,14 +2,13 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import unicode_literals
+ 
+ import argparse
+ import sys
+ 
+ from operator import itemgetter
+ 
+-from .base import (
++from base import (
+     NoCommandError,
+     UnknownCommandError,
+     UnrecognizedArgumentError,
+diff -up mozilla/python/mach/mach/logging.py.python2 mozilla/python/mach/mach/logging.py
+--- mozilla/python/mach/mach/logging.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/python/mach/mach/logging.py	2013-12-07 22:18:39.000000000 +0100
+@@ -6,22 +6,25 @@
+ # support for a structured logging framework built on top of Python's built-in
+ # logging framework.
+ 
+-from __future__ import absolute_import, unicode_literals
+-
+ try:
+     import blessings
+ except ImportError:
+     blessings = None
+ 
+-import json
+-import logging
+ import sys
++import simplejson as json
++print sys.path
++import rhrebase
++print dir(rhrebase)
++from rhrebase import import_non_local
++
++# hack to not import this file instead of system logging
++logging_sys = import_non_local('logging')
+ import time
+ 
+-class NullHandler(logging.Handler):
++class NullHandler(logging_sys.Handler):
+     def emit(self, record):
+         pass
+-logging.NullHandler = NullHandler
+ 
+ def format_seconds(total):
+     """Format number of seconds to MM:SS.DD form."""
+@@ -31,7 +34,7 @@ def format_seconds(total):
+     return '%2d:%05.2f' % (minutes, seconds)
+ 
+ 
+-class ConvertToStructuredFilter(logging.Filter):
++class ConvertToStructuredFilter(logging_sys.Filter):
+     """Filter that converts unstructured records into structured ones."""
+     def filter(self, record):
+         if hasattr(record, 'action') and hasattr(record, 'params'):
+@@ -44,7 +47,7 @@ class ConvertToStructuredFilter(logging.
+         return True
+ 
+ 
+-class StructuredJSONFormatter(logging.Formatter):
++class StructuredJSONFormatter(logging_sys.Formatter):
+     """Log formatter that writes a structured JSON entry."""
+ 
+     def format(self, record):
+@@ -54,7 +57,7 @@ class StructuredJSONFormatter(logging.Fo
+         return json.dumps([record.created, action, params])
+ 
+ 
+-class StructuredHumanFormatter(logging.Formatter):
++class StructuredHumanFormatter(logging_sys.Formatter):
+     """Log formatter that writes structured messages for humans.
+ 
+     It is important that this formatter never be added to a logger that
+@@ -134,16 +137,16 @@ class LoggingManager(object):
+         self.terminal_handler = None
+         self.terminal_formatter = None
+ 
+-        self.root_logger = logging.getLogger()
+-        self.root_logger.setLevel(logging.DEBUG)
++        self.root_logger = logging_sys.getLogger()
++        self.root_logger.setLevel(logging_sys.DEBUG)
+ 
+         # Installing NullHandler on the root logger ensures that *all* log
+         # messages have at least one handler. This prevents Python from
+         # complaining about "no handlers could be found for logger XXX."
+-        self.root_logger.addHandler(logging.NullHandler())
++        #self.root_logger.addHandler(NullHandler()) FIXME?
+ 
+-        self.mach_logger = logging.getLogger('mach')
+-        self.mach_logger.setLevel(logging.DEBUG)
++        self.mach_logger = logging_sys.getLogger('mach')
++        self.mach_logger.setLevel(logging_sys.DEBUG)
+ 
+         self.structured_filter = ConvertToStructuredFilter()
+ 
+@@ -170,9 +173,9 @@ class LoggingManager(object):
+         """Enable JSON logging on the specified file object."""
+ 
+         # Configure the consumer of structured messages.
+-        handler = logging.StreamHandler(fh)
++        handler = logging_sys.StreamHandler(fh)
+         handler.setFormatter(StructuredJSONFormatter())
+-        handler.setLevel(logging.DEBUG)
++        handler.setLevel(logging_sys.DEBUG)
+ 
+         # And hook it up.
+         for logger in self.structured_loggers:
+@@ -180,7 +183,7 @@ class LoggingManager(object):
+ 
+         self.json_handlers.append(handler)
+ 
+-    def add_terminal_logging(self, fh=sys.stdout, level=logging.INFO,
++    def add_terminal_logging(self, fh=sys.stdout, level=20,
+             write_interval=False):
+         """Enable logging to the terminal."""
+ 
+@@ -192,7 +195,7 @@ class LoggingManager(object):
+                 write_interval=write_interval)
+             formatter.set_terminal(self.terminal)
+ 
+-        handler = logging.StreamHandler(fh)
++        handler = logging_sys.StreamHandler(fh)
+         handler.setFormatter(formatter)
+         handler.setLevel(level)
+ 
+diff -up mozilla/python/mach/mach/main.py.python2 mozilla/python/mach/mach/main.py
+--- mozilla/python/mach/mach/main.py.python2	2013-12-06 16:45:17.000000000 +0100
++++ mozilla/python/mach/mach/main.py	2013-12-07 22:18:39.000000000 +0100
+@@ -5,7 +5,6 @@
+ # This module provides functionality for the command-line build tool
+ # (mach). It is packaged as a module because everything is a library.
+ 
+-from __future__ import absolute_import, unicode_literals
+ 
+ import argparse
+ import codecs
+@@ -17,7 +16,7 @@ import traceback
+ import uuid
+ import sys
+ 
+-from .base import (
++from base import (
+     CommandContext,
+     MachError,
+     NoCommandError,
+@@ -25,16 +24,16 @@ from .base import (
+     UnrecognizedArgumentError,
+ )
+ 
+-from .decorators import (
++from decorators import (
+     CommandArgument,
+     CommandProvider,
+     Command,
+ )
+ 
+-from .config import ConfigSettings
+-from .dispatcher import CommandAction
+-from .logging import LoggingManager
+-from .registrar import Registrar
++from config import ConfigSettings
++from dispatcher import CommandAction
++from logging import LoggingManager
++from registrar import Registrar
+ 
+ 
+ 
+@@ -233,7 +232,7 @@ To see more help for a specific command,
+             print('mach interrupted by signal or user action. Stopping.')
+             return 1
+ 
+-        except Exception as e:
++        except Exception, e:
+             # _run swallows exceptions in invoked handlers and converts them to
+             # a proper exit code. So, the only scenario where we should get an
+             # exception here is if _run itself raises. If _run raises, that's a
+@@ -270,10 +269,10 @@ To see more help for a specific command,
+         except NoCommandError:
+             print(NO_COMMAND_ERROR)
+             return 1
+-        except UnknownCommandError as e:
++        except UnknownCommandError, e:
+             print(UNKNOWN_COMMAND_ERROR % (e.verb, e.command))
+             return 1
+-        except UnrecognizedArgumentError as e:
++        except UnrecognizedArgumentError, e:
+             print(UNRECOGNIZED_ARGUMENT_ERROR % (e.command,
+                 ' '.join(e.arguments)))
+             return 1
+@@ -322,9 +321,9 @@ To see more help for a specific command,
+             assert isinstance(result, (int, long))
+ 
+             return result
+-        except KeyboardInterrupt as ki:
++        except KeyboardInterrupt, ki:
+             raise ki
+-        except Exception as e:
++        except Exception, e:
+             exc_type, exc_value, exc_tb = sys.exc_info()
+ 
+             # The first frame is us and is never used.
+diff -up mozilla/python/mach/mach/mixin/logging.py.python2 mozilla/python/mach/mach/mixin/logging.py
+--- mozilla/python/mach/mach/mixin/logging.py.python2	2013-12-06 16:45:17.000000000 +0100
++++ mozilla/python/mach/mach/mixin/logging.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,9 +2,8 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import absolute_import, unicode_literals
+-
+-import logging
++import rhrebase
++logging_ = rhrebase.import_non_local('logging')
+ 
+ 
+ class LoggingMixin(object):
+@@ -23,7 +22,7 @@ class LoggingMixin(object):
+         if name is None:
+             name = '.'.join([self.__module__, self.__class__.__name__])
+ 
+-        self._logger = logging.getLogger(name)
++        self._logger = logging_.getLogger(name)
+ 
+     def log(self, level, action, params, format_str):
+         """Log a structured log event.
+@@ -50,6 +49,5 @@ class LoggingMixin(object):
+         self.log(logging.DEBUG, 'login', {'username': 'johndoe'},
+             'User login: {username}')
+         """
+-        self._logger.log(level, format_str,
+-            extra={'action': action, 'params': params})
++        self._logger.log(level, format_str)
+ 
+diff -up mozilla/python/mach/mach/mixin/process.py.python2 mozilla/python/mach/mach/mixin/process.py
+--- mozilla/python/mach/mach/mixin/process.py.python2	2013-12-06 16:45:17.000000000 +0100
++++ mozilla/python/mach/mach/mixin/process.py	2013-12-07 22:18:39.000000000 +0100
+@@ -4,16 +4,16 @@
+ 
+ # This module provides mixins to perform process execution.
+ 
+-from __future__ import absolute_import, unicode_literals
+-
+-import logging
++import rhrebase
++logging_ = rhrebase.import_non_local('logging')
++#import logging
+ import os
+ import subprocess
+ import sys
+ 
+ from mozprocess.processhandler import ProcessHandlerMixin
+ 
+-from .logging import LoggingMixin
++from logging import LoggingMixin
+ 
+ 
+ # Perform detection of operating system environment. This is used by command
+@@ -41,7 +41,7 @@ class ProcessExecutionMixin(LoggingMixin
+     """Mix-in that provides process execution functionality."""
+ 
+     def run_process(self, args=None, cwd=None, append_env=None,
+-        explicit_env=None, log_name=None, log_level=logging.INFO,
++        explicit_env=None, log_name=None, log_level=logging_.INFO,
+         line_handler=None, require_unix_environment=False,
+         ensure_exit_code=0, ignore_children=False, pass_thru=False):
+         """Runs a single process to completion.
+@@ -75,7 +75,7 @@ class ProcessExecutionMixin(LoggingMixin
+         """
+         args = self._normalize_command(args, require_unix_environment)
+ 
+-        self.log(logging.INFO, 'new_process', {'args': args}, ' '.join(args))
++        self.log(logging_.INFO, 'new_process', {'args': args}, ' '.join(args))
+ 
+         def handleLine(line):
+             # Converts str to unicode on Python 2 and bytes to str on Python 3.
+@@ -99,7 +99,7 @@ class ProcessExecutionMixin(LoggingMixin
+             if append_env:
+                 use_env.update(append_env)
+ 
+-        self.log(logging.DEBUG, 'process', {'env': use_env}, 'Environment: {env}')
++        self.log(logging_.DEBUG, 'process', {'env': use_env}, 'Environment: {env}')
+ 
+         if pass_thru:
+             status = subprocess.call(args, cwd=cwd, env=use_env)
+diff -up mozilla/python/mach/mach/registrar.py.python2 mozilla/python/mach/mach/registrar.py
+--- mozilla/python/mach/mach/registrar.py.python2	2013-12-06 16:45:17.000000000 +0100
++++ mozilla/python/mach/mach/registrar.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,9 +2,7 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import unicode_literals
+-
+-from .base import MachError
++from mach.base import MachError
+ 
+ 
+ class MachRegistrar(object):
+diff -up mozilla/python/mach/mach/terminal.py.python2 mozilla/python/mach/mach/terminal.py
+--- mozilla/python/mach/mach/terminal.py.python2	2013-12-06 16:45:17.000000000 +0100
++++ mozilla/python/mach/mach/terminal.py	2013-12-07 22:18:39.000000000 +0100
+@@ -8,7 +8,6 @@ All the terminal interaction code is con
+ one place, away from code that is commonly looked at.
+ """
+ 
+-from __future__ import print_function, unicode_literals
+ 
+ import logging
+ import sys
+diff -up mozilla/python/mozboot/bin/bootstrap.py.python2 mozilla/python/mozboot/bin/bootstrap.py
+--- mozilla/python/mozboot/bin/bootstrap.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozboot/bin/bootstrap.py	2013-12-07 22:18:39.000000000 +0100
+@@ -12,7 +12,6 @@
+ 
+ # If we add unicode_literals, optparse breaks on Python 2.6.1 (which is needed
+ # to support OS X 10.6).
+-from __future__ import print_function
+ 
+ import os
+ import shutil
+@@ -97,8 +96,9 @@ def ensure_environment(repo_url=None, re
+                 if not os.path.exists(destdir):
+                     os.makedirs(destdir)
+ 
+-                with open(destpath, 'wb') as fh:
+-                    fh.write(files[relpath])
++                fh = open(destpath, 'wb')
++                fh.write(files[relpath])
++                fh.close()
+ 
+             # This should always work.
+             sys.path.append(TEMPDIR)
+@@ -122,7 +122,7 @@ def main(args):
+     try:
+         try:
+             cls = ensure_environment(options.repo_url, options.repo_type)
+-        except Exception as e:
++        except Exception, e:
+             print('Could not load the bootstrap Python environment.\n')
+             print('This should never happen. Consider filing a bug.\n')
+             print('\n')
+diff -up mozilla/python/mozboot/mozboot/base.py.python2 mozilla/python/mozboot/mozboot/base.py
+--- mozilla/python/mozboot/mozboot/base.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozboot/mozboot/base.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ # You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import print_function, unicode_literals
+ 
+ import os
+ import re
+diff -up mozilla/python/mozboot/mozboot/bootstrap.py.python2 mozilla/python/mozboot/mozboot/bootstrap.py
+--- mozilla/python/mozboot/mozboot/bootstrap.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozboot/mozboot/bootstrap.py	2013-12-07 22:18:39.000000000 +0100
+@@ -3,7 +3,6 @@
+ # You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+ # If we add unicode_literals, Python 2.6.1 (required for OS X 10.6) breaks.
+-from __future__ import print_function
+ 
+ import platform
+ import sys
+diff -up mozilla/python/mozboot/mozboot/mach_commands.py.python2 mozilla/python/mozboot/mozboot/mach_commands.py
+--- mozilla/python/mozboot/mozboot/mach_commands.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozboot/mozboot/mach_commands.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this,
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import unicode_literals
+ 
+ from mach.decorators import (
+     CommandArgument,
+diff -up mozilla/python/mozboot/mozboot/osx.py.python2 mozilla/python/mozboot/mozboot/osx.py
+--- mozilla/python/mozboot/mozboot/osx.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozboot/mozboot/osx.py	2013-12-07 22:18:39.000000000 +0100
+@@ -157,7 +157,7 @@ class OSXBootstrapper(BaseBootstrapper):
+         try:
+             output = self.check_output(['/usr/bin/xcrun', 'clang'],
+                 stderr=subprocess.STDOUT)
+-        except subprocess.CalledProcessError as e:
++        except subprocess.CalledProcessError, e:
+             if 'license' in e.output:
+                 xcodebuild = self.which('xcodebuild')
+                 subprocess.check_call([xcodebuild, '-license'])
+diff -up mozilla/python/mozbuild/dumbmake/dumbmake.py.python2 mozilla/python/mozbuild/dumbmake/dumbmake.py
+--- mozilla/python/mozbuild/dumbmake/dumbmake.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/dumbmake/dumbmake.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import unicode_literals
+ 
+ from collections import OrderedDict
+ from itertools import groupby
+diff -up mozilla/python/mozbuild/mozbuild/backend/base.py.python2 mozilla/python/mozbuild/mozbuild/backend/base.py
+--- mozilla/python/mozbuild/mozbuild/backend/base.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/backend/base.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import unicode_literals
+ 
+ from abc import (
+     ABCMeta,
+@@ -15,11 +14,11 @@ import time
+ 
+ from mach.mixin.logging import LoggingMixin
+ 
+-from ..frontend.data import (
++from frontend.data import (
+     ReaderSummary,
+     SandboxDerived,
+ )
+-from .configenvironment import ConfigEnvironment
++from configenvironment import ConfigEnvironment
+ 
+ 
+ class BackendConsumeSummary(object):
+@@ -57,13 +56,15 @@ class BackendConsumeSummary(object):
+ 
+     @property
+     def reader_summary(self):
+-        return 'Finished reading {0:d} moz.build files into {1:d} descriptors in {2:.2f}s'.format(
++        #return 'Finished reading {0:d} moz.build files into {1:d} descriptors in {2:.2f}s'.format(
++        return 'Finished reading %d moz.build files into %d descriptors in %s s' % (
+             self.mozbuild_count, self.object_count,
+             self.mozbuild_execution_time)
+ 
+     @property
+     def backend_summary(self):
+-        return 'Backend executed in {0:.2f}s'.format(self.backend_execution_time)
++        return 'Backend executed in %fs' % (self.backend_execution_time)
++        #return 'Backend executed in {0:.2f}s'.format(self.backend_execution_time)
+ 
+     def backend_detailed_summary(self):
+         """Backend summary to be supplied by BuildBackend implementations."""
+@@ -71,8 +72,11 @@ class BackendConsumeSummary(object):
+ 
+     @property
+     def total_summary(self):
+-        efficiency_value = self.cpu_time / self.wall_time if self.wall_time else 100
+-        return 'Total wall time: {0:.2f}s; CPU time: {1:.2f}s; Efficiency: {2:.0%}'.format(
++        efficiency_value = 100
++        if self.wall_time:
++          efficiency_value = self.cpu_time / self.wall_time
++        #return 'Total wall time: {0:.2f}s; CPU time: {1:.2f}s; Efficiency: {2:.0%}'.format(
++        return 'Total wall time: %fs; CPU time: %fs; Efficiency: %f' % (
+             self.wall_time, self.cpu_time, efficiency_value)
+ 
+     def summaries(self):
+@@ -189,8 +193,9 @@ class BuildBackend(LoggingMixin):
+         # Write out a file indicating when this backend was last generated.
+         age_file = os.path.join(self.environment.topobjdir,
+             'backend.%s.built' % self.__class__.__name__)
+-        with open(age_file, 'a'):
+-            os.utime(age_file, None)
++        f = open(age_file, 'a')
++        os.utime(age_file, None)
++        f.close()
+ 
+         finished_start = time.time()
+         self.consume_finished()
+diff -up mozilla/python/mozbuild/mozbuild/backend/configenvironment.py.python2 mozilla/python/mozbuild/mozbuild/backend/configenvironment.py
+--- mozilla/python/mozbuild/mozbuild/backend/configenvironment.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/backend/configenvironment.py	2013-12-07 22:18:39.000000000 +0100
+@@ -7,11 +7,68 @@ import os
+ import posixpath
+ import re
+ 
+-from os.path import relpath
++import os
++import sys
++
++# Creates os.path.relpath for Python 2.4
++
++if not hasattr(os, 'relpath'):
++    if os.path is sys.modules.get('ntpath'):
++        def relpath(path, start=os.path.curdir):
++            """Return a relative version of a path"""
++        
++            if not path:
++                raise ValueError("no path specified")
++            start_list = os.path.abspath(start).split(os.path.sep)
++            path_list = os.path.abspath(path).split(os.path.sep)
++            if start_list[0].lower() != path_list[0].lower():
++                unc_path, rest = os.path.splitunc(path)
++                unc_start, rest = os.path.splitunc(start)
++                if bool(unc_path) ^ bool(unc_start):
++                    raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)"
++                                                                        % (path, start))
++                else:
++                    raise ValueError("path is on drive %s, start on drive %s"
++                                                        % (path_list[0], start_list[0]))
++            # Work out how much of the filepath is shared by start and path.
++            for i in range(min(len(start_list), len(path_list))):
++                if start_list[i].lower() != path_list[i].lower():
++                    break
++            else:
++                i += 1
++        
++            rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
++            if not rel_list:
++                return os.path.curdir
++            return os.path.join(*rel_list)
++    
++    else:
++        # default to posixpath definition
++        def relpath(path, start=os.path.curdir):
++            """Return a relative version of a path"""
++        
++            if not path:
++                raise ValueError("no path specified")
++        
++            start_list = os.path.abspath(start).split(os.path.sep)
++            path_list = os.path.abspath(path).split(os.path.sep)
++        
++            # Work out how much of the filepath is shared by start and path.
++            i = len(os.path.commonprefix([start_list, path_list]))
++        
++            rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
++            if not rel_list:
++                return os.path.curdir
++            return os.path.join(*rel_list)
++        
++    os.path.relpath = relpath
++
++#from os.path import relpath
+ 
+ from Preprocessor import Preprocessor
+ 
+-from ..util import (
++#from ..util import (
++from util import (
+     ensureParentDir,
+     FileAvoidWrite,
+ )
+@@ -40,9 +97,10 @@ class BuildConfig(object):
+     def from_config_status(path):
+         """Create an instance from a config.status file."""
+ 
+-        with open(path, 'rt') as fh:
++        fh = open(path, 'rt')
++        if 1:
+             source = fh.read()
+-            code = compile(source, path, 'exec', dont_inherit=1)
++            code = compile(source, path, 'exec', 0, 1)
+             g = {
+                 '__builtins__': __builtins__,
+                 '__file__': path,
+@@ -55,6 +113,7 @@ class BuildConfig(object):
+             for name in l['__all__']:
+                 setattr(config, name, l[name])
+ 
++            fh.close()
+             return config
+ 
+ 
+@@ -202,7 +261,8 @@ class ConfigEnvironment(object):
+             "#undef UNKNOWN_NAME" is turned into "/* #undef UNKNOWN_NAME */"
+             Whitespaces are preserved.
+         '''
+-        with open(self.get_input(path), 'rU') as input:
++        input = open(self.get_input(path), 'rU')
++        if 1:
+             ensureParentDir(path)
+             output = FileAvoidWrite(path)
+             r = re.compile('^\s*#\s*(?P<cmd>[a-z]+)(?:\s+(?P<name>\S+)(?:\s+(?P<value>\S+))?)?', re.U)
+@@ -222,5 +282,6 @@ class ConfigEnvironment(object):
+                            l = '/* ' + l[:m.end('name')] + ' */' + l[m.end('name'):]
+ 
+                 output.write(l)
++            input.close()
+             return output.close()
+ 
+diff -up mozilla/python/mozbuild/mozbuild/backend/recursivemake.py.python2 mozilla/python/mozbuild/mozbuild/backend/recursivemake.py
+--- mozilla/python/mozbuild/mozbuild/backend/recursivemake.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/backend/recursivemake.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,15 +2,14 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import unicode_literals
+ 
+ import errno
+ import logging
+ import os
+ import types
+ 
+-from .base import BuildBackend
+-from ..frontend.data import (
++from base import BuildBackend
++from mozbuild.frontend.data import (
+     ConfigFileSubstitution,
+     DirectoryTraversal,
+     SandboxDerived,
+@@ -19,19 +18,20 @@ from ..frontend.data import (
+     Program,
+     XpcshellManifests,
+ )
+-from ..util import FileAvoidWrite
++
++from util import FileAvoidWrite
+ 
+ 
+ STUB_MAKEFILE = '''
+ # THIS FILE WAS AUTOMATICALLY GENERATED. DO NOT MODIFY BY HAND.
+ 
+-DEPTH          := {depth}
+-topsrcdir      := {topsrc}
+-srcdir         := {src}
+-VPATH          := {src}
+-relativesrcdir := {relsrc}
++DEPTH          := %(depth)s
++topsrcdir      := %(topsrc)s
++srcdir         := %(src)s
++VPATH          := %(src)s
++relativesrcdir := %(relsrc)s
+ 
+-include {topsrc}/config/rules.mk
++include %(topsrc)s/config/rules.mk
+ '''.lstrip()
+ 
+ 
+@@ -114,7 +114,8 @@ class RecursiveMakeBackend(BuildBackend)
+         self.summary.unchanged_count = 0
+ 
+         def detailed(summary):
+-            return '{0:d} total backend files. {1:d} created; {2:d} updated; {3:d} unchanged'.format(
++            #return '{0:d} total backend files. {1:d} created; {2:d} updated; {3:d} unchanged'.format(
++            return '%d total backend files. %d created; %d updated; %d unchanged' % (
+                 summary.managed_count, summary.created_count,
+                 summary.updated_count, summary.unchanged_count)
+ 
+@@ -139,10 +140,8 @@ class RecursiveMakeBackend(BuildBackend)
+ 
+     def consume_object(self, obj):
+         """Write out build files necessary to build with recursive make."""
+-
+         if not isinstance(obj, SandboxDerived):
+             return
+-
+         backend_file = self._backend_files.get(obj.srcdir,
+             BackendMakeFile(obj.srcdir, obj.objdir, self.get_environment(obj)))
+ 
+@@ -180,7 +179,7 @@ class RecursiveMakeBackend(BuildBackend)
+             if not os.path.exists(bf.objdir):
+                 try:
+                     os.makedirs(bf.objdir)
+-                except OSError as error:
++                except OSError, error:
+                     if error.errno != errno.EEXIST:
+                         raise
+ 
+@@ -215,7 +214,7 @@ class RecursiveMakeBackend(BuildBackend)
+                 }
+ 
+                 aw = FileAvoidWrite(makefile)
+-                aw.write(STUB_MAKEFILE.format(**params))
++                aw.write(STUB_MAKEFILE % params)
+                 self._update_from_avoid_write(aw.close())
+                 self.summary.managed_count += 1
+ 
+diff -up mozilla/python/mozbuild/mozbuild/backend/util.py.python2 mozilla/python/mozbuild/mozbuild/backend/util.py
+--- mozilla/python/mozbuild/mozbuild/backend/util.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/backend/util.py	2013-12-07 22:18:39.000000000 +0100
+@@ -0,0 +1,377 @@
++# This Source Code Form is subject to the terms of the Mozilla Public
++# License, v. 2.0. If a copy of the MPL was not distributed with this file,
++# You can obtain one at http://mozilla.org/MPL/2.0/.
++
++# This file contains miscellaneous utility functions that don't belong anywhere
++# in particular.
++
++import copy
++import errno
++import sha
++import os
++import sys
++
++from StringIO import StringIO
++
++if sys.version_info[0] == 3:
++    str_type = str
++else:
++    str_type = basestring
++
++def hash_file(path):
++    """Hashes a file specified by the path given and returns the hex digest."""
++
++    # If the hashing function changes, this may invalidate lots of cached data.
++    # Don't change it lightly.
++    h = sha.sha()
++
++    fh = open(path, 'rb')
++    while True:
++            data = fh.read(8192)
++
++            if not len(data):
++                break
++
++            h.update(data)
++
++    fh.close()
++    return h.hexdigest()
++
++
++class ReadOnlyDict(dict):
++    """A read-only dictionary."""
++    def __init__(self, d):
++        dict.__init__(self, d)
++
++    def __setitem__(self, name, value):
++        raise Exception('Object does not support assignment.')
++
++
++class undefined_default(object):
++    """Represents an undefined argument value that isn't None."""
++
++
++undefined = undefined_default()
++
++
++class DefaultOnReadDict(dict):
++    """A dictionary that returns default values for missing keys on read."""
++
++    def __init__(self, d, defaults=None, global_default=undefined):
++        """Create an instance from an iterable with defaults.
++
++        The first argument is fed into the dict constructor.
++
++        defaults is a dict mapping keys to their default values.
++
++        global_default is the default value for *all* missing keys. If it isn't
++        specified, no default value for keys not in defaults will be used and
++        IndexError will be raised on access.
++        """
++        dict.__init__(self, d)
++
++        self._defaults = defaults or {}
++        self._global_default = global_default
++
++    def __getitem__(self, k):
++        try:
++            return dict.__getitem__(self, k)
++        except:
++            pass
++
++        if k in self._defaults:
++            dict.__setitem__(self, k, copy.deepcopy(self._defaults[k]))
++        elif self._global_default != undefined:
++            dict.__setitem__(self, k, copy.deepcopy(self._global_default))
++
++        return dict.__getitem__(self, k)
++
++
++class ReadOnlyDefaultDict(DefaultOnReadDict, ReadOnlyDict):
++    """A read-only dictionary that supports default values on retrieval."""
++    def __init__(self, d, defaults=None, global_default=undefined):
++        DefaultOnReadDict.__init__(self, d, defaults, global_default)
++
++
++def ensureParentDir(path):
++    """Ensures the directory parent to the given file exists."""
++    d = os.path.dirname(path)
++    if d and not os.path.exists(path):
++        try:
++            os.makedirs(d)
++        except OSError, error:
++            if error.errno != errno.EEXIST:
++                raise
++
++
++class FileAvoidWrite(StringIO):
++    """File-like object that buffers output and only writes if content changed.
++
++    We create an instance from an existing filename. New content is written to
++    it. When we close the file object, if the content in the in-memory buffer
++    differs from what is on disk, then we write out the new content. Otherwise,
++    the original file is untouched.
++    """
++    def __init__(self, filename):
++        StringIO.__init__(self)
++        self.filename = filename
++
++    def close(self):
++        """Stop accepting writes, compare file contents, and rewrite if needed.
++
++        Returns a tuple of bools indicating what action was performed:
++
++            (file existed, file updated)
++        """
++        buf = self.getvalue()
++        StringIO.close(self)
++        existed = False
++        try:
++            existing = open(self.filename, 'rU')
++            existed = True
++        except IOError:
++            pass
++        else:
++            try:
++                if existing.read() == buf:
++                    existing.close()
++                    return True, False
++                
++            #except IOError:
++            except ValueError:
++                pass
++
++        ensureParentDir(self.filename)
++        file_ = open(self.filename, 'w')
++        file_.write(buf)
++        file_.close()
++
++        return existed, True
++
++    def __enter__(self):
++        return self
++    def __exit__(self, type, value, traceback):
++        self.close()
++
++
++def resolve_target_to_make(topobjdir, target):
++    r'''
++    Resolve `target` (a target, directory, or file) to a make target.
++
++    `topobjdir` is the object directory; all make targets will be
++    rooted at or below the top-level Makefile in this directory.
++
++    Returns a pair `(reldir, target)` where `reldir` is a directory
++    relative to `topobjdir` containing a Makefile and `target` is a
++    make target (possibly `None`).
++
++    A directory resolves to the nearest directory at or above
++    containing a Makefile, and target `None`.
++
++    A regular (non-Makefile) file resolves to the nearest directory at
++    or above the file containing a Makefile, and an appropriate
++    target.
++
++    A Makefile resolves to the nearest parent strictly above the
++    Makefile containing a different Makefile, and an appropriate
++    target.
++    '''
++    if os.path.isabs(target):
++        print('Absolute paths for make targets are not allowed.')
++        return (None, None)
++
++    target = target.replace(os.sep, '/')
++
++    abs_target = os.path.join(topobjdir, target)
++
++    # For directories, run |make -C dir|. If the directory does not
++    # contain a Makefile, check parents until we find one. At worst,
++    # this will terminate at the root.
++    if os.path.isdir(abs_target):
++        current = abs_target
++
++        while True:
++            make_path = os.path.join(current, 'Makefile')
++            if os.path.exists(make_path):
++                return (current[len(topobjdir) + 1:], None)
++
++            current = os.path.dirname(current)
++
++    # If it's not in a directory, this is probably a top-level make
++    # target. Treat it as such.
++    if '/' not in target:
++        return (None, target)
++
++    # We have a relative path within the tree. We look for a Makefile
++    # as far into the path as possible. Then, we compute the make
++    # target as relative to that directory.
++    reldir = os.path.dirname(target)
++    target = os.path.basename(target)
++
++    while True:
++        make_path = os.path.join(topobjdir, reldir, 'Makefile')
++
++        # We append to target every iteration, so the check below
++        # happens exactly once.
++        if target != 'Makefile' and os.path.exists(make_path):
++            return (reldir, target)
++
++        target = os.path.join(os.path.basename(reldir), target)
++        reldir = os.path.dirname(reldir)
++
++
++class UnsortedError(Exception):
++    def __init__(self, srtd, original):
++        assert len(srtd) == len(original)
++
++        self.sorted = srtd
++        self.original = original
++
++        for i, orig in enumerate(original):
++            s = srtd[i]
++
++            if orig != s:
++                self.i = i
++                break
++
++    def __str__(self):
++        s = StringIO()
++
++        s.write('An attempt was made to add an unsorted sequence to a list. ')
++        s.write('The incoming list is unsorted starting at element %d. ' %
++            self.i)
++        s.write('We expected "%s" but got "%s"' % (
++            self.sorted[self.i], self.original[self.i]))
++
++        return s.getvalue()
++
++
++class StrictOrderingOnAppendList(list):
++    """A list specialized for moz.build environments.
++
++    We overload the assignment and append operations to require that incoming
++    elements be ordered. This enforces cleaner style in moz.build files.
++    """
++    @staticmethod
++    def ensure_sorted(l):
++        srtd = sorted(l)
++
++        if srtd != l:
++            raise UnsortedError(srtd, l)
++
++    def __init__(self, iterable=[]):
++        StrictOrderingOnAppendList.ensure_sorted(iterable)
++
++        list.__init__(self, iterable)
++
++    def extend(self, l):
++        if not isinstance(l, list):
++            raise ValueError('List can only be extended with other list instances.')
++
++        StrictOrderingOnAppendList.ensure_sorted(l)
++
++        return list.extend(self, l)
++
++    def __setslice__(self, i, j, sequence):
++        if not isinstance(sequence, list):
++            raise ValueError('List can only be sliced with other list instances.')
++
++        StrictOrderingOnAppendList.ensure_sorted(sequence)
++
++        return list.__setslice__(self, i, j, sequence)
++
++    def __add__(self, other):
++        if not isinstance(other, list):
++            raise ValueError('Only lists can be appended to lists.')
++
++        StrictOrderingOnAppendList.ensure_sorted(other)
++
++        # list.__add__ will return a new list. We "cast" it to our type.
++        return StrictOrderingOnAppendList(list.__add__(self, other))
++
++    def __iadd__(self, other):
++        if not isinstance(other, list):
++            raise ValueError('Only lists can be appended to lists.')
++
++        StrictOrderingOnAppendList.ensure_sorted(other)
++
++        list.__iadd__(self, other)
++
++        return self
++
++
++class MozbuildDeletionError(Exception):
++    pass
++
++class HierarchicalStringList(object):
++    """A hierarchy of lists of strings.
++
++    Each instance of this object contains a list of strings, which can be set or
++    appended to. A sub-level of the hierarchy is also an instance of this class,
++    can be added by appending to an attribute instead.
++
++    For example, the moz.build variable EXPORTS is an instance of this class. We
++    can do:
++
++    EXPORTS += ['foo.h']
++    EXPORTS.mozilla.dom += ['bar.h']
++
++    In this case, we have 3 instances (EXPORTS, EXPORTS.mozilla, and
++    EXPORTS.mozilla.dom), and the first and last each have one element in their
++    list.
++    """
++    __slots__ = ('_strings', '_children')
++
++    def __init__(self):
++        self._strings = StrictOrderingOnAppendList()
++        self._children = {}
++
++    def get_children(self):
++        return self._children
++
++    def get_strings(self):
++        return self._strings
++
++    def __setattr__(self, name, value):
++        if name in self.__slots__:
++            return object.__setattr__(self, name, value)
++
++        # __setattr__ can be called with a list when a simple assignment is
++        # used:
++        #
++        # EXPORTS.foo = ['file.h']
++        #
++        # In this case, we need to overwrite foo's current list of strings.
++        #
++        # However, __setattr__ is also called with a HierarchicalStringList
++        # to try to actually set the attribute. We want to ignore this case,
++        # since we don't actually create an attribute called 'foo', but just add
++        # it to our list of children (using _get_exportvariable()).
++        exports = self._get_exportvariable(name)
++        if not isinstance(value, HierarchicalStringList):
++            exports._check_list(value)
++            exports._strings = value
++
++    def __getattr__(self, name):
++        if name.startswith('__'):
++            return object.__getattr__(self, name)
++        return self._get_exportvariable(name)
++
++    def __delattr__(self, name):
++        raise MozbuildDeletionError('Unable to delete attributes for this object')
++
++    def __iadd__(self, other):
++        self._check_list(other)
++        self._strings += other
++        return self
++
++    def _get_exportvariable(self, name):
++        return self._children.setdefault(name, HierarchicalStringList())
++
++    def _check_list(self, value):
++        if not isinstance(value, list):
++            raise ValueError('Expected a list of strings, not %s' % type(value))
++        for v in value:
++            if not isinstance(v, str_type):
++                raise ValueError(
++                    'Expected a list of strings, not an element of %s' % type(v))
+diff -up mozilla/python/mozbuild/mozbuild/base.py.python2 mozilla/python/mozbuild/mozbuild/base.py
+--- mozilla/python/mozbuild/mozbuild/base.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/base.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,12 +2,10 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import print_function, unicode_literals
+-
+-import json
++import simplejson as json
+ import logging
+ import mozpack.path
+-import multiprocessing
++#import multiprocessing
+ import os
+ import subprocess
+ import sys
+@@ -18,9 +16,9 @@ from mach.mixin.process import ProcessEx
+ 
+ from mozfile.mozfile import rmtree
+ 
+-from .backend.configenvironment import ConfigEnvironment
+-from .config import BuildConfig
+-from .mozconfig import (
++from mozbuild.backend.configenvironment import ConfigEnvironment
++from mozbuild.config import BuildConfig
++from mozbuild.mozconfig import (
+     MozconfigFindException,
+     MozconfigLoadException,
+     MozconfigLoader,
+@@ -272,7 +270,11 @@ class MozbuildObject(ProcessExecutionMix
+ 
+         leaf = None
+ 
+-        leaf = (substs['MOZ_APP_NAME'] if what == 'app' else what) + substs['BIN_SUFFIX']
++        if what =='app':
++          leaf = substs['MOZ_APP_NAME'] + substs['BIN_SUFFIX']
++        else:
++          leaf = what + substs['BIN_SUFFIX']
++        #leaf = (substs['MOZ_APP_NAME'] if what == 'app' else what) + substs['BIN_SUFFIX']
+         path = os.path.join(stem, leaf)
+ 
+         if validate_exists and not os.path.exists(path):
+@@ -348,7 +350,8 @@ class MozbuildObject(ProcessExecutionMix
+             if num_jobs > 0:
+                 args.append('-j%d' % num_jobs)
+             else:
+-                args.append('-j%d' % multiprocessing.cpu_count())
++                #args.append('-j%d' % multiprocessing.cpu_count())
++                args.append('-j%d' % 2)
+ 
+         if ignore_errors:
+             args.append('-k')
+@@ -453,11 +456,11 @@ class MachCommandBase(MozbuildObject):
+         try:
+             self.mozconfig
+ 
+-        except MozconfigFindException as e:
++        except MozconfigFindException, e:
+             print(e.message)
+             sys.exit(1)
+ 
+-        except MozconfigLoadException as e:
++        except MozconfigLoadException, e:
+             print('Error loading mozconfig: ' + e.path)
+             print('')
+             print(e.message)
+@@ -478,7 +481,11 @@ class PathArgument(object):
+         self.arg = arg
+         self.topsrcdir = topsrcdir
+         self.topobjdir = topobjdir
+-        self.cwd = os.getcwd() if cwd is None else cwd
++        if cwd is None:
++          self.cwd = os.getcwd()
++        else:
++          self.cwd = cwd        
++        #self.cwd = os.getcwd() if cwd is None else cwd
+ 
+     def relpath(self):
+         """Return a path relative to the topsrcdir or topobjdir.
+diff -up mozilla/python/mozbuild/mozbuild/compilation/warnings.py.python2 mozilla/python/mozbuild/mozbuild/compilation/warnings.py
+--- mozilla/python/mozbuild/mozbuild/compilation/warnings.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/compilation/warnings.py	2013-12-07 22:18:39.000000000 +0100
+@@ -4,7 +4,6 @@
+ 
+ # This modules provides functionality for dealing with compiler warnings.
+ 
+-from __future__ import unicode_literals
+ 
+ import errno
+ import json
+@@ -261,19 +260,21 @@ class WarningsDatabase(object):
+ 
+     def load_from_file(self, filename):
+         """Load the database from a file."""
+-        with open(filename, 'rb') as fh:
+-            self.deserialize(fh)
++        fh = open(filename, 'rb')
++        self.deserialize(fh)
++        fh.close()
+ 
+     def save_to_file(self, filename):
+         """Save the database to a file."""
+         try:
+             # Ensure the directory exists
+             os.makedirs(os.path.dirname(filename))
+-        except OSError as e:
++        except OSError, e:
+             if e.errno != errno.EEXIST:
+                 raise
+-        with open(filename, 'wb') as fh:
+-            self.serialize(fh)
++        fh = open(filename, 'wb')
++        self.serialize(fh)
++        fh.close()
+ 
+ 
+ class WarningsCollector(object):
+diff -up mozilla/python/mozbuild/mozbuild/config.py.python2 mozilla/python/mozbuild/mozbuild/config.py
+--- mozilla/python/mozbuild/mozbuild/config.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/config.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,9 +2,8 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ # You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import unicode_literals
+ 
+-import multiprocessing
++#import multiprocessing
+ 
+ from mach.config import (
+     ConfigProvider,
+@@ -14,11 +13,13 @@ from mach.config import (
+ from mach.decorators import SettingsProvider
+ 
+ 
+- at SettingsProvider
++#@SettingsProvider
+ class BuildConfig(ConfigProvider):
+     """The configuration for mozbuild."""
+ 
+     def __init__(self, settings):
++        print "Does SettingsProvider work?"
++        SettingsProvider(self)
+         self.settings = settings
+ 
+     @classmethod
+@@ -28,6 +29,6 @@ class BuildConfig(ConfigProvider):
+                 **kwargs)
+ 
+         register('build', 'threads', PositiveIntegerType,
+-            default=multiprocessing.cpu_count())
++            default=2)
+ 
+ 
+diff -up mozilla/python/mozbuild/mozbuild/controller/building.py.python2 mozilla/python/mozbuild/mozbuild/controller/building.py
+--- mozilla/python/mozbuild/mozbuild/controller/building.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/controller/building.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import unicode_literals
+ 
+ import getpass
+ import os
+diff -up mozilla/python/mozbuild/mozbuild/controller/clobber.py.python2 mozilla/python/mozbuild/mozbuild/controller/clobber.py
+--- mozilla/python/mozbuild/mozbuild/controller/clobber.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/controller/clobber.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,7 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import print_function
++from rhrebase import print24
+ 
+ r'''This module contains code for managing clobbering of the tree.'''
+ 
+@@ -77,9 +77,11 @@ class Clobberer(object):
+         This returns a list of lines describing why the clobber was required.
+         Each line is stripped of leading and trailing whitespace.
+         """
+-        with open(self.src_clobber, 'rt') as fh:
+-            lines = [l.strip() for l in fh.readlines()]
+-            return [l for l in lines if l and not l.startswith('#')]
++        fh = open(self.src_clobber, 'rt')
++        lines = [l.strip() for l in fh.readlines()]
++        mylist = [l for l in lines if l and not l.startswith('#')]
++        fh.close()
++        return mylist
+ 
+     def ensure_objdir_state(self):
+         """Ensure the CLOBBER file in the objdir exists.
+@@ -92,8 +94,9 @@ class Clobberer(object):
+ 
+         if not os.path.exists(self.obj_clobber):
+             # Simply touch the file.
+-            with open(self.obj_clobber, 'a'):
+-                pass
++            f = open(self.obj_clobber, 'a')
++            f.close()
++            pass
+ 
+     def maybe_do_clobber(self, cwd, allow_auto=False, fh=sys.stderr):
+         """Perform a clobber if it is required. Maybe.
+@@ -113,7 +116,7 @@ class Clobberer(object):
+         cwd = os.path.normpath(cwd)
+ 
+         if not self.clobber_needed():
+-            print('Clobber not needed.', file=fh)
++            print24('Clobber not needed.', file=fh)
+             self.ensure_objdir_state()
+             return False, False, None
+ 
+@@ -134,7 +137,7 @@ class Clobberer(object):
+             return True, False, self._message(
+                 'Cannot clobber while the shell is inside the object directory.')
+ 
+-        print('Automatically clobbering %s' % self.topobjdir, file=fh)
++        print24('Automatically clobbering %s' % self.topobjdir, file=fh)
+         try:
+             if cwd == self.topobjdir:
+                 for entry in os.listdir(self.topobjdir):
+@@ -149,9 +152,9 @@ class Clobberer(object):
+                 rmtree(self.topobjdir)
+ 
+             self.ensure_objdir_state()
+-            print('Successfully completed auto clobber.', file=fh)
++            print24('Successfully completed auto clobber.', file=fh)
+             return True, True, None
+-        except (IOError) as error:
++        except IOError,  error:
+             return True, False, self._message(
+                 'Error when automatically clobbering: ' + str(error))
+ 
+@@ -164,7 +167,7 @@ class Clobberer(object):
+ 
+ def main(args, env, cwd, fh=sys.stderr):
+     if len(args) != 2:
+-        print('Usage: clobber.py topsrcdir topobjdir', file=fh)
++        print24('Usage: clobber.py topsrcdir topobjdir', file=fh)
+         return 1
+ 
+     topsrcdir, topobjdir = args
+@@ -175,14 +178,16 @@ def main(args, env, cwd, fh=sys.stderr):
+     if not os.path.isabs(topobjdir):
+         topobjdir = os.path.abspath(topobjdir)
+ 
+-    auto = True if env.get('AUTOCLOBBER', False) else False
++    auto = False
++    if env.get('AUTOCLOBBER', False):
++      auto = True
+     clobber = Clobberer(topsrcdir, topobjdir)
+     required, performed, message = clobber.maybe_do_clobber(cwd, auto, fh)
+ 
+     if not required or performed:
+         return 0
+ 
+-    print(message, file=fh)
++    print24(message, file=fh)
+     return 1
+ 
+ 
+diff -up mozilla/python/mozbuild/mozbuild/frontend/data.py.python2 mozilla/python/mozbuild/mozbuild/frontend/data.py
+--- mozilla/python/mozbuild/mozbuild/frontend/data.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/frontend/data.py	2013-12-07 22:18:39.000000000 +0100
+@@ -15,7 +15,6 @@ contains the code for converting execute
+ structures.
+ """
+ 
+-from __future__ import unicode_literals
+ 
+ from ordereddict import OrderedDict
+ 
+diff -up mozilla/python/mozbuild/mozbuild/frontend/emitter.py.python2 mozilla/python/mozbuild/mozbuild/frontend/emitter.py
+--- mozilla/python/mozbuild/mozbuild/frontend/emitter.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/frontend/emitter.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,11 +2,10 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import unicode_literals
+ 
+ import os
+ 
+-from .data import (
++from data import (
+     ConfigFileSubstitution,
+     DirectoryTraversal,
+     Exports,
+@@ -16,7 +15,7 @@ from .data import (
+     XpcshellManifests,
+ )
+ 
+-from .reader import MozbuildSandbox
++from reader import MozbuildSandbox
+ 
+ 
+ class TreeMetadataEmitter(object):
+diff -up mozilla/python/mozbuild/mozbuild/frontend/mach_commands.py.python2 mozilla/python/mozbuild/mozbuild/frontend/mach_commands.py
+--- mozilla/python/mozbuild/mozbuild/frontend/mach_commands.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/frontend/mach_commands.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import print_function, unicode_literals
+ 
+ import textwrap
+ 
+diff -up mozilla/python/mozbuild/mozbuild/frontend/reader.py.python2 mozilla/python/mozbuild/mozbuild/frontend/reader.py
+--- mozilla/python/mozbuild/mozbuild/frontend/reader.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/frontend/reader.py	2013-12-07 22:18:39.000000000 +0100
+@@ -21,7 +21,6 @@ The BuildReader contains basic logic for
+ It does this by examining specific variables populated during execution.
+ """
+ 
+-from __future__ import print_function, unicode_literals
+ 
+ import logging
+ import os
+@@ -29,8 +28,61 @@ import sys
+ import time
+ import traceback
+ import types
++import os
++import sys
+ 
+-from io import StringIO
++# Creates os.path.relpath for Python 2.4
++
++if not hasattr(os, 'relpath'):
++    if os.path is sys.modules.get('ntpath'):
++        def relpath(path, start=os.path.curdir):
++            """Return a relative version of a path"""
++        
++            if not path:
++                raise ValueError("no path specified")
++            start_list = os.path.abspath(start).split(os.path.sep)
++            path_list = os.path.abspath(path).split(os.path.sep)
++            if start_list[0].lower() != path_list[0].lower():
++                unc_path, rest = os.path.splitunc(path)
++                unc_start, rest = os.path.splitunc(start)
++                if bool(unc_path) ^ bool(unc_start):
++                    raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)"
++                                                                        % (path, start))
++                else:
++                    raise ValueError("path is on drive %s, start on drive %s"
++                                                        % (path_list[0], start_list[0]))
++            # Work out how much of the filepath is shared by start and path.
++            for i in range(min(len(start_list), len(path_list))):
++                if start_list[i].lower() != path_list[i].lower():
++                    break
++            else:
++                i += 1
++        
++            rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
++            if not rel_list:
++                return os.path.curdir
++            return os.path.join(*rel_list)
++    
++    else:
++        # default to posixpath definition
++        def relpath(path, start=os.path.curdir):
++            """Return a relative version of a path"""
++        
++            if not path:
++                raise ValueError("no path specified")
++        
++            start_list = os.path.abspath(start).split(os.path.sep)
++            path_list = os.path.abspath(path).split(os.path.sep)
++        
++            # Work out how much of the filepath is shared by start and path.
++            i = len(os.path.commonprefix([start_list, path_list]))
++        
++            rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
++            if not rel_list:
++                return os.path.curdir
++            return os.path.join(*rel_list)
++        
++    os.path.relpath = relpath
+ 
+ from mozbuild.util import (
+     ReadOnlyDefaultDict,
+@@ -39,21 +91,22 @@ from mozbuild.util import (
+ 
+ from mozbuild.backend.configenvironment import ConfigEnvironment
+ 
+-from .sandbox import (
++from sandbox import (
+     SandboxError,
+     SandboxExecutionError,
+     SandboxLoadError,
+     Sandbox,
+ )
+ 
+-from .sandbox_symbols import (
++from sandbox_symbols import (
+     FUNCTIONS,
+     VARIABLES,
+ )
+ 
+ 
+ if sys.version_info[0] == 2:
+-    text_type = unicode
++    #text_type = unicode FIXME?
++    text_type = str
+     type_type = types.TypeType
+ else:
+     text_type = str
+@@ -61,7 +114,8 @@ else:
+ 
+ 
+ def log(logger, level, action, params, formatter):
+-    logger.log(level, formatter, extra={'action': action, 'params': params})
++    #logger.log(level, formatter, extra={'action': action, 'params': params})
++    logger.log(level, formatter)
+ 
+ 
+ def is_read_allowed(path, config):
+@@ -125,7 +179,7 @@ class MozbuildSandbox(Sandbox):
+         topobjdir = os.path.abspath(config.topobjdir)
+         topsrcdir = config.topsrcdir
+         norm_topsrcdir = os.path.normpath(topsrcdir)
+-
++        print "starting"
+         if not path.startswith(norm_topsrcdir):
+             external_dirs = config.substs.get('EXTERNAL_SOURCE_DIR', '').split()
+             for external in external_dirs:
+@@ -160,13 +214,23 @@ class MozbuildSandbox(Sandbox):
+ 
+         relpath = os.path.relpath(path, topsrcdir).replace(os.sep, '/')
+         reldir = os.path.dirname(relpath)
+-
+-        with self._globals.allow_all_writes() as d:
+-            d['TOPSRCDIR'] = topsrcdir
++        print "Relpath(", "path", path,"topsrcdir", topsrcdir,"relpath",  relpath, "reldir", reldir
++        print "allow all writes", reldir, relpath
++        d = self._globals.allow_all_writes()
++        self._globals._allow_all_writes = True
++        #d.__open__()
++        if 1:
++            print d, dir(d)
++            try:
++              d['TOPSRCDIR'] = topsrcdir
++            except Exception, e:
++              print  e
++            print "set topdir"
+             d['TOPOBJDIR'] = topobjdir
+             d['RELATIVEDIR'] = reldir
+             d['SRCDIR'] = os.path.join(topsrcdir, reldir).replace(os.sep, '/').rstrip('/')
+             d['OBJDIR'] = os.path.join(topobjdir, reldir).replace(os.sep, '/').rstrip('/')
++            print "here"
+ 
+             # config.status does not yet use unicode. However, mozbuild expects
+             # unicode everywhere. So, decode binary into unicode as necessary.
+@@ -190,6 +254,7 @@ class MozbuildSandbox(Sandbox):
+             # Register functions.
+             for name, func in FUNCTIONS.items():
+                 d[name] = getattr(self, func[0])
++        self._globals._allow_all_writes = False
+ 
+     def exec_file(self, path, filesystem_absolute=False):
+         """Override exec_file to normalize paths and restrict file loading.
+@@ -230,7 +295,7 @@ class MozbuildSandbox(Sandbox):
+ 
+     def _add_tier_directory(self, tier, reldir, static=False):
+         """Register a tier directory with the build."""
+-        if isinstance(reldir, text_type):
++        if isinstance(reldir, str):
+             reldir = [reldir]
+ 
+         if not tier in self['TIERS']:
+@@ -239,9 +304,12 @@ class MozbuildSandbox(Sandbox):
+                 'static': [],
+             }
+ 
+-        key = 'static' if static else 'regular'
++        key = 'regular'
++        if static:
++          key = 'static'
+ 
+         for path in reldir:
++            print "path in list", path
+             if path in self['TIERS'][tier][key]:
+                 raise Exception('Directory has already been registered with '
+                     'tier: %s' % path)
+@@ -256,7 +324,7 @@ class MozbuildSandbox(Sandbox):
+ 
+     def _warning(self, message):
+         # FUTURE consider capturing warnings in a variable instead of printing.
+-        print('WARNING: %s' % message, file=sys.stderr)
++        sys.stderr.write('WARNING: %s\n\r' % message)
+ 
+     def _error(self, message):
+         raise SandboxCalledError(self._execution_stack, message)
+@@ -320,41 +388,42 @@ class BuildReaderError(Exception):
+             self.sandbox_called_error
+ 
+     def __str__(self):
+-        s = StringIO()
++        #s = StringIO()
++        s = ""
+ 
+         delim = '=' * 30
+-        s.write('%s\nERROR PROCESSING MOZBUILD FILE\n%s\n\n' % (delim, delim))
++        s += '%s\nERROR PROCESSING MOZBUILD FILE\n%s\n\n' % (delim, delim)
+ 
+-        s.write('The error occurred while processing the following file:\n')
+-        s.write('\n')
+-        s.write('    %s\n' % self.actual_file)
+-        s.write('\n')
++        s += 'The error occurred while processing the following file:\n'
++        s += '\n'
++        s += '    %s\n' % self.actual_file
++        s += '\n'
+ 
+         if self.actual_file != self.main_file and not self.sandbox_load:
+-            s.write('This file was included as part of processing:\n')
+-            s.write('\n')
+-            s.write('    %s\n' % self.main_file)
+-            s.write('\n')
++            s += 'This file was included as part of processing:\n'
++            s += '\n'
++            s += '    %s\n' % self.main_file
++            s += '\n'
+ 
+         if self.sandbox_error is not None:
+             self._print_sandbox_error(s)
+         elif self.validation_error is not None:
+-            s.write('The error occurred when validating the result of ')
+-            s.write('the execution. The reported error is:\n')
+-            s.write('\n')
+-            s.write('    %s\n' % self.validation_error.message)
+-            s.write('\n')
++            s += 'The error occurred when validating the result of '
++            s += 'the execution. The reported error is:\n'
++            s += '\n'
++            s += '    %s\n' % self.validation_error.message
++            s += '\n'
+         else:
+-            s.write('The error appears to be part of the %s ' % __name__)
+-            s.write('Python module itself! It is possible you have stumbled ')
+-            s.write('across a legitimate bug.\n')
+-            s.write('\n')
++            s += 'The error appears to be part of the %s ' % __name__
++            s += 'Python module itself! It is possible you have stumbled '
++            s += 'across a legitimate bug.\n'
++            s += '\n'
+ 
+             for l in traceback.format_exception(type(self.other), self.other,
+                 self.trace):
+-                s.write(unicode(l))
++                s += unicode(l)
+ 
+-        return s.getvalue()
++        return s
+ 
+     def _print_sandbox_error(self, s):
+         # Try to find the frame of the executed code.
+@@ -597,26 +666,26 @@ class BuildReader(object):
+                 filesystem_absolute=filesystem_absolute, descend=descend):
+                 yield s
+ 
+-        except BuildReaderError as bre:
++        except BuildReaderError, bre:
+             raise bre
+ 
+-        except SandboxCalledError as sce:
++        except SandboxCalledError, sce:
+             raise BuildReaderError(list(self._execution_stack),
+                 sys.exc_info()[2], sandbox_called_error=sce)
+ 
+-        except SandboxExecutionError as se:
++        except SandboxExecutionError, se:
+             raise BuildReaderError(list(self._execution_stack),
+                 sys.exc_info()[2], sandbox_exec_error=se)
+ 
+-        except SandboxLoadError as sle:
++        except SandboxLoadError, sle:
+             raise BuildReaderError(list(self._execution_stack),
+                 sys.exc_info()[2], sandbox_load_error=sle)
+ 
+-        except SandboxValidationError as ve:
++        except SandboxValidationError, ve:
+             raise BuildReaderError(list(self._execution_stack),
+                 sys.exc_info()[2], validation_error=ve)
+ 
+-        except Exception as e:
++        except Exception, e:
+             raise BuildReaderError(list(self._execution_stack),
+                 sys.exc_info()[2], other_error=e)
+ 
+@@ -631,11 +700,15 @@ class BuildReader(object):
+             return
+ 
+         self._read_files.add(path)
++        print "after read_files"
+ 
+         time_start = time.time()
+         sandbox = MozbuildSandbox(self.config, path)
++        print "sandbox created"
+         sandbox.exec_file(path, filesystem_absolute=filesystem_absolute)
++        print "sandbox executed"
+         sandbox.execution_time = time.time() - time_start
++        print "preyeld"
+         yield sandbox
+ 
+         # Traverse into referenced files.
+diff -up mozilla/python/mozbuild/mozbuild/frontend/sandbox.py.python2 mozilla/python/mozbuild/mozbuild/frontend/sandbox.py
+--- mozilla/python/mozbuild/mozbuild/frontend/sandbox.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/frontend/sandbox.py	2013-12-07 22:18:39.000000000 +0100
+@@ -20,13 +20,12 @@ KeyError are machine parseable. This mac
+ user-friendly error messages in the case of errors.
+ """
+ 
+-from __future__ import unicode_literals
+ 
+ import copy
+ import os
+ import sys
+ 
+-from contextlib import contextmanager
++#from contextlib import contextmanager
+ 
+ from mozbuild.util import (
+     ReadOnlyDefaultDict,
+@@ -111,6 +110,7 @@ class GlobalNamespace(dict):
+         # The variable isn't present yet. Fall back to VARIABLES.
+         default = self._allowed_variables.get(name, None)
+         if default is None:
++            print "in getitem"
+             self.last_name_error = KeyError('global_ns', 'get_unknown', name)
+             raise self.last_name_error
+ 
+@@ -119,6 +119,7 @@ class GlobalNamespace(dict):
+ 
+     def __setitem__(self, name, value):
+         if self._allow_all_writes:
++            print "write allowed"
+             dict.__setitem__(self, name, value)
+             return
+ 
+@@ -129,6 +130,7 @@ class GlobalNamespace(dict):
+ 
+         # Variable is unknown.
+         if stored_type is None:
++            print "set type1" 
+             self.last_name_error = KeyError('global_ns', 'set_unknown', name,
+                 value)
+             raise self.last_name_error
+@@ -139,7 +141,10 @@ class GlobalNamespace(dict):
+         # not be in the allowed set if the constructor function for the stored
+         # type does not accept an instance of that type.
+         if not isinstance(value, stored_type):
++            print value.__class__, stored_type, input_type
++            # FIXME if not isinstance(value, input_type):
+             if not isinstance(value, input_type):
++                print "set type2" 
+                 self.last_name_error = ValueError('global_ns', 'set_type', name,
+                     value, input_type)
+                 raise self.last_name_error
+@@ -148,7 +153,7 @@ class GlobalNamespace(dict):
+ 
+         dict.__setitem__(self, name, value)
+ 
+-    @contextmanager
++    #@contextmanager
+     def allow_all_writes(self):
+         """Allow any variable to be written to this instance.
+ 
+@@ -158,8 +163,9 @@ class GlobalNamespace(dict):
+         whitelisted mutations.
+         """
+         self._allow_all_writes = True
+-        yield self
+-        self._allow_all_writes = False
++        return self
++        #yield self
++        #self._allow_all_writes = False
+ 
+ 
+ class LocalNamespace(dict):
+@@ -282,12 +288,13 @@ class Sandbox(object):
+         source = None
+ 
+         try:
+-            with open(path, 'rt') as fd:
+-                source = fd.read()
+-        except Exception as e:
++            fd = open(path, 'rt')
++            source = fd.read()
++            fd.close()
++        except Exception, e:
+             raise SandboxLoadError(list(self._execution_stack),
+                 sys.exc_info()[2], read_error=path)
+-
++        print "exec source", path, source
+         self.exec_source(source, path)
+ 
+     def exec_source(self, source, path):
+@@ -315,10 +322,14 @@ class Sandbox(object):
+             # compile() inherits the __future__ from the module by default. We
+             # do want Unicode literals.
+             code = compile(source, path, 'exec')
++            print "exec"
+             exec(code, self._globals, self._locals)
+-        except SandboxError as e:
++            print "exec done"
++        except SandboxError, e:
++            print "sand EXCEPTION", e
+             raise e
+-        except NameError as e:
++        except NameError, e:
++            print "name", e
+             # A NameError is raised when a local or global could not be found.
+             # The original KeyError has been dropped by the interpreter.
+             # However, we should have it cached in our namespace instances!
+@@ -336,14 +347,15 @@ class Sandbox(object):
+             raise SandboxExecutionError(list(self._execution_stack),
+                 type(actual), actual, sys.exc_info()[2])
+ 
+-        except Exception as e:
++        except Exception, e:
++            print "exec source except", e, dir(e)
+             # Need to copy the stack otherwise we get a reference and that is
+             # mutated during the finally.
+             exc = sys.exc_info()
+             raise SandboxExecutionError(list(self._execution_stack), exc[0],
+                 exc[1], exc[2])
+-        finally:
+             self._execution_stack.pop()
++        self._execution_stack.pop()
+ 
+     # Dict interface proxies reads to global namespace.
+     def __len__(self):
+diff -up mozilla/python/mozbuild/mozbuild/frontend/sandbox_symbols.py.python2 mozilla/python/mozbuild/mozbuild/frontend/sandbox_symbols.py
+--- mozilla/python/mozbuild/mozbuild/frontend/sandbox_symbols.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/frontend/sandbox_symbols.py	2013-12-07 22:18:39.000000000 +0100
+@@ -15,7 +15,6 @@ If you are looking for the absolute auth
+ the Sandbox consists of, you've come to the right place.
+ """
+ 
+-from __future__ import unicode_literals
+ 
+ from ordereddict import OrderedDict
+ from mozbuild.util import (
+@@ -23,6 +22,7 @@ from mozbuild.util import (
+     StrictOrderingOnAppendList,
+ )
+ 
++str_type = str
+ 
+ def doc_to_paragraphs(doc):
+     """Take a documentation string and converts it to paragraphs.
+@@ -133,18 +133,18 @@ VARIABLES = {
+         likely go away.
+         """),
+ 
+-    'HOST_LIBRARY_NAME': (unicode, unicode, "",
++    'HOST_LIBRARY_NAME': (str_type, str_type, "",
+         """Name of target library generated when cross compiling.
+         """),
+ 
+-    'JS_MODULES_PATH': (unicode, unicode, "",
++    'JS_MODULES_PATH': (str_type, str_type, "",
+         """Path to install EXTRA_JS_MODULES.
+ 
+         EXTRA_JS_MODULES files are copied to this path, which defaults to
+         $(FINAL_TARGET)/modules if unspecified.
+         """),
+ 
+-    'LIBRARY_NAME': (unicode, unicode, "",
++    'LIBRARY_NAME': (str_type, str_type, "",
+         """The name of the library generated for a directory.
+ 
+         Example:
+@@ -246,7 +246,7 @@ VARIABLES = {
+         variables declared during configure.
+         """),
+ 
+-    'MODULE': (unicode, unicode, "",
++    'MODULE': (str_type, str_type, "",
+         """Module name.
+ 
+         Historically, this variable was used to describe where to install header
+@@ -271,7 +271,7 @@ VARIABLES = {
+         EXPORTS.mozilla.dom += ['bar.h']
+         """),
+ 
+-    'PROGRAM' : (unicode, unicode, "",
++    'PROGRAM' : (str_type, str_type, "",
+         """Compiled executable name.
+ 
+         If the configuration token 'BIN_SUFFIX' is set, its value will be
+@@ -295,7 +295,7 @@ VARIABLES = {
+         files.
+         """),
+ 
+-    'XPIDL_MODULE': (unicode, unicode, "",
++    'XPIDL_MODULE': (str_type, str_type, "",
+         """XPCOM Interface Definition Module Name.
+ 
+         This is the name of the .xpt file that is created by linking
+diff -up mozilla/python/mozbuild/mozbuild/mach_commands.py.python2 mozilla/python/mozbuild/mozbuild/mach_commands.py
+--- mozilla/python/mozbuild/mozbuild/mach_commands.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/mach_commands.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import print_function, unicode_literals
+ 
+ import logging
+ import operator
+@@ -271,9 +270,10 @@ class Build(MachCommandBase):
+         warnings_path = self._get_state_filename('warnings.json')
+         monitor = BuildMonitor(self.topobjdir, warnings_path)
+ 
+-        with BuildOutputManager(self.log_manager, monitor) as output:
+-            monitor.start()
+-
++        output = BuildOutputManager(self.log_manager, monitor)
++        output.__enter__()
++        monitor.start()
++        if 1:
+             if what:
+                 top_make = os.path.join(self.topobjdir, 'Makefile')
+                 if not os.path.exists(top_make):
+@@ -290,6 +290,7 @@ class Build(MachCommandBase):
+                         path_arg.relpath())
+ 
+                     if make_dir is None and make_target is None:
++                        output.__exit__()
+                         return 1
+ 
+                     target_pairs.append((make_dir, make_target))
+@@ -300,8 +301,9 @@ class Build(MachCommandBase):
+                                                    add_extra_dependencies)
+                     depfile = os.path.join(self.topsrcdir, 'build',
+                                            'dumbmake-dependencies')
+-                    with open(depfile) as f:
+-                        dm = dependency_map(f.readlines())
++                    f = open(depfile)
++                    dm = dependency_map(f.readlines())
++                    f.close()
+                     new_pairs = list(add_extra_dependencies(target_pairs, dm))
+                     self.log(logging.DEBUG, 'dumbmake',
+                              {'target_pairs': target_pairs,
+@@ -330,6 +332,7 @@ class Build(MachCommandBase):
+ 
+             monitor.finish()
+ 
++        output.__exit__()
+         high_finder, finder_percent = monitor.have_high_finder_usage()
+         if high_finder:
+             print(FINDER_SLOW_MESSAGE % finder_percent)
+@@ -382,7 +385,7 @@ class Build(MachCommandBase):
+         try:
+             self.remove_objdir()
+             return 0
+-        except WindowsError as e:
++        except WindowsError, e:
+             if e.winerror in (5, 32):
+                 self.log(logging.ERROR, 'file_access_error', {'error': e},
+                     "Could not clobber because a file was in use. If the "
+@@ -610,7 +613,7 @@ class RunProgram(MachCommandBase):
+     def run(self, params, remote, background):
+         try:
+             args = [self.get_binary_path('app')]
+-        except Exception as e:
++        except Exception, e:
+             print("It looks like your program isn't built.",
+                 "You can run |mach build| to build it.")
+             print(e)
+@@ -640,13 +643,13 @@ class DebugProgram(MachCommandBase):
+         import which
+         try:
+             debugger = which.which('gdb')
+-        except Exception as e:
++        except Exception, e:
+             print("You don't have gdb in your PATH")
+             print(e)
+             return 1
+         try:
+             args = [debugger, '--args', self.get_binary_path('app')]
+-        except Exception as e:
++        except Exception, e:
+             print("It looks like your program isn't built.",
+                 "You can run |mach build| to build it.")
+             print(e)
+diff -up mozilla/python/mozbuild/mozbuild/mozconfig.py.python2 mozilla/python/mozbuild/mozbuild/mozconfig.py
+--- mozilla/python/mozbuild/mozbuild/mozconfig.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/mozconfig.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,13 +2,47 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import unicode_literals
+-
+ import os
+ import re
+ import subprocess
+ 
+-from collections import defaultdict
++try:
++    from collections import defaultdict
++except:
++    class defaultdict(dict):
++        def __init__(self, default_factory=None, *a, **kw):
++            if (default_factory is not None and
++                not hasattr(default_factory, '__call__')):
++                raise TypeError('first argument must be callable')
++            dict.__init__(self, *a, **kw)
++            self.default_factory = default_factory
++        def __getitem__(self, key):
++            try:
++                return dict.__getitem__(self, key)
++            except KeyError:
++                return self.__missing__(key)
++        def __missing__(self, key):
++            if self.default_factory is None:
++                raise KeyError(key)
++            self[key] = value = self.default_factory()
++            return value
++        def __reduce__(self):
++            if self.default_factory is None:
++                args = tuple()
++            else:
++                args = self.default_factory,
++            return type(self), args, None, None, self.items()
++        def copy(self):
++            return self.__copy__()
++        def __copy__(self):
++            return type(self)(self.default_factory, self)
++        def __deepcopy__(self, memo):
++            import copy
++            return type(self)(self.default_factory,
++                              copy.deepcopy(self.items()))
++        def __repr__(self):
++            return 'defaultdict(%s, %s)' % (self.default_factory,
++                                            dict.__repr__(self))
+ from mach.mixin.process import ProcessExecutionMixin
+ 
+ 
+@@ -183,7 +217,7 @@ class MozconfigLoader(ProcessExecutionMi
+             # errors if execution fails.
+             output = subprocess.check_output(args, stderr=subprocess.STDOUT,
+                 cwd=self.topsrcdir, env=env)
+-        except subprocess.CalledProcessError as e:
++        except subprocess.CalledProcessError, e:
+             lines = e.output.splitlines()
+ 
+             # Output before actual execution shouldn't be relevant.
+@@ -362,7 +396,8 @@ class MozconfigLoader(ProcessExecutionMi
+                             current.append(value)
+                             continue
+                         else:
+-                            value = value[:-1] if has_quote else value
++                            if has_quote:
++                               value = value[:-1]
+ 
+                 assert name is not None
+ 
+diff -up mozilla/python/mozbuild/mozbuild/test/test_base.py.python2 mozilla/python/mozbuild/mozbuild/test/test_base.py
+--- mozilla/python/mozbuild/mozbuild/test/test_base.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/test/test_base.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import unicode_literals
+ 
+ import os
+ import sys
+diff -up mozilla/python/mozbuild/mozbuild/util.py.python2 mozilla/python/mozbuild/mozbuild/util.py
+--- mozilla/python/mozbuild/mozbuild/util.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozbuild/util.py	2013-12-07 22:18:39.000000000 +0100
+@@ -5,11 +5,9 @@
+ # This file contains miscellaneous utility functions that don't belong anywhere
+ # in particular.
+ 
+-from __future__ import unicode_literals
+-
+ import copy
+ import errno
+-import hashlib
++import sha
+ import os
+ import sys
+ 
+@@ -25,10 +23,10 @@ def hash_file(path):
+ 
+     # If the hashing function changes, this may invalidate lots of cached data.
+     # Don't change it lightly.
+-    h = hashlib.sha1()
++    h = sha.sha()
+ 
+-    with open(path, 'rb') as fh:
+-        while True:
++    fh = open(path, 'rb')
++    while True:
+             data = fh.read(8192)
+ 
+             if not len(data):
+@@ -36,6 +34,7 @@ def hash_file(path):
+ 
+             h.update(data)
+ 
++    fh.close()
+     return h.hexdigest()
+ 
+ 
+@@ -135,15 +134,17 @@ class FileAvoidWrite(StringIO):
+         else:
+             try:
+                 if existing.read() == buf:
++                    existing.close()
+                     return True, False
+-            except IOError:
++                
++            #except IOError:
++            except ValueError:
+                 pass
+-            finally:
+-                existing.close()
+ 
+         ensureParentDir(self.filename)
+-        with open(self.filename, 'w') as file:
+-            file.write(buf)
++        file_ = open(self.filename, 'w')
++        file_.write(buf)
++        file_.close()
+ 
+         return existed, True
+ 
+diff -up mozilla/python/mozbuild/mozpack/chrome/manifest.py.python2 mozilla/python/mozbuild/mozpack/chrome/manifest.py
+--- mozilla/python/mozbuild/mozpack/chrome/manifest.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozpack/chrome/manifest.py	2013-12-07 22:18:39.000000000 +0100
+@@ -8,6 +8,7 @@ from urlparse import urlparse
+ import mozpack.path
+ from mozpack.chrome.flags import Flags
+ from mozpack.errors import errors
++from rhrebase import all
+ 
+ 
+ class ManifestEntry(object):
+@@ -196,8 +197,8 @@ class ManifestOverload(ManifestEntry):
+     @property
+     def localized(self):
+         u = urlparse(self.overload)
+-        return u.scheme == 'chrome' and \
+-               u.path.split('/')[0:2] == ['', 'locale']
++        return u[0] == 'chrome' and \
++               u[2].split('/')[0:2] == ['', 'locale']
+ 
+ 
+ class ManifestOverlay(ManifestOverload):
+@@ -247,7 +248,7 @@ class ManifestResource(ManifestEntry):
+ 
+     def rebase(self, base):
+         u = urlparse(self.target)
+-        if u.scheme and u.scheme != 'jar':
++        if u[0] and u[0] != 'jar':
+             return ManifestEntry.rebase(self, base)
+         clone = ManifestEntry.rebase(self, base)
+         clone.target = mozpack.path.rebase(self.base, base, self.target)
+@@ -351,7 +352,8 @@ def parse_manifest(root, path, fileobj=N
+     linenum = 0
+     for line in fileobj:
+         linenum += 1
+-        with errors.context(path, linenum):
++        #with errors.context(path, linenum):
++        if 1:
+             e = parse_manifest_line(base, line)
+             if e:
+                 yield e
+diff -up mozilla/python/mozbuild/mozpack/copier.py.python2 mozilla/python/mozbuild/mozpack/copier.py
+--- mozilla/python/mozbuild/mozpack/copier.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/python/mozbuild/mozpack/copier.py	2013-12-07 22:18:39.000000000 +0100
+@@ -10,7 +10,7 @@ from mozpack.files import (
+ )
+ import mozpack.path
+ import errno
+-from collections import (
++from namedtuple import (
+     namedtuple
+ )
+ from ordereddict import OrderedDict
+@@ -22,7 +22,7 @@ def ensure_parent_dir(file):
+         return
+     try:
+         os.makedirs(dir)
+-    except OSError as error:
++    except OSError, error:
+         if error.errno != errno.EEXIST:
+             raise
+ 
+@@ -278,8 +278,12 @@ class Jarrer(FileRegistry, BaseFile):
+         old_contents = dict([(f.filename, f) for f in old_jar])
+         print "Old content", old_contents
+         print "JarWriter"
+-        with JarWriter(fileobj=dest, compress=self.compress,
+-                       optimize=self.optimize) as jar:
++        jar = JarWriter(fileobj=dest, compress=self.compress,
++                       optimize=self.optimize)
++#        with JarWriter(fileobj=dest, compress=self.compress,
++#                       optimize=self.optimize) as jar:
++        jar.__enter__()
++        if 1:
+             for path, file in self:
+                 if path in old_contents:
+                     deflater = DeflaterDest(old_contents[path], self.compress)
+@@ -289,6 +293,7 @@ class Jarrer(FileRegistry, BaseFile):
+                 jar.add(path, deflater.deflater)
+             if self._preload:
+                 jar.preload(self._preload)
++        jar.__exit__(0, 0, 0)
+ 
+     def open(self):
+         raise RuntimeError('unsupported')
+diff -up mozilla/python/mozbuild/mozpack/errors.py.python2 mozilla/python/mozbuild/mozpack/errors.py
+--- mozilla/python/mozbuild/mozpack/errors.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozpack/errors.py	2013-12-07 22:18:39.000000000 +0100
+@@ -3,7 +3,7 @@
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+ import sys
+-from contextlib import contextmanager
++#forget it from contextlib import contextmanager
+ 
+ 
+ class ErrorMessage(Exception):
+@@ -110,19 +110,19 @@ class ErrorCollector(object):
+         if self._context:
+             return self._context[-1]
+ 
+-    @contextmanager
++    #@contextmanager
+     def context(self, file, line):
+         if file and line:
+             self._context.append((file, line))
+-        yield
++       #lets ingore it yield
+         if file and line:
+             self._context.pop()
+ 
+-    @contextmanager
++    #@contextmanager
+     def accumulate(self):
+         assert self._count is None
+         self._count = 0
+-        yield
++        # lets ignore it yield
+         count = self._count
+         self._count = None
+         if count:
+@@ -131,7 +131,11 @@ class ErrorCollector(object):
+     @property
+     def count(self):
+         # _count can be None.
+-        return self._count if self._count else 0
++ 
++        ret = 0
++        if self._count:
++          ret = self._count
++        return ret
+ 
+ 
+ errors = ErrorCollector()
+diff -up mozilla/python/mozbuild/mozpack/executables.py.python2 mozilla/python/mozbuild/mozpack/executables.py
+--- mozilla/python/mozbuild/mozpack/executables.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozpack/executables.py	2013-12-07 22:18:39.000000000 +0100
+@@ -31,16 +31,21 @@ def get_type(path):
+     Check the signature of the give file and returns what kind of executable
+     matches.
+     '''
+-    with open(path, 'rb') as f:
++    f = open(path, 'rb')
++    if 1:
+         signature = f.read(4)
+         if len(signature) < 4:
++            f.close()
+             return UNKNOWN
+         signature = struct.unpack('>L', signature)[0]
+         if signature == ELF_SIGNATURE:
++            f.close()
+             return ELF
+         if signature in MACHO_SIGNATURES:
++            f.close()
+             return MACHO
+         if signature != FAT_SIGNATURE:
++            f.close()
+             return UNKNOWN
+         # We have to sanity check the second four bytes, because Java class
+         # files use the same magic number as Mach-O fat binaries.
+@@ -51,10 +56,13 @@ def get_type(path):
+         # class format used the version 43.0.
+         num = f.read(4)
+         if len(num) < 4:
++            f.close()
+             return UNKNOWN
+         num = struct.unpack('>L', num)[0]
+         if num < 20:
++            f.close()
+             return MACHO
++        f.close()
+         return UNKNOWN
+ 
+ 
+@@ -93,7 +101,9 @@ def strip(path):
+     Execute the STRIP command with STRIP_FLAGS on the given path.
+     '''
+     strip = substs['STRIP']
+-    flags = substs['STRIP_FLAGS'].split() if 'STRIP_FLAGS' in substs else []
++    flags = []
++    if 'STRIP_FLAGS' in substs:
++      flags = substs['STRIP_FLAGS'].split()
+     cmd = [strip] + flags + [path]
+     if subprocess.call(cmd) != 0:
+         errors.fatal('Error executing ' + ' '.join(cmd))
+diff -up mozilla/python/mozbuild/mozpack/files.py.python2 mozilla/python/mozbuild/mozpack/files.py
+--- mozilla/python/mozbuild/mozpack/files.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/python/mozbuild/mozpack/files.py	2013-12-07 22:18:39.000000000 +0100
+@@ -13,7 +13,8 @@ from mozpack.executables import (
+     elfhack,
+ )
+ from mozpack.chrome.manifest import ManifestEntry
+-from io import BytesIO
++from StringIO import StringIO as BytesIO
++#from io import BytesIO
+ from mozpack.errors import ErrorMessage
+ from mozpack.mozjar import JarReader
+ import mozpack.path
+@@ -275,7 +276,9 @@ class ManifestFile(BaseFile):
+         currently but could in the future.
+     '''
+     def __init__(self, base, entries=None):
+-        self._entries = entries if entries else []
++        self._entries = []
++        if entries:
++           self._entries = entries 
+         self._base = base
+ 
+     def add(self, entry):
+diff -up mozilla/python/mozbuild/mozpack/mozjar.py.python2 mozilla/python/mozbuild/mozpack/mozjar.py
+--- mozilla/python/mozbuild/mozpack/mozjar.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/python/mozbuild/mozpack/mozjar.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,8 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from io import BytesIO
++#from io import BytesIO
++from StringIO import StringIO as BytesIO
+ import struct
+ import zlib
+ import os
+@@ -11,7 +12,8 @@ from zipfile import (
+     ZIP_DEFLATED,
+ )
+ from ordereddict import OrderedDict
+-from urlparse import urlparse, ParseResult
++from urlparse import urlparse
++#, ParseResult
+ import mozpack.path
+ 
+ JAR_STORED = ZIP_STORED
+@@ -108,7 +110,9 @@ class JarStruct(object):
+         for name, t in self.STRUCT.iteritems():
+             if name in self.size_fields:
+                 continue
+-            self._values[name] = 0 if t in JarStruct.TYPE_MAPPING else ''
++            self._values[name] = ''
++            if t in JarStruct.TYPE_MAPPING:
++		      self._values[name] = 0
+ 
+     @staticmethod
+     def get_data(type, data):
+@@ -283,7 +287,8 @@ class JarFileReader(object):
+         '''
+         return iter(self.readlines())
+ 
+-    def seek(self, pos, whence=os.SEEK_SET):
++    def seek(self, pos, whence=0):
++    #def seek(self, pos, whence=os.SEEK_SET):
+         '''
+         Change the current position in the uncompressed data. Subsequent reads
+         will start from there.
+@@ -767,6 +772,7 @@ class JarLog(dict):
+         - jar:jar:file:///{path}!/{subpath}!/{subpath2} becomes
+            ({path}, {subpath}, {subpath2})
+         '''
++        print url
+         if not isinstance(url, ParseResult):
+             # Assume that if it doesn't start with jar: or file:, it's a path.
+             if not url.startswith(('jar:', 'file:')):
+diff -up mozilla/python/mozbuild/mozpack/packager/formats.py.python2 mozilla/python/mozbuild/mozpack/packager/formats.py
+--- mozilla/python/mozbuild/mozpack/packager/formats.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozpack/packager/formats.py	2013-12-07 22:18:39.000000000 +0100
+@@ -20,6 +20,7 @@ from mozpack.copier import (
+     Jarrer,
+ )
+ 
++from rhrebase import any
+ STARTUP_CACHE_PATHS = [
+     'jsloader',
+     'jssubloader',
+diff -up mozilla/python/mozbuild/mozpack/packager/__init__.py.python2 mozilla/python/mozbuild/mozpack/packager/__init__.py
+--- mozilla/python/mozbuild/mozpack/packager/__init__.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozpack/packager/__init__.py	2013-12-07 22:18:39.000000000 +0100
+@@ -99,7 +99,9 @@ class Component(object):
+             raise ValueError('No component found')
+         if not re.match('[a-zA-Z0-9_\-]+$', component):
+             raise ValueError('Bad component name ' + component)
+-        options = Component._split_options(splits[1]) if len(splits) > 1 else {}
++        options = {}
++        if len(splits) > 1:
++          options = Component._split_options(splits[1])
+         return component, options
+ 
+     @staticmethod
+@@ -109,7 +111,7 @@ class Component(object):
+         '''
+         try:
+             name, options = Component._split_component_and_options(string)
+-        except ValueError as e:
++        except ValueError, e:
+             errors.fatal('Malformed manifest: %s' % e)
+             return
+         destdir = options.pop('destdir', '')
+@@ -174,8 +176,8 @@ class PreprocessorOutputWrapper(object):
+ 
+     def write(self, str):
+         file = os.path.normpath(os.path.abspath(self._pp.context['FILE']))
+-        with errors.context(file, self._pp.context['LINE']):
+-            self._parser.handle_line(str)
++        #with errors.context(file, self._pp.context['LINE']):
++        self._parser.handle_line(str)
+ 
+ 
+ def preprocess(input, parser, defines={}):
+@@ -213,8 +215,8 @@ class CallDeque(deque):
+             except IndexError:
+                 return
+             if context:
+-                with errors.context(context[0], context[1]):
+-                    function(*args)
++                #with errors.context(context[0], context[1]):
++                function(*args)
+             else:
+                 function(*args)
+ 
+diff -up mozilla/python/mozbuild/mozpack/packager/unpack.py.python2 mozilla/python/mozbuild/mozpack/packager/unpack.py
+--- mozilla/python/mozbuild/mozpack/packager/unpack.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozpack/packager/unpack.py	2013-12-07 22:18:39.000000000 +0100
+@@ -68,8 +68,12 @@ class UnpackFinder(FileFinder):
+             # jar: urls. If there are some, the files contained in the jar they
+             # point to, go under a directory named after the jar.
+             if is_manifest(p):
+-                m = self.files[p] if self.files.contains(p) \
+-                    else ManifestFile(base)
++                m = False
++                if self.files.contains(p):
++                  m = self.files[p]
++                else:
++                  m = ManifestFile(base)
++
+                 for e in parse_manifest(self.base, p, f.open()):
+                     m.add(self._handle_manifest_entry(e, jars))
+                 if self.files.contains(p):
+@@ -82,8 +86,11 @@ class UnpackFinder(FileFinder):
+         for j in jar:
+             path = mozpack.path.join(base, j.filename)
+             if is_manifest(j.filename):
+-                m = self.files[path] if self.files.contains(path) \
+-                    else ManifestFile(mozpack.path.dirname(path))
++                m = False
++                if self.files.contains(path):
++                  m = self.files[path] 
++                else:
++                  m = ManifestFile(mozpack.path.dirname(path))
+                 for e in parse_manifest(None, path, j):
+                     m.add(e)
+                 if not self.files.contains(path):
+@@ -95,10 +102,10 @@ class UnpackFinder(FileFinder):
+     def _handle_manifest_entry(self, entry, jars):
+         jarpath = None
+         if isinstance(entry, ManifestEntryWithRelPath) and \
+-                urlparse(entry.relpath).scheme == 'jar':
++                urlparse(entry.relpath)[0] == 'jar':
+             jarpath, entry = self._unjarize(entry, entry.relpath)
+         elif isinstance(entry, ManifestResource) and \
+-                urlparse(entry.target).scheme == 'jar':
++                urlparse(entry.target)[0] == 'jar':
+             jarpath, entry = self._unjarize(entry, entry.target)
+         if jarpath:
+             # Don't defer unpacking the jar file. If we already saw
+@@ -153,7 +160,7 @@ class UnpackFinder(FileFinder):
+         the new entry.
+         '''
+         base = entry.base
+-        jar, relpath = urlparse(relpath).path.split('!', 1)
++        jar, relpath = urlparse(relpath)[2].split('!', 1)
+         entry = entry.rebase(mozpack.path.join(base, 'jar:%s!' % jar)) \
+             .move(mozpack.path.join(base, mozpack.path.splitext(jar)[0])) \
+             .rebase(base)
+diff -up mozilla/python/mozbuild/mozpack/path.py.python2 mozilla/python/mozbuild/mozpack/path.py
+--- mozilla/python/mozbuild/mozpack/path.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozpack/path.py	2013-12-07 22:18:39.000000000 +0100
+@@ -5,6 +5,59 @@
+ import posixpath
+ import os
+ import re
++import sys
++
++if not hasattr(os, 'relpath'):
++    if os.path is sys.modules.get('ntpath'):
++        def relpath(path, start=os.path.curdir):
++            """Return a relative version of a path"""
++        
++            if not path:
++                raise ValueError("no path specified")
++            start_list = os.path.abspath(start).split(os.path.sep)
++            path_list = os.path.abspath(path).split(os.path.sep)
++            if start_list[0].lower() != path_list[0].lower():
++                unc_path, rest = os.path.splitunc(path)
++                unc_start, rest = os.path.splitunc(start)
++                if bool(unc_path) ^ bool(unc_start):
++                    raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)"
++                                                                        % (path, start))
++                else:
++                    raise ValueError("path is on drive %s, start on drive %s"
++                                                        % (path_list[0], start_list[0]))
++            # Work out how much of the filepath is shared by start and path.
++            for i in range(min(len(start_list), len(path_list))):
++                if start_list[i].lower() != path_list[i].lower():
++                    break
++            else:
++                i += 1
++        
++            rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
++            if not rel_list:
++                return os.path.curdir
++            return os.path.join(*rel_list)
++    
++    else:
++        # default to posixpath definition
++        def relpath(path, start=os.path.curdir):
++            """Return a relative version of a path"""
++        
++            if not path:
++                raise ValueError("no path specified")
++        
++            start_list = os.path.abspath(start).split(os.path.sep)
++            path_list = os.path.abspath(path).split(os.path.sep)
++        
++            # Work out how much of the filepath is shared by start and path.
++            i = len(os.path.commonprefix([start_list, path_list]))
++        
++            rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
++            if not rel_list:
++                return os.path.curdir
++            return os.path.join(*rel_list)
++        
++    os.path.relpath = relpath
++
+ 
+ '''
+ Like os.path, with a reduced set of functions, and with normalized path
+@@ -25,8 +78,10 @@ def normsep(path):
+ 
+ def relpath(path, start):
+     rel = normsep(os.path.relpath(path, start))
+-    return '' if rel == '.' else rel
+-
++    if rel == '.':
++      return ''
++    else:
++      return rel
+ 
+ def join(*paths):
+     paths = [normsep(p) for p in paths]
+diff -up mozilla/python/mozbuild/mozpack/test/test_files.py.python2 mozilla/python/mozbuild/mozpack/test/test_files.py
+--- mozilla/python/mozbuild/mozpack/test/test_files.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/mozbuild/mozpack/test/test_files.py	2013-12-07 22:18:39.000000000 +0100
+@@ -126,8 +126,9 @@ class TestFile(TestWithTmpDir):
+         dest = self.tmppath('dest')
+ 
+         for content in samples:
+-            with open(src, 'wb') as tmp:
+-                tmp.write(content)
++            tmp = open(src, 'wb')
++            tmp.write(content)
++            tmp.close()
+             # Ensure the destination file, when it exists, is older than the
+             # source
+             if os.path.exists(dest):
+@@ -150,8 +151,9 @@ class TestFile(TestWithTmpDir):
+         dest = MockDest()
+ 
+         for content in samples:
+-            with open(src, 'wb') as tmp:
+-                tmp.write(content)
++            tmp = open(src, 'wb')
++            tmp.write(content)
++            tmp.close()
+             f = File(src)
+             f.copy(dest)
+             self.assertEqual(content, dest.getvalue())
+@@ -162,8 +164,9 @@ class TestFile(TestWithTmpDir):
+         '''
+         src = self.tmppath('src')
+         content = ''.join(samples)
+-        with open(src, 'wb') as tmp:
+-            tmp.write(content)
++        tmp = open(src, 'wb')
++        tmp.write(content)
++        tmp.close()
+ 
+         f = File(src)
+         self.assertEqual(content[:42], f.open().read(42))
+@@ -177,8 +180,9 @@ class TestFile(TestWithTmpDir):
+         src = self.tmppath('src')
+         dest = self.tmppath('dest')
+ 
+-        with open(src, 'wb') as tmp:
+-            tmp.write('test')
++        tmp = open(src, 'wb')
++        tmp.write('test')
++        tmp.close()
+ 
+         # Initial copy
+         f = File(src)
+@@ -197,8 +201,9 @@ class TestFile(TestWithTmpDir):
+ 
+         # When the source file is older than the destination file, even with
+         # different content, no copy should occur.
+-        with open(src, 'wb') as tmp:
+-            tmp.write('fooo')
++        tmp = open(src, 'wb')
++        tmp.write('fooo')
++        tmp.close()
+         time = os.path.getmtime(dest) - 1
+         os.utime(src, (time, time))
+         f.copy(DestNoWrite(dest))
+diff -up mozilla/python/psutil/psutil/__init__.py.python2 mozilla/python/psutil/psutil/__init__.py
+--- mozilla/python/psutil/psutil/__init__.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/psutil/psutil/__init__.py	2013-12-07 22:18:39.000000000 +0100
+@@ -10,7 +10,6 @@ processes and gather system information 
+ Python.
+ """
+ 
+-from __future__ import division
+ 
+ __version__ = "0.7.1"
+ version_info = tuple([int(num) for num in __version__.split('.')])
+diff -up mozilla/python/virtualenv/virtualenv_embedded/site.py.python2 mozilla/python/virtualenv/virtualenv_embedded/site.py
+--- mozilla/python/virtualenv/virtualenv_embedded/site.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/virtualenv/virtualenv_embedded/site.py	2013-12-07 22:18:39.000000000 +0100
+@@ -238,7 +238,7 @@ def addsitepackages(known_paths, sys_pre
+                 lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages")
+                 if (os.path.exists(lib64_dir) and 
+                     os.path.realpath(lib64_dir) not in [os.path.realpath(p) for p in sitedirs]):
+-                    if sys.maxsize > 2**32:
++                    if sys.maxint > 2**32:
+                         sitedirs.insert(0, lib64_dir)
+                     else:
+                         sitedirs.append(lib64_dir)
+@@ -580,7 +580,7 @@ def virtual_install_main_packages():
+         hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below
+         lib64_path = os.path.join(sys.real_prefix, 'lib64', 'python'+sys.version[:3])
+         if os.path.exists(lib64_path):
+-            if sys.maxsize > 2**32:
++            if sys.maxint > 2**32:
+                 paths.insert(0, lib64_path)
+             else:
+                 paths.append(lib64_path)
+diff -up mozilla/python/virtualenv/virtualenv.py.python2 mozilla/python/virtualenv/virtualenv.py
+--- mozilla/python/virtualenv/virtualenv.py.python2	2013-12-06 16:45:18.000000000 +0100
++++ mozilla/python/virtualenv/virtualenv.py	2013-12-07 22:18:39.000000000 +0100
+@@ -24,10 +24,14 @@ from distutils.util import strtobool
+ import struct
+ import subprocess
+ 
+-if sys.version_info < (2, 5):
+-    print('ERROR: %s' % sys.exc_info()[1])
+-    print('ERROR: this script requires Python 2.5 or greater.')
+-    sys.exit(101)
++# FIXME must be also lib
++sys.path.append('/usr/lib64/python2.4')
++
++# No check for python24
++#if sys.version_info < (2, 5):
++#    print('ERROR: %s' % sys.exc_info()[1])
++#    print('ERROR: this script requires Python 2.5 or greater.')
++#    sys.exit(101)
+ 
+ try:
+     set
+@@ -512,7 +516,8 @@ def _install_req(py_executable, unzip=Fa
+             tgz_path = 'distribute-*.tar.gz'
+             found, tgz_path = _find_file(tgz_path, search_dirs)
+             bootstrap_script = DISTRIBUTE_SETUP_PY
+-
++    #prepend bootstap_script with path FIXME:
++    bootstrap_script = "import sys;sys.path.append('/usr/lib64/python2.4');"+ bootstrap_script
+     if is_jython and os._name == 'nt':
+         # Jython's .bat sys.executable can't handle a command line
+         # argument with newlines
+@@ -982,6 +987,7 @@ def call_subprocess(cmd, show_stdout=Tru
+                 part = part.decode(sys.getfilesystemencoding())
+         cmd_parts.append(part)
+     cmd_desc = ' '.join(cmd_parts)
++    print ' '.join(cmd)
+     if show_stdout:
+         stdout = None
+     else:
+@@ -1267,7 +1273,9 @@ def install_python(home_dir, lib_dir, in
+         site_filename = site_filename.replace('$py.class', '.py')
+     site_filename_dst = change_prefix(site_filename, home_dir)
+     site_dir = os.path.dirname(site_filename_dst)
+-    writefile(site_filename_dst, SITE_PY)
++    #writefile(site_filename_dst, SITE_PY)
++    # copy patched file instead
++    shutil.copyfile(os.path.dirname(os.path.realpath(__file__))+"/virtualenv_embedded/site.py", site_filename_dst)
+     writefile(join(site_dir, 'orig-prefix.txt'), prefix)
+     site_packages_filename = join(site_dir, 'no-global-site-packages.txt')
+     if not site_packages:
+diff -up mozilla/rebase/abc.py.python2 mozilla/rebase/abc.py
+--- mozilla/rebase/abc.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/rebase/abc.py	2013-12-07 22:18:39.000000000 +0100
+@@ -0,0 +1,182 @@
++# Copyright 2007 Google, Inc. All Rights Reserved.
++# Licensed to PSF under a Contributor Agreement.
++
++"""Abstract Base Classes (ABCs) according to PEP 3119."""
++
++
++# Instance of old-style class
++class _C: pass
++_InstanceType = type(_C())
++
++
++def abstractmethod(funcobj):
++    """A decorator indicating abstract methods.
++
++    Requires that the metaclass is ABCMeta or derived from it.  A
++    class that has a metaclass derived from ABCMeta cannot be
++    instantiated unless all of its abstract methods are overridden.
++    The abstract methods can be called using any of the normal
++    'super' call mechanisms.
++
++    Usage:
++
++        class C:
++            __metaclass__ = ABCMeta
++            @abstractmethod
++            def my_abstract_method(self, ...):
++                ...
++    """
++    funcobj.__isabstractmethod__ = True
++    return funcobj
++
++
++class abstractproperty(property):
++    """A decorator indicating abstract properties.
++
++    Requires that the metaclass is ABCMeta or derived from it.  A
++    class that has a metaclass derived from ABCMeta cannot be
++    instantiated unless all of its abstract properties are overridden.
++    The abstract properties can be called using any of the normal
++    'super' call mechanisms.
++
++    Usage:
++
++        class C:
++            __metaclass__ = ABCMeta
++            @abstractproperty
++            def my_abstract_property(self):
++                ...
++
++    This defines a read-only property; you can also define a read-write
++    abstract property using the 'long' form of property declaration:
++
++        class C:
++            __metaclass__ = ABCMeta
++            def getx(self): ...
++            def setx(self, value): ...
++            x = abstractproperty(getx, setx)
++    """
++    __isabstractmethod__ = True
++
++
++class ABCMeta(type):
++
++    """Metaclass for defining Abstract Base Classes (ABCs).
++
++    Use this metaclass to create an ABC.  An ABC can be subclassed
++    directly, and then acts as a mix-in class.  You can also register
++    unrelated concrete classes (even built-in classes) and unrelated
++    ABCs as 'virtual subclasses' -- these and their descendants will
++    be considered subclasses of the registering ABC by the built-in
++    issubclass() function, but the registering ABC won't show up in
++    their MRO (Method Resolution Order) nor will method
++    implementations defined by the registering ABC be callable (not
++    even via super()).
++
++    """
++
++    # A global counter that is incremented each time a class is
++    # registered as a virtual subclass of anything.  It forces the
++    # negative cache to be cleared before its next use.
++    _abc_invalidation_counter = 0
++
++    def __new__(mcls, name, bases, namespace):
++        cls = super(ABCMeta, mcls).__new__(mcls, name, bases, namespace)
++        # Compute set of abstract method names
++        abstracts = set(name
++                     for name, value in namespace.items()
++                     if getattr(value, "__isabstractmethod__", False))
++        for base in bases:
++            for name in getattr(base, "__abstractmethods__", set()):
++                value = getattr(cls, name, None)
++                if getattr(value, "__isabstractmethod__", False):
++                    abstracts.add(name)
++        cls.__abstractmethods__ = frozenset(abstracts)
++        # Set up inheritance registry
++        cls._abc_registry = set()
++        cls._abc_cache = set()
++        cls._abc_negative_cache = set()
++        cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
++        return cls
++
++    def register(cls, subclass):
++        """Register a virtual subclass of an ABC."""
++        if not isinstance(cls, type):
++            raise TypeError("Can only register classes")
++        if issubclass(subclass, cls):
++            return  # Already a subclass
++        # Subtle: test for cycles *after* testing for "already a subclass";
++        # this means we allow X.register(X) and interpret it as a no-op.
++        if issubclass(cls, subclass):
++            # This would create a cycle, which is bad for the algorithm below
++            raise RuntimeError("Refusing to create an inheritance cycle")
++        cls._abc_registry.add(subclass)
++        ABCMeta._abc_invalidation_counter += 1  # Invalidate negative cache
++
++    def _dump_registry(cls, file=None):
++        """Debug helper to print the ABC registry."""
++        print >> file, "Class: %s.%s" % (cls.__module__, cls.__name__)
++        print >> file, "Inv.counter: %s" % ABCMeta._abc_invalidation_counter
++        for name in sorted(cls.__dict__.keys()):
++            if name.startswith("_abc_"):
++                value = getattr(cls, name)
++                print >> file, "%s: %r" % (name, value)
++
++    def __instancecheck__(cls, instance):
++        """Override for isinstance(instance, cls)."""
++        # Inline the cache checking when it's simple.
++        subclass = getattr(instance, '__class__', None)
++        if subclass in cls._abc_cache:
++            return True
++        subtype = type(instance)
++        # Old-style instances
++        if subtype is _InstanceType:
++            subtype = subclass
++        if subtype is subclass or subclass is None:
++            if (cls._abc_negative_cache_version ==
++                ABCMeta._abc_invalidation_counter and
++                subtype in cls._abc_negative_cache):
++                return False
++            # Fall back to the subclass check.
++            return cls.__subclasscheck__(subtype)
++        return (cls.__subclasscheck__(subclass) or
++                cls.__subclasscheck__(subtype))
++
++    def __subclasscheck__(cls, subclass):
++        """Override for issubclass(subclass, cls)."""
++        # Check cache
++        if subclass in cls._abc_cache:
++            return True
++        # Check negative cache; may have to invalidate
++        if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter:
++            # Invalidate the negative cache
++            cls._abc_negative_cache = set()
++            cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
++        elif subclass in cls._abc_negative_cache:
++            return False
++        # Check the subclass hook
++        ok = cls.__subclasshook__(subclass)
++        if ok is not NotImplemented:
++            assert isinstance(ok, bool)
++            if ok:
++                cls._abc_cache.add(subclass)
++            else:
++                cls._abc_negative_cache.add(subclass)
++            return ok
++        # Check if it's a direct subclass
++        if cls in getattr(subclass, '__mro__', ()):
++            cls._abc_cache.add(subclass)
++            return True
++        # Check if it's a subclass of a registered class (recursive)
++        for rcls in cls._abc_registry:
++            if issubclass(subclass, rcls):
++                cls._abc_cache.add(subclass)
++                return True
++        # Check if it's a subclass of a subclass (recursive)
++        for scls in cls.__subclasses__():
++            if issubclass(subclass, scls):
++                cls._abc_cache.add(subclass)
++                return True
++        # No dice; update negative cache
++        cls._abc_negative_cache.add(subclass)
++        return False
+diff -up mozilla/rebase/argparse.py.python2 mozilla/rebase/argparse.py
+--- mozilla/rebase/argparse.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/rebase/argparse.py	2013-12-07 22:18:39.000000000 +0100
+@@ -0,0 +1,2362 @@
++# Author: Steven J. Bethard <steven.bethard at gmail.com>.
++
++"""Command-line parsing library
++
++This module is an optparse-inspired command-line parsing library that:
++
++    - handles both optional and positional arguments
++    - produces highly informative usage messages
++    - supports parsers that dispatch to sub-parsers
++
++The following is a simple usage example that sums integers from the
++command-line and writes the result to a file::
++
++    parser = argparse.ArgumentParser(
++        description='sum the integers at the command line')
++    parser.add_argument(
++        'integers', metavar='int', nargs='+', type=int,
++        help='an integer to be summed')
++    parser.add_argument(
++        '--log', default=sys.stdout, type=argparse.FileType('w'),
++        help='the file where the sum should be written')
++    args = parser.parse_args()
++    args.log.write('%s' % sum(args.integers))
++    args.log.close()
++
++The module contains the following public classes:
++
++    - ArgumentParser -- The main entry point for command-line parsing. As the
++        example above shows, the add_argument() method is used to populate
++        the parser with actions for optional and positional arguments. Then
++        the parse_args() method is invoked to convert the args at the
++        command-line into an object with attributes.
++
++    - ArgumentError -- The exception raised by ArgumentParser objects when
++        there are errors with the parser's actions. Errors raised while
++        parsing the command-line are caught by ArgumentParser and emitted
++        as command-line messages.
++
++    - FileType -- A factory for defining types of files to be created. As the
++        example above shows, instances of FileType are typically passed as
++        the type= argument of add_argument() calls.
++
++    - Action -- The base class for parser actions. Typically actions are
++        selected by passing strings like 'store_true' or 'append_const' to
++        the action= argument of add_argument(). However, for greater
++        customization of ArgumentParser actions, subclasses of Action may
++        be defined and passed as the action= argument.
++
++    - HelpFormatter, RawDescriptionHelpFormatter, RawTextHelpFormatter,
++        ArgumentDefaultsHelpFormatter -- Formatter classes which
++        may be passed as the formatter_class= argument to the
++        ArgumentParser constructor. HelpFormatter is the default,
++        RawDescriptionHelpFormatter and RawTextHelpFormatter tell the parser
++        not to change the formatting for help text, and
++        ArgumentDefaultsHelpFormatter adds information about argument defaults
++        to the help.
++
++All other classes in this module are considered implementation details.
++(Also note that HelpFormatter and RawDescriptionHelpFormatter are only
++considered public as object names -- the API of the formatter objects is
++still considered an implementation detail.)
++"""
++
++__version__ = '1.2.1'
++__all__ = [
++    'ArgumentParser',
++    'ArgumentError',
++    'ArgumentTypeError',
++    'FileType',
++    'HelpFormatter',
++    'ArgumentDefaultsHelpFormatter',
++    'RawDescriptionHelpFormatter',
++    'RawTextHelpFormatter',
++    'Namespace',
++    'Action',
++    'ONE_OR_MORE',
++    'OPTIONAL',
++    'PARSER',
++    'REMAINDER',
++    'SUPPRESS',
++    'ZERO_OR_MORE',
++]
++
++
++import copy as _copy
++import os as _os
++import re as _re
++import sys as _sys
++import textwrap as _textwrap
++
++from gettext import gettext as _
++
++try:
++    set
++except NameError:
++    # for python < 2.4 compatibility (sets module is there since 2.3):
++    from sets import Set as set
++
++try:
++    basestring
++except NameError:
++    basestring = str
++
++try:
++    sorted
++except NameError:
++    # for python < 2.4 compatibility:
++    def sorted(iterable, reverse=False):
++        result = list(iterable)
++        result.sort()
++        if reverse:
++            result.reverse()
++        return result
++
++
++def _callable(obj):
++    return hasattr(obj, '__call__') or hasattr(obj, '__bases__')
++
++
++SUPPRESS = '==SUPPRESS=='
++
++OPTIONAL = '?'
++ZERO_OR_MORE = '*'
++ONE_OR_MORE = '+'
++PARSER = 'A...'
++REMAINDER = '...'
++_UNRECOGNIZED_ARGS_ATTR = '_unrecognized_args'
++
++# =============================
++# Utility functions and classes
++# =============================
++
++class _AttributeHolder(object):
++    """Abstract base class that provides __repr__.
++
++    The __repr__ method returns a string in the format::
++        ClassName(attr=name, attr=name, ...)
++    The attributes are determined either by a class-level attribute,
++    '_kwarg_names', or by inspecting the instance __dict__.
++    """
++
++    def __repr__(self):
++        type_name = type(self).__name__
++        arg_strings = []
++        for arg in self._get_args():
++            arg_strings.append(repr(arg))
++        for name, value in self._get_kwargs():
++            arg_strings.append('%s=%r' % (name, value))
++        return '%s(%s)' % (type_name, ', '.join(arg_strings))
++
++    def _get_kwargs(self):
++        return sorted(self.__dict__.items())
++
++    def _get_args(self):
++        return []
++
++
++def _ensure_value(namespace, name, value):
++    if getattr(namespace, name, None) is None:
++        setattr(namespace, name, value)
++    return getattr(namespace, name)
++
++
++# ===============
++# Formatting Help
++# ===============
++
++class HelpFormatter(object):
++    """Formatter for generating usage messages and argument help strings.
++
++    Only the name of this class is considered a public API. All the methods
++    provided by the class are considered an implementation detail.
++    """
++
++    def __init__(self,
++                 prog,
++                 indent_increment=2,
++                 max_help_position=24,
++                 width=None):
++
++        # default setting for width
++        if width is None:
++            try:
++                width = int(_os.environ['COLUMNS'])
++            except (KeyError, ValueError):
++                width = 80
++            width -= 2
++
++        self._prog = prog
++        self._indent_increment = indent_increment
++        self._max_help_position = max_help_position
++        self._width = width
++
++        self._current_indent = 0
++        self._level = 0
++        self._action_max_length = 0
++
++        self._root_section = self._Section(self, None)
++        self._current_section = self._root_section
++
++        self._whitespace_matcher = _re.compile(r'\s+')
++        self._long_break_matcher = _re.compile(r'\n\n\n+')
++
++    # ===============================
++    # Section and indentation methods
++    # ===============================
++    def _indent(self):
++        self._current_indent += self._indent_increment
++        self._level += 1
++
++    def _dedent(self):
++        self._current_indent -= self._indent_increment
++        assert self._current_indent >= 0, 'Indent decreased below 0.'
++        self._level -= 1
++
++    class _Section(object):
++
++        def __init__(self, formatter, parent, heading=None):
++            self.formatter = formatter
++            self.parent = parent
++            self.heading = heading
++            self.items = []
++
++        def format_help(self):
++            # format the indented section
++            if self.parent is not None:
++                self.formatter._indent()
++            join = self.formatter._join_parts
++            for func, args in self.items:
++                func(*args)
++            item_help = join([func(*args) for func, args in self.items])
++            if self.parent is not None:
++                self.formatter._dedent()
++
++            # return nothing if the section was empty
++            if not item_help:
++                return ''
++
++            # add the heading if the section was non-empty
++            if self.heading is not SUPPRESS and self.heading is not None:
++                current_indent = self.formatter._current_indent
++                heading = '%*s%s:\n' % (current_indent, '', self.heading)
++            else:
++                heading = ''
++
++            # join the section-initial newline, the heading and the help
++            return join(['\n', heading, item_help, '\n'])
++
++    def _add_item(self, func, args):
++        self._current_section.items.append((func, args))
++
++    # ========================
++    # Message building methods
++    # ========================
++    def start_section(self, heading):
++        self._indent()
++        section = self._Section(self, self._current_section, heading)
++        self._add_item(section.format_help, [])
++        self._current_section = section
++
++    def end_section(self):
++        self._current_section = self._current_section.parent
++        self._dedent()
++
++    def add_text(self, text):
++        if text is not SUPPRESS and text is not None:
++            self._add_item(self._format_text, [text])
++
++    def add_usage(self, usage, actions, groups, prefix=None):
++        if usage is not SUPPRESS:
++            args = usage, actions, groups, prefix
++            self._add_item(self._format_usage, args)
++
++    def add_argument(self, action):
++        if action.help is not SUPPRESS:
++
++            # find all invocations
++            get_invocation = self._format_action_invocation
++            invocations = [get_invocation(action)]
++            for subaction in self._iter_indented_subactions(action):
++                invocations.append(get_invocation(subaction))
++
++            # update the maximum item length
++            invocation_length = max([len(s) for s in invocations])
++            action_length = invocation_length + self._current_indent
++            self._action_max_length = max(self._action_max_length,
++                                          action_length)
++
++            # add the item to the list
++            self._add_item(self._format_action, [action])
++
++    def add_arguments(self, actions):
++        for action in actions:
++            self.add_argument(action)
++
++    # =======================
++    # Help-formatting methods
++    # =======================
++    def format_help(self):
++        help = self._root_section.format_help()
++        if help:
++            help = self._long_break_matcher.sub('\n\n', help)
++            help = help.strip('\n') + '\n'
++        return help
++
++    def _join_parts(self, part_strings):
++        return ''.join([part
++                        for part in part_strings
++                        if part and part is not SUPPRESS])
++
++    def _format_usage(self, usage, actions, groups, prefix):
++        if prefix is None:
++            prefix = _('usage: ')
++
++        # if usage is specified, use that
++        if usage is not None:
++            usage = usage % dict(prog=self._prog)
++
++        # if no optionals or positionals are available, usage is just prog
++        elif usage is None and not actions:
++            usage = '%(prog)s' % dict(prog=self._prog)
++
++        # if optionals and positionals are available, calculate usage
++        elif usage is None:
++            prog = '%(prog)s' % dict(prog=self._prog)
++
++            # split optionals from positionals
++            optionals = []
++            positionals = []
++            for action in actions:
++                if action.option_strings:
++                    optionals.append(action)
++                else:
++                    positionals.append(action)
++
++            # build full usage string
++            format = self._format_actions_usage
++            action_usage = format(optionals + positionals, groups)
++            usage = ' '.join([s for s in [prog, action_usage] if s])
++
++            # wrap the usage parts if it's too long
++            text_width = self._width - self._current_indent
++            if len(prefix) + len(usage) > text_width:
++
++                # break usage into wrappable parts
++                part_regexp = r'\(.*?\)+|\[.*?\]+|\S+'
++                opt_usage = format(optionals, groups)
++                pos_usage = format(positionals, groups)
++                opt_parts = _re.findall(part_regexp, opt_usage)
++                pos_parts = _re.findall(part_regexp, pos_usage)
++                assert ' '.join(opt_parts) == opt_usage
++                assert ' '.join(pos_parts) == pos_usage
++
++                # helper for wrapping lines
++                def get_lines(parts, indent, prefix=None):
++                    lines = []
++                    line = []
++                    if prefix is not None:
++                        line_len = len(prefix) - 1
++                    else:
++                        line_len = len(indent) - 1
++                    for part in parts:
++                        if line_len + 1 + len(part) > text_width:
++                            lines.append(indent + ' '.join(line))
++                            line = []
++                            line_len = len(indent) - 1
++                        line.append(part)
++                        line_len += len(part) + 1
++                    if line:
++                        lines.append(indent + ' '.join(line))
++                    if prefix is not None:
++                        lines[0] = lines[0][len(indent):]
++                    return lines
++
++                # if prog is short, follow it with optionals or positionals
++                if len(prefix) + len(prog) <= 0.75 * text_width:
++                    indent = ' ' * (len(prefix) + len(prog) + 1)
++                    if opt_parts:
++                        lines = get_lines([prog] + opt_parts, indent, prefix)
++                        lines.extend(get_lines(pos_parts, indent))
++                    elif pos_parts:
++                        lines = get_lines([prog] + pos_parts, indent, prefix)
++                    else:
++                        lines = [prog]
++
++                # if prog is long, put it on its own line
++                else:
++                    indent = ' ' * len(prefix)
++                    parts = opt_parts + pos_parts
++                    lines = get_lines(parts, indent)
++                    if len(lines) > 1:
++                        lines = []
++                        lines.extend(get_lines(opt_parts, indent))
++                        lines.extend(get_lines(pos_parts, indent))
++                    lines = [prog] + lines
++
++                # join lines into usage
++                usage = '\n'.join(lines)
++
++        # prefix with 'usage:'
++        return '%s%s\n\n' % (prefix, usage)
++
++    def _format_actions_usage(self, actions, groups):
++        # find group indices and identify actions in groups
++        group_actions = set()
++        inserts = {}
++        for group in groups:
++            try:
++                start = actions.index(group._group_actions[0])
++            except ValueError:
++                continue
++            else:
++                end = start + len(group._group_actions)
++                if actions[start:end] == group._group_actions:
++                    for action in group._group_actions:
++                        group_actions.add(action)
++                    if not group.required:
++                        if start in inserts:
++                            inserts[start] += ' ['
++                        else:
++                            inserts[start] = '['
++                        inserts[end] = ']'
++                    else:
++                        if start in inserts:
++                            inserts[start] += ' ('
++                        else:
++                            inserts[start] = '('
++                        inserts[end] = ')'
++                    for i in range(start + 1, end):
++                        inserts[i] = '|'
++
++        # collect all actions format strings
++        parts = []
++        for i, action in enumerate(actions):
++
++            # suppressed arguments are marked with None
++            # remove | separators for suppressed arguments
++            if action.help is SUPPRESS:
++                parts.append(None)
++                if inserts.get(i) == '|':
++                    inserts.pop(i)
++                elif inserts.get(i + 1) == '|':
++                    inserts.pop(i + 1)
++
++            # produce all arg strings
++            elif not action.option_strings:
++                part = self._format_args(action, action.dest)
++
++                # if it's in a group, strip the outer []
++                if action in group_actions:
++                    if part[0] == '[' and part[-1] == ']':
++                        part = part[1:-1]
++
++                # add the action string to the list
++                parts.append(part)
++
++            # produce the first way to invoke the option in brackets
++            else:
++                option_string = action.option_strings[0]
++
++                # if the Optional doesn't take a value, format is:
++                #    -s or --long
++                if action.nargs == 0:
++                    part = '%s' % option_string
++
++                # if the Optional takes a value, format is:
++                #    -s ARGS or --long ARGS
++                else:
++                    default = action.dest.upper()
++                    args_string = self._format_args(action, default)
++                    part = '%s %s' % (option_string, args_string)
++
++                # make it look optional if it's not required or in a group
++                if not action.required and action not in group_actions:
++                    part = '[%s]' % part
++
++                # add the action string to the list
++                parts.append(part)
++
++        # insert things at the necessary indices
++        for i in sorted(inserts, reverse=True):
++            parts[i:i] = [inserts[i]]
++
++        # join all the action items with spaces
++        text = ' '.join([item for item in parts if item is not None])
++
++        # clean up separators for mutually exclusive groups
++        open = r'[\[(]'
++        close = r'[\])]'
++        text = _re.sub(r'(%s) ' % open, r'\1', text)
++        text = _re.sub(r' (%s)' % close, r'\1', text)
++        text = _re.sub(r'%s *%s' % (open, close), r'', text)
++        text = _re.sub(r'\(([^|]*)\)', r'\1', text)
++        text = text.strip()
++
++        # return the text
++        return text
++
++    def _format_text(self, text):
++        if '%(prog)' in text:
++            text = text % dict(prog=self._prog)
++        text_width = self._width - self._current_indent
++        indent = ' ' * self._current_indent
++        return self._fill_text(text, text_width, indent) + '\n\n'
++
++    def _format_action(self, action):
++        # determine the required width and the entry label
++        help_position = min(self._action_max_length + 2,
++                            self._max_help_position)
++        help_width = self._width - help_position
++        action_width = help_position - self._current_indent - 2
++        action_header = self._format_action_invocation(action)
++
++        # ho nelp; start on same line and add a final newline
++        if not action.help:
++            tup = self._current_indent, '', action_header
++            action_header = '%*s%s\n' % tup
++
++        # short action name; start on the same line and pad two spaces
++        elif len(action_header) <= action_width:
++            tup = self._current_indent, '', action_width, action_header
++            action_header = '%*s%-*s  ' % tup
++            indent_first = 0
++
++        # long action name; start on the next line
++        else:
++            tup = self._current_indent, '', action_header
++            action_header = '%*s%s\n' % tup
++            indent_first = help_position
++
++        # collect the pieces of the action help
++        parts = [action_header]
++
++        # if there was help for the action, add lines of help text
++        if action.help:
++            help_text = self._expand_help(action)
++            help_lines = self._split_lines(help_text, help_width)
++            parts.append('%*s%s\n' % (indent_first, '', help_lines[0]))
++            for line in help_lines[1:]:
++                parts.append('%*s%s\n' % (help_position, '', line))
++
++        # or add a newline if the description doesn't end with one
++        elif not action_header.endswith('\n'):
++            parts.append('\n')
++
++        # if there are any sub-actions, add their help as well
++        for subaction in self._iter_indented_subactions(action):
++            parts.append(self._format_action(subaction))
++
++        # return a single string
++        return self._join_parts(parts)
++
++    def _format_action_invocation(self, action):
++        if not action.option_strings:
++            metavar, = self._metavar_formatter(action, action.dest)(1)
++            return metavar
++
++        else:
++            parts = []
++
++            # if the Optional doesn't take a value, format is:
++            #    -s, --long
++            if action.nargs == 0:
++                parts.extend(action.option_strings)
++
++            # if the Optional takes a value, format is:
++            #    -s ARGS, --long ARGS
++            else:
++                default = action.dest.upper()
++                args_string = self._format_args(action, default)
++                for option_string in action.option_strings:
++                    parts.append('%s %s' % (option_string, args_string))
++
++            return ', '.join(parts)
++
++    def _metavar_formatter(self, action, default_metavar):
++        if action.metavar is not None:
++            result = action.metavar
++        elif action.choices is not None:
++            choice_strs = [str(choice) for choice in action.choices]
++            result = '{%s}' % ','.join(choice_strs)
++        else:
++            result = default_metavar
++
++        def format(tuple_size):
++            if isinstance(result, tuple):
++                return result
++            else:
++                return (result, ) * tuple_size
++        return format
++
++    def _format_args(self, action, default_metavar):
++        get_metavar = self._metavar_formatter(action, default_metavar)
++        if action.nargs is None:
++            result = '%s' % get_metavar(1)
++        elif action.nargs == OPTIONAL:
++            result = '[%s]' % get_metavar(1)
++        elif action.nargs == ZERO_OR_MORE:
++            result = '[%s [%s ...]]' % get_metavar(2)
++        elif action.nargs == ONE_OR_MORE:
++            result = '%s [%s ...]' % get_metavar(2)
++        elif action.nargs == REMAINDER:
++            result = '...'
++        elif action.nargs == PARSER:
++            result = '%s ...' % get_metavar(1)
++        else:
++            formats = ['%s' for _ in range(action.nargs)]
++            result = ' '.join(formats) % get_metavar(action.nargs)
++        return result
++
++    def _expand_help(self, action):
++        params = dict(vars(action), prog=self._prog)
++        for name in list(params):
++            if params[name] is SUPPRESS:
++                del params[name]
++        for name in list(params):
++            if hasattr(params[name], '__name__'):
++                params[name] = params[name].__name__
++        if params.get('choices') is not None:
++            choices_str = ', '.join([str(c) for c in params['choices']])
++            params['choices'] = choices_str
++        return self._get_help_string(action) % params
++
++    def _iter_indented_subactions(self, action):
++        try:
++            get_subactions = action._get_subactions
++        except AttributeError:
++            pass
++        else:
++            self._indent()
++            for subaction in get_subactions():
++                yield subaction
++            self._dedent()
++
++    def _split_lines(self, text, width):
++        text = self._whitespace_matcher.sub(' ', text).strip()
++        return _textwrap.wrap(text, width)
++
++    def _fill_text(self, text, width, indent):
++        text = self._whitespace_matcher.sub(' ', text).strip()
++        return _textwrap.fill(text, width, initial_indent=indent,
++                                           subsequent_indent=indent)
++
++    def _get_help_string(self, action):
++        return action.help
++
++
++class RawDescriptionHelpFormatter(HelpFormatter):
++    """Help message formatter which retains any formatting in descriptions.
++
++    Only the name of this class is considered a public API. All the methods
++    provided by the class are considered an implementation detail.
++    """
++
++    def _fill_text(self, text, width, indent):
++        return ''.join([indent + line for line in text.splitlines(True)])
++
++
++class RawTextHelpFormatter(RawDescriptionHelpFormatter):
++    """Help message formatter which retains formatting of all help text.
++
++    Only the name of this class is considered a public API. All the methods
++    provided by the class are considered an implementation detail.
++    """
++
++    def _split_lines(self, text, width):
++        return text.splitlines()
++
++
++class ArgumentDefaultsHelpFormatter(HelpFormatter):
++    """Help message formatter which adds default values to argument help.
++
++    Only the name of this class is considered a public API. All the methods
++    provided by the class are considered an implementation detail.
++    """
++
++    def _get_help_string(self, action):
++        help = action.help
++        if '%(default)' not in action.help:
++            if action.default is not SUPPRESS:
++                defaulting_nargs = [OPTIONAL, ZERO_OR_MORE]
++                if action.option_strings or action.nargs in defaulting_nargs:
++                    help += ' (default: %(default)s)'
++        return help
++
++
++# =====================
++# Options and Arguments
++# =====================
++
++def _get_action_name(argument):
++    if argument is None:
++        return None
++    elif argument.option_strings:
++        return  '/'.join(argument.option_strings)
++    elif argument.metavar not in (None, SUPPRESS):
++        return argument.metavar
++    elif argument.dest not in (None, SUPPRESS):
++        return argument.dest
++    else:
++        return None
++
++
++class ArgumentError(Exception):
++    """An error from creating or using an argument (optional or positional).
++
++    The string value of this exception is the message, augmented with
++    information about the argument that caused it.
++    """
++
++    def __init__(self, argument, message):
++        self.argument_name = _get_action_name(argument)
++        self.message = message
++
++    def __str__(self):
++        if self.argument_name is None:
++            format = '%(message)s'
++        else:
++            format = 'argument %(argument_name)s: %(message)s'
++        return format % dict(message=self.message,
++                             argument_name=self.argument_name)
++
++
++class ArgumentTypeError(Exception):
++    """An error from trying to convert a command line string to a type."""
++    pass
++
++
++# ==============
++# Action classes
++# ==============
++
++class Action(_AttributeHolder):
++    """Information about how to convert command line strings to Python objects.
++
++    Action objects are used by an ArgumentParser to represent the information
++    needed to parse a single argument from one or more strings from the
++    command line. The keyword arguments to the Action constructor are also
++    all attributes of Action instances.
++
++    Keyword Arguments:
++
++        - option_strings -- A list of command-line option strings which
++            should be associated with this action.
++
++        - dest -- The name of the attribute to hold the created object(s)
++
++        - nargs -- The number of command-line arguments that should be
++            consumed. By default, one argument will be consumed and a single
++            value will be produced.  Other values include:
++                - N (an integer) consumes N arguments (and produces a list)
++                - '?' consumes zero or one arguments
++                - '*' consumes zero or more arguments (and produces a list)
++                - '+' consumes one or more arguments (and produces a list)
++            Note that the difference between the default and nargs=1 is that
++            with the default, a single value will be produced, while with
++            nargs=1, a list containing a single value will be produced.
++
++        - const -- The value to be produced if the option is specified and the
++            option uses an action that takes no values.
++
++        - default -- The value to be produced if the option is not specified.
++
++        - type -- The type which the command-line arguments should be converted
++            to, should be one of 'string', 'int', 'float', 'complex' or a
++            callable object that accepts a single string argument. If None,
++            'string' is assumed.
++
++        - choices -- A container of values that should be allowed. If not None,
++            after a command-line argument has been converted to the appropriate
++            type, an exception will be raised if it is not a member of this
++            collection.
++
++        - required -- True if the action must always be specified at the
++            command line. This is only meaningful for optional command-line
++            arguments.
++
++        - help -- The help string describing the argument.
++
++        - metavar -- The name to be used for the option's argument with the
++            help string. If None, the 'dest' value will be used as the name.
++    """
++
++    def __init__(self,
++                 option_strings,
++                 dest,
++                 nargs=None,
++                 const=None,
++                 default=None,
++                 type=None,
++                 choices=None,
++                 required=False,
++                 help=None,
++                 metavar=None):
++        self.option_strings = option_strings
++        self.dest = dest
++        self.nargs = nargs
++        self.const = const
++        self.default = default
++        self.type = type
++        self.choices = choices
++        self.required = required
++        self.help = help
++        self.metavar = metavar
++
++    def _get_kwargs(self):
++        names = [
++            'option_strings',
++            'dest',
++            'nargs',
++            'const',
++            'default',
++            'type',
++            'choices',
++            'help',
++            'metavar',
++        ]
++        return [(name, getattr(self, name)) for name in names]
++
++    def __call__(self, parser, namespace, values, option_string=None):
++        raise NotImplementedError(_('.__call__() not defined'))
++
++
++class _StoreAction(Action):
++
++    def __init__(self,
++                 option_strings,
++                 dest,
++                 nargs=None,
++                 const=None,
++                 default=None,
++                 type=None,
++                 choices=None,
++                 required=False,
++                 help=None,
++                 metavar=None):
++        if nargs == 0:
++            raise ValueError('nargs for store actions must be > 0; if you '
++                             'have nothing to store, actions such as store '
++                             'true or store const may be more appropriate')
++        if const is not None and nargs != OPTIONAL:
++            raise ValueError('nargs must be %r to supply const' % OPTIONAL)
++        super(_StoreAction, self).__init__(
++            option_strings=option_strings,
++            dest=dest,
++            nargs=nargs,
++            const=const,
++            default=default,
++            type=type,
++            choices=choices,
++            required=required,
++            help=help,
++            metavar=metavar)
++
++    def __call__(self, parser, namespace, values, option_string=None):
++        setattr(namespace, self.dest, values)
++
++
++class _StoreConstAction(Action):
++
++    def __init__(self,
++                 option_strings,
++                 dest,
++                 const,
++                 default=None,
++                 required=False,
++                 help=None,
++                 metavar=None):
++        super(_StoreConstAction, self).__init__(
++            option_strings=option_strings,
++            dest=dest,
++            nargs=0,
++            const=const,
++            default=default,
++            required=required,
++            help=help)
++
++    def __call__(self, parser, namespace, values, option_string=None):
++        setattr(namespace, self.dest, self.const)
++
++
++class _StoreTrueAction(_StoreConstAction):
++
++    def __init__(self,
++                 option_strings,
++                 dest,
++                 default=False,
++                 required=False,
++                 help=None):
++        super(_StoreTrueAction, self).__init__(
++            option_strings=option_strings,
++            dest=dest,
++            const=True,
++            default=default,
++            required=required,
++            help=help)
++
++
++class _StoreFalseAction(_StoreConstAction):
++
++    def __init__(self,
++                 option_strings,
++                 dest,
++                 default=True,
++                 required=False,
++                 help=None):
++        super(_StoreFalseAction, self).__init__(
++            option_strings=option_strings,
++            dest=dest,
++            const=False,
++            default=default,
++            required=required,
++            help=help)
++
++
++class _AppendAction(Action):
++
++    def __init__(self,
++                 option_strings,
++                 dest,
++                 nargs=None,
++                 const=None,
++                 default=None,
++                 type=None,
++                 choices=None,
++                 required=False,
++                 help=None,
++                 metavar=None):
++        if nargs == 0:
++            raise ValueError('nargs for append actions must be > 0; if arg '
++                             'strings are not supplying the value to append, '
++                             'the append const action may be more appropriate')
++        if const is not None and nargs != OPTIONAL:
++            raise ValueError('nargs must be %r to supply const' % OPTIONAL)
++        super(_AppendAction, self).__init__(
++            option_strings=option_strings,
++            dest=dest,
++            nargs=nargs,
++            const=const,
++            default=default,
++            type=type,
++            choices=choices,
++            required=required,
++            help=help,
++            metavar=metavar)
++
++    def __call__(self, parser, namespace, values, option_string=None):
++        items = _copy.copy(_ensure_value(namespace, self.dest, []))
++        items.append(values)
++        setattr(namespace, self.dest, items)
++
++
++class _AppendConstAction(Action):
++
++    def __init__(self,
++                 option_strings,
++                 dest,
++                 const,
++                 default=None,
++                 required=False,
++                 help=None,
++                 metavar=None):
++        super(_AppendConstAction, self).__init__(
++            option_strings=option_strings,
++            dest=dest,
++            nargs=0,
++            const=const,
++            default=default,
++            required=required,
++            help=help,
++            metavar=metavar)
++
++    def __call__(self, parser, namespace, values, option_string=None):
++        items = _copy.copy(_ensure_value(namespace, self.dest, []))
++        items.append(self.const)
++        setattr(namespace, self.dest, items)
++
++
++class _CountAction(Action):
++
++    def __init__(self,
++                 option_strings,
++                 dest,
++                 default=None,
++                 required=False,
++                 help=None):
++        super(_CountAction, self).__init__(
++            option_strings=option_strings,
++            dest=dest,
++            nargs=0,
++            default=default,
++            required=required,
++            help=help)
++
++    def __call__(self, parser, namespace, values, option_string=None):
++        new_count = _ensure_value(namespace, self.dest, 0) + 1
++        setattr(namespace, self.dest, new_count)
++
++
++class _HelpAction(Action):
++
++    def __init__(self,
++                 option_strings,
++                 dest=SUPPRESS,
++                 default=SUPPRESS,
++                 help=None):
++        super(_HelpAction, self).__init__(
++            option_strings=option_strings,
++            dest=dest,
++            default=default,
++            nargs=0,
++            help=help)
++
++    def __call__(self, parser, namespace, values, option_string=None):
++        parser.print_help()
++        parser.exit()
++
++
++class _VersionAction(Action):
++
++    def __init__(self,
++                 option_strings,
++                 version=None,
++                 dest=SUPPRESS,
++                 default=SUPPRESS,
++                 help="show program's version number and exit"):
++        super(_VersionAction, self).__init__(
++            option_strings=option_strings,
++            dest=dest,
++            default=default,
++            nargs=0,
++            help=help)
++        self.version = version
++
++    def __call__(self, parser, namespace, values, option_string=None):
++        version = self.version
++        if version is None:
++            version = parser.version
++        formatter = parser._get_formatter()
++        formatter.add_text(version)
++        parser.exit(message=formatter.format_help())
++
++
++class _SubParsersAction(Action):
++
++    class _ChoicesPseudoAction(Action):
++
++        def __init__(self, name, help):
++            sup = super(_SubParsersAction._ChoicesPseudoAction, self)
++            sup.__init__(option_strings=[], dest=name, help=help)
++
++    def __init__(self,
++                 option_strings,
++                 prog,
++                 parser_class,
++                 dest=SUPPRESS,
++                 help=None,
++                 metavar=None):
++
++        self._prog_prefix = prog
++        self._parser_class = parser_class
++        self._name_parser_map = {}
++        self._choices_actions = []
++
++        super(_SubParsersAction, self).__init__(
++            option_strings=option_strings,
++            dest=dest,
++            nargs=PARSER,
++            choices=self._name_parser_map,
++            help=help,
++            metavar=metavar)
++
++    def add_parser(self, name, **kwargs):
++        # set prog from the existing prefix
++        if kwargs.get('prog') is None:
++            kwargs['prog'] = '%s %s' % (self._prog_prefix, name)
++
++        # create a pseudo-action to hold the choice help
++        if 'help' in kwargs:
++            help = kwargs.pop('help')
++            choice_action = self._ChoicesPseudoAction(name, help)
++            self._choices_actions.append(choice_action)
++
++        # create the parser and add it to the map
++        parser = self._parser_class(**kwargs)
++        self._name_parser_map[name] = parser
++        return parser
++
++    def _get_subactions(self):
++        return self._choices_actions
++
++    def __call__(self, parser, namespace, values, option_string=None):
++        parser_name = values[0]
++        arg_strings = values[1:]
++
++        # set the parser name if requested
++        if self.dest is not SUPPRESS:
++            setattr(namespace, self.dest, parser_name)
++
++        # select the parser
++        try:
++            parser = self._name_parser_map[parser_name]
++        except KeyError:
++            tup = parser_name, ', '.join(self._name_parser_map)
++            msg = _('unknown parser %r (choices: %s)' % tup)
++            raise ArgumentError(self, msg)
++
++        # parse all the remaining options into the namespace
++        # store any unrecognized options on the object, so that the top
++        # level parser can decide what to do with them
++        namespace, arg_strings = parser.parse_known_args(arg_strings, namespace)
++        if arg_strings:
++            vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, [])
++            getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings)
++
++
++# ==============
++# Type classes
++# ==============
++
++class FileType(object):
++    """Factory for creating file object types
++
++    Instances of FileType are typically passed as type= arguments to the
++    ArgumentParser add_argument() method.
++
++    Keyword Arguments:
++        - mode -- A string indicating how the file is to be opened. Accepts the
++            same values as the builtin open() function.
++        - bufsize -- The file's desired buffer size. Accepts the same values as
++            the builtin open() function.
++    """
++
++    def __init__(self, mode='r', bufsize=None):
++        self._mode = mode
++        self._bufsize = bufsize
++
++    def __call__(self, string):
++        # the special argument "-" means sys.std{in,out}
++        if string == '-':
++            if 'r' in self._mode:
++                return _sys.stdin
++            elif 'w' in self._mode:
++                return _sys.stdout
++            else:
++                msg = _('argument "-" with mode %r' % self._mode)
++                raise ValueError(msg)
++
++        # all other arguments are used as file names
++        if self._bufsize:
++            return open(string, self._mode, self._bufsize)
++        else:
++            return open(string, self._mode)
++
++    def __repr__(self):
++        args = [self._mode, self._bufsize]
++        args_str = ', '.join([repr(arg) for arg in args if arg is not None])
++        return '%s(%s)' % (type(self).__name__, args_str)
++
++# ===========================
++# Optional and Positional Parsing
++# ===========================
++
++class Namespace(_AttributeHolder):
++    """Simple object for storing attributes.
++
++    Implements equality by attribute names and values, and provides a simple
++    string representation.
++    """
++
++    def __init__(self, **kwargs):
++        for name in kwargs:
++            setattr(self, name, kwargs[name])
++
++    __hash__ = None
++
++    def __eq__(self, other):
++        return vars(self) == vars(other)
++
++    def __ne__(self, other):
++        return not (self == other)
++
++    def __contains__(self, key):
++        return key in self.__dict__
++
++
++class _ActionsContainer(object):
++
++    def __init__(self,
++                 description,
++                 prefix_chars,
++                 argument_default,
++                 conflict_handler):
++        super(_ActionsContainer, self).__init__()
++
++        self.description = description
++        self.argument_default = argument_default
++        self.prefix_chars = prefix_chars
++        self.conflict_handler = conflict_handler
++
++        # set up registries
++        self._registries = {}
++
++        # register actions
++        self.register('action', None, _StoreAction)
++        self.register('action', 'store', _StoreAction)
++        self.register('action', 'store_const', _StoreConstAction)
++        self.register('action', 'store_true', _StoreTrueAction)
++        self.register('action', 'store_false', _StoreFalseAction)
++        self.register('action', 'append', _AppendAction)
++        self.register('action', 'append_const', _AppendConstAction)
++        self.register('action', 'count', _CountAction)
++        self.register('action', 'help', _HelpAction)
++        self.register('action', 'version', _VersionAction)
++        self.register('action', 'parsers', _SubParsersAction)
++
++        # raise an exception if the conflict handler is invalid
++        self._get_handler()
++
++        # action storage
++        self._actions = []
++        self._option_string_actions = {}
++
++        # groups
++        self._action_groups = []
++        self._mutually_exclusive_groups = []
++
++        # defaults storage
++        self._defaults = {}
++
++        # determines whether an "option" looks like a negative number
++        self._negative_number_matcher = _re.compile(r'^-\d+$|^-\d*\.\d+$')
++
++        # whether or not there are any optionals that look like negative
++        # numbers -- uses a list so it can be shared and edited
++        self._has_negative_number_optionals = []
++
++    # ====================
++    # Registration methods
++    # ====================
++    def register(self, registry_name, value, object):
++        registry = self._registries.setdefault(registry_name, {})
++        registry[value] = object
++
++    def _registry_get(self, registry_name, value, default=None):
++        return self._registries[registry_name].get(value, default)
++
++    # ==================================
++    # Namespace default accessor methods
++    # ==================================
++    def set_defaults(self, **kwargs):
++        self._defaults.update(kwargs)
++
++        # if these defaults match any existing arguments, replace
++        # the previous default on the object with the new one
++        for action in self._actions:
++            if action.dest in kwargs:
++                action.default = kwargs[action.dest]
++
++    def get_default(self, dest):
++        for action in self._actions:
++            if action.dest == dest and action.default is not None:
++                return action.default
++        return self._defaults.get(dest, None)
++
++
++    # =======================
++    # Adding argument actions
++    # =======================
++    def add_argument(self, *args, **kwargs):
++        """
++        add_argument(dest, ..., name=value, ...)
++        add_argument(option_string, option_string, ..., name=value, ...)
++        """
++
++        # if no positional args are supplied or only one is supplied and
++        # it doesn't look like an option string, parse a positional
++        # argument
++        chars = self.prefix_chars
++        if not args or len(args) == 1 and args[0][0] not in chars:
++            if args and 'dest' in kwargs:
++                raise ValueError('dest supplied twice for positional argument')
++            kwargs = self._get_positional_kwargs(*args, **kwargs)
++
++        # otherwise, we're adding an optional argument
++        else:
++            kwargs = self._get_optional_kwargs(*args, **kwargs)
++
++        # if no default was supplied, use the parser-level default
++        if 'default' not in kwargs:
++            dest = kwargs['dest']
++            if dest in self._defaults:
++                kwargs['default'] = self._defaults[dest]
++            elif self.argument_default is not None:
++                kwargs['default'] = self.argument_default
++
++        # create the action object, and add it to the parser
++        action_class = self._pop_action_class(kwargs)
++        if not _callable(action_class):
++            raise ValueError('unknown action "%s"' % action_class)
++        action = action_class(**kwargs)
++
++        # raise an error if the action type is not callable
++        type_func = self._registry_get('type', action.type, action.type)
++        if not _callable(type_func):
++            raise ValueError('%r is not callable' % type_func)
++
++        return self._add_action(action)
++
++    def add_argument_group(self, *args, **kwargs):
++        group = _ArgumentGroup(self, *args, **kwargs)
++        self._action_groups.append(group)
++        return group
++
++    def add_mutually_exclusive_group(self, **kwargs):
++        group = _MutuallyExclusiveGroup(self, **kwargs)
++        self._mutually_exclusive_groups.append(group)
++        return group
++
++    def _add_action(self, action):
++        # resolve any conflicts
++        self._check_conflict(action)
++
++        # add to actions list
++        self._actions.append(action)
++        action.container = self
++
++        # index the action by any option strings it has
++        for option_string in action.option_strings:
++            self._option_string_actions[option_string] = action
++
++        # set the flag if any option strings look like negative numbers
++        for option_string in action.option_strings:
++            if self._negative_number_matcher.match(option_string):
++                if not self._has_negative_number_optionals:
++                    self._has_negative_number_optionals.append(True)
++
++        # return the created action
++        return action
++
++    def _remove_action(self, action):
++        self._actions.remove(action)
++
++    def _add_container_actions(self, container):
++        # collect groups by titles
++        title_group_map = {}
++        for group in self._action_groups:
++            if group.title in title_group_map:
++                msg = _('cannot merge actions - two groups are named %r')
++                raise ValueError(msg % (group.title))
++            title_group_map[group.title] = group
++
++        # map each action to its group
++        group_map = {}
++        for group in container._action_groups:
++
++            # if a group with the title exists, use that, otherwise
++            # create a new group matching the container's group
++            if group.title not in title_group_map:
++                title_group_map[group.title] = self.add_argument_group(
++                    title=group.title,
++                    description=group.description,
++                    conflict_handler=group.conflict_handler)
++
++            # map the actions to their new group
++            for action in group._group_actions:
++                group_map[action] = title_group_map[group.title]
++
++        # add container's mutually exclusive groups
++        # NOTE: if add_mutually_exclusive_group ever gains title= and
++        # description= then this code will need to be expanded as above
++        for group in container._mutually_exclusive_groups:
++            mutex_group = self.add_mutually_exclusive_group(
++                required=group.required)
++
++            # map the actions to their new mutex group
++            for action in group._group_actions:
++                group_map[action] = mutex_group
++
++        # add all actions to this container or their group
++        for action in container._actions:
++            group_map.get(action, self)._add_action(action)
++
++    def _get_positional_kwargs(self, dest, **kwargs):
++        # make sure required is not specified
++        if 'required' in kwargs:
++            msg = _("'required' is an invalid argument for positionals")
++            raise TypeError(msg)
++
++        # mark positional arguments as required if at least one is
++        # always required
++        if kwargs.get('nargs') not in [OPTIONAL, ZERO_OR_MORE]:
++            kwargs['required'] = True
++        if kwargs.get('nargs') == ZERO_OR_MORE and 'default' not in kwargs:
++            kwargs['required'] = True
++
++        # return the keyword arguments with no option strings
++        return dict(kwargs, dest=dest, option_strings=[])
++
++    def _get_optional_kwargs(self, *args, **kwargs):
++        # determine short and long option strings
++        option_strings = []
++        long_option_strings = []
++        for option_string in args:
++            # error on strings that don't start with an appropriate prefix
++            if not option_string[0] in self.prefix_chars:
++                msg = _('invalid option string %r: '
++                        'must start with a character %r')
++                tup = option_string, self.prefix_chars
++                raise ValueError(msg % tup)
++
++            # strings starting with two prefix characters are long options
++            option_strings.append(option_string)
++            if option_string[0] in self.prefix_chars:
++                if len(option_string) > 1:
++                    if option_string[1] in self.prefix_chars:
++                        long_option_strings.append(option_string)
++
++        # infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x'
++        dest = kwargs.pop('dest', None)
++        if dest is None:
++            if long_option_strings:
++                dest_option_string = long_option_strings[0]
++            else:
++                dest_option_string = option_strings[0]
++            dest = dest_option_string.lstrip(self.prefix_chars)
++            if not dest:
++                msg = _('dest= is required for options like %r')
++                raise ValueError(msg % option_string)
++            dest = dest.replace('-', '_')
++
++        # return the updated keyword arguments
++        return dict(kwargs, dest=dest, option_strings=option_strings)
++
++    def _pop_action_class(self, kwargs, default=None):
++        action = kwargs.pop('action', default)
++        return self._registry_get('action', action, action)
++
++    def _get_handler(self):
++        # determine function from conflict handler string
++        handler_func_name = '_handle_conflict_%s' % self.conflict_handler
++        try:
++            return getattr(self, handler_func_name)
++        except AttributeError:
++            msg = _('invalid conflict_resolution value: %r')
++            raise ValueError(msg % self.conflict_handler)
++
++    def _check_conflict(self, action):
++
++        # find all options that conflict with this option
++        confl_optionals = []
++        for option_string in action.option_strings:
++            if option_string in self._option_string_actions:
++                confl_optional = self._option_string_actions[option_string]
++                confl_optionals.append((option_string, confl_optional))
++
++        # resolve any conflicts
++        if confl_optionals:
++            conflict_handler = self._get_handler()
++            conflict_handler(action, confl_optionals)
++
++    def _handle_conflict_error(self, action, conflicting_actions):
++        message = _('conflicting option string(s): %s')
++        conflict_string = ', '.join([option_string
++                                     for option_string, action
++                                     in conflicting_actions])
++        raise ArgumentError(action, message % conflict_string)
++
++    def _handle_conflict_resolve(self, action, conflicting_actions):
++
++        # remove all conflicting options
++        for option_string, action in conflicting_actions:
++
++            # remove the conflicting option
++            action.option_strings.remove(option_string)
++            self._option_string_actions.pop(option_string, None)
++
++            # if the option now has no option string, remove it from the
++            # container holding it
++            if not action.option_strings:
++                action.container._remove_action(action)
++
++
++class _ArgumentGroup(_ActionsContainer):
++
++    def __init__(self, container, title=None, description=None, **kwargs):
++        # add any missing keyword arguments by checking the container
++        update = kwargs.setdefault
++        update('conflict_handler', container.conflict_handler)
++        update('prefix_chars', container.prefix_chars)
++        update('argument_default', container.argument_default)
++        super_init = super(_ArgumentGroup, self).__init__
++        super_init(description=description, **kwargs)
++
++        # group attributes
++        self.title = title
++        self._group_actions = []
++
++        # share most attributes with the container
++        self._registries = container._registries
++        self._actions = container._actions
++        self._option_string_actions = container._option_string_actions
++        self._defaults = container._defaults
++        self._has_negative_number_optionals = \
++            container._has_negative_number_optionals
++
++    def _add_action(self, action):
++        action = super(_ArgumentGroup, self)._add_action(action)
++        self._group_actions.append(action)
++        return action
++
++    def _remove_action(self, action):
++        super(_ArgumentGroup, self)._remove_action(action)
++        self._group_actions.remove(action)
++
++
++class _MutuallyExclusiveGroup(_ArgumentGroup):
++
++    def __init__(self, container, required=False):
++        super(_MutuallyExclusiveGroup, self).__init__(container)
++        self.required = required
++        self._container = container
++
++    def _add_action(self, action):
++        if action.required:
++            msg = _('mutually exclusive arguments must be optional')
++            raise ValueError(msg)
++        action = self._container._add_action(action)
++        self._group_actions.append(action)
++        return action
++
++    def _remove_action(self, action):
++        self._container._remove_action(action)
++        self._group_actions.remove(action)
++
++
++class ArgumentParser(_AttributeHolder, _ActionsContainer):
++    """Object for parsing command line strings into Python objects.
++
++    Keyword Arguments:
++        - prog -- The name of the program (default: sys.argv[0])
++        - usage -- A usage message (default: auto-generated from arguments)
++        - description -- A description of what the program does
++        - epilog -- Text following the argument descriptions
++        - parents -- Parsers whose arguments should be copied into this one
++        - formatter_class -- HelpFormatter class for printing help messages
++        - prefix_chars -- Characters that prefix optional arguments
++        - fromfile_prefix_chars -- Characters that prefix files containing
++            additional arguments
++        - argument_default -- The default value for all arguments
++        - conflict_handler -- String indicating how to handle conflicts
++        - add_help -- Add a -h/-help option
++    """
++
++    def __init__(self,
++                 prog=None,
++                 usage=None,
++                 description=None,
++                 epilog=None,
++                 version=None,
++                 parents=[],
++                 formatter_class=HelpFormatter,
++                 prefix_chars='-',
++                 fromfile_prefix_chars=None,
++                 argument_default=None,
++                 conflict_handler='error',
++                 add_help=True):
++
++        if version is not None:
++            import warnings
++            warnings.warn(
++                """The "version" argument to ArgumentParser is deprecated. """
++                """Please use """
++                """"add_argument(..., action='version', version="N", ...)" """
++                """instead""", DeprecationWarning)
++
++        superinit = super(ArgumentParser, self).__init__
++        superinit(description=description,
++                  prefix_chars=prefix_chars,
++                  argument_default=argument_default,
++                  conflict_handler=conflict_handler)
++
++        # default setting for prog
++        if prog is None:
++            prog = _os.path.basename(_sys.argv[0])
++
++        self.prog = prog
++        self.usage = usage
++        self.epilog = epilog
++        self.version = version
++        self.formatter_class = formatter_class
++        self.fromfile_prefix_chars = fromfile_prefix_chars
++        self.add_help = add_help
++
++        add_group = self.add_argument_group
++        self._positionals = add_group(_('positional arguments'))
++        self._optionals = add_group(_('optional arguments'))
++        self._subparsers = None
++
++        # register types
++        def identity(string):
++            return string
++        self.register('type', None, identity)
++
++        # add help and version arguments if necessary
++        # (using explicit default to override global argument_default)
++        if '-' in prefix_chars:
++            default_prefix = '-'
++        else:
++            default_prefix = prefix_chars[0]
++        if self.add_help:
++            self.add_argument(
++                default_prefix+'h', default_prefix*2+'help',
++                action='help', default=SUPPRESS,
++                help=_('show this help message and exit'))
++        if self.version:
++            self.add_argument(
++                default_prefix+'v', default_prefix*2+'version',
++                action='version', default=SUPPRESS,
++                version=self.version,
++                help=_("show program's version number and exit"))
++
++        # add parent arguments and defaults
++        for parent in parents:
++            self._add_container_actions(parent)
++            try:
++                defaults = parent._defaults
++            except AttributeError:
++                pass
++            else:
++                self._defaults.update(defaults)
++
++    # =======================
++    # Pretty __repr__ methods
++    # =======================
++    def _get_kwargs(self):
++        names = [
++            'prog',
++            'usage',
++            'description',
++            'version',
++            'formatter_class',
++            'conflict_handler',
++            'add_help',
++        ]
++        return [(name, getattr(self, name)) for name in names]
++
++    # ==================================
++    # Optional/Positional adding methods
++    # ==================================
++    def add_subparsers(self, **kwargs):
++        if self._subparsers is not None:
++            self.error(_('cannot have multiple subparser arguments'))
++
++        # add the parser class to the arguments if it's not present
++        kwargs.setdefault('parser_class', type(self))
++
++        if 'title' in kwargs or 'description' in kwargs:
++            title = _(kwargs.pop('title', 'subcommands'))
++            description = _(kwargs.pop('description', None))
++            self._subparsers = self.add_argument_group(title, description)
++        else:
++            self._subparsers = self._positionals
++
++        # prog defaults to the usage message of this parser, skipping
++        # optional arguments and with no "usage:" prefix
++        if kwargs.get('prog') is None:
++            formatter = self._get_formatter()
++            positionals = self._get_positional_actions()
++            groups = self._mutually_exclusive_groups
++            formatter.add_usage(self.usage, positionals, groups, '')
++            kwargs['prog'] = formatter.format_help().strip()
++
++        # create the parsers action and add it to the positionals list
++        parsers_class = self._pop_action_class(kwargs, 'parsers')
++        action = parsers_class(option_strings=[], **kwargs)
++        self._subparsers._add_action(action)
++
++        # return the created parsers action
++        return action
++
++    def _add_action(self, action):
++        if action.option_strings:
++            self._optionals._add_action(action)
++        else:
++            self._positionals._add_action(action)
++        return action
++
++    def _get_optional_actions(self):
++        return [action
++                for action in self._actions
++                if action.option_strings]
++
++    def _get_positional_actions(self):
++        return [action
++                for action in self._actions
++                if not action.option_strings]
++
++    # =====================================
++    # Command line argument parsing methods
++    # =====================================
++    def parse_args(self, args=None, namespace=None):
++        args, argv = self.parse_known_args(args, namespace)
++        if argv:
++            msg = _('unrecognized arguments: %s')
++            self.error(msg % ' '.join(argv))
++        return args
++
++    def parse_known_args(self, args=None, namespace=None):
++        # args default to the system args
++        if args is None:
++            args = _sys.argv[1:]
++
++        # default Namespace built from parser defaults
++        if namespace is None:
++            namespace = Namespace()
++
++        # add any action defaults that aren't present
++        for action in self._actions:
++            if action.dest is not SUPPRESS:
++                if not hasattr(namespace, action.dest):
++                    if action.default is not SUPPRESS:
++                        default = action.default
++                        if isinstance(action.default, basestring):
++                            default = self._get_value(action, default)
++                        setattr(namespace, action.dest, default)
++
++        # add any parser defaults that aren't present
++        for dest in self._defaults:
++            if not hasattr(namespace, dest):
++                setattr(namespace, dest, self._defaults[dest])
++
++        # parse the arguments and exit if there are any errors
++        try:
++            namespace, args = self._parse_known_args(args, namespace)
++            if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR):
++                args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR))
++                delattr(namespace, _UNRECOGNIZED_ARGS_ATTR)
++            return namespace, args
++        except ArgumentError:
++            err = _sys.exc_info()[1]
++            self.error(str(err))
++
++    def _parse_known_args(self, arg_strings, namespace):
++        # replace arg strings that are file references
++        if self.fromfile_prefix_chars is not None:
++            arg_strings = self._read_args_from_files(arg_strings)
++
++        # map all mutually exclusive arguments to the other arguments
++        # they can't occur with
++        action_conflicts = {}
++        for mutex_group in self._mutually_exclusive_groups:
++            group_actions = mutex_group._group_actions
++            for i, mutex_action in enumerate(mutex_group._group_actions):
++                conflicts = action_conflicts.setdefault(mutex_action, [])
++                conflicts.extend(group_actions[:i])
++                conflicts.extend(group_actions[i + 1:])
++
++        # find all option indices, and determine the arg_string_pattern
++        # which has an 'O' if there is an option at an index,
++        # an 'A' if there is an argument, or a '-' if there is a '--'
++        option_string_indices = {}
++        arg_string_pattern_parts = []
++        arg_strings_iter = iter(arg_strings)
++        for i, arg_string in enumerate(arg_strings_iter):
++
++            # all args after -- are non-options
++            if arg_string == '--':
++                arg_string_pattern_parts.append('-')
++                for arg_string in arg_strings_iter:
++                    arg_string_pattern_parts.append('A')
++
++            # otherwise, add the arg to the arg strings
++            # and note the index if it was an option
++            else:
++                option_tuple = self._parse_optional(arg_string)
++                if option_tuple is None:
++                    pattern = 'A'
++                else:
++                    option_string_indices[i] = option_tuple
++                    pattern = 'O'
++                arg_string_pattern_parts.append(pattern)
++
++        # join the pieces together to form the pattern
++        arg_strings_pattern = ''.join(arg_string_pattern_parts)
++
++        # converts arg strings to the appropriate and then takes the action
++        seen_actions = set()
++        seen_non_default_actions = set()
++
++        def take_action(action, argument_strings, option_string=None):
++            seen_actions.add(action)
++            argument_values = self._get_values(action, argument_strings)
++
++            # error if this argument is not allowed with other previously
++            # seen arguments, assuming that actions that use the default
++            # value don't really count as "present"
++            if argument_values is not action.default:
++                seen_non_default_actions.add(action)
++                for conflict_action in action_conflicts.get(action, []):
++                    if conflict_action in seen_non_default_actions:
++                        msg = _('not allowed with argument %s')
++                        action_name = _get_action_name(conflict_action)
++                        raise ArgumentError(action, msg % action_name)
++
++            # take the action if we didn't receive a SUPPRESS value
++            # (e.g. from a default)
++            if argument_values is not SUPPRESS:
++                action(self, namespace, argument_values, option_string)
++
++        # function to convert arg_strings into an optional action
++        def consume_optional(start_index):
++
++            # get the optional identified at this index
++            option_tuple = option_string_indices[start_index]
++            action, option_string, explicit_arg = option_tuple
++
++            # identify additional optionals in the same arg string
++            # (e.g. -xyz is the same as -x -y -z if no args are required)
++            match_argument = self._match_argument
++            action_tuples = []
++            while True:
++
++                # if we found no optional action, skip it
++                if action is None:
++                    extras.append(arg_strings[start_index])
++                    return start_index + 1
++
++                # if there is an explicit argument, try to match the
++                # optional's string arguments to only this
++                if explicit_arg is not None:
++                    arg_count = match_argument(action, 'A')
++
++                    # if the action is a single-dash option and takes no
++                    # arguments, try to parse more single-dash options out
++                    # of the tail of the option string
++                    chars = self.prefix_chars
++                    if arg_count == 0 and option_string[1] not in chars:
++                        action_tuples.append((action, [], option_string))
++                        char = option_string[0]
++                        option_string = char + explicit_arg[0]
++                        new_explicit_arg = explicit_arg[1:] or None
++                        optionals_map = self._option_string_actions
++                        if option_string in optionals_map:
++                            action = optionals_map[option_string]
++                            explicit_arg = new_explicit_arg
++                        else:
++                            msg = _('ignored explicit argument %r')
++                            raise ArgumentError(action, msg % explicit_arg)
++
++                    # if the action expect exactly one argument, we've
++                    # successfully matched the option; exit the loop
++                    elif arg_count == 1:
++                        stop = start_index + 1
++                        args = [explicit_arg]
++                        action_tuples.append((action, args, option_string))
++                        break
++
++                    # error if a double-dash option did not use the
++                    # explicit argument
++                    else:
++                        msg = _('ignored explicit argument %r')
++                        raise ArgumentError(action, msg % explicit_arg)
++
++                # if there is no explicit argument, try to match the
++                # optional's string arguments with the following strings
++                # if successful, exit the loop
++                else:
++                    start = start_index + 1
++                    selected_patterns = arg_strings_pattern[start:]
++                    arg_count = match_argument(action, selected_patterns)
++                    stop = start + arg_count
++                    args = arg_strings[start:stop]
++                    action_tuples.append((action, args, option_string))
++                    break
++
++            # add the Optional to the list and return the index at which
++            # the Optional's string args stopped
++            assert action_tuples
++            for action, args, option_string in action_tuples:
++                take_action(action, args, option_string)
++            return stop
++
++        # the list of Positionals left to be parsed; this is modified
++        # by consume_positionals()
++        positionals = self._get_positional_actions()
++
++        # function to convert arg_strings into positional actions
++        def consume_positionals(start_index):
++            # match as many Positionals as possible
++            match_partial = self._match_arguments_partial
++            selected_pattern = arg_strings_pattern[start_index:]
++            arg_counts = match_partial(positionals, selected_pattern)
++
++            # slice off the appropriate arg strings for each Positional
++            # and add the Positional and its args to the list
++            for action, arg_count in zip(positionals, arg_counts):
++                args = arg_strings[start_index: start_index + arg_count]
++                start_index += arg_count
++                take_action(action, args)
++
++            # slice off the Positionals that we just parsed and return the
++            # index at which the Positionals' string args stopped
++            positionals[:] = positionals[len(arg_counts):]
++            return start_index
++
++        # consume Positionals and Optionals alternately, until we have
++        # passed the last option string
++        extras = []
++        start_index = 0
++        if option_string_indices:
++            max_option_string_index = max(option_string_indices)
++        else:
++            max_option_string_index = -1
++        while start_index <= max_option_string_index:
++
++            # consume any Positionals preceding the next option
++            next_option_string_index = min([
++                index
++                for index in option_string_indices
++                if index >= start_index])
++            if start_index != next_option_string_index:
++                positionals_end_index = consume_positionals(start_index)
++
++                # only try to parse the next optional if we didn't consume
++                # the option string during the positionals parsing
++                if positionals_end_index > start_index:
++                    start_index = positionals_end_index
++                    continue
++                else:
++                    start_index = positionals_end_index
++
++            # if we consumed all the positionals we could and we're not
++            # at the index of an option string, there were extra arguments
++            if start_index not in option_string_indices:
++                strings = arg_strings[start_index:next_option_string_index]
++                extras.extend(strings)
++                start_index = next_option_string_index
++
++            # consume the next optional and any arguments for it
++            start_index = consume_optional(start_index)
++
++        # consume any positionals following the last Optional
++        stop_index = consume_positionals(start_index)
++
++        # if we didn't consume all the argument strings, there were extras
++        extras.extend(arg_strings[stop_index:])
++
++        # if we didn't use all the Positional objects, there were too few
++        # arg strings supplied.
++        if positionals:
++            self.error(_('too few arguments'))
++
++        # make sure all required actions were present
++        for action in self._actions:
++            if action.required:
++                if action not in seen_actions:
++                    name = _get_action_name(action)
++                    self.error(_('argument %s is required') % name)
++
++        # make sure all required groups had one option present
++        for group in self._mutually_exclusive_groups:
++            if group.required:
++                for action in group._group_actions:
++                    if action in seen_non_default_actions:
++                        break
++
++                # if no actions were used, report the error
++                else:
++                    names = [_get_action_name(action)
++                             for action in group._group_actions
++                             if action.help is not SUPPRESS]
++                    msg = _('one of the arguments %s is required')
++                    self.error(msg % ' '.join(names))
++
++        # return the updated namespace and the extra arguments
++        return namespace, extras
++
++    def _read_args_from_files(self, arg_strings):
++        # expand arguments referencing files
++        new_arg_strings = []
++        for arg_string in arg_strings:
++
++            # for regular arguments, just add them back into the list
++            if arg_string[0] not in self.fromfile_prefix_chars:
++                new_arg_strings.append(arg_string)
++
++            # replace arguments referencing files with the file content
++            else:
++                try:
++                    args_file = open(arg_string[1:])
++                    try:
++                        arg_strings = []
++                        for arg_line in args_file.read().splitlines():
++                            for arg in self.convert_arg_line_to_args(arg_line):
++                                arg_strings.append(arg)
++                        arg_strings = self._read_args_from_files(arg_strings)
++                        new_arg_strings.extend(arg_strings)
++                    finally:
++                        args_file.close()
++                except IOError:
++                    err = _sys.exc_info()[1]
++                    self.error(str(err))
++
++        # return the modified argument list
++        return new_arg_strings
++
++    def convert_arg_line_to_args(self, arg_line):
++        return [arg_line]
++
++    def _match_argument(self, action, arg_strings_pattern):
++        # match the pattern for this action to the arg strings
++        nargs_pattern = self._get_nargs_pattern(action)
++        match = _re.match(nargs_pattern, arg_strings_pattern)
++
++        # raise an exception if we weren't able to find a match
++        if match is None:
++            nargs_errors = {
++                None: _('expected one argument'),
++                OPTIONAL: _('expected at most one argument'),
++                ONE_OR_MORE: _('expected at least one argument'),
++            }
++            default = _('expected %s argument(s)') % action.nargs
++            msg = nargs_errors.get(action.nargs, default)
++            raise ArgumentError(action, msg)
++
++        # return the number of arguments matched
++        return len(match.group(1))
++
++    def _match_arguments_partial(self, actions, arg_strings_pattern):
++        # progressively shorten the actions list by slicing off the
++        # final actions until we find a match
++        result = []
++        for i in range(len(actions), 0, -1):
++            actions_slice = actions[:i]
++            pattern = ''.join([self._get_nargs_pattern(action)
++                               for action in actions_slice])
++            match = _re.match(pattern, arg_strings_pattern)
++            if match is not None:
++                result.extend([len(string) for string in match.groups()])
++                break
++
++        # return the list of arg string counts
++        return result
++
++    def _parse_optional(self, arg_string):
++        # if it's an empty string, it was meant to be a positional
++        if not arg_string:
++            return None
++
++        # if it doesn't start with a prefix, it was meant to be positional
++        if not arg_string[0] in self.prefix_chars:
++            return None
++
++        # if the option string is present in the parser, return the action
++        if arg_string in self._option_string_actions:
++            action = self._option_string_actions[arg_string]
++            return action, arg_string, None
++
++        # if it's just a single character, it was meant to be positional
++        if len(arg_string) == 1:
++            return None
++
++        # if the option string before the "=" is present, return the action
++        if '=' in arg_string:
++            option_string, explicit_arg = arg_string.split('=', 1)
++            if option_string in self._option_string_actions:
++                action = self._option_string_actions[option_string]
++                return action, option_string, explicit_arg
++
++        # search through all possible prefixes of the option string
++        # and all actions in the parser for possible interpretations
++        option_tuples = self._get_option_tuples(arg_string)
++
++        # if multiple actions match, the option string was ambiguous
++        if len(option_tuples) > 1:
++            options = ', '.join([option_string
++                for action, option_string, explicit_arg in option_tuples])
++            tup = arg_string, options
++            self.error(_('ambiguous option: %s could match %s') % tup)
++
++        # if exactly one action matched, this segmentation is good,
++        # so return the parsed action
++        elif len(option_tuples) == 1:
++            option_tuple, = option_tuples
++            return option_tuple
++
++        # if it was not found as an option, but it looks like a negative
++        # number, it was meant to be positional
++        # unless there are negative-number-like options
++        if self._negative_number_matcher.match(arg_string):
++            if not self._has_negative_number_optionals:
++                return None
++
++        # if it contains a space, it was meant to be a positional
++        if ' ' in arg_string:
++            return None
++
++        # it was meant to be an optional but there is no such option
++        # in this parser (though it might be a valid option in a subparser)
++        return None, arg_string, None
++
++    def _get_option_tuples(self, option_string):
++        result = []
++
++        # option strings starting with two prefix characters are only
++        # split at the '='
++        chars = self.prefix_chars
++        if option_string[0] in chars and option_string[1] in chars:
++            if '=' in option_string:
++                option_prefix, explicit_arg = option_string.split('=', 1)
++            else:
++                option_prefix = option_string
++                explicit_arg = None
++            for option_string in self._option_string_actions:
++                if option_string.startswith(option_prefix):
++                    action = self._option_string_actions[option_string]
++                    tup = action, option_string, explicit_arg
++                    result.append(tup)
++
++        # single character options can be concatenated with their arguments
++        # but multiple character options always have to have their argument
++        # separate
++        elif option_string[0] in chars and option_string[1] not in chars:
++            option_prefix = option_string
++            explicit_arg = None
++            short_option_prefix = option_string[:2]
++            short_explicit_arg = option_string[2:]
++
++            for option_string in self._option_string_actions:
++                if option_string == short_option_prefix:
++                    action = self._option_string_actions[option_string]
++                    tup = action, option_string, short_explicit_arg
++                    result.append(tup)
++                elif option_string.startswith(option_prefix):
++                    action = self._option_string_actions[option_string]
++                    tup = action, option_string, explicit_arg
++                    result.append(tup)
++
++        # shouldn't ever get here
++        else:
++            self.error(_('unexpected option string: %s') % option_string)
++
++        # return the collected option tuples
++        return result
++
++    def _get_nargs_pattern(self, action):
++        # in all examples below, we have to allow for '--' args
++        # which are represented as '-' in the pattern
++        nargs = action.nargs
++
++        # the default (None) is assumed to be a single argument
++        if nargs is None:
++            nargs_pattern = '(-*A-*)'
++
++        # allow zero or one arguments
++        elif nargs == OPTIONAL:
++            nargs_pattern = '(-*A?-*)'
++
++        # allow zero or more arguments
++        elif nargs == ZERO_OR_MORE:
++            nargs_pattern = '(-*[A-]*)'
++
++        # allow one or more arguments
++        elif nargs == ONE_OR_MORE:
++            nargs_pattern = '(-*A[A-]*)'
++
++        # allow any number of options or arguments
++        elif nargs == REMAINDER:
++            nargs_pattern = '([-AO]*)'
++
++        # allow one argument followed by any number of options or arguments
++        elif nargs == PARSER:
++            nargs_pattern = '(-*A[-AO]*)'
++
++        # all others should be integers
++        else:
++            nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs)
++
++        # if this is an optional action, -- is not allowed
++        if action.option_strings:
++            nargs_pattern = nargs_pattern.replace('-*', '')
++            nargs_pattern = nargs_pattern.replace('-', '')
++
++        # return the pattern
++        return nargs_pattern
++
++    # ========================
++    # Value conversion methods
++    # ========================
++    def _get_values(self, action, arg_strings):
++        # for everything but PARSER args, strip out '--'
++        if action.nargs not in [PARSER, REMAINDER]:
++            arg_strings = [s for s in arg_strings if s != '--']
++
++        # optional argument produces a default when not present
++        if not arg_strings and action.nargs == OPTIONAL:
++            if action.option_strings:
++                value = action.const
++            else:
++                value = action.default
++            if isinstance(value, basestring):
++                value = self._get_value(action, value)
++                self._check_value(action, value)
++
++        # when nargs='*' on a positional, if there were no command-line
++        # args, use the default if it is anything other than None
++        elif (not arg_strings and action.nargs == ZERO_OR_MORE and
++              not action.option_strings):
++            if action.default is not None:
++                value = action.default
++            else:
++                value = arg_strings
++            self._check_value(action, value)
++
++        # single argument or optional argument produces a single value
++        elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]:
++            arg_string, = arg_strings
++            value = self._get_value(action, arg_string)
++            self._check_value(action, value)
++
++        # REMAINDER arguments convert all values, checking none
++        elif action.nargs == REMAINDER:
++            value = [self._get_value(action, v) for v in arg_strings]
++
++        # PARSER arguments convert all values, but check only the first
++        elif action.nargs == PARSER:
++            value = [self._get_value(action, v) for v in arg_strings]
++            self._check_value(action, value[0])
++
++        # all other types of nargs produce a list
++        else:
++            value = [self._get_value(action, v) for v in arg_strings]
++            for v in value:
++                self._check_value(action, v)
++
++        # return the converted value
++        return value
++
++    def _get_value(self, action, arg_string):
++        type_func = self._registry_get('type', action.type, action.type)
++        if not _callable(type_func):
++            msg = _('%r is not callable')
++            raise ArgumentError(action, msg % type_func)
++
++        # convert the value to the appropriate type
++        try:
++            result = type_func(arg_string)
++
++        # ArgumentTypeErrors indicate errors
++        except ArgumentTypeError:
++            name = getattr(action.type, '__name__', repr(action.type))
++            msg = str(_sys.exc_info()[1])
++            raise ArgumentError(action, msg)
++
++        # TypeErrors or ValueErrors also indicate errors
++        except (TypeError, ValueError):
++            name = getattr(action.type, '__name__', repr(action.type))
++            msg = _('invalid %s value: %r')
++            raise ArgumentError(action, msg % (name, arg_string))
++
++        # return the converted value
++        return result
++
++    def _check_value(self, action, value):
++        # converted value must be one of the choices (if specified)
++        if action.choices is not None and value not in action.choices:
++            tup = value, ', '.join(map(repr, action.choices))
++            msg = _('invalid choice: %r (choose from %s)') % tup
++            raise ArgumentError(action, msg)
++
++    # =======================
++    # Help-formatting methods
++    # =======================
++    def format_usage(self):
++        formatter = self._get_formatter()
++        formatter.add_usage(self.usage, self._actions,
++                            self._mutually_exclusive_groups)
++        return formatter.format_help()
++
++    def format_help(self):
++        formatter = self._get_formatter()
++
++        # usage
++        formatter.add_usage(self.usage, self._actions,
++                            self._mutually_exclusive_groups)
++
++        # description
++        formatter.add_text(self.description)
++
++        # positionals, optionals and user-defined groups
++        for action_group in self._action_groups:
++            formatter.start_section(action_group.title)
++            formatter.add_text(action_group.description)
++            formatter.add_arguments(action_group._group_actions)
++            formatter.end_section()
++
++        # epilog
++        formatter.add_text(self.epilog)
++
++        # determine help from format above
++        return formatter.format_help()
++
++    def format_version(self):
++        import warnings
++        warnings.warn(
++            'The format_version method is deprecated -- the "version" '
++            'argument to ArgumentParser is no longer supported.',
++            DeprecationWarning)
++        formatter = self._get_formatter()
++        formatter.add_text(self.version)
++        return formatter.format_help()
++
++    def _get_formatter(self):
++        return self.formatter_class(prog=self.prog)
++
++    # =====================
++    # Help-printing methods
++    # =====================
++    def print_usage(self, file=None):
++        if file is None:
++            file = _sys.stdout
++        self._print_message(self.format_usage(), file)
++
++    def print_help(self, file=None):
++        if file is None:
++            file = _sys.stdout
++        self._print_message(self.format_help(), file)
++
++    def print_version(self, file=None):
++        import warnings
++        warnings.warn(
++            'The print_version method is deprecated -- the "version" '
++            'argument to ArgumentParser is no longer supported.',
++            DeprecationWarning)
++        self._print_message(self.format_version(), file)
++
++    def _print_message(self, message, file=None):
++        if message:
++            if file is None:
++                file = _sys.stderr
++            file.write(message)
++
++    # ===============
++    # Exiting methods
++    # ===============
++    def exit(self, status=0, message=None):
++        if message:
++            self._print_message(message, _sys.stderr)
++        _sys.exit(status)
++
++    def error(self, message):
++        """error(message: string)
++
++        Prints a usage message incorporating the message to stderr and
++        exits.
++
++        If you override this in a subclass, it should not return -- it
++        should either exit or raise an exception.
++        """
++        self.print_usage(_sys.stderr)
++        self.exit(2, _('%s: error: %s\n') % (self.prog, message))
+diff -up mozilla/rebase/mypickle.py.python2 mozilla/rebase/mypickle.py
+--- mozilla/rebase/mypickle.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/rebase/mypickle.py	2013-12-07 22:18:39.000000000 +0100
+@@ -0,0 +1,1403 @@
++"""Create portable serialized representations of Python objects.
++
++See module cPickle for a (much) faster implementation.
++See module copy_reg for a mechanism for registering custom picklers.
++See module pickletools source for extensive comments.
++
++Classes:
++
++    Pickler
++    Unpickler
++
++Functions:
++
++    dump(object, file)
++    dumps(object) -> string
++    load(file) -> object
++    loads(string) -> object
++
++Misc variables:
++
++    __version__
++    format_version
++    compatible_formats
++
++"""
++
++__version__ = "$Revision: 72223 $"       # Code version
++
++from types import *
++from copy_reg import dispatch_table
++from copy_reg import _extension_registry, _inverted_registry, _extension_cache
++import marshal
++import sys
++import struct
++import re
++
++__all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler",
++           "Unpickler", "dump", "dumps", "load", "loads"]
++
++# These are purely informational; no code uses these.
++format_version = "2.0"                  # File format version we write
++compatible_formats = ["1.0",            # Original protocol 0
++                      "1.1",            # Protocol 0 with INST added
++                      "1.2",            # Original protocol 1
++                      "1.3",            # Protocol 1 with BINFLOAT added
++                      "2.0",            # Protocol 2
++                      ]                 # Old format versions we can read
++
++# Keep in synch with cPickle.  This is the highest protocol number we
++# know how to read.
++HIGHEST_PROTOCOL = 2
++
++# Why use struct.pack() for pickling but marshal.loads() for
++# unpickling?  struct.pack() is 40% faster than marshal.dumps(), but
++# marshal.loads() is twice as fast as struct.unpack()!
++mloads = marshal.loads
++
++class PickleError(Exception):
++    """A common base class for the other pickling exceptions."""
++    pass
++
++class PicklingError(PickleError):
++    """This exception is raised when an unpicklable object is passed to the
++    dump() method.
++
++    """
++    pass
++
++class UnpicklingError(PickleError):
++    """This exception is raised when there is a problem unpickling an object,
++    such as a security violation.
++
++    Note that other exceptions may also be raised during unpickling, including
++    (but not necessarily limited to) AttributeError, EOFError, ImportError,
++    and IndexError.
++
++    """
++    pass
++
++# An instance of _Stop is raised by Unpickler.load_stop() in response to
++# the STOP opcode, passing the object that is the result of unpickling.
++class _Stop(Exception):
++    def __init__(self, value):
++        self.value = value
++
++# Jython has PyStringMap; it's a dict subclass with string keys
++try:
++    from org.python.core import PyStringMap
++except ImportError:
++    PyStringMap = None
++
++# UnicodeType may or may not be exported (normally imported from types)
++try:
++    UnicodeType
++except NameError:
++    UnicodeType = None
++
++# Pickle opcodes.  See pickletools.py for extensive docs.  The listing
++# here is in kind-of alphabetical order of 1-character pickle code.
++# pickletools groups them by purpose.
++
++MARK            = '('   # push special markobject on stack
++STOP            = '.'   # every pickle ends with STOP
++POP             = '0'   # discard topmost stack item
++POP_MARK        = '1'   # discard stack top through topmost markobject
++DUP             = '2'   # duplicate top stack item
++FLOAT           = 'F'   # push float object; decimal string argument
++INT             = 'I'   # push integer or bool; decimal string argument
++BININT          = 'J'   # push four-byte signed int
++BININT1         = 'K'   # push 1-byte unsigned int
++LONG            = 'L'   # push long; decimal string argument
++BININT2         = 'M'   # push 2-byte unsigned int
++NONE            = 'N'   # push None
++PERSID          = 'P'   # push persistent object; id is taken from string arg
++BINPERSID       = 'Q'   #  "       "         "  ;  "  "   "     "  stack
++REDUCE          = 'R'   # apply callable to argtuple, both on stack
++STRING          = 'S'   # push string; NL-terminated string argument
++BINSTRING       = 'T'   # push string; counted binary string argument
++SHORT_BINSTRING = 'U'   #  "     "   ;    "      "       "      " < 256 bytes
++UNICODE         = 'V'   # push Unicode string; raw-unicode-escaped'd argument
++BINUNICODE      = 'X'   #   "     "       "  ; counted UTF-8 string argument
++APPEND          = 'a'   # append stack top to list below it
++BUILD           = 'b'   # call __setstate__ or __dict__.update()
++GLOBAL          = 'c'   # push self.find_class(modname, name); 2 string args
++DICT            = 'd'   # build a dict from stack items
++EMPTY_DICT      = '}'   # push empty dict
++APPENDS         = 'e'   # extend list on stack by topmost stack slice
++GET             = 'g'   # push item from memo on stack; index is string arg
++BINGET          = 'h'   #   "    "    "    "   "   "  ;   "    " 1-byte arg
++INST            = 'i'   # build & push class instance
++LONG_BINGET     = 'j'   # push item from memo on stack; index is 4-byte arg
++LIST            = 'l'   # build list from topmost stack items
++EMPTY_LIST      = ']'   # push empty list
++OBJ             = 'o'   # build & push class instance
++PUT             = 'p'   # store stack top in memo; index is string arg
++BINPUT          = 'q'   #   "     "    "   "   " ;   "    " 1-byte arg
++LONG_BINPUT     = 'r'   #   "     "    "   "   " ;   "    " 4-byte arg
++SETITEM         = 's'   # add key+value pair to dict
++TUPLE           = 't'   # build tuple from topmost stack items
++EMPTY_TUPLE     = ')'   # push empty tuple
++SETITEMS        = 'u'   # modify dict by adding topmost key+value pairs
++BINFLOAT        = 'G'   # push float; arg is 8-byte float encoding
++
++TRUE            = 'I01\n'  # not an opcode; see INT docs in pickletools.py
++FALSE           = 'I00\n'  # not an opcode; see INT docs in pickletools.py
++
++# Protocol 2
++
++PROTO           = '\x80'  # identify pickle protocol
++NEWOBJ          = '\x81'  # build object by applying cls.__new__ to argtuple
++EXT1            = '\x82'  # push object from extension registry; 1-byte index
++EXT2            = '\x83'  # ditto, but 2-byte index
++EXT4            = '\x84'  # ditto, but 4-byte index
++TUPLE1          = '\x85'  # build 1-tuple from stack top
++TUPLE2          = '\x86'  # build 2-tuple from two topmost stack items
++TUPLE3          = '\x87'  # build 3-tuple from three topmost stack items
++NEWTRUE         = '\x88'  # push True
++NEWFALSE        = '\x89'  # push False
++LONG1           = '\x8a'  # push long from < 256 bytes
++LONG4           = '\x8b'  # push really big long
++
++_tuplesize2code = [EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3]
++
++
++__all__.extend([x for x in dir() if re.match("[A-Z][A-Z0-9_]+$",x)])
++del x
++
++
++# Pickling machinery
++
++class Pickler:
++
++    def __init__(self, file, protocol=None):
++        """This takes a file-like object for writing a pickle data stream.
++
++        The optional protocol argument tells the pickler to use the
++        given protocol; supported protocols are 0, 1, 2.  The default
++        protocol is 0, to be backwards compatible.  (Protocol 0 is the
++        only protocol that can be written to a file opened in text
++        mode and read back successfully.  When using a protocol higher
++        than 0, make sure the file is opened in binary mode, both when
++        pickling and unpickling.)
++
++        Protocol 1 is more efficient than protocol 0; protocol 2 is
++        more efficient than protocol 1.
++
++        Specifying a negative protocol version selects the highest
++        protocol version supported.  The higher the protocol used, the
++        more recent the version of Python needed to read the pickle
++        produced.
++
++        The file parameter must have a write() method that accepts a single
++        string argument.  It can thus be an open file object, a StringIO
++        object, or any other custom object that meets this interface.
++
++        """
++        if protocol is None:
++            protocol = 0
++        if protocol < 0:
++            protocol = HIGHEST_PROTOCOL
++        elif not 0 <= protocol <= HIGHEST_PROTOCOL:
++            raise ValueError("pickle protocol must be <= %d" % HIGHEST_PROTOCOL)
++        self.write = file.write
++        self.memo = {}
++        self.proto = int(protocol)
++        self.bin = protocol >= 1
++        self.fast = 0
++
++    def clear_memo(self):
++        """Clears the pickler's "memo".
++
++        The memo is the data structure that remembers which objects the
++        pickler has already seen, so that shared or recursive objects are
++        pickled by reference and not by value.  This method is useful when
++        re-using picklers.
++
++        """
++        self.memo.clear()
++
++    def dump(self, obj):
++        """Write a pickled representation of obj to the open file."""
++        if self.proto >= 2:
++            self.write(PROTO + chr(self.proto))
++        self.save(obj)
++        self.write(STOP)
++
++    def memoize(self, obj):
++        """Store an object in the memo."""
++
++        # The Pickler memo is a dictionary mapping object ids to 2-tuples
++        # that contain the Unpickler memo key and the object being memoized.
++        # The memo key is written to the pickle and will become
++        # the key in the Unpickler's memo.  The object is stored in the
++        # Pickler memo so that transient objects are kept alive during
++        # pickling.
++
++        # The use of the Unpickler memo length as the memo key is just a
++        # convention.  The only requirement is that the memo values be unique.
++        # But there appears no advantage to any other scheme, and this
++        # scheme allows the Unpickler memo to be implemented as a plain (but
++        # growable) array, indexed by memo key.
++        if self.fast:
++            return
++        assert id(obj) not in self.memo
++        memo_len = len(self.memo)
++        self.write(self.put(memo_len))
++        self.memo[id(obj)] = memo_len, obj
++
++    # Return a PUT (BINPUT, LONG_BINPUT) opcode string, with argument i.
++    def put(self, i, pack=struct.pack):
++        if self.bin:
++            if i < 256:
++                return BINPUT + chr(i)
++            else:
++                return LONG_BINPUT + pack("<i", i)
++
++        return PUT + repr(i) + '\n'
++
++    # Return a GET (BINGET, LONG_BINGET) opcode string, with argument i.
++    def get(self, i, pack=struct.pack):
++        if self.bin:
++            if i < 256:
++                return BINGET + chr(i)
++            else:
++                return LONG_BINGET + pack("<i", i)
++
++        return GET + repr(i) + '\n'
++
++    def save(self, obj):
++        # Check for persistent id (defined by a subclass)
++        pid = self.persistent_id(obj)
++        if pid:
++            self.save_pers(pid)
++            return
++
++        # Check the memo
++        x = self.memo.get(id(obj))
++        if x:
++            self.write(self.get(x[0]))
++            return
++
++        # Check the type dispatch table
++        t = type(obj)
++        f = self.dispatch.get(t)
++        if f:
++            f(self, obj) # Call unbound method with explicit self
++            return
++
++        # Check copy_reg.dispatch_table
++        reduce = dispatch_table.get(t)
++        if reduce:
++            rv = reduce(obj)
++        else:
++            # Check for a class with a custom metaclass; treat as regular class
++            try:
++                issc = issubclass(t, TypeType)
++            except TypeError: # t is not a class (old Boost; see SF #502085)
++                issc = 0
++            if issc:
++                self.save_global(obj)
++                return
++
++            # Check for a __reduce_ex__ method, fall back to __reduce__
++            reduce = getattr(obj, "__reduce_ex__", None)
++            if reduce:
++                rv = reduce(self.proto)
++            else:
++                reduce = getattr(obj, "__reduce__", None)
++                if reduce:
++                    rv = reduce()
++                else:
++                    raise PicklingError("Can't pickle %r object: %r" %
++                                        (t.__name__, obj))
++
++        # Check for string returned by reduce(), meaning "save as global"
++        if type(rv) is StringType:
++            self.save_global(obj, rv)
++            return
++
++        # Assert that reduce() returned a tuple
++        if type(rv) is not TupleType:
++            raise PicklingError("%s must return string or tuple" % reduce)
++
++        # Assert that it returned an appropriately sized tuple
++        l = len(rv)
++        if not (2 <= l <= 5):
++            raise PicklingError("Tuple returned by %s must have "
++                                "two to five elements" % reduce)
++
++        # Save the reduce() output and finally memoize the object
++        self.save_reduce(obj=obj, *rv)
++
++    def persistent_id(self, obj):
++        # This exists so a subclass can override it
++        return None
++
++    def save_pers(self, pid):
++        # Save a persistent id reference
++        if self.bin:
++            self.save(pid)
++            self.write(BINPERSID)
++        else:
++            self.write(PERSID + str(pid) + '\n')
++
++    def save_reduce(self, func, args, state=None,
++                    listitems=None, dictitems=None, obj=None):
++        # This API is called by some subclasses
++
++        # Assert that args is a tuple or None
++        if not isinstance(args, TupleType):
++            raise PicklingError("args from reduce() should be a tuple")
++
++        # Assert that func is callable
++        if not hasattr(func, '__call__'):
++            raise PicklingError("func from reduce should be callable")
++
++        save = self.save
++        write = self.write
++
++        # Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
++        if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
++            # A __reduce__ implementation can direct protocol 2 to
++            # use the more efficient NEWOBJ opcode, while still
++            # allowing protocol 0 and 1 to work normally.  For this to
++            # work, the function returned by __reduce__ should be
++            # called __newobj__, and its first argument should be a
++            # new-style class.  The implementation for __newobj__
++            # should be as follows, although pickle has no way to
++            # verify this:
++            #
++            # def __newobj__(cls, *args):
++            #     return cls.__new__(cls, *args)
++            #
++            # Protocols 0 and 1 will pickle a reference to __newobj__,
++            # while protocol 2 (and above) will pickle a reference to
++            # cls, the remaining args tuple, and the NEWOBJ code,
++            # which calls cls.__new__(cls, *args) at unpickling time
++            # (see load_newobj below).  If __reduce__ returns a
++            # three-tuple, the state from the third tuple item will be
++            # pickled regardless of the protocol, calling __setstate__
++            # at unpickling time (see load_build below).
++            #
++            # Note that no standard __newobj__ implementation exists;
++            # you have to provide your own.  This is to enforce
++            # compatibility with Python 2.2 (pickles written using
++            # protocol 0 or 1 in Python 2.3 should be unpicklable by
++            # Python 2.2).
++            cls = args[0]
++            if not hasattr(cls, "__new__"):
++                raise PicklingError(
++                    "args[0] from __newobj__ args has no __new__")
++            if obj is not None and cls is not obj.__class__:
++                raise PicklingError(
++                    "args[0] from __newobj__ args has the wrong class")
++            args = args[1:]
++            save(cls)
++            save(args)
++            write(NEWOBJ)
++        else:
++            save(func)
++            save(args)
++            write(REDUCE)
++
++        if obj is not None:
++            self.memoize(obj)
++
++        # More new special cases (that work with older protocols as
++        # well): when __reduce__ returns a tuple with 4 or 5 items,
++        # the 4th and 5th item should be iterators that provide list
++        # items and dict items (as (key, value) tuples), or None.
++
++        if listitems is not None:
++            self._batch_appends(listitems)
++
++        if dictitems is not None:
++            self._batch_setitems(dictitems)
++
++        if state is not None:
++            save(state)
++            write(BUILD)
++
++    # Methods below this point are dispatched through the dispatch table
++
++    dispatch = {}
++
++    def save_none(self, obj):
++        self.write(NONE)
++    dispatch[NoneType] = save_none
++
++    def save_bool(self, obj):
++        if self.proto >= 2:
++            self.write(obj and NEWTRUE or NEWFALSE)
++        else:
++            self.write(obj and TRUE or FALSE)
++    dispatch[bool] = save_bool
++
++    def save_int(self, obj, pack=struct.pack):
++        if self.bin:
++            # If the int is small enough to fit in a signed 4-byte 2's-comp
++            # format, we can store it more efficiently than the general
++            # case.
++            # First one- and two-byte unsigned ints:
++            if obj >= 0:
++                if obj <= 0xff:
++                    self.write(BININT1 + chr(obj))
++                    return
++                if obj <= 0xffff:
++                    self.write("%c%c%c" % (BININT2, obj&0xff, obj>>8))
++                    return
++            # Next check for 4-byte signed ints:
++            high_bits = obj >> 31  # note that Python shift sign-extends
++            if high_bits == 0 or high_bits == -1:
++                # All high bits are copies of bit 2**31, so the value
++                # fits in a 4-byte signed int.
++                self.write(BININT + pack("<i", obj))
++                return
++        # Text pickle, or int too big to fit in signed 4-byte format.
++        self.write(INT + repr(obj) + '\n')
++    dispatch[IntType] = save_int
++
++    def save_long(self, obj, pack=struct.pack):
++        if self.proto >= 2:
++            bytes = encode_long(obj)
++            n = len(bytes)
++            if n < 256:
++                self.write(LONG1 + chr(n) + bytes)
++            else:
++                self.write(LONG4 + pack("<i", n) + bytes)
++            return
++        self.write(LONG + repr(obj) + '\n')
++    dispatch[LongType] = save_long
++
++    def save_float(self, obj, pack=struct.pack):
++        print obj
++        if obj == float('inf'):
++          self.write(FLOAT + repr(float('9999999999999.0')) + '\n')
++        else:
++          if obj == float('-inf'):
++            self.write(FLOAT + repr(float('-9999999999999.0')) + '\n')
++          else:
++            if self.bin:
++                self.write(BINFLOAT + pack('>d', obj))
++            else:
++                self.write(FLOAT + repr(obj) + '\n')
++    dispatch[FloatType] = save_float
++
++    def save_string(self, obj, pack=struct.pack):
++        if self.bin:
++            n = len(obj)
++            if n < 256:
++                self.write(SHORT_BINSTRING + chr(n) + obj)
++            else:
++                self.write(BINSTRING + pack("<i", n) + obj)
++        else:
++            self.write(STRING + repr(obj) + '\n')
++        self.memoize(obj)
++    dispatch[StringType] = save_string
++
++    def save_unicode(self, obj, pack=struct.pack):
++        if self.bin:
++            encoding = obj.encode('utf-8')
++            n = len(encoding)
++            self.write(BINUNICODE + pack("<i", n) + encoding)
++        else:
++            obj = obj.replace("\\", "\\u005c")
++            obj = obj.replace("\n", "\\u000a")
++            self.write(UNICODE + obj.encode('raw-unicode-escape') + '\n')
++        self.memoize(obj)
++    dispatch[UnicodeType] = save_unicode
++
++    if StringType is UnicodeType:
++        # This is true for Jython
++        def save_string(self, obj, pack=struct.pack):
++            unicode = obj.isunicode()
++
++            if self.bin:
++                if unicode:
++                    obj = obj.encode("utf-8")
++                l = len(obj)
++                if l < 256 and not unicode:
++                    self.write(SHORT_BINSTRING + chr(l) + obj)
++                else:
++                    s = pack("<i", l)
++                    if unicode:
++                        self.write(BINUNICODE + s + obj)
++                    else:
++                        self.write(BINSTRING + s + obj)
++            else:
++                if unicode:
++                    obj = obj.replace("\\", "\\u005c")
++                    obj = obj.replace("\n", "\\u000a")
++                    obj = obj.encode('raw-unicode-escape')
++                    self.write(UNICODE + obj + '\n')
++                else:
++                    self.write(STRING + repr(obj) + '\n')
++            self.memoize(obj)
++        dispatch[StringType] = save_string
++
++    def save_tuple(self, obj):
++        write = self.write
++        proto = self.proto
++
++        n = len(obj)
++        if n == 0:
++            if proto:
++                write(EMPTY_TUPLE)
++            else:
++                write(MARK + TUPLE)
++            return
++
++        save = self.save
++        memo = self.memo
++        if n <= 3 and proto >= 2:
++            for element in obj:
++                save(element)
++            # Subtle.  Same as in the big comment below.
++            if id(obj) in memo:
++                get = self.get(memo[id(obj)][0])
++                write(POP * n + get)
++            else:
++                write(_tuplesize2code[n])
++                self.memoize(obj)
++            return
++
++        # proto 0 or proto 1 and tuple isn't empty, or proto > 1 and tuple
++        # has more than 3 elements.
++        write(MARK)
++        for element in obj:
++            save(element)
++
++        if id(obj) in memo:
++            # Subtle.  d was not in memo when we entered save_tuple(), so
++            # the process of saving the tuple's elements must have saved
++            # the tuple itself:  the tuple is recursive.  The proper action
++            # now is to throw away everything we put on the stack, and
++            # simply GET the tuple (it's already constructed).  This check
++            # could have been done in the "for element" loop instead, but
++            # recursive tuples are a rare thing.
++            get = self.get(memo[id(obj)][0])
++            if proto:
++                write(POP_MARK + get)
++            else:   # proto 0 -- POP_MARK not available
++                write(POP * (n+1) + get)
++            return
++
++        # No recursion.
++        self.write(TUPLE)
++        self.memoize(obj)
++
++    dispatch[TupleType] = save_tuple
++
++    # save_empty_tuple() isn't used by anything in Python 2.3.  However, I
++    # found a Pickler subclass in Zope3 that calls it, so it's not harmless
++    # to remove it.
++    def save_empty_tuple(self, obj):
++        self.write(EMPTY_TUPLE)
++
++    def save_list(self, obj):
++        write = self.write
++
++        if self.bin:
++            write(EMPTY_LIST)
++        else:   # proto 0 -- can't use EMPTY_LIST
++            write(MARK + LIST)
++
++        self.memoize(obj)
++        self._batch_appends(iter(obj))
++
++    dispatch[ListType] = save_list
++
++    # Keep in synch with cPickle's BATCHSIZE.  Nothing will break if it gets
++    # out of synch, though.
++    _BATCHSIZE = 1000
++
++    def _batch_appends(self, items):
++        # Helper to batch up APPENDS sequences
++        save = self.save
++        write = self.write
++
++        if not self.bin:
++            for x in items:
++                save(x)
++                write(APPEND)
++            return
++
++        r = xrange(self._BATCHSIZE)
++        while items is not None:
++            tmp = []
++            for i in r:
++                try:
++                    x = items.next()
++                    tmp.append(x)
++                except StopIteration:
++                    items = None
++                    break
++            n = len(tmp)
++            if n > 1:
++                write(MARK)
++                for x in tmp:
++                    save(x)
++                write(APPENDS)
++            elif n:
++                save(tmp[0])
++                write(APPEND)
++            # else tmp is empty, and we're done
++
++    def save_dict(self, obj):
++        write = self.write
++
++        if self.bin:
++            write(EMPTY_DICT)
++        else:   # proto 0 -- can't use EMPTY_DICT
++            write(MARK + DICT)
++
++        self.memoize(obj)
++        self._batch_setitems(obj.iteritems())
++
++    dispatch[DictionaryType] = save_dict
++    if not PyStringMap is None:
++        dispatch[PyStringMap] = save_dict
++
++    def _batch_setitems(self, items):
++        # Helper to batch up SETITEMS sequences; proto >= 1 only
++        save = self.save
++        write = self.write
++
++        if not self.bin:
++            for k, v in items:
++                save(k)
++                save(v)
++                write(SETITEM)
++            return
++
++        r = xrange(self._BATCHSIZE)
++        while items is not None:
++            tmp = []
++            for i in r:
++                try:
++                    tmp.append(items.next())
++                except StopIteration:
++                    items = None
++                    break
++            n = len(tmp)
++            if n > 1:
++                write(MARK)
++                for k, v in tmp:
++                    save(k)
++                    save(v)
++                write(SETITEMS)
++            elif n:
++                k, v = tmp[0]
++                save(k)
++                save(v)
++                write(SETITEM)
++            # else tmp is empty, and we're done
++
++    def save_inst(self, obj):
++        cls = obj.__class__
++
++        memo  = self.memo
++        write = self.write
++        save  = self.save
++
++        if hasattr(obj, '__getinitargs__'):
++            args = obj.__getinitargs__()
++            len(args) # XXX Assert it's a sequence
++            _keep_alive(args, memo)
++        else:
++            args = ()
++
++        write(MARK)
++
++        if self.bin:
++            save(cls)
++            for arg in args:
++                save(arg)
++            write(OBJ)
++        else:
++            for arg in args:
++                save(arg)
++            write(INST + cls.__module__ + '\n' + cls.__name__ + '\n')
++
++        self.memoize(obj)
++
++        try:
++            getstate = obj.__getstate__
++        except AttributeError:
++            stuff = obj.__dict__
++        else:
++            stuff = getstate()
++            _keep_alive(stuff, memo)
++        save(stuff)
++        write(BUILD)
++
++    dispatch[InstanceType] = save_inst
++
++    def save_global(self, obj, name=None, pack=struct.pack):
++        write = self.write
++        memo = self.memo
++
++        if name is None:
++            name = obj.__name__
++
++        module = getattr(obj, "__module__", None)
++        if module is None:
++            module = whichmodule(obj, name)
++
++        try:
++            __import__(module)
++            mod = sys.modules[module]
++            klass = getattr(mod, name)
++        except (ImportError, KeyError, AttributeError):
++            raise PicklingError(
++                "Can't pickle %r: it's not found as %s.%s" %
++                (obj, module, name))
++        else:
++            if klass is not obj:
++                raise PicklingError(
++                    "Can't pickle %r: it's not the same object as %s.%s" %
++                    (obj, module, name))
++
++        if self.proto >= 2:
++            code = _extension_registry.get((module, name))
++            if code:
++                assert code > 0
++                if code <= 0xff:
++                    write(EXT1 + chr(code))
++                elif code <= 0xffff:
++                    write("%c%c%c" % (EXT2, code&0xff, code>>8))
++                else:
++                    write(EXT4 + pack("<i", code))
++                return
++
++        write(GLOBAL + module + '\n' + name + '\n')
++        self.memoize(obj)
++
++    dispatch[ClassType] = save_global
++    dispatch[FunctionType] = save_global
++    dispatch[BuiltinFunctionType] = save_global
++    dispatch[TypeType] = save_global
++
++# Pickling helpers
++
++def _keep_alive(x, memo):
++    """Keeps a reference to the object x in the memo.
++
++    Because we remember objects by their id, we have
++    to assure that possibly temporary objects are kept
++    alive by referencing them.
++    We store a reference at the id of the memo, which should
++    normally not be used unless someone tries to deepcopy
++    the memo itself...
++    """
++    try:
++        memo[id(memo)].append(x)
++    except KeyError:
++        # aha, this is the first one :-)
++        memo[id(memo)]=[x]
++
++
++# A cache for whichmodule(), mapping a function object to the name of
++# the module in which the function was found.
++
++classmap = {} # called classmap for backwards compatibility
++
++def whichmodule(func, funcname):
++    """Figure out the module in which a function occurs.
++
++    Search sys.modules for the module.
++    Cache in classmap.
++    Return a module name.
++    If the function cannot be found, return "__main__".
++    """
++    # Python functions should always get an __module__ from their globals.
++    mod = getattr(func, "__module__", None)
++    if mod is not None:
++        return mod
++    if func in classmap:
++        return classmap[func]
++
++    for name, module in sys.modules.items():
++        if module is None:
++            continue # skip dummy package entries
++        if name != '__main__' and getattr(module, funcname, None) is func:
++            break
++    else:
++        name = '__main__'
++    classmap[func] = name
++    return name
++
++
++# Unpickling machinery
++
++class Unpickler:
++
++    def __init__(self, file):
++        """This takes a file-like object for reading a pickle data stream.
++
++        The protocol version of the pickle is detected automatically, so no
++        proto argument is needed.
++
++        The file-like object must have two methods, a read() method that
++        takes an integer argument, and a readline() method that requires no
++        arguments.  Both methods should return a string.  Thus file-like
++        object can be a file object opened for reading, a StringIO object,
++        or any other custom object that meets this interface.
++        """
++        self.readline = file.readline
++        self.read = file.read
++        self.memo = {}
++
++    def load(self):
++        """Read a pickled object representation from the open file.
++
++        Return the reconstituted object hierarchy specified in the file.
++        """
++        self.mark = object() # any new unique object
++        self.stack = []
++        self.append = self.stack.append
++        read = self.read
++        dispatch = self.dispatch
++        try:
++            while 1:
++                key = read(1)
++                dispatch[key](self)
++        except _Stop, stopinst:
++            return stopinst.value
++
++    # Return largest index k such that self.stack[k] is self.mark.
++    # If the stack doesn't contain a mark, eventually raises IndexError.
++    # This could be sped by maintaining another stack, of indices at which
++    # the mark appears.  For that matter, the latter stack would suffice,
++    # and we wouldn't need to push mark objects on self.stack at all.
++    # Doing so is probably a good thing, though, since if the pickle is
++    # corrupt (or hostile) we may get a clue from finding self.mark embedded
++    # in unpickled objects.
++    def marker(self):
++        stack = self.stack
++        mark = self.mark
++        k = len(stack)-1
++        while stack[k] is not mark: k = k-1
++        return k
++
++    dispatch = {}
++
++    def load_eof(self):
++        raise EOFError
++    dispatch[''] = load_eof
++
++    def load_proto(self):
++        proto = ord(self.read(1))
++        if not 0 <= proto <= 2:
++            raise ValueError, "unsupported pickle protocol: %d" % proto
++    dispatch[PROTO] = load_proto
++
++    def load_persid(self):
++        pid = self.readline()[:-1]
++        self.append(self.persistent_load(pid))
++    dispatch[PERSID] = load_persid
++
++    def load_binpersid(self):
++        pid = self.stack.pop()
++        self.append(self.persistent_load(pid))
++    dispatch[BINPERSID] = load_binpersid
++
++    def load_none(self):
++        self.append(None)
++    dispatch[NONE] = load_none
++
++    def load_false(self):
++        self.append(False)
++    dispatch[NEWFALSE] = load_false
++
++    def load_true(self):
++        self.append(True)
++    dispatch[NEWTRUE] = load_true
++
++    def load_int(self):
++        data = self.readline()
++        if data == FALSE[1:]:
++            val = False
++        elif data == TRUE[1:]:
++            val = True
++        else:
++            try:
++                val = int(data)
++            except ValueError:
++                val = long(data)
++        self.append(val)
++    dispatch[INT] = load_int
++
++    def load_binint(self):
++        self.append(mloads('i' + self.read(4)))
++    dispatch[BININT] = load_binint
++
++    def load_binint1(self):
++        self.append(ord(self.read(1)))
++    dispatch[BININT1] = load_binint1
++
++    def load_binint2(self):
++        self.append(mloads('i' + self.read(2) + '\000\000'))
++    dispatch[BININT2] = load_binint2
++
++    def load_long(self):
++        self.append(long(self.readline()[:-1], 0))
++    dispatch[LONG] = load_long
++
++    def load_long1(self):
++        n = ord(self.read(1))
++        bytes = self.read(n)
++        self.append(decode_long(bytes))
++    dispatch[LONG1] = load_long1
++
++    def load_long4(self):
++        n = mloads('i' + self.read(4))
++        bytes = self.read(n)
++        self.append(decode_long(bytes))
++    dispatch[LONG4] = load_long4
++
++    def load_float(self):
++        num = float(self.readline()[:-1])
++        if num == 9999999999999.0: # very cheap trick
++          num = float('inf')
++        if num == -9999999999999.0: # very cheap trick
++          num = float('-inf')
++        self.append(num)
++    dispatch[FLOAT] = load_float
++
++    def load_binfloat(self, unpack=struct.unpack):
++        self.append(unpack('>d', self.read(8))[0])
++    dispatch[BINFLOAT] = load_binfloat
++
++    def load_string(self):
++        rep = self.readline()[:-1]
++        for q in "\"'": # double or single quote
++            if rep.startswith(q):
++                if len(rep) < 2 or not rep.endswith(q):
++                    raise ValueError, "insecure string pickle"
++                rep = rep[len(q):-len(q)]
++                break
++        else:
++            raise ValueError, "insecure string pickle"
++        self.append(rep.decode("string-escape"))
++    dispatch[STRING] = load_string
++
++    def load_binstring(self):
++        len = mloads('i' + self.read(4))
++        self.append(self.read(len))
++    dispatch[BINSTRING] = load_binstring
++
++    def load_unicode(self):
++        self.append(unicode(self.readline()[:-1],'raw-unicode-escape'))
++    dispatch[UNICODE] = load_unicode
++
++    def load_binunicode(self):
++        len = mloads('i' + self.read(4))
++        self.append(unicode(self.read(len),'utf-8'))
++    dispatch[BINUNICODE] = load_binunicode
++
++    def load_short_binstring(self):
++        len = ord(self.read(1))
++        self.append(self.read(len))
++    dispatch[SHORT_BINSTRING] = load_short_binstring
++
++    def load_tuple(self):
++        k = self.marker()
++        self.stack[k:] = [tuple(self.stack[k+1:])]
++    dispatch[TUPLE] = load_tuple
++
++    def load_empty_tuple(self):
++        self.stack.append(())
++    dispatch[EMPTY_TUPLE] = load_empty_tuple
++
++    def load_tuple1(self):
++        self.stack[-1] = (self.stack[-1],)
++    dispatch[TUPLE1] = load_tuple1
++
++    def load_tuple2(self):
++        self.stack[-2:] = [(self.stack[-2], self.stack[-1])]
++    dispatch[TUPLE2] = load_tuple2
++
++    def load_tuple3(self):
++        self.stack[-3:] = [(self.stack[-3], self.stack[-2], self.stack[-1])]
++    dispatch[TUPLE3] = load_tuple3
++
++    def load_empty_list(self):
++        self.stack.append([])
++    dispatch[EMPTY_LIST] = load_empty_list
++
++    def load_empty_dictionary(self):
++        self.stack.append({})
++    dispatch[EMPTY_DICT] = load_empty_dictionary
++
++    def load_list(self):
++        k = self.marker()
++        self.stack[k:] = [self.stack[k+1:]]
++    dispatch[LIST] = load_list
++
++    def load_dict(self):
++        k = self.marker()
++        d = {}
++        items = self.stack[k+1:]
++        for i in range(0, len(items), 2):
++            key = items[i]
++            value = items[i+1]
++            d[key] = value
++        self.stack[k:] = [d]
++    dispatch[DICT] = load_dict
++
++    # INST and OBJ differ only in how they get a class object.  It's not
++    # only sensible to do the rest in a common routine, the two routines
++    # previously diverged and grew different bugs.
++    # klass is the class to instantiate, and k points to the topmost mark
++    # object, following which are the arguments for klass.__init__.
++    def _instantiate(self, klass, k):
++        args = tuple(self.stack[k+1:])
++        del self.stack[k:]
++        instantiated = 0
++        if (not args and
++                type(klass) is ClassType and
++                not hasattr(klass, "__getinitargs__")):
++            try:
++                value = _EmptyClass()
++                value.__class__ = klass
++                instantiated = 1
++            except RuntimeError:
++                # In restricted execution, assignment to inst.__class__ is
++                # prohibited
++                pass
++        if not instantiated:
++            try:
++                value = klass(*args)
++            except TypeError, err:
++                raise TypeError, "in constructor for %s: %s" % (
++                    klass.__name__, str(err)), sys.exc_info()[2]
++        self.append(value)
++
++    def load_inst(self):
++        module = self.readline()[:-1]
++        name = self.readline()[:-1]
++        klass = self.find_class(module, name)
++        self._instantiate(klass, self.marker())
++    dispatch[INST] = load_inst
++
++    def load_obj(self):
++        # Stack is ... markobject classobject arg1 arg2 ...
++        k = self.marker()
++        klass = self.stack.pop(k+1)
++        self._instantiate(klass, k)
++    dispatch[OBJ] = load_obj
++
++    def load_newobj(self):
++        args = self.stack.pop()
++        cls = self.stack[-1]
++        obj = cls.__new__(cls, *args)
++        self.stack[-1] = obj
++    dispatch[NEWOBJ] = load_newobj
++
++    def load_global(self):
++        module = self.readline()[:-1]
++        name = self.readline()[:-1]
++        klass = self.find_class(module, name)
++        self.append(klass)
++    dispatch[GLOBAL] = load_global
++
++    def load_ext1(self):
++        code = ord(self.read(1))
++        self.get_extension(code)
++    dispatch[EXT1] = load_ext1
++
++    def load_ext2(self):
++        code = mloads('i' + self.read(2) + '\000\000')
++        self.get_extension(code)
++    dispatch[EXT2] = load_ext2
++
++    def load_ext4(self):
++        code = mloads('i' + self.read(4))
++        self.get_extension(code)
++    dispatch[EXT4] = load_ext4
++
++    def get_extension(self, code):
++        nil = []
++        obj = _extension_cache.get(code, nil)
++        if obj is not nil:
++            self.append(obj)
++            return
++        key = _inverted_registry.get(code)
++        if not key:
++            raise ValueError("unregistered extension code %d" % code)
++        obj = self.find_class(*key)
++        _extension_cache[code] = obj
++        self.append(obj)
++
++    def find_class(self, module, name):
++        # Subclasses may override this
++        __import__(module)
++        mod = sys.modules[module]
++        klass = getattr(mod, name)
++        return klass
++
++    def load_reduce(self):
++        stack = self.stack
++        args = stack.pop()
++        func = stack[-1]
++        value = func(*args)
++        stack[-1] = value
++    dispatch[REDUCE] = load_reduce
++
++    def load_pop(self):
++        del self.stack[-1]
++    dispatch[POP] = load_pop
++
++    def load_pop_mark(self):
++        k = self.marker()
++        del self.stack[k:]
++    dispatch[POP_MARK] = load_pop_mark
++
++    def load_dup(self):
++        self.append(self.stack[-1])
++    dispatch[DUP] = load_dup
++
++    def load_get(self):
++        self.append(self.memo[self.readline()[:-1]])
++    dispatch[GET] = load_get
++
++    def load_binget(self):
++        i = ord(self.read(1))
++        self.append(self.memo[repr(i)])
++    dispatch[BINGET] = load_binget
++
++    def load_long_binget(self):
++        i = mloads('i' + self.read(4))
++        self.append(self.memo[repr(i)])
++    dispatch[LONG_BINGET] = load_long_binget
++
++    def load_put(self):
++        self.memo[self.readline()[:-1]] = self.stack[-1]
++    dispatch[PUT] = load_put
++
++    def load_binput(self):
++        i = ord(self.read(1))
++        self.memo[repr(i)] = self.stack[-1]
++    dispatch[BINPUT] = load_binput
++
++    def load_long_binput(self):
++        i = mloads('i' + self.read(4))
++        self.memo[repr(i)] = self.stack[-1]
++    dispatch[LONG_BINPUT] = load_long_binput
++
++    def load_append(self):
++        stack = self.stack
++        value = stack.pop()
++        list = stack[-1]
++        list.append(value)
++    dispatch[APPEND] = load_append
++
++    def load_appends(self):
++        stack = self.stack
++        mark = self.marker()
++        list = stack[mark - 1]
++        list.extend(stack[mark + 1:])
++        del stack[mark:]
++    dispatch[APPENDS] = load_appends
++
++    def load_setitem(self):
++        stack = self.stack
++        value = stack.pop()
++        key = stack.pop()
++        dict = stack[-1]
++        dict[key] = value
++    dispatch[SETITEM] = load_setitem
++
++    def load_setitems(self):
++        stack = self.stack
++        mark = self.marker()
++        dict = stack[mark - 1]
++        for i in range(mark + 1, len(stack), 2):
++            dict[stack[i]] = stack[i + 1]
++
++        del stack[mark:]
++    dispatch[SETITEMS] = load_setitems
++
++    def load_build(self):
++        stack = self.stack
++        state = stack.pop()
++        inst = stack[-1]
++        setstate = getattr(inst, "__setstate__", None)
++        if setstate:
++            setstate(state)
++            return
++        slotstate = None
++        if isinstance(state, tuple) and len(state) == 2:
++            state, slotstate = state
++        if state:
++            try:
++                d = inst.__dict__
++                try:
++                    for k, v in state.iteritems():
++                        d[intern(k)] = v
++                # keys in state don't have to be strings
++                # don't blow up, but don't go out of our way
++                except TypeError:
++                    d.update(state)
++
++            except RuntimeError:
++                # XXX In restricted execution, the instance's __dict__
++                # is not accessible.  Use the old way of unpickling
++                # the instance variables.  This is a semantic
++                # difference when unpickling in restricted
++                # vs. unrestricted modes.
++                # Note, however, that cPickle has never tried to do the
++                # .update() business, and always uses
++                #     PyObject_SetItem(inst.__dict__, key, value) in a
++                # loop over state.items().
++                for k, v in state.items():
++                    setattr(inst, k, v)
++        if slotstate:
++            for k, v in slotstate.items():
++                setattr(inst, k, v)
++    dispatch[BUILD] = load_build
++
++    def load_mark(self):
++        self.append(self.mark)
++    dispatch[MARK] = load_mark
++
++    def load_stop(self):
++        value = self.stack.pop()
++        raise _Stop(value)
++    dispatch[STOP] = load_stop
++
++# Helper class for load_inst/load_obj
++
++class _EmptyClass:
++    pass
++
++# Encode/decode longs in linear time.
++
++import binascii as _binascii
++
++def encode_long(x):
++    r"""Encode a long to a two's complement little-endian binary string.
++    Note that 0L is a special case, returning an empty string, to save a
++    byte in the LONG1 pickling context.
++
++    >>> encode_long(0L)
++    ''
++    >>> encode_long(255L)
++    '\xff\x00'
++    >>> encode_long(32767L)
++    '\xff\x7f'
++    >>> encode_long(-256L)
++    '\x00\xff'
++    >>> encode_long(-32768L)
++    '\x00\x80'
++    >>> encode_long(-128L)
++    '\x80'
++    >>> encode_long(127L)
++    '\x7f'
++    >>>
++    """
++
++    if x == 0:
++        return ''
++    if x > 0:
++        ashex = hex(x)
++        assert ashex.startswith("0x")
++        njunkchars = 2 + ashex.endswith('L')
++        nibbles = len(ashex) - njunkchars
++        if nibbles & 1:
++            # need an even # of nibbles for unhexlify
++            ashex = "0x0" + ashex[2:]
++        elif int(ashex[2], 16) >= 8:
++            # "looks negative", so need a byte of sign bits
++            ashex = "0x00" + ashex[2:]
++    else:
++        # Build the 256's-complement:  (1L << nbytes) + x.  The trick is
++        # to find the number of bytes in linear time (although that should
++        # really be a constant-time task).
++        ashex = hex(-x)
++        assert ashex.startswith("0x")
++        njunkchars = 2 + ashex.endswith('L')
++        nibbles = len(ashex) - njunkchars
++        if nibbles & 1:
++            # Extend to a full byte.
++            nibbles += 1
++        nbits = nibbles * 4
++        x += 1L << nbits
++        assert x > 0
++        ashex = hex(x)
++        njunkchars = 2 + ashex.endswith('L')
++        newnibbles = len(ashex) - njunkchars
++        if newnibbles < nibbles:
++            ashex = "0x" + "0" * (nibbles - newnibbles) + ashex[2:]
++        if int(ashex[2], 16) < 8:
++            # "looks positive", so need a byte of sign bits
++            ashex = "0xff" + ashex[2:]
++
++    if ashex.endswith('L'):
++        ashex = ashex[2:-1]
++    else:
++        ashex = ashex[2:]
++    assert len(ashex) & 1 == 0, (x, ashex)
++    binary = _binascii.unhexlify(ashex)
++    return binary[::-1]
++
++def decode_long(data):
++    r"""Decode a long from a two's complement little-endian binary string.
++
++    >>> decode_long('')
++    0L
++    >>> decode_long("\xff\x00")
++    255L
++    >>> decode_long("\xff\x7f")
++    32767L
++    >>> decode_long("\x00\xff")
++    -256L
++    >>> decode_long("\x00\x80")
++    -32768L
++    >>> decode_long("\x80")
++    -128L
++    >>> decode_long("\x7f")
++    127L
++    """
++
++    nbytes = len(data)
++    if nbytes == 0:
++        return 0L
++    ashex = _binascii.hexlify(data[::-1])
++    n = long(ashex, 16) # quadratic time before Python 2.3; linear now
++    if data[-1] >= '\x80':
++        n -= 1L << (nbytes * 8)
++    return n
++
++# Shorthands
++
++try:
++    from cStringIO import StringIO
++except ImportError:
++    from StringIO import StringIO
++
++def dump(obj, file, protocol=None):
++    Pickler(file, protocol).dump(obj)
++
++def dumps(obj, protocol=None):
++    file = StringIO()
++    Pickler(file, protocol).dump(obj)
++    return file.getvalue()
++
++def load(file):
++    return Unpickler(file).load()
++
++def loads(str):
++    file = StringIO(str)
++    return Unpickler(file).load()
++
++# Doctest
++
++def _test():
++    import doctest
++    return doctest.testmod()
++
++if __name__ == "__main__":
++    _test()
+diff -up mozilla/rebase/mystruct.py.python2 mozilla/rebase/mystruct.py
+--- mozilla/rebase/mystruct.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/rebase/mystruct.py	2013-12-07 22:18:39.000000000 +0100
+@@ -0,0 +1,15 @@
++import struct
++
++def unpack_from(format, data, start):
++  return struct.unpack(format, data[start:start + struct.calcsize(format)])
++
++class Struct:
++  def __init__(self, format):
++    self.format = format
++    self.size = struct.calcsize(format)
++  def unpack_from(self, buf, offset=0):
++    return unpack_from(self.format, buf, offset)
++  def pack(self, *args):
++    return struct.pack(self.format, *args)
++
++
+diff -up mozilla/rebase/namedtuple.py.python2 mozilla/rebase/namedtuple.py
+--- mozilla/rebase/namedtuple.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/rebase/namedtuple.py	2013-12-07 22:18:39.000000000 +0100
+@@ -0,0 +1,147 @@
++from operator import itemgetter as _itemgetter
++from keyword import iskeyword as _iskeyword
++import sys as _sys
++
++def namedtuple(typename, field_names, verbose=False, rename=False):
++    """Returns a new subclass of tuple with named fields.
++
++    >>> Point = namedtuple('Point', 'x y')
++    >>> Point.__doc__                   # docstring for the new class
++    'Point(x, y)'
++    >>> p = Point(11, y=22)             # instantiate with positional args or keywords
++    >>> p[0] + p[1]                     # indexable like a plain tuple
++    33
++    >>> x, y = p                        # unpack like a regular tuple
++    >>> x, y
++    (11, 22)
++    >>> p.x + p.y                       # fields also accessable by name
++    33
++    >>> d = p._asdict()                 # convert to a dictionary
++    >>> d['x']
++    11
++    >>> Point(**d)                      # convert from a dictionary
++    Point(x=11, y=22)
++    >>> p._replace(x=100)               # _replace() is like str.replace() but targets named fields
++    Point(x=100, y=22)
++
++    """
++
++    # Parse and validate the field names.  Validation serves two purposes,
++    # generating informative error messages and preventing template injection attacks.
++    if isinstance(field_names, basestring):
++        field_names = field_names.replace(',', ' ').split() # names separated by whitespace and/or commas
++    field_names = tuple(map(str, field_names))
++    if rename:
++        names = list(field_names)
++        seen = set()
++        for i, name in enumerate(names):
++            if (not min(c.isalnum() or c=='_' for c in name) or _iskeyword(name)
++                or not name or name[0].isdigit() or name.startswith('_')
++                or name in seen):
++                    names[i] = '_%d' % i
++            seen.add(name)
++        field_names = tuple(names)
++    for name in (typename,) + field_names:
++        if not min(c.isalnum() or c=='_' for c in name):
++            raise ValueError('Type names and field names can only contain alphanumeric characters and underscores: %r' % name)
++        if _iskeyword(name):
++            raise ValueError('Type names and field names cannot be a keyword: %r' % name)
++        if name[0].isdigit():
++            raise ValueError('Type names and field names cannot start with a number: %r' % name)
++    seen_names = set()
++    for name in field_names:
++        if name.startswith('_') and not rename:
++            raise ValueError('Field names cannot start with an underscore: %r' % name)
++        if name in seen_names:
++            raise ValueError('Encountered duplicate field name: %r' % name)
++        seen_names.add(name)
++
++    # Create and fill-in the class template
++    numfields = len(field_names)
++    argtxt = repr(field_names).replace("'", "")[1:-1]   # tuple repr without parens or quotes
++    reprtxt = ', '.join('%s=%%r' % name for name in field_names)
++    template = '''class %(typename)s(tuple):
++        '%(typename)s(%(argtxt)s)' \n
++        __slots__ = () \n
++        _fields = %(field_names)r \n
++        def __new__(_cls, %(argtxt)s):
++            return _tuple.__new__(_cls, (%(argtxt)s)) \n
++        @classmethod
++        def _make(cls, iterable, new=tuple.__new__, len=len):
++            'Make a new %(typename)s object from a sequence or iterable'
++            result = new(cls, iterable)
++            if len(result) != %(numfields)d:
++                raise TypeError('Expected %(numfields)d arguments, got %%d' %% len(result))
++            return result \n
++        def __repr__(self):
++            return '%(typename)s(%(reprtxt)s)' %% self \n
++        def _asdict(self):
++            'Return a new dict which maps field names to their values'
++            return dict(zip(self._fields, self)) \n
++        def _replace(_self, **kwds):
++            'Return a new %(typename)s object replacing specified fields with new values'
++            result = _self._make(map(kwds.pop, %(field_names)r, _self))
++            if kwds:
++                raise ValueError('Got unexpected field names: %%r' %% kwds.keys())
++            return result \n
++        def __getnewargs__(self):
++            return tuple(self) \n\n''' % locals()
++    for i, name in enumerate(field_names):
++        template += '        %s = _property(_itemgetter(%d))\n' % (name, i)
++    if verbose:
++        print template
++
++    # Execute the template string in a temporary namespace
++    namespace = dict(_itemgetter=_itemgetter, __name__='namedtuple_%s' % typename,
++                     _property=property, _tuple=tuple)
++    try:
++        exec template in namespace
++    except SyntaxError, e:
++        raise SyntaxError(e.message + ':\n' + template)
++    result = namespace[typename]
++
++    # For pickling to work, the __module__ variable needs to be set to the frame
++    # where the named tuple is created.  Bypass this step in enviroments where
++    # sys._getframe is not defined (Jython for example) or sys._getframe is not
++    # defined for arguments greater than 0 (IronPython).
++    try:
++        result.__module__ = _sys._getframe(1).f_globals.get('__name__', '__main__')
++    except (AttributeError, ValueError):
++        pass
++
++    return result
++
++
++
++
++
++
++if __name__ == '__main__':
++    # verify that instances can be pickled
++    from cPickle import loads, dumps
++    Point = namedtuple('Point', 'x, y', True)
++    p = Point(x=10, y=20)
++    assert p == loads(dumps(p, -1))
++
++    # test and demonstrate ability to override methods
++    class Point(namedtuple('Point', 'x y')):
++        @property
++        def hypot(self):
++            return (self.x ** 2 + self.y ** 2) ** 0.5
++        def __str__(self):
++            return 'Point: x=%6.3f y=%6.3f hypot=%6.3f' % (self.x, self.y, self.hypot)
++
++    for p in Point(3,4), Point(14,5), Point(9./7,6):
++        print p
++
++    class Point(namedtuple('Point', 'x y')):
++        'Point class with optimized _make() and _replace() without error-checking'
++        _make = classmethod(tuple.__new__)
++        def _replace(self, _map=map, **kwds):
++            return self._make(_map(kwds.get, ('x', 'y'), self))
++
++    print Point(11, 22)._replace(x=100)
++
++    import doctest
++    TestResults = namedtuple('TestResults', 'failed attempted')
++    print TestResults(*doctest.testmod())
+diff -up mozilla/rebase/rhrebase.py.python2 mozilla/rebase/rhrebase.py
+--- mozilla/rebase/rhrebase.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/rebase/rhrebase.py	2013-12-07 22:18:39.000000000 +0100
+@@ -0,0 +1,40 @@
++def print24(msg, file):
++  if file:
++    file.write(msg)
++  else:
++    print msg
++
++# hack to not import this file instead of system logging
++def import_non_local(name, custom_name=None):
++    import imp, sys
++
++    custom_name = custom_name or name
++
++    f, pathname, desc = imp.find_module(name, sys.path[1:])
++    module = imp.load_module(custom_name, f, pathname, desc)
++    #f.close()
++
++    return module
++
++def get_first_if_true(first, second, cond):
++   if cond:
++      return first
++   else:
++      return second
++
++def all(iterable):
++    for element in iterable:
++        if not element:
++            return False
++    return True
++
++def any(iterable):
++    for element in iterable:
++        if element:
++            return True
++    return False
++
++
++
++
++
+diff -up mozilla/testing/marionette/mach_commands.py.python2 mozilla/testing/marionette/mach_commands.py
+--- mozilla/testing/marionette/mach_commands.py.python2	2013-12-06 16:45:19.000000000 +0100
++++ mozilla/testing/marionette/mach_commands.py	2013-12-07 22:18:39.000000000 +0100
+@@ -54,7 +54,7 @@ class MachCommands(MachCommandBase):
+             try:
+                 bin = self.get_binary_path('app')
+                 options.bin = bin
+-            except Exception as e:
++            except Exception, e:
+                 print("It looks like your program isn't built.",
+                       "You can run |mach build| to build it.")
+                 print(e)
+diff -up mozilla/testing/mozbase/mozfile/rhrebase.py.python2 mozilla/testing/mozbase/mozfile/rhrebase.py
+--- mozilla/testing/mozbase/mozfile/rhrebase.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/testing/mozbase/mozfile/rhrebase.py	2013-12-07 22:18:39.000000000 +0100
+@@ -0,0 +1,40 @@
++def print24(msg, file):
++  if file:
++    file.write(msg)
++  else:
++    print msg
++
++# hack to not import this file instead of system logging
++def import_non_local(name, custom_name=None):
++    import imp, sys
++
++    custom_name = custom_name or name
++
++    f, pathname, desc = imp.find_module(name, sys.path[1:])
++    module = imp.load_module(custom_name, f, pathname, desc)
++    #f.close()
++
++    return module
++
++def get_first_if_true(first, second, cond):
++   if cond:
++      return first
++   else:
++      return second
++
++def all(iterable):
++    for element in iterable:
++        if not element:
++            return False
++    return True
++
++def any(iterable):
++    for element in iterable:
++        if element:
++            return True
++    return False
++
++
++
++
++
+diff -up mozilla/testing/xpcshell/mach_commands.py.python2 mozilla/testing/xpcshell/mach_commands.py
+--- mozilla/testing/xpcshell/mach_commands.py.python2	2013-12-06 16:45:20.000000000 +0100
++++ mozilla/testing/xpcshell/mach_commands.py	2013-12-07 22:18:39.000000000 +0100
+@@ -4,7 +4,6 @@
+ 
+ # Integrates the xpcshell test runner with mach.
+ 
+-from __future__ import unicode_literals
+ 
+ import mozpack.path
+ import os
+@@ -177,6 +176,6 @@ class MachCommands(MachCommandBase):
+ 
+         try:
+             return xpcshell.run_test(**params)
+-        except InvalidTestPathError as e:
++        except InvalidTestPathError, e:
+             print(e.message)
+             return 1
+diff -up mozilla/testing/xpcshell/remotexpcshelltests.py.python2 mozilla/testing/xpcshell/remotexpcshelltests.py
+--- mozilla/testing/xpcshell/remotexpcshelltests.py.python2	2013-12-06 16:45:20.000000000 +0100
++++ mozilla/testing/xpcshell/remotexpcshelltests.py	2013-12-07 22:18:39.000000000 +0100
+@@ -74,7 +74,7 @@ class XPCShellRemote(xpcshell.XPCShellTe
+                 packageName = self.localAPKContents.read("package-name.txt")
+                 if packageName:
+                     self.appRoot = self.device.getAppRoot(packageName.strip())
+-            except Exception as detail:
++            except Exception, detail:
+                 print "unable to determine app root: " + str(detail)
+                 pass
+         return None
+diff -up mozilla/toolkit/components/telemetry/gen-histogram-data.py.python2 mozilla/toolkit/components/telemetry/gen-histogram-data.py
+--- mozilla/toolkit/components/telemetry/gen-histogram-data.py.python2	2013-12-06 16:45:20.000000000 +0100
++++ mozilla/toolkit/components/telemetry/gen-histogram-data.py	2013-12-07 22:18:39.000000000 +0100
+@@ -5,11 +5,11 @@
+ # Write out histogram information for C++.  The histograms are defined
+ # in a file provided as a command-line argument.
+ 
+-from __future__ import with_statement
+ 
+ import sys
+ import histogram_tools
+ import itertools
++import rhrebase
+ 
+ banner = """/* This file is auto-generated, see gen-histogram-data.py.  */
+ """
+@@ -60,7 +60,7 @@ def print_array_entry(histogram, name_in
+         % (histogram.low(), histogram.high(),
+            histogram.n_buckets(), histogram.nsITelemetry_kind(),
+            name_index, desc_index,
+-           "true" if histogram.extended_statistics_ok() else "false")
++           rhrebase.get_first_if_true("true", "false", histogram.extended_statistics_ok()) )
+     if cpp_guard:
+         print "#endif"
+ 
+diff -up mozilla/toolkit/components/telemetry/histogram_tools.py.python2 mozilla/toolkit/components/telemetry/histogram_tools.py
+--- mozilla/toolkit/components/telemetry/histogram_tools.py.python2	2013-12-06 16:45:20.000000000 +0100
++++ mozilla/toolkit/components/telemetry/histogram_tools.py	2013-12-07 22:18:39.000000000 +0100
+@@ -3,6 +3,7 @@
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+ import math
++import sys
+ 
+ # For compatibility with Python 2.6
+ try:
+@@ -18,12 +19,16 @@ def table_dispatch(kind, table, body):
+     if kind in table:
+         return body(table[kind])
+     else:
+-        raise BaseException, "don't know how to handle a histogram of kind %s" % kind
++        raise "don't know how to handle a histogram of kind %s" % kind
+ 
+-class DefinitionException(BaseException):
++#class DefinitionException(BaseException):
++class DefinitionException:
+     pass
+ 
+ def check_numeric_limits(dmin, dmax, n_buckets):
++    sys.stderr.write("MAXIMUM IS: %s\n" % (type(dmax)))
++    if type(dmax) == unicode:
++       sys.stderr.write("VAL: %s\n" % (dmax))
+     if type(dmin) != int:
+         raise DefinitionException, "minimum is not a number"
+     if type(dmax) != int:
+@@ -32,6 +37,10 @@ def check_numeric_limits(dmin, dmax, n_b
+         raise DefinitionException, "number of buckets is not a number"
+ 
+ def linear_buckets(dmin, dmax, n_buckets):
++    if type(n_buckets) != int:
++      n_buckets = 1
++    if type(dmax) == unicode:
++       dmax=100
+     check_numeric_limits(dmin, dmax, n_buckets)
+     ret_array = [0] * n_buckets
+     dmin = float(dmin)
+@@ -42,6 +51,8 @@ def linear_buckets(dmin, dmax, n_buckets
+     return ret_array
+ 
+ def exponential_buckets(dmin, dmax, n_buckets):
++    if type(dmax) == unicode:
++       dmax=100
+     check_numeric_limits(dmin, dmax, n_buckets)
+     log_max = math.log(dmax);
+     bucket_index = 2;
+@@ -208,8 +219,9 @@ is enabled."""
+ def from_file(filename):
+     """Return an iterator that provides a sequence of Histograms for
+ the histograms defined in filename.
+-    """
+-    with open(filename, 'r') as f:
+-        histograms = json.load(f, object_pairs_hook=OrderedDict)
+-        for (name, definition) in histograms.iteritems():
++    """   
++    f = open(filename, 'r')
++    histograms = json.load(f, object_pairs_hook=OrderedDict)
++    f.close()
++    for (name, definition) in histograms.iteritems():
+             yield Histogram(name, definition)
+diff -up mozilla/toolkit/mozapps/installer/find-dupes.py.python2 mozilla/toolkit/mozapps/installer/find-dupes.py
+--- mozilla/toolkit/mozapps/installer/find-dupes.py.python2	2013-12-07 22:18:39.000000000 +0100
++++ mozilla/toolkit/mozapps/installer/find-dupes.py	2013-12-07 22:18:39.000000000 +0100
+@@ -3,7 +3,7 @@
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+ import sys
+-import hashlib
++import md5 as hashlib
+ from mozpack.packager.unpack import UnpackFinder
+ from ordereddict import OrderedDict
+ 
+@@ -25,8 +25,11 @@ def find_dupes(source):
+     num_dupes = 0
+     for m, (size, paths) in md5s.iteritems():
+         if len(paths) > 1:
+-            print 'Duplicates %d bytes%s:' % (size,
+-                  ' (%d times)' % (len(paths) - 1) if len(paths) > 2 else '')
++            if len(paths) > 2:             
++              print 'Duplicates %d bytes%s:' % (size,
++                    ' (%d times)' % (len(paths) - 1))
++            else:
++              print 'Duplicates %d bytes%s:' % (size, '')
+             print ''.join('  %s\n' % p for p in paths)
+             total += (len(paths) - 1) * size
+             num_dupes += 1
+diff -up mozilla/toolkit/mozapps/installer/informulate.py.python2 mozilla/toolkit/mozapps/installer/informulate.py
+--- mozilla/toolkit/mozapps/installer/informulate.py.python2	2013-12-06 16:45:21.000000000 +0100
++++ mozilla/toolkit/mozapps/installer/informulate.py	2013-12-07 22:18:39.000000000 +0100
+@@ -37,9 +37,10 @@ def main():
+     all_key_value_pairs = dict([(x.lower(), buildconfig.substs[x]) for x in important_substitutions])
+     all_key_value_pairs.update(parse_cmdline(sys.argv[2:]))
+ 
+-    with open(sys.argv[1], "w+") as f:
+-        json.dump(all_key_value_pairs, f, indent=2, sort_keys=True)
+-        f.write('\n')
++    f = open(sys.argv[1], "w+")
++    json.dump(all_key_value_pairs, f, indent=2, sort_keys=True)
++    f.write('\n')
++    f.close()
+ 
+ 
+ if __name__=="__main__":
+diff -up mozilla/toolkit/mozapps/installer/packager.py.python2 mozilla/toolkit/mozapps/installer/packager.py
+--- mozilla/toolkit/mozapps/installer/packager.py.python2	2013-12-06 16:45:21.000000000 +0100
++++ mozilla/toolkit/mozapps/installer/packager.py	2013-12-07 22:18:39.000000000 +0100
+@@ -32,6 +32,7 @@ import os
+ from StringIO import StringIO
+ import subprocess
+ import platform
++from rhrebase import any
+ 
+ # List of libraries to shlibsign.
+ SIGN_LIBS = [
+@@ -300,7 +301,8 @@ def main():
+     elif not buildconfig.substs['CROSS_COMPILE']:
+         launcher.tooldir = buildconfig.substs['LIBXUL_DIST']
+ 
+-    with errors.accumulate():
++    #with errors.accumulate():
++    if 1:
+         if args.unify:
+             finder = UnifiedBuildFinder(FileFinder(args.source),
+                                         FileFinder(args.unify),
+diff -up mozilla/tools/mach_commands.py.python2 mozilla/tools/mach_commands.py
+--- mozilla/tools/mach_commands.py.python2	2013-12-06 16:45:22.000000000 +0100
++++ mozilla/tools/mach_commands.py	2013-12-07 22:18:39.000000000 +0100
+@@ -2,7 +2,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import unicode_literals
+ 
+ from mach.decorators import (
+     CommandArgument,
+diff -up mozilla/tools/rb/fix_stack_using_bpsyms.py.python2 mozilla/tools/rb/fix_stack_using_bpsyms.py
+--- mozilla/tools/rb/fix_stack_using_bpsyms.py.python2	2013-12-06 16:45:22.000000000 +0100
++++ mozilla/tools/rb/fix_stack_using_bpsyms.py	2013-12-07 22:18:39.000000000 +0100
+@@ -4,7 +4,6 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-from __future__ import with_statement
+ 
+ import sys
+ import os
+diff -up mozilla/xpcom/idl-parser/header.py.python2 mozilla/xpcom/idl-parser/header.py
+--- mozilla/xpcom/idl-parser/header.py.python2	2013-12-06 16:45:23.000000000 +0100
++++ mozilla/xpcom/idl-parser/header.py	2013-12-07 22:18:39.000000000 +0100
+@@ -159,7 +159,8 @@ forward_decl = """class %(name)s; /* for
+ 
+ def idl_basename(f):
+     """returns the base name of a file with the last extension stripped"""
+-    return os.path.basename(f).rpartition('.')[0]
++    return os.path.basename(f).rsplit('.', 1)[0]
++    #return os.path.basename(f).rpartition('.')[0]
+ 
+ def print_header(idl, fd, filename):
+     fd.write(header % {'filename': filename,
+@@ -333,11 +334,17 @@ def write_interface(iface, fd):
+ 
+         fd.write("  %s = 0;\n" % attributeAsNative(a, True))
+         if a.infallible:
++            args2 = ''
++            if a.implicit_jscontext:
++               args2 = 'JSContext* cx'
++            argnames = ''
++            if a.implicit_jscontext:
++               argnames = 'cx, ' 
+             fd.write(attr_infallible_tmpl %
+                      {'realtype': a.realtype.nativeType('in'),
+                       'nativename': attributeNativeName(a, getter=True),
+-                      'args': '' if not a.implicit_jscontext else 'JSContext* cx',
+-                      'argnames': '' if not a.implicit_jscontext else 'cx, '})
++                      'args': args2,
++                      'argnames': argnames})
+ 
+         if not a.readonly:
+             fd.write("  %s = 0;\n" % attributeAsNative(a, False))
+@@ -486,7 +493,10 @@ if __name__ == '__main__':
+     o.add_option('--regen', action='store_true', dest='regen', default=False,
+                  help="Regenerate IDL Parser cache")
+     options, args = o.parse_args()
+-    file = args[0] if args else None
++    
++    file2 = None
++    if args:
++       file2 = args[0]
+ 
+     if options.cachedir is not None:
+         if not os.path.isdir(options.cachedir):
+@@ -520,9 +530,9 @@ if __name__ == '__main__':
+         outfd = sys.stdout
+         closeoutfd = False
+ 
+-    idl = p.parse(open(file).read(), filename=file)
++    idl = p.parse(open(file2).read(), filename=file2)
+     idl.resolve(options.incdirs, p)
+-    print_header(idl, outfd, file)
++    print_header(idl, outfd, file2)
+ 
+     if closeoutfd:
+         outfd.close()
+diff -up mozilla/xpcom/idl-parser/typelib.py.python2 mozilla/xpcom/idl-parser/typelib.py
+--- mozilla/xpcom/idl-parser/typelib.py.python2	2013-12-06 16:45:23.000000000 +0100
++++ mozilla/xpcom/idl-parser/typelib.py	2013-12-07 22:18:39.000000000 +0100
+@@ -260,7 +260,9 @@ if __name__ == '__main__':
+     o.add_option('--regen', action='store_true', dest='regen', default=False,
+                  help="Regenerate IDL Parser cache")
+     options, args = o.parse_args()
+-    file = args[0] if args else None
++    file2 = None
++    if args:
++      file2 = args[0]
+ 
+     if options.cachedir is not None:
+         if not os.path.isdir(options.cachedir):
+@@ -286,9 +288,9 @@ if __name__ == '__main__':
+         raise "typelib generation requires an output file"
+ 
+     p = xpidl.IDLParser(outputdir=options.cachedir)
+-    idl = p.parse(open(file).read(), filename=file)
++    idl = p.parse(open(file2).read(), filename=file2)
+     idl.resolve(options.incdirs, p)
+-    write_typelib(idl, outfd, file)
++    write_typelib(idl, outfd, file2)
+ 
+     if closeoutfd:
+         outfd.close()
+diff -up mozilla/xpcom/typelib/xpt/tools/xpt.py.python2 mozilla/xpcom/typelib/xpt/tools/xpt.py
+--- mozilla/xpcom/typelib/xpt/tools/xpt.py.python2	2013-12-06 16:45:23.000000000 +0100
++++ mozilla/xpcom/typelib/xpt/tools/xpt.py	2013-12-07 22:18:39.000000000 +0100
+@@ -65,10 +65,11 @@ InterfaceType()        - construct a new
+ 
+ """
+ 
+-from __future__ import with_statement
+ import os, sys
+ import struct
+ import operator
++import mystruct
++setattr(struct, 'Struct', mystruct.Struct)
+ 
+ # header magic
+ XPT_MAGIC = "XPCOM\nTypeLib\r\n\x1a"
+@@ -102,7 +103,7 @@ class Type(object):
+     this class directly. Rather, use one of its subclasses.
+     
+     """
+-    _prefixdescriptor = struct.Struct(">B")
++    _prefixdescriptor = mystruct.Struct(">B")
+     Tags = enum(
+         # The first 18 entries are SimpleTypeDescriptor
+         'int8',
+@@ -801,7 +802,7 @@ class Constant(object):
+         if isinstance(t, SimpleType) and t.tag in Constant.typemap:
+             tt = Constant.typemap[t.tag]
+             start = data_pool + offset - 1
+-            (val,) = struct.unpack_from(tt, map, start)
++            (val,) = mystruct.unpack_from(tt, map, start)
+             offset += struct.calcsize(tt)
+             c = Constant(name, t, val)
+         return c, offset
+@@ -919,14 +920,14 @@ class Interface(object):
+             self.methods.append(m)
+         # Read constants
+         start = data_pool + offset - 1
+-        (num_constants, ) = struct.unpack_from(">H", map, start)
++        (num_constants, ) = mystruct.unpack_from(">H", map, start)
+         offset = offset + struct.calcsize(">H")
+         for i in range(num_constants):
+             c, offset = Constant.read(typelib, map, data_pool, offset)
+             self.constants.append(c)
+         # Read flags
+         start = data_pool + offset - 1
+-        (flags, ) = struct.unpack_from(">B", map, start)
++        (flags, ) = mystruct.unpack_from(">B", map, start)
+         offset = offset + struct.calcsize(">B")
+         # only the first two bits are flags
+         flags &= 0xE0
+@@ -1067,13 +1068,13 @@ class Typelib(object):
+         expected_size = None
+         if isinstance(input_file, basestring):
+             filename = input_file
+-            with open(input_file, "rb") as f:
+-                st = os.fstat(f.fileno())
+-                data = f.read(st.st_size)
+-                expected_size = st.st_size
++            f = open(input_file, "rb")
++            st = os.fstat(f.fileno())
++            data = f.read(st.st_size)
++            expected_size = st.st_size
++            f.close()
+         else:
+             data = input_file.read()
+-
+         (magic,
+          major_ver,
+          minor_ver,
+@@ -1096,7 +1097,7 @@ class Typelib(object):
+         # make a half-hearted attempt to read Annotations,
+         # since XPIDL doesn't produce any anyway.
+         start = Typelib._header.size
+-        (anno, ) = struct.unpack_from(">B", data, start)
++        (anno, ) = mystruct.unpack_from(">B", data, start)
+         islast = anno & 0x80
+         tag = anno & 0x7F
+         if tag == 0: # EmptyAnnotation
+@@ -1184,8 +1185,9 @@ class Typelib(object):
+         """
+         self._sanityCheck()
+         if isinstance(output_file, basestring):
+-            with open(output_file, "wb") as f:
+-                self.writefd(f)
++            f = open(output_file, "wb")
++            self.writefd(f)
++            f.close()
+         else:
+             self.writefd(output_file)
+ 
diff --git a/thunderbird-lightning.spec b/thunderbird-lightning.spec
index dc96486..8391c80 100644
--- a/thunderbird-lightning.spec
+++ b/thunderbird-lightning.spec
@@ -52,24 +52,16 @@ Patch1:         mozilla-build-arm.patch
 # Fix build on secondary arches (patches copied from xulrunner)
 Patch2:         xulrunner-10.0-secondary-ipc.patch
 
-Patch097:       mozilla-python.patch
 Patch099:       mozilla-build.patch
 Patch100:       xulrunner-2.0-chromium-types.patch
 Patch101:       mozilla-abort.patch
 Patch103:       xulrunner-gc-sections-ppc.patch
-Patch104:       mozilla-694594-gcc-4.4.patch
-Patch105:       mozilla-gtk-close.patch
 # For RHEL5 only
 Patch201:       mozilla-python.patch
-Patch202:       mozilla-py24.patch
-Patch203:       xulrunner-build-gcc41.patch
 Patch204:       xulrunner-missing-pysqlite.patch
-Patch205:       xulrunner-hidden.patch
-# This is fix for ia64 jemmaloc, ia64 and ppc js
-Patch206:       xulrunner-pagesize-ia64.patch
+Patch210:	mozilla-python2.patch
 
 # Thunderbird specific
-Patch301:       thunderbird-python.patch
 
 # Lightning
 # Fix alarm handling after suspend (bug #910976)
@@ -150,23 +142,16 @@ sed -e 's/__RPM_VERSION_INTERNAL__/%{version_internal}/' %{P:%%PATCH0} \
 cd mozilla
 %patch1 -p2 -b .arm-fix
 %patch2 -p3 -b .secondary-ipc
-%patch097 -p1 -b .python
 %patch099 -p1 -b .build
 %patch100 -p2 -b .chromium-types
 %patch101 -p1 -b .abort
-%patch104 -p1 -b .gcc-4.4
-%patch105 -p1 -b .gtk-close
-%patch201 -p2 -b .python
-%patch202 -p2 -b .py24
-%patch203 -p2 -b .gcc41
-%patch204 -p2 -b .pysqlite
-%patch205 -p2 -b .hidden
-%patch206 -p2 -b .pagesize-ia64
+%patch201 -p1 -b .python
+%patch204 -p1 -b .pysqlite
+%patch210 -p1 -b .python2
 %patch103 -p2 -b .gc-sections
 cd ..
 
 # thunderbird specific patches
-%patch301 -p1 -b .tb-python
 
 %{__rm} -f .mozconfig
 %{__cp} %{SOURCE10} .mozconfig
diff --git a/xulrunner-missing-pysqlite.patch b/xulrunner-missing-pysqlite.patch
index 9ed49f0..a1fe4b7 100644
--- a/xulrunner-missing-pysqlite.patch
+++ b/xulrunner-missing-pysqlite.patch
@@ -1,15 +1,18 @@
-diff -up comm-beta/mozilla/testing/mozbase/mozprofile/setup.py.pysqlite comm-beta/mozilla/testing/mozbase/mozprofile/setup.py
---- comm-beta/mozilla/testing/mozbase/mozprofile/setup.py.pysqlite	2012-10-11 01:37:25.000000000 +0200
-+++ comm-beta/mozilla/testing/mozbase/mozprofile/setup.py	2012-10-24 16:00:05.000000000 +0200
-@@ -5,6 +5,7 @@
- import os
- import sys
- from setuptools import setup, find_packages
+diff -up mozilla-esr24/testing/mozbase/mozprofile/setup.py.pysqlite mozilla-esr24/testing/mozbase/mozprofile/setup.py
+--- mozilla-esr24/testing/mozbase/mozprofile/setup.py.pysqlite	2013-10-22 08:19:20.000000000 +0200
++++ mozilla-esr24/testing/mozbase/mozprofile/setup.py	2013-11-27 15:44:14.323731312 +0100
+@@ -2,8 +2,8 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ # You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+-import sys
+-from setuptools import setup
++import sysfrom setuptools import setup
 +from setuptools.command import easy_install
  
- PACKAGE_VERSION = '0.4'
+ PACKAGE_VERSION = '0.9'
  
-@@ -20,6 +21,7 @@ except ImportError:
+@@ -20,6 +20,7 @@ except ImportError:
  try:
      import sqlite3
  except ImportError:


More information about the scm-commits mailing list