jjames pushed to python-zodbpickle (master). "New upstream version. (..more)"

notifications at fedoraproject.org notifications at fedoraproject.org
Tue Apr 14 17:07:56 UTC 2015


>From 10b1feddd0ed0a98135ac08f9ab08c98ccc7834e Mon Sep 17 00:00:00 2001
From: Jerry James <jamesjer at betterlinux.com>
Date: Tue, 14 Apr 2015 11:07:29 -0600
Subject: New upstream version.

Also, drop upstreamed -python34 patch.

diff --git a/.gitignore b/.gitignore
index 12a5e25..04b120f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1 +1,2 @@
 /zodbpickle-0.5.2.tar.gz
+/zodbpickle-0.6.0.tar.gz
diff --git a/python-zodbpickle-python34.patch b/python-zodbpickle-python34.patch
deleted file mode 100644
index ddaff63..0000000
--- a/python-zodbpickle-python34.patch
+++ /dev/null
@@ -1,15416 +0,0 @@
-diff --git a/setup.py b/setup.py
-index e248073..90ef6a4 100644
---- a/setup.py
-+++ b/setup.py
-@@ -26,8 +26,10 @@ if sys.version_info[:1] < (3,):
-     EXT = 'src/zodbpickle/_pickle_27.c'
- elif sys.version_info[:2] == (3, 2):
-     EXT = 'src/zodbpickle/_pickle_32.c'
--else:
-+elif sys.version_info[:2] == (3, 3):
-     EXT = 'src/zodbpickle/_pickle_33.c'
-+else:
-+    EXT = 'src/zodbpickle/_pickle_34.c'
- 
- setup(
-     name='zodbpickle',
-diff --git a/src/zodbpickle/_pickle_34.c b/src/zodbpickle/_pickle_34.c
-new file mode 100644
-index 0000000..db2c49d
---- /dev/null
-+++ b/src/zodbpickle/_pickle_34.c
-@@ -0,0 +1,8181 @@
-+#include "Python.h"
-+#include "structmember.h"
-+
-+PyDoc_STRVAR(pickle_module_doc,
-+"Optimized C implementation for the Python pickle module.");
-+
-+/*[clinic input]
-+output preset file
-+module _pickle
-+class _pickle.Pickler "PicklerObject *" "&Pickler_Type"
-+class _pickle.PicklerMemoProxy "PicklerMemoProxyObject *" "&PicklerMemoProxyType"
-+class _pickle.Unpickler "UnpicklerObject *" "&Unpickler_Type"
-+class _pickle.UnpicklerMemoProxy "UnpicklerMemoProxyObject *" "&UnpicklerMemoProxyType"
-+[clinic start generated code]*/
-+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=11c45248a41dd3fc]*/
-+
-+/* Bump this when new opcodes are added to the pickle protocol. */
-+enum {
-+    HIGHEST_PROTOCOL = 4,
-+    DEFAULT_PROTOCOL = 3
-+};
-+
-+/* Pickle opcodes. These must be kept updated with pickle.py.
-+   Extensive docs are in pickletools_34.py. */
-+enum opcode {
-+    MARK            = '(',
-+    STOP            = '.',
-+    POP             = '0',
-+    POP_MARK        = '1',
-+    DUP             = '2',
-+    FLOAT           = 'F',
-+    INT             = 'I',
-+    BININT          = 'J',
-+    BININT1         = 'K',
-+    LONG            = 'L',
-+    BININT2         = 'M',
-+    NONE            = 'N',
-+    PERSID          = 'P',
-+    BINPERSID       = 'Q',
-+    REDUCE          = 'R',
-+    STRING          = 'S',
-+    BINSTRING       = 'T',
-+    SHORT_BINSTRING = 'U',
-+    UNICODE         = 'V',
-+    BINUNICODE      = 'X',
-+    APPEND          = 'a',
-+    BUILD           = 'b',
-+    GLOBAL          = 'c',
-+    DICT            = 'd',
-+    EMPTY_DICT      = '}',
-+    APPENDS         = 'e',
-+    GET             = 'g',
-+    BINGET          = 'h',
-+    INST            = 'i',
-+    LONG_BINGET     = 'j',
-+    LIST            = 'l',
-+    EMPTY_LIST      = ']',
-+    OBJ             = 'o',
-+    PUT             = 'p',
-+    BINPUT          = 'q',
-+    LONG_BINPUT     = 'r',
-+    SETITEM         = 's',
-+    TUPLE           = 't',
-+    EMPTY_TUPLE     = ')',
-+    SETITEMS        = 'u',
-+    BINFLOAT        = 'G',
-+
-+    /* Protocol 2. */
-+    PROTO       = '\x80',
-+    NEWOBJ      = '\x81',
-+    EXT1        = '\x82',
-+    EXT2        = '\x83',
-+    EXT4        = '\x84',
-+    TUPLE1      = '\x85',
-+    TUPLE2      = '\x86',
-+    TUPLE3      = '\x87',
-+    NEWTRUE     = '\x88',
-+    NEWFALSE    = '\x89',
-+    LONG1       = '\x8a',
-+    LONG4       = '\x8b',
-+
-+    /* Protocol 3 (Python 3.x) */
-+    BINBYTES       = 'B',
-+    SHORT_BINBYTES = 'C',
-+
-+    /* Protocol 4 */
-+    SHORT_BINUNICODE = '\x8c',
-+    BINUNICODE8      = '\x8d',
-+    BINBYTES8        = '\x8e',
-+    EMPTY_SET        = '\x8f',
-+    ADDITEMS         = '\x90',
-+    FROZENSET        = '\x91',
-+    NEWOBJ_EX        = '\x92',
-+    STACK_GLOBAL     = '\x93',
-+    MEMOIZE          = '\x94',
-+    FRAME            = '\x95'
-+};
-+
-+enum {
-+   /* Keep in synch with pickle.Pickler._BATCHSIZE.  This is how many elements
-+      batch_list/dict() pumps out before doing APPENDS/SETITEMS.  Nothing will
-+      break if this gets out of synch with pickle.py, but it's unclear that would
-+      help anything either. */
-+    BATCHSIZE = 1000,
-+
-+    /* Nesting limit until Pickler, when running in "fast mode", starts
-+       checking for self-referential data-structures. */
-+    FAST_NESTING_LIMIT = 50,
-+
-+    /* Initial size of the write buffer of Pickler. */
-+    WRITE_BUF_SIZE = 4096,
-+
-+    /* Prefetch size when unpickling (disabled on unpeekable streams) */
-+    PREFETCH = 8192 * 16,
-+
-+    FRAME_SIZE_TARGET = 64 * 1024,
-+
-+    FRAME_HEADER_SIZE = 9
-+};
-+
-+/*************************************************************************/
-+
-+/* State of the pickle module, per PEP 3121. */
-+typedef struct {
-+    /* Exception classes for pickle. */
-+    PyObject *PickleError;
-+    PyObject *PicklingError;
-+    PyObject *UnpicklingError;
-+
-+    /* copyreg.dispatch_table, {type_object: pickling_function} */
-+    PyObject *dispatch_table;
-+
-+    /* For the extension opcodes EXT1, EXT2 and EXT4. */
-+
-+    /* copyreg._extension_registry, {(module_name, function_name): code} */
-+    PyObject *extension_registry;
-+    /* copyreg._extension_cache, {code: object} */
-+    PyObject *extension_cache;
-+    /* copyreg._inverted_registry, {code: (module_name, function_name)} */
-+    PyObject *inverted_registry;
-+
-+    /* Import mappings for compatibility with Python 2.x */
-+
-+    /* _compat_pickle.NAME_MAPPING,
-+       {(oldmodule, oldname): (newmodule, newname)} */
-+    PyObject *name_mapping_2to3;
-+    /* _compat_pickle.IMPORT_MAPPING, {oldmodule: newmodule} */
-+    PyObject *import_mapping_2to3;
-+    /* Same, but with REVERSE_NAME_MAPPING / REVERSE_IMPORT_MAPPING */
-+    PyObject *name_mapping_3to2;
-+    PyObject *import_mapping_3to2;
-+
-+    /* codecs.encode, used for saving bytes in older protocols */
-+    PyObject *codecs_encode;
-+} PickleState;
-+
-+/* Forward declaration of the _pickle module definition. */
-+static struct PyModuleDef _picklemodule;
-+
-+/* Given a module object, get its per-module state. */
-+static PickleState *
-+_Pickle_GetState(PyObject *module)
-+{
-+    return (PickleState *)PyModule_GetState(module);
-+}
-+
-+/* Find the module instance imported in the currently running sub-interpreter
-+   and get its state. */
-+static PickleState *
-+_Pickle_GetGlobalState(void)
-+{
-+    return _Pickle_GetState(PyState_FindModule(&_picklemodule));
-+}
-+
-+/* Clear the given pickle module state. */
-+static void
-+_Pickle_ClearState(PickleState *st)
-+{
-+    Py_CLEAR(st->PickleError);
-+    Py_CLEAR(st->PicklingError);
-+    Py_CLEAR(st->UnpicklingError);
-+    Py_CLEAR(st->dispatch_table);
-+    Py_CLEAR(st->extension_registry);
-+    Py_CLEAR(st->extension_cache);
-+    Py_CLEAR(st->inverted_registry);
-+    Py_CLEAR(st->name_mapping_2to3);
-+    Py_CLEAR(st->import_mapping_2to3);
-+    Py_CLEAR(st->name_mapping_3to2);
-+    Py_CLEAR(st->import_mapping_3to2);
-+    Py_CLEAR(st->codecs_encode);
-+}
-+
-+/* Initialize the given pickle module state. */
-+static int
-+_Pickle_InitState(PickleState *st)
-+{
-+    PyObject *copyreg = NULL;
-+    PyObject *compat_pickle = NULL;
-+    PyObject *codecs = NULL;
-+
-+    copyreg = PyImport_ImportModule("copyreg");
-+    if (!copyreg)
-+        goto error;
-+    st->dispatch_table = PyObject_GetAttrString(copyreg, "dispatch_table");
-+    if (!st->dispatch_table)
-+        goto error;
-+    if (!PyDict_CheckExact(st->dispatch_table)) {
-+        PyErr_Format(PyExc_RuntimeError,
-+                     "copyreg.dispatch_table should be a dict, not %.200s",
-+                     Py_TYPE(st->dispatch_table)->tp_name);
-+        goto error;
-+    }
-+    st->extension_registry = \
-+        PyObject_GetAttrString(copyreg, "_extension_registry");
-+    if (!st->extension_registry)
-+        goto error;
-+    if (!PyDict_CheckExact(st->extension_registry)) {
-+        PyErr_Format(PyExc_RuntimeError,
-+                     "copyreg._extension_registry should be a dict, "
-+                     "not %.200s", Py_TYPE(st->extension_registry)->tp_name);
-+        goto error;
-+    }
-+    st->inverted_registry = \
-+        PyObject_GetAttrString(copyreg, "_inverted_registry");
-+    if (!st->inverted_registry)
-+        goto error;
-+    if (!PyDict_CheckExact(st->inverted_registry)) {
-+        PyErr_Format(PyExc_RuntimeError,
-+                     "copyreg._inverted_registry should be a dict, "
-+                     "not %.200s", Py_TYPE(st->inverted_registry)->tp_name);
-+        goto error;
-+    }
-+    st->extension_cache = PyObject_GetAttrString(copyreg, "_extension_cache");
-+    if (!st->extension_cache)
-+        goto error;
-+    if (!PyDict_CheckExact(st->extension_cache)) {
-+        PyErr_Format(PyExc_RuntimeError,
-+                     "copyreg._extension_cache should be a dict, "
-+                     "not %.200s", Py_TYPE(st->extension_cache)->tp_name);
-+        goto error;
-+    }
-+    Py_CLEAR(copyreg);
-+
-+    /* Load the 2.x -> 3.x stdlib module mapping tables */
-+    compat_pickle = PyImport_ImportModule("_compat_pickle");
-+    if (!compat_pickle)
-+        goto error;
-+    st->name_mapping_2to3 = \
-+        PyObject_GetAttrString(compat_pickle, "NAME_MAPPING");
-+    if (!st->name_mapping_2to3)
-+        goto error;
-+    if (!PyDict_CheckExact(st->name_mapping_2to3)) {
-+        PyErr_Format(PyExc_RuntimeError,
-+                     "_compat_pickle.NAME_MAPPING should be a dict, not %.200s",
-+                     Py_TYPE(st->name_mapping_2to3)->tp_name);
-+        goto error;
-+    }
-+    st->import_mapping_2to3 = \
-+        PyObject_GetAttrString(compat_pickle, "IMPORT_MAPPING");
-+    if (!st->import_mapping_2to3)
-+        goto error;
-+    if (!PyDict_CheckExact(st->import_mapping_2to3)) {
-+        PyErr_Format(PyExc_RuntimeError,
-+                     "_compat_pickle.IMPORT_MAPPING should be a dict, "
-+                     "not %.200s", Py_TYPE(st->import_mapping_2to3)->tp_name);
-+        goto error;
-+    }
-+    /* ... and the 3.x -> 2.x mapping tables */
-+    st->name_mapping_3to2 = \
-+        PyObject_GetAttrString(compat_pickle, "REVERSE_NAME_MAPPING");
-+    if (!st->name_mapping_3to2)
-+        goto error;
-+    if (!PyDict_CheckExact(st->name_mapping_3to2)) {
-+        PyErr_Format(PyExc_RuntimeError,
-+                     "_compat_pickle.REVERSE_NAME_MAPPING should be a dict, "
-+                     "not %.200s", Py_TYPE(st->name_mapping_3to2)->tp_name);
-+        goto error;
-+    }
-+    st->import_mapping_3to2 = \
-+        PyObject_GetAttrString(compat_pickle, "REVERSE_IMPORT_MAPPING");
-+    if (!st->import_mapping_3to2)
-+        goto error;
-+    if (!PyDict_CheckExact(st->import_mapping_3to2)) {
-+        PyErr_Format(PyExc_RuntimeError,
-+                     "_compat_pickle.REVERSE_IMPORT_MAPPING should be a dict, "
-+                     "not %.200s", Py_TYPE(st->import_mapping_3to2)->tp_name);
-+        goto error;
-+    }
-+    Py_CLEAR(compat_pickle);
-+
-+    codecs = PyImport_ImportModule("codecs");
-+    if (codecs == NULL)
-+        goto error;
-+    st->codecs_encode = PyObject_GetAttrString(codecs, "encode");
-+    if (st->codecs_encode == NULL) {
-+        goto error;
-+    }
-+    if (!PyCallable_Check(st->codecs_encode)) {
-+        PyErr_Format(PyExc_RuntimeError,
-+                     "codecs.encode should be a callable, not %.200s",
-+                     Py_TYPE(st->codecs_encode)->tp_name);
-+        goto error;
-+    }
-+    Py_CLEAR(codecs);
-+
-+    return 0;
-+
-+  error:
-+    Py_CLEAR(copyreg);
-+    Py_CLEAR(compat_pickle);
-+    Py_CLEAR(codecs);
-+    _Pickle_ClearState(st);
-+    return -1;
-+}
-+
-+/* Helper for calling a function with a single argument quickly.
-+
-+   This function steals the reference of the given argument. */
-+static PyObject *
-+_Pickle_FastCall(PyObject *func, PyObject *obj)
-+{
-+    PyObject *result;
-+    PyObject *arg_tuple = PyTuple_New(1);
-+
-+    /* Note: this function used to reuse the argument tuple. This used to give
-+       a slight performance boost with older pickle implementations where many
-+       unbuffered reads occurred (thus needing many function calls).
-+
-+       However, this optimization was removed because it was too complicated
-+       to get right. It abused the C API for tuples to mutate them which led
-+       to subtle reference counting and concurrency bugs. Furthermore, the
-+       introduction of protocol 4 and the prefetching optimization via peek()
-+       significantly reduced the number of function calls we do. Thus, the
-+       benefits became marginal at best. */
-+
-+    if (arg_tuple == NULL) {
-+        Py_DECREF(obj);
-+        return NULL;
-+    }
-+    PyTuple_SET_ITEM(arg_tuple, 0, obj);
-+    result = PyObject_Call(func, arg_tuple, NULL);
-+    Py_CLEAR(arg_tuple);
-+    return result;
-+}
-+
-+/*************************************************************************/
-+
-+static int
-+stack_underflow(void)
-+{
-+    PickleState *st = _Pickle_GetGlobalState();
-+    PyErr_SetString(st->UnpicklingError, "unpickling stack underflow");
-+    return -1;
-+}
-+
-+/* Internal data type used as the unpickling stack. */
-+typedef struct {
-+    PyObject_VAR_HEAD
-+    PyObject **data;
-+    Py_ssize_t allocated;  /* number of slots in data allocated */
-+} Pdata;
-+
-+static void
-+Pdata_dealloc(Pdata *self)
-+{
-+    Py_ssize_t i = Py_SIZE(self);
-+    while (--i >= 0) {
-+        Py_DECREF(self->data[i]);
-+    }
-+    PyMem_FREE(self->data);
-+    PyObject_Del(self);
-+}
-+
-+static PyTypeObject Pdata_Type = {
-+    PyVarObject_HEAD_INIT(NULL, 0)
-+    "_pickle.Pdata",              /*tp_name*/
-+    sizeof(Pdata),                /*tp_basicsize*/
-+    0,                            /*tp_itemsize*/
-+    (destructor)Pdata_dealloc,    /*tp_dealloc*/
-+};
-+
-+static PyObject *
-+Pdata_New(void)
-+{
-+    Pdata *self;
-+
-+    if (!(self = PyObject_New(Pdata, &Pdata_Type)))
-+        return NULL;
-+    Py_SIZE(self) = 0;
-+    self->allocated = 8;
-+    self->data = PyMem_MALLOC(self->allocated * sizeof(PyObject *));
-+    if (self->data)
-+        return (PyObject *)self;
-+    Py_DECREF(self);
-+    return PyErr_NoMemory();
-+}
-+
-+
-+/* Retain only the initial clearto items.  If clearto >= the current
-+ * number of items, this is a (non-erroneous) NOP.
-+ */
-+static int
-+Pdata_clear(Pdata *self, Py_ssize_t clearto)
-+{
-+    Py_ssize_t i = Py_SIZE(self);
-+
-+    if (clearto < 0)
-+        return stack_underflow();
-+    if (clearto >= i)
-+        return 0;
-+
-+    while (--i >= clearto) {
-+        Py_CLEAR(self->data[i]);
-+    }
-+    Py_SIZE(self) = clearto;
-+    return 0;
-+}
-+
-+static int
-+Pdata_grow(Pdata *self)
-+{
-+    PyObject **data = self->data;
-+    Py_ssize_t allocated = self->allocated;
-+    Py_ssize_t new_allocated;
-+
-+    new_allocated = (allocated >> 3) + 6;
-+    /* check for integer overflow */
-+    if (new_allocated > PY_SSIZE_T_MAX - allocated)
-+        goto nomemory;
-+    new_allocated += allocated;
-+    if (new_allocated > (PY_SSIZE_T_MAX / sizeof(PyObject *)))
-+        goto nomemory;
-+    data = PyMem_REALLOC(data, new_allocated * sizeof(PyObject *));
-+    if (data == NULL)
-+        goto nomemory;
-+
-+    self->data = data;
-+    self->allocated = new_allocated;
-+    return 0;
-+
-+  nomemory:
-+    PyErr_NoMemory();
-+    return -1;
-+}
-+
-+/* D is a Pdata*.  Pop the topmost element and store it into V, which
-+ * must be an lvalue holding PyObject*.  On stack underflow, UnpicklingError
-+ * is raised and V is set to NULL.
-+ */
-+static PyObject *
-+Pdata_pop(Pdata *self)
-+{
-+    PickleState *st = _Pickle_GetGlobalState();
-+    if (Py_SIZE(self) == 0) {
-+        PyErr_SetString(st->UnpicklingError, "bad pickle data");
-+        return NULL;
-+    }
-+    return self->data[--Py_SIZE(self)];
-+}
-+#define PDATA_POP(D, V) do { (V) = Pdata_pop((D)); } while (0)
-+
-+static int
-+Pdata_push(Pdata *self, PyObject *obj)
-+{
-+    if (Py_SIZE(self) == self->allocated && Pdata_grow(self) < 0) {
-+        return -1;
-+    }
-+    self->data[Py_SIZE(self)++] = obj;
-+    return 0;
-+}
-+
-+/* Push an object on stack, transferring its ownership to the stack. */
-+#define PDATA_PUSH(D, O, ER) do {                               \
-+        if (Pdata_push((D), (O)) < 0) return (ER); } while(0)
-+
-+/* Push an object on stack, adding a new reference to the object. */
-+#define PDATA_APPEND(D, O, ER) do {                             \
-+        Py_INCREF((O));                                         \
-+        if (Pdata_push((D), (O)) < 0) return (ER); } while(0)
-+
-+static PyObject *
-+Pdata_poptuple(Pdata *self, Py_ssize_t start)
-+{
-+    PyObject *tuple;
-+    Py_ssize_t len, i, j;
-+
-+    len = Py_SIZE(self) - start;
-+    tuple = PyTuple_New(len);
-+    if (tuple == NULL)
-+        return NULL;
-+    for (i = start, j = 0; j < len; i++, j++)
-+        PyTuple_SET_ITEM(tuple, j, self->data[i]);
-+
-+    Py_SIZE(self) = start;
-+    return tuple;
-+}
-+
-+static PyObject *
-+Pdata_poplist(Pdata *self, Py_ssize_t start)
-+{
-+    PyObject *list;
-+    Py_ssize_t len, i, j;
-+
-+    len = Py_SIZE(self) - start;
-+    list = PyList_New(len);
-+    if (list == NULL)
-+        return NULL;
-+    for (i = start, j = 0; j < len; i++, j++)
-+        PyList_SET_ITEM(list, j, self->data[i]);
-+
-+    Py_SIZE(self) = start;
-+    return list;
-+}
-+
-+typedef struct {
-+    PyObject *me_key;
-+    Py_ssize_t me_value;
-+} PyMemoEntry;
-+
-+typedef struct {
-+    Py_ssize_t mt_mask;
-+    Py_ssize_t mt_used;
-+    Py_ssize_t mt_allocated;
-+    PyMemoEntry *mt_table;
-+} PyMemoTable;
-+
-+typedef struct PicklerObject {
-+    PyObject_HEAD
-+    PyMemoTable *memo;          /* Memo table, keep track of the seen
-+                                   objects to support self-referential objects
-+                                   pickling. */
-+    PyObject *pers_func;        /* persistent_id() method, can be NULL */
-+    PyObject *dispatch_table;   /* private dispatch_table, can be NULL */
-+
-+    PyObject *write;            /* write() method of the output stream. */
-+    PyObject *output_buffer;    /* Write into a local bytearray buffer before
-+                                   flushing to the stream. */
-+    Py_ssize_t output_len;      /* Length of output_buffer. */
-+    Py_ssize_t max_output_len;  /* Allocation size of output_buffer. */
-+    int proto;                  /* Pickle protocol number, >= 0 */
-+    int bin;                    /* Boolean, true if proto > 0 */
-+    int framing;                /* True when framing is enabled, proto >= 4 */
-+    Py_ssize_t frame_start;     /* Position in output_buffer where the
-+                                   where the current frame begins. -1 if there
-+                                   is no frame currently open. */
-+
-+    Py_ssize_t buf_size;        /* Size of the current buffered pickle data */
-+    int fast;                   /* Enable fast mode if set to a true value.
-+                                   The fast mode disable the usage of memo,
-+                                   therefore speeding the pickling process by
-+                                   not generating superfluous PUT opcodes. It
-+                                   should not be used if with self-referential
-+                                   objects. */
-+    int fast_nesting;
-+    int fix_imports;            /* Indicate whether Pickler should fix
-+                                   the name of globals for Python 2.x. */
-+    PyObject *fast_memo;
-+} PicklerObject;
-+
-+typedef struct UnpicklerObject {
-+    PyObject_HEAD
-+    Pdata *stack;               /* Pickle data stack, store unpickled objects. */
-+
-+    /* The unpickler memo is just an array of PyObject *s. Using a dict
-+       is unnecessary, since the keys are contiguous ints. */
-+    PyObject **memo;
-+    Py_ssize_t memo_size;       /* Capacity of the memo array */
-+    Py_ssize_t memo_len;        /* Number of objects in the memo */
-+
-+    PyObject *pers_func;        /* persistent_load() method, can be NULL. */
-+
-+    Py_buffer buffer;
-+    char *input_buffer;
-+    char *input_line;
-+    Py_ssize_t input_len;
-+    Py_ssize_t next_read_idx;
-+    Py_ssize_t prefetched_idx;  /* index of first prefetched byte */
-+
-+    PyObject *read;             /* read() method of the input stream. */
-+    PyObject *readline;         /* readline() method of the input stream. */
-+    PyObject *peek;             /* peek() method of the input stream, or NULL */
-+
-+    char *encoding;             /* Name of the encoding to be used for
-+                                   decoding strings pickled using Python
-+                                   2.x. The default value is "ASCII" */
-+    char *errors;               /* Name of errors handling scheme to used when
-+                                   decoding strings. The default value is
-+                                   "strict". */
-+    Py_ssize_t *marks;          /* Mark stack, used for unpickling container
-+                                   objects. */
-+    Py_ssize_t num_marks;       /* Number of marks in the mark stack. */
-+    Py_ssize_t marks_size;      /* Current allocated size of the mark stack. */
-+    int proto;                  /* Protocol of the pickle loaded. */
-+    int fix_imports;            /* Indicate whether Unpickler should fix
-+                                   the name of globals pickled by Python 2.x. */
-+} UnpicklerObject;
-+
-+typedef struct {
-+    PyObject_HEAD
-+    PicklerObject *pickler; /* Pickler whose memo table we're proxying. */
-+}  PicklerMemoProxyObject;
-+
-+typedef struct {
-+    PyObject_HEAD
-+    UnpicklerObject *unpickler;
-+} UnpicklerMemoProxyObject;
-+
-+/* Forward declarations */
-+static int save(PicklerObject *, PyObject *, int);
-+static int save_reduce(PicklerObject *, PyObject *, PyObject *);
-+static PyTypeObject Pickler_Type;
-+static PyTypeObject Unpickler_Type;
-+
-+/*[clinic input]
-+preserve
-+[clinic start generated code]*/
-+
-+PyDoc_STRVAR(_pickle_Pickler_clear_memo__doc__,
-+"clear_memo($self, /)\n"
-+"--\n"
-+"\n"
-+"Clears the pickler\'s \"memo\".\n"
-+"\n"
-+"The memo is the data structure that remembers which objects the\n"
-+"pickler has already seen, so that shared or recursive objects are\n"
-+"pickled by reference and not by value.  This method is useful when\n"
-+"re-using picklers.");
-+
-+#define _PICKLE_PICKLER_CLEAR_MEMO_METHODDEF    \
-+    {"clear_memo", (PyCFunction)_pickle_Pickler_clear_memo, METH_NOARGS, _pickle_Pickler_clear_memo__doc__},
-+
-+static PyObject *
-+_pickle_Pickler_clear_memo_impl(PicklerObject *self);
-+
-+static PyObject *
-+_pickle_Pickler_clear_memo(PicklerObject *self, PyObject *Py_UNUSED(ignored))
-+{
-+    return _pickle_Pickler_clear_memo_impl(self);
-+}
-+
-+PyDoc_STRVAR(_pickle_Pickler_dump__doc__,
-+"dump($self, obj, /)\n"
-+"--\n"
-+"\n"
-+"Write a pickled representation of the given object to the open file.");
-+
-+#define _PICKLE_PICKLER_DUMP_METHODDEF    \
-+    {"dump", (PyCFunction)_pickle_Pickler_dump, METH_O, _pickle_Pickler_dump__doc__},
-+
-+PyDoc_STRVAR(_pickle_Pickler___init____doc__,
-+"Pickler(file, protocol=None, fix_imports=True)\n"
-+"--\n"
-+"\n"
-+"This takes a binary file for writing a pickle data stream.\n"
-+"\n"
-+"The optional *protocol* argument tells the pickler to use the given\n"
-+"protocol; supported protocols are 0, 1, 2, 3 and 4.  The default\n"
-+"protocol is 3; a backward-incompatible protocol designed for Python 3.\n"
-+"\n"
-+"Specifying a negative protocol version selects the highest protocol\n"
-+"version supported.  The higher the protocol used, the more recent the\n"
-+"version of Python needed to read the pickle produced.\n"
-+"\n"
-+"The *file* argument must have a write() method that accepts a single\n"
-+"bytes argument. It can thus be a file object opened for binary\n"
-+"writing, a io.BytesIO instance, or any other custom object that meets\n"
-+"this interface.\n"
-+"\n"
-+"If *fix_imports* is True and protocol is less than 3, pickle will try\n"
-+"to map the new Python 3 names to the old module names used in Python\n"
-+"2, so that the pickle data stream is readable with Python 2.");
-+
-+static int
-+_pickle_Pickler___init___impl(PicklerObject *self, PyObject *file, PyObject *protocol, int fix_imports);
-+
-+static int
-+_pickle_Pickler___init__(PyObject *self, PyObject *args, PyObject *kwargs)
-+{
-+    int return_value = -1;
-+    static char *_keywords[] = {"file", "protocol", "fix_imports", NULL};
-+    PyObject *file;
-+    PyObject *protocol = NULL;
-+    int fix_imports = 1;
-+
-+    if (!PyArg_ParseTupleAndKeywords(args, kwargs,
-+        "O|Op:Pickler", _keywords,
-+        &file, &protocol, &fix_imports))
-+        goto exit;
-+    return_value = _pickle_Pickler___init___impl((PicklerObject *)self, file, protocol, fix_imports);
-+
-+exit:
-+    return return_value;
-+}
-+
-+PyDoc_STRVAR(_pickle_PicklerMemoProxy_clear__doc__,
-+"clear($self, /)\n"
-+"--\n"
-+"\n"
-+"Remove all items from memo.");
-+
-+#define _PICKLE_PICKLERMEMOPROXY_CLEAR_METHODDEF    \
-+    {"clear", (PyCFunction)_pickle_PicklerMemoProxy_clear, METH_NOARGS, _pickle_PicklerMemoProxy_clear__doc__},
-+
-+static PyObject *
-+_pickle_PicklerMemoProxy_clear_impl(PicklerMemoProxyObject *self);
-+
-+static PyObject *
-+_pickle_PicklerMemoProxy_clear(PicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored))
-+{
-+    return _pickle_PicklerMemoProxy_clear_impl(self);
-+}
-+
-+PyDoc_STRVAR(_pickle_PicklerMemoProxy_copy__doc__,
-+"copy($self, /)\n"
-+"--\n"
-+"\n"
-+"Copy the memo to a new object.");
-+
-+#define _PICKLE_PICKLERMEMOPROXY_COPY_METHODDEF    \
-+    {"copy", (PyCFunction)_pickle_PicklerMemoProxy_copy, METH_NOARGS, _pickle_PicklerMemoProxy_copy__doc__},
-+
-+static PyObject *
-+_pickle_PicklerMemoProxy_copy_impl(PicklerMemoProxyObject *self);
-+
-+static PyObject *
-+_pickle_PicklerMemoProxy_copy(PicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored))
-+{
-+    return _pickle_PicklerMemoProxy_copy_impl(self);
-+}
-+
-+PyDoc_STRVAR(_pickle_PicklerMemoProxy___reduce____doc__,
-+"__reduce__($self, /)\n"
-+"--\n"
-+"\n"
-+"Implement pickle support.");
-+
-+#define _PICKLE_PICKLERMEMOPROXY___REDUCE___METHODDEF    \
-+    {"__reduce__", (PyCFunction)_pickle_PicklerMemoProxy___reduce__, METH_NOARGS, _pickle_PicklerMemoProxy___reduce____doc__},
-+
-+static PyObject *
-+_pickle_PicklerMemoProxy___reduce___impl(PicklerMemoProxyObject *self);
-+
-+static PyObject *
-+_pickle_PicklerMemoProxy___reduce__(PicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored))
-+{
-+    return _pickle_PicklerMemoProxy___reduce___impl(self);
-+}
-+
-+PyDoc_STRVAR(_pickle_Unpickler_load__doc__,
-+"load($self, /)\n"
-+"--\n"
-+"\n"
-+"Load a pickle.\n"
-+"\n"
-+"Read a pickled object representation from the open file object given\n"
-+"in the constructor, and return the reconstituted object hierarchy\n"
-+"specified therein.");
-+
-+#define _PICKLE_UNPICKLER_LOAD_METHODDEF    \
-+    {"load", (PyCFunction)_pickle_Unpickler_load, METH_NOARGS, _pickle_Unpickler_load__doc__},
-+
-+static PyObject *
-+_pickle_Unpickler_load_impl(UnpicklerObject *self);
-+
-+static PyObject *
-+_pickle_Unpickler_load(UnpicklerObject *self, PyObject *Py_UNUSED(ignored))
-+{
-+    return _pickle_Unpickler_load_impl(self);
-+}
-+
-+PyDoc_STRVAR(_pickle_Unpickler_find_class__doc__,
-+"find_class($self, module_name, global_name, /)\n"
-+"--\n"
-+"\n"
-+"Return an object from a specified module.\n"
-+"\n"
-+"If necessary, the module will be imported. Subclasses may override\n"
-+"this method (e.g. to restrict unpickling of arbitrary classes and\n"
-+"functions).\n"
-+"\n"
-+"This method is called whenever a class or a function object is\n"
-+"needed.  Both arguments passed are str objects.");
-+
-+#define _PICKLE_UNPICKLER_FIND_CLASS_METHODDEF    \
-+    {"find_class", (PyCFunction)_pickle_Unpickler_find_class, METH_VARARGS, _pickle_Unpickler_find_class__doc__},
-+
-+static PyObject *
-+_pickle_Unpickler_find_class_impl(UnpicklerObject *self, PyObject *module_name, PyObject *global_name);
-+
-+static PyObject *
-+_pickle_Unpickler_find_class(UnpicklerObject *self, PyObject *args)
-+{
-+    PyObject *return_value = NULL;
-+    PyObject *module_name;
-+    PyObject *global_name;
-+
-+    if (!PyArg_UnpackTuple(args, "find_class",
-+        2, 2,
-+        &module_name, &global_name))
-+        goto exit;
-+    return_value = _pickle_Unpickler_find_class_impl(self, module_name, global_name);
-+
-+exit:
-+    return return_value;
-+}
-+
-+PyDoc_STRVAR(_pickle_Unpickler___init____doc__,
-+"Unpickler(file, *, fix_imports=True, encoding=\'ASCII\', errors=\'strict\')\n"
-+"--\n"
-+"\n"
-+"This takes a binary file for reading a pickle data stream.\n"
-+"\n"
-+"The protocol version of the pickle is detected automatically, so no\n"
-+"protocol argument is needed.  Bytes past the pickled object\'s\n"
-+"representation are ignored.\n"
-+"\n"
-+"The argument *file* must have two methods, a read() method that takes\n"
-+"an integer argument, and a readline() method that requires no\n"
-+"arguments.  Both methods should return bytes.  Thus *file* can be a\n"
-+"binary file object opened for reading, a io.BytesIO object, or any\n"
-+"other custom object that meets this interface.\n"
-+"\n"
-+"Optional keyword arguments are *fix_imports*, *encoding* and *errors*,\n"
-+"which are used to control compatiblity support for pickle stream\n"
-+"generated by Python 2.  If *fix_imports* is True, pickle will try to\n"
-+"map the old Python 2 names to the new names used in Python 3.  The\n"
-+"*encoding* and *errors* tell pickle how to decode 8-bit string\n"
-+"instances pickled by Python 2; these default to \'ASCII\' and \'strict\',\n"
-+"respectively.  The *encoding* can be \'bytes\' to read these 8-bit\n"
-+"string instances as bytes objects.");
-+
-+static int
-+_pickle_Unpickler___init___impl(UnpicklerObject *self, PyObject *file, int fix_imports, const char *encoding, const char *errors);
-+
-+static int
-+_pickle_Unpickler___init__(PyObject *self, PyObject *args, PyObject *kwargs)
-+{
-+    int return_value = -1;
-+    static char *_keywords[] = {"file", "fix_imports", "encoding", "errors", NULL};
-+    PyObject *file;
-+    int fix_imports = 1;
-+    const char *encoding = "ASCII";
-+    const char *errors = "strict";
-+
-+    if (!PyArg_ParseTupleAndKeywords(args, kwargs,
-+        "O|$pss:Unpickler", _keywords,
-+        &file, &fix_imports, &encoding, &errors))
-+        goto exit;
-+    return_value = _pickle_Unpickler___init___impl((UnpicklerObject *)self, file, fix_imports, encoding, errors);
-+
-+exit:
-+    return return_value;
-+}
-+
-+PyDoc_STRVAR(_pickle_UnpicklerMemoProxy_clear__doc__,
-+"clear($self, /)\n"
-+"--\n"
-+"\n"
-+"Remove all items from memo.");
-+
-+#define _PICKLE_UNPICKLERMEMOPROXY_CLEAR_METHODDEF    \
-+    {"clear", (PyCFunction)_pickle_UnpicklerMemoProxy_clear, METH_NOARGS, _pickle_UnpicklerMemoProxy_clear__doc__},
-+
-+static PyObject *
-+_pickle_UnpicklerMemoProxy_clear_impl(UnpicklerMemoProxyObject *self);
-+
-+static PyObject *
-+_pickle_UnpicklerMemoProxy_clear(UnpicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored))
-+{
-+    return _pickle_UnpicklerMemoProxy_clear_impl(self);
-+}
-+
-+PyDoc_STRVAR(_pickle_UnpicklerMemoProxy_copy__doc__,
-+"copy($self, /)\n"
-+"--\n"
-+"\n"
-+"Copy the memo to a new object.");
-+
-+#define _PICKLE_UNPICKLERMEMOPROXY_COPY_METHODDEF    \
-+    {"copy", (PyCFunction)_pickle_UnpicklerMemoProxy_copy, METH_NOARGS, _pickle_UnpicklerMemoProxy_copy__doc__},
-+
-+static PyObject *
-+_pickle_UnpicklerMemoProxy_copy_impl(UnpicklerMemoProxyObject *self);
-+
-+static PyObject *
-+_pickle_UnpicklerMemoProxy_copy(UnpicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored))
-+{
-+    return _pickle_UnpicklerMemoProxy_copy_impl(self);
-+}
-+
-+PyDoc_STRVAR(_pickle_UnpicklerMemoProxy___reduce____doc__,
-+"__reduce__($self, /)\n"
-+"--\n"
-+"\n"
-+"Implement pickling support.");
-+
-+#define _PICKLE_UNPICKLERMEMOPROXY___REDUCE___METHODDEF    \
-+    {"__reduce__", (PyCFunction)_pickle_UnpicklerMemoProxy___reduce__, METH_NOARGS, _pickle_UnpicklerMemoProxy___reduce____doc__},
-+
-+static PyObject *
-+_pickle_UnpicklerMemoProxy___reduce___impl(UnpicklerMemoProxyObject *self);
-+
-+static PyObject *
-+_pickle_UnpicklerMemoProxy___reduce__(UnpicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored))
-+{
-+    return _pickle_UnpicklerMemoProxy___reduce___impl(self);
-+}
-+
-+PyDoc_STRVAR(_pickle_dump__doc__,
-+"dump($module, /, obj, file, protocol=None, *, fix_imports=True)\n"
-+"--\n"
-+"\n"
-+"Write a pickled representation of obj to the open file object file.\n"
-+"\n"
-+"This is equivalent to ``Pickler(file, protocol).dump(obj)``, but may\n"
-+"be more efficient.\n"
-+"\n"
-+"The optional *protocol* argument tells the pickler to use the given\n"
-+"protocol supported protocols are 0, 1, 2, 3 and 4.  The default\n"
-+"protocol is 3; a backward-incompatible protocol designed for Python 3.\n"
-+"\n"
-+"Specifying a negative protocol version selects the highest protocol\n"
-+"version supported.  The higher the protocol used, the more recent the\n"
-+"version of Python needed to read the pickle produced.\n"
-+"\n"
-+"The *file* argument must have a write() method that accepts a single\n"
-+"bytes argument.  It can thus be a file object opened for binary\n"
-+"writing, a io.BytesIO instance, or any other custom object that meets\n"
-+"this interface.\n"
-+"\n"
-+"If *fix_imports* is True and protocol is less than 3, pickle will try\n"
-+"to map the new Python 3 names to the old module names used in Python\n"
-+"2, so that the pickle data stream is readable with Python 2.");
-+
-+#define _PICKLE_DUMP_METHODDEF    \
-+    {"dump", (PyCFunction)_pickle_dump, METH_VARARGS|METH_KEYWORDS, _pickle_dump__doc__},
-+
-+static PyObject *
-+_pickle_dump_impl(PyModuleDef *module, PyObject *obj, PyObject *file, PyObject *protocol, int fix_imports);
-+
-+static PyObject *
-+_pickle_dump(PyModuleDef *module, PyObject *args, PyObject *kwargs)
-+{
-+    PyObject *return_value = NULL;
-+    static char *_keywords[] = {"obj", "file", "protocol", "fix_imports", NULL};
-+    PyObject *obj;
-+    PyObject *file;
-+    PyObject *protocol = NULL;
-+    int fix_imports = 1;
-+
-+    if (!PyArg_ParseTupleAndKeywords(args, kwargs,
-+        "OO|O$p:dump", _keywords,
-+        &obj, &file, &protocol, &fix_imports))
-+        goto exit;
-+    return_value = _pickle_dump_impl(module, obj, file, protocol, fix_imports);
-+
-+exit:
-+    return return_value;
-+}
-+
-+PyDoc_STRVAR(_pickle_dumps__doc__,
-+"dumps($module, /, obj, protocol=None, *, fix_imports=True)\n"
-+"--\n"
-+"\n"
-+"Return the pickled representation of the object as a bytes object.\n"
-+"\n"
-+"The optional *protocol* argument tells the pickler to use the given\n"
-+"protocol; supported protocols are 0, 1, 2, 3 and 4.  The default\n"
-+"protocol is 3; a backward-incompatible protocol designed for Python 3.\n"
-+"\n"
-+"Specifying a negative protocol version selects the highest protocol\n"
-+"version supported.  The higher the protocol used, the more recent the\n"
-+"version of Python needed to read the pickle produced.\n"
-+"\n"
-+"If *fix_imports* is True and *protocol* is less than 3, pickle will\n"
-+"try to map the new Python 3 names to the old module names used in\n"
-+"Python 2, so that the pickle data stream is readable with Python 2.");
-+
-+#define _PICKLE_DUMPS_METHODDEF    \
-+    {"dumps", (PyCFunction)_pickle_dumps, METH_VARARGS|METH_KEYWORDS, _pickle_dumps__doc__},
-+
-+static PyObject *
-+_pickle_dumps_impl(PyModuleDef *module, PyObject *obj, PyObject *protocol, int fix_imports);
-+
-+static PyObject *
-+_pickle_dumps(PyModuleDef *module, PyObject *args, PyObject *kwargs)
-+{
-+    PyObject *return_value = NULL;
-+    static char *_keywords[] = {"obj", "protocol", "fix_imports", NULL};
-+    PyObject *obj;
-+    PyObject *protocol = NULL;
-+    int fix_imports = 1;
-+
-+    if (!PyArg_ParseTupleAndKeywords(args, kwargs,
-+        "O|O$p:dumps", _keywords,
-+        &obj, &protocol, &fix_imports))
-+        goto exit;
-+    return_value = _pickle_dumps_impl(module, obj, protocol, fix_imports);
-+
-+exit:
-+    return return_value;
-+}
-+
-+PyDoc_STRVAR(_pickle_load__doc__,
-+"load($module, /, file, *, fix_imports=True, encoding=\'ASCII\',\n"
-+"     errors=\'strict\')\n"
-+"--\n"
-+"\n"
-+"Read and return an object from the pickle data stored in a file.\n"
-+"\n"
-+"This is equivalent to ``Unpickler(file).load()``, but may be more\n"
-+"efficient.\n"
-+"\n"
-+"The protocol version of the pickle is detected automatically, so no\n"
-+"protocol argument is needed.  Bytes past the pickled object\'s\n"
-+"representation are ignored.\n"
-+"\n"
-+"The argument *file* must have two methods, a read() method that takes\n"
-+"an integer argument, and a readline() method that requires no\n"
-+"arguments.  Both methods should return bytes.  Thus *file* can be a\n"
-+"binary file object opened for reading, a io.BytesIO object, or any\n"
-+"other custom object that meets this interface.\n"
-+"\n"
-+"Optional keyword arguments are *fix_imports*, *encoding* and *errors*,\n"
-+"which are used to control compatiblity support for pickle stream\n"
-+"generated by Python 2.  If *fix_imports* is True, pickle will try to\n"
-+"map the old Python 2 names to the new names used in Python 3.  The\n"
-+"*encoding* and *errors* tell pickle how to decode 8-bit string\n"
-+"instances pickled by Python 2; these default to \'ASCII\' and \'strict\',\n"
-+"respectively.  The *encoding* can be \'bytes\' to read these 8-bit\n"
-+"string instances as bytes objects.");
-+
-+#define _PICKLE_LOAD_METHODDEF    \
-+    {"load", (PyCFunction)_pickle_load, METH_VARARGS|METH_KEYWORDS, _pickle_load__doc__},
-+
-+static PyObject *
-+_pickle_load_impl(PyModuleDef *module, PyObject *file, int fix_imports, const char *encoding, const char *errors);
-+
-+static PyObject *
-+_pickle_load(PyModuleDef *module, PyObject *args, PyObject *kwargs)
-+{
-+    PyObject *return_value = NULL;
-+    static char *_keywords[] = {"file", "fix_imports", "encoding", "errors", NULL};
-+    PyObject *file;
-+    int fix_imports = 1;
-+    const char *encoding = "ASCII";
-+    const char *errors = "strict";
-+
-+    if (!PyArg_ParseTupleAndKeywords(args, kwargs,
-+        "O|$pss:load", _keywords,
-+        &file, &fix_imports, &encoding, &errors))
-+        goto exit;
-+    return_value = _pickle_load_impl(module, file, fix_imports, encoding, errors);
-+
-+exit:
-+    return return_value;
-+}
-+
-+PyDoc_STRVAR(_pickle_loads__doc__,
-+"loads($module, /, data, *, fix_imports=True, encoding=\'ASCII\',\n"
-+"      errors=\'strict\')\n"
-+"--\n"
-+"\n"
-+"Read and return an object from the given pickle data.\n"
-+"\n"
-+"The protocol version of the pickle is detected automatically, so no\n"
-+"protocol argument is needed.  Bytes past the pickled object\'s\n"
-+"representation are ignored.\n"
-+"\n"
-+"Optional keyword arguments are *fix_imports*, *encoding* and *errors*,\n"
-+"which are used to control compatiblity support for pickle stream\n"
-+"generated by Python 2.  If *fix_imports* is True, pickle will try to\n"
-+"map the old Python 2 names to the new names used in Python 3.  The\n"
-+"*encoding* and *errors* tell pickle how to decode 8-bit string\n"
-+"instances pickled by Python 2; these default to \'ASCII\' and \'strict\',\n"
-+"respectively.  The *encoding* can be \'bytes\' to read these 8-bit\n"
-+"string instances as bytes objects.");
-+
-+#define _PICKLE_LOADS_METHODDEF    \
-+    {"loads", (PyCFunction)_pickle_loads, METH_VARARGS|METH_KEYWORDS, _pickle_loads__doc__},
-+
-+static PyObject *
-+_pickle_loads_impl(PyModuleDef *module, PyObject *data, int fix_imports, const char *encoding, const char *errors);
-+
-+static PyObject *
-+_pickle_loads(PyModuleDef *module, PyObject *args, PyObject *kwargs)
-+{
-+    PyObject *return_value = NULL;
-+    static char *_keywords[] = {"data", "fix_imports", "encoding", "errors", NULL};
-+    PyObject *data;
-+    int fix_imports = 1;
-+    const char *encoding = "ASCII";
-+    const char *errors = "strict";
-+
-+    if (!PyArg_ParseTupleAndKeywords(args, kwargs,
-+        "O|$pss:loads", _keywords,
-+        &data, &fix_imports, &encoding, &errors))
-+        goto exit;
-+    return_value = _pickle_loads_impl(module, data, fix_imports, encoding, errors);
-+
-+exit:
-+    return return_value;
-+}
-+/*[clinic end generated code: output=f965b6c7018c898d input=a9049054013a1b77]*/
-+
-+/*************************************************************************
-+ A custom hashtable mapping void* to Python ints. This is used by the pickler
-+ for memoization. Using a custom hashtable rather than PyDict allows us to skip
-+ a bunch of unnecessary object creation. This makes a huge performance
-+ difference. */
-+
-+#define MT_MINSIZE 8
-+#define PERTURB_SHIFT 5
-+
-+
-+static PyMemoTable *
-+PyMemoTable_New(void)
-+{
-+    PyMemoTable *memo = PyMem_MALLOC(sizeof(PyMemoTable));
-+    if (memo == NULL) {
-+        PyErr_NoMemory();
-+        return NULL;
-+    }
-+
-+    memo->mt_used = 0;
-+    memo->mt_allocated = MT_MINSIZE;
-+    memo->mt_mask = MT_MINSIZE - 1;
-+    memo->mt_table = PyMem_MALLOC(MT_MINSIZE * sizeof(PyMemoEntry));
-+    if (memo->mt_table == NULL) {
-+        PyMem_FREE(memo);
-+        PyErr_NoMemory();
-+        return NULL;
-+    }
-+    memset(memo->mt_table, 0, MT_MINSIZE * sizeof(PyMemoEntry));
-+
-+    return memo;
-+}
-+
-+static PyMemoTable *
-+PyMemoTable_Copy(PyMemoTable *self)
-+{
-+    Py_ssize_t i;
-+    PyMemoTable *new = PyMemoTable_New();
-+    if (new == NULL)
-+        return NULL;
-+
-+    new->mt_used = self->mt_used;
-+    new->mt_allocated = self->mt_allocated;
-+    new->mt_mask = self->mt_mask;
-+    /* The table we get from _New() is probably smaller than we wanted.
-+       Free it and allocate one that's the right size. */
-+    PyMem_FREE(new->mt_table);
-+    new->mt_table = PyMem_MALLOC(self->mt_allocated * sizeof(PyMemoEntry));
-+    if (new->mt_table == NULL) {
-+        PyMem_FREE(new);
-+        PyErr_NoMemory();
-+        return NULL;
-+    }
-+    for (i = 0; i < self->mt_allocated; i++) {
-+        Py_XINCREF(self->mt_table[i].me_key);
-+    }
-+    memcpy(new->mt_table, self->mt_table,
-+           sizeof(PyMemoEntry) * self->mt_allocated);
-+
-+    return new;
-+}
-+
-+static Py_ssize_t
-+PyMemoTable_Size(PyMemoTable *self)
-+{
-+    return self->mt_used;
-+}
-+
-+static int
-+PyMemoTable_Clear(PyMemoTable *self)
-+{
-+    Py_ssize_t i = self->mt_allocated;
-+
-+    while (--i >= 0) {
-+        Py_XDECREF(self->mt_table[i].me_key);
-+    }
-+    self->mt_used = 0;
-+    memset(self->mt_table, 0, self->mt_allocated * sizeof(PyMemoEntry));
-+    return 0;
-+}
-+
-+static void
-+PyMemoTable_Del(PyMemoTable *self)
-+{
-+    if (self == NULL)
-+        return;
-+    PyMemoTable_Clear(self);
-+
-+    PyMem_FREE(self->mt_table);
-+    PyMem_FREE(self);
-+}
-+
-+/* Since entries cannot be deleted from this hashtable, _PyMemoTable_Lookup()
-+   can be considerably simpler than dictobject.c's lookdict(). */
-+static PyMemoEntry *
-+_PyMemoTable_Lookup(PyMemoTable *self, PyObject *key)
-+{
-+    size_t i;
-+    size_t perturb;
-+    size_t mask = (size_t)self->mt_mask;
-+    PyMemoEntry *table = self->mt_table;
-+    PyMemoEntry *entry;
-+    Py_hash_t hash = (Py_hash_t)key >> 3;
-+
-+    i = hash & mask;
-+    entry = &table[i];
-+    if (entry->me_key == NULL || entry->me_key == key)
-+        return entry;
-+
-+    for (perturb = hash; ; perturb >>= PERTURB_SHIFT) {
-+        i = (i << 2) + i + perturb + 1;
-+        entry = &table[i & mask];
-+        if (entry->me_key == NULL || entry->me_key == key)
-+            return entry;
-+    }
-+    assert(0);  /* Never reached */
-+    return NULL;
-+}
-+
-+/* Returns -1 on failure, 0 on success. */
-+static int
-+_PyMemoTable_ResizeTable(PyMemoTable *self, Py_ssize_t min_size)
-+{
-+    PyMemoEntry *oldtable = NULL;
-+    PyMemoEntry *oldentry, *newentry;
-+    Py_ssize_t new_size = MT_MINSIZE;
-+    Py_ssize_t to_process;
-+
-+    assert(min_size > 0);
-+
-+    /* Find the smallest valid table size >= min_size. */
-+    while (new_size < min_size && new_size > 0)
-+        new_size <<= 1;
-+    if (new_size <= 0) {
-+        PyErr_NoMemory();
-+        return -1;
-+    }
-+    /* new_size needs to be a power of two. */
-+    assert((new_size & (new_size - 1)) == 0);
-+
-+    /* Allocate new table. */
-+    oldtable = self->mt_table;
-+    self->mt_table = PyMem_MALLOC(new_size * sizeof(PyMemoEntry));
-+    if (self->mt_table == NULL) {
-+        self->mt_table = oldtable;
-+        PyErr_NoMemory();
-+        return -1;
-+    }
-+    self->mt_allocated = new_size;
-+    self->mt_mask = new_size - 1;
-+    memset(self->mt_table, 0, sizeof(PyMemoEntry) * new_size);
-+
-+    /* Copy entries from the old table. */
-+    to_process = self->mt_used;
-+    for (oldentry = oldtable; to_process > 0; oldentry++) {
-+        if (oldentry->me_key != NULL) {
-+            to_process--;
-+            /* newentry is a pointer to a chunk of the new
-+               mt_table, so we're setting the key:value pair
-+               in-place. */
-+            newentry = _PyMemoTable_Lookup(self, oldentry->me_key);
-+            newentry->me_key = oldentry->me_key;
-+            newentry->me_value = oldentry->me_value;
-+        }
-+    }
-+
-+    /* Deallocate the old table. */
-+    PyMem_FREE(oldtable);
-+    return 0;
-+}
-+
-+/* Returns NULL on failure, a pointer to the value otherwise. */
-+static Py_ssize_t *
-+PyMemoTable_Get(PyMemoTable *self, PyObject *key)
-+{
-+    PyMemoEntry *entry = _PyMemoTable_Lookup(self, key);
-+    if (entry->me_key == NULL)
-+        return NULL;
-+    return &entry->me_value;
-+}
-+
-+/* Returns -1 on failure, 0 on success. */
-+static int
-+PyMemoTable_Set(PyMemoTable *self, PyObject *key, Py_ssize_t value)
-+{
-+    PyMemoEntry *entry;
-+
-+    assert(key != NULL);
-+
-+    entry = _PyMemoTable_Lookup(self, key);
-+    if (entry->me_key != NULL) {
-+        entry->me_value = value;
-+        return 0;
-+    }
-+    Py_INCREF(key);
-+    entry->me_key = key;
-+    entry->me_value = value;
-+    self->mt_used++;
-+
-+    /* If we added a key, we can safely resize. Otherwise just return!
-+     * If used >= 2/3 size, adjust size. Normally, this quaduples the size.
-+     *
-+     * Quadrupling the size improves average table sparseness
-+     * (reducing collisions) at the cost of some memory. It also halves
-+     * the number of expensive resize operations in a growing memo table.
-+     *
-+     * Very large memo tables (over 50K items) use doubling instead.
-+     * This may help applications with severe memory constraints.
-+     */
-+    if (!(self->mt_used * 3 >= (self->mt_mask + 1) * 2))
-+        return 0;
-+    return _PyMemoTable_ResizeTable(self,
-+        (self->mt_used > 50000 ? 2 : 4) * self->mt_used);
-+}
-+
-+#undef MT_MINSIZE
-+#undef PERTURB_SHIFT
-+
-+/*************************************************************************/
-+
-+
-+static int
-+_Pickler_ClearBuffer(PicklerObject *self)
-+{
-+    Py_CLEAR(self->output_buffer);
-+    self->output_buffer =
-+        PyBytes_FromStringAndSize(NULL, self->max_output_len);
-+    if (self->output_buffer == NULL)
-+        return -1;
-+    self->output_len = 0;
-+    self->frame_start = -1;
-+    return 0;
-+}
-+
-+static void
-+_write_size64(char *out, size_t value)
-+{
-+    int i;
-+
-+    assert(sizeof(size_t) <= 8);
-+
-+    for (i = 0; i < sizeof(size_t); i++) {
-+        out[i] = (unsigned char)((value >> (8 * i)) & 0xff);
-+    }
-+    for (i = sizeof(size_t); i < 8; i++) {
-+        out[i] = 0;
-+    }
-+}
-+
-+static void
-+_Pickler_WriteFrameHeader(PicklerObject *self, char *qdata, size_t frame_len)
-+{
-+    qdata[0] = FRAME;
-+    _write_size64(qdata + 1, frame_len);
-+}
-+
-+static int
-+_Pickler_CommitFrame(PicklerObject *self)
-+{
-+    size_t frame_len;
-+    char *qdata;
-+
-+    if (!self->framing || self->frame_start == -1)
-+        return 0;
-+    frame_len = self->output_len - self->frame_start - FRAME_HEADER_SIZE;
-+    qdata = PyBytes_AS_STRING(self->output_buffer) + self->frame_start;
-+    _Pickler_WriteFrameHeader(self, qdata, frame_len);
-+    self->frame_start = -1;
-+    return 0;
-+}
-+
-+static int
-+_Pickler_OpcodeBoundary(PicklerObject *self)
-+{
-+    Py_ssize_t frame_len;
-+
-+    if (!self->framing || self->frame_start == -1)
-+        return 0;
-+    frame_len = self->output_len - self->frame_start - FRAME_HEADER_SIZE;
-+    if (frame_len >= FRAME_SIZE_TARGET)
-+        return _Pickler_CommitFrame(self);
-+    else
-+        return 0;
-+}
-+
-+static PyObject *
-+_Pickler_GetString(PicklerObject *self)
-+{
-+    PyObject *output_buffer = self->output_buffer;
-+
-+    assert(self->output_buffer != NULL);
-+
-+    if (_Pickler_CommitFrame(self))
-+        return NULL;
-+
-+    self->output_buffer = NULL;
-+    /* Resize down to exact size */
-+    if (_PyBytes_Resize(&output_buffer, self->output_len) < 0)
-+        return NULL;
-+    return output_buffer;
-+}
-+
-+static int
-+_Pickler_FlushToFile(PicklerObject *self)
-+{
-+    PyObject *output, *result;
-+
-+    assert(self->write != NULL);
-+
-+    /* This will commit the frame first */
-+    output = _Pickler_GetString(self);
-+    if (output == NULL)
-+        return -1;
-+
-+    result = _Pickle_FastCall(self->write, output);
-+    Py_XDECREF(result);
-+    return (result == NULL) ? -1 : 0;
-+}
-+
-+static Py_ssize_t
-+_Pickler_Write(PicklerObject *self, const char *s, Py_ssize_t data_len)
-+{
-+    Py_ssize_t i, n, required;
-+    char *buffer;
-+    int need_new_frame;
-+
-+    assert(s != NULL);
-+    need_new_frame = (self->framing && self->frame_start == -1);
-+
-+    if (need_new_frame)
-+        n = data_len + FRAME_HEADER_SIZE;
-+    else
-+        n = data_len;
-+
-+    required = self->output_len + n;
-+    if (required > self->max_output_len) {
-+        /* Make place in buffer for the pickle chunk */
-+        if (self->output_len >= PY_SSIZE_T_MAX / 2 - n) {
-+            PyErr_NoMemory();
-+            return -1;
-+        }
-+        self->max_output_len = (self->output_len + n) / 2 * 3;
-+        if (_PyBytes_Resize(&self->output_buffer, self->max_output_len) < 0)
-+            return -1;
-+    }
-+    buffer = PyBytes_AS_STRING(self->output_buffer);
-+    if (need_new_frame) {
-+        /* Setup new frame */
-+        Py_ssize_t frame_start = self->output_len;
-+        self->frame_start = frame_start;
-+        for (i = 0; i < FRAME_HEADER_SIZE; i++) {
-+            /* Write an invalid value, for debugging */
-+            buffer[frame_start + i] = 0xFE;
-+        }
-+        self->output_len += FRAME_HEADER_SIZE;
-+    }
-+    if (data_len < 8) {
-+        /* This is faster than memcpy when the string is short. */
-+        for (i = 0; i < data_len; i++) {
-+            buffer[self->output_len + i] = s[i];
-+        }
-+    }
-+    else {
-+        memcpy(buffer + self->output_len, s, data_len);
-+    }
-+    self->output_len += data_len;
-+    return data_len;
-+}
-+
-+static PicklerObject *
-+_Pickler_New(void)
-+{
-+    PicklerObject *self;
-+
-+    self = PyObject_GC_New(PicklerObject, &Pickler_Type);
-+    if (self == NULL)
-+        return NULL;
-+
-+    self->pers_func = NULL;
-+    self->dispatch_table = NULL;
-+    self->write = NULL;
-+    self->proto = 0;
-+    self->bin = 0;
-+    self->framing = 0;
-+    self->frame_start = -1;
-+    self->fast = 0;
-+    self->fast_nesting = 0;
-+    self->fix_imports = 0;
-+    self->fast_memo = NULL;
-+    self->max_output_len = WRITE_BUF_SIZE;
-+    self->output_len = 0;
-+
-+    self->memo = PyMemoTable_New();
-+    self->output_buffer = PyBytes_FromStringAndSize(NULL,
-+                                                    self->max_output_len);
-+
-+    if (self->memo == NULL || self->output_buffer == NULL) {
-+        Py_DECREF(self);
-+        return NULL;
-+    }
-+    return self;
-+}
-+
-+static int
-+_Pickler_SetProtocol(PicklerObject *self, PyObject *protocol, int fix_imports)
-+{
-+    long proto;
-+
-+    if (protocol == NULL || protocol == Py_None) {
-+        proto = DEFAULT_PROTOCOL;
-+    }
-+    else {
-+        proto = PyLong_AsLong(protocol);
-+        if (proto < 0) {
-+            if (proto == -1 && PyErr_Occurred())
-+                return -1;
-+            proto = HIGHEST_PROTOCOL;
-+        }
-+        else if (proto > HIGHEST_PROTOCOL) {
-+            PyErr_Format(PyExc_ValueError, "pickle protocol must be <= %d",
-+                         HIGHEST_PROTOCOL);
-+            return -1;
-+        }
-+    }
-+    self->proto = (int)proto;
-+    self->bin = proto > 0;
-+    self->fix_imports = fix_imports && proto < 3;
-+    return 0;
-+}
-+
-+/* Returns -1 (with an exception set) on failure, 0 on success. This may
-+   be called once on a freshly created Pickler. */
-+static int
-+_Pickler_SetOutputStream(PicklerObject *self, PyObject *file)
-+{
-+    _Py_IDENTIFIER(write);
-+    assert(file != NULL);
-+    self->write = _PyObject_GetAttrId(file, &PyId_write);
-+    if (self->write == NULL) {
-+        if (PyErr_ExceptionMatches(PyExc_AttributeError))
-+            PyErr_SetString(PyExc_TypeError,
-+                            "file must have a 'write' attribute");
-+        return -1;
-+    }
-+
-+    return 0;
-+}
-+
-+/* Returns the size of the input on success, -1 on failure. This takes its
-+   own reference to `input`. */
-+static Py_ssize_t
-+_Unpickler_SetStringInput(UnpicklerObject *self, PyObject *input)
-+{
-+    if (self->buffer.buf != NULL)
-+        PyBuffer_Release(&self->buffer);
-+    if (PyObject_GetBuffer(input, &self->buffer, PyBUF_CONTIG_RO) < 0)
-+        return -1;
-+    self->input_buffer = self->buffer.buf;
-+    self->input_len = self->buffer.len;
-+    self->next_read_idx = 0;
-+    self->prefetched_idx = self->input_len;
-+    return self->input_len;
-+}
-+
-+static int
-+_Unpickler_SkipConsumed(UnpicklerObject *self)
-+{
-+    Py_ssize_t consumed;
-+    PyObject *r;
-+
-+    consumed = self->next_read_idx - self->prefetched_idx;
-+    if (consumed <= 0)
-+        return 0;
-+
-+    assert(self->peek);  /* otherwise we did something wrong */
-+    /* This makes an useless copy... */
-+    r = PyObject_CallFunction(self->read, "n", consumed);
-+    if (r == NULL)
-+        return -1;
-+    Py_DECREF(r);
-+
-+    self->prefetched_idx = self->next_read_idx;
-+    return 0;
-+}
-+
-+static const Py_ssize_t READ_WHOLE_LINE = -1;
-+
-+/* If reading from a file, we need to only pull the bytes we need, since there
-+   may be multiple pickle objects arranged contiguously in the same input
-+   buffer.
-+
-+   If `n` is READ_WHOLE_LINE, read a whole line. Otherwise, read up to `n`
-+   bytes from the input stream/buffer.
-+
-+   Update the unpickler's input buffer with the newly-read data. Returns -1 on
-+   failure; on success, returns the number of bytes read from the file.
-+
-+   On success, self->input_len will be 0; this is intentional so that when
-+   unpickling from a file, the "we've run out of data" code paths will trigger,
-+   causing the Unpickler to go back to the file for more data. Use the returned
-+   size to tell you how much data you can process. */
-+static Py_ssize_t
-+_Unpickler_ReadFromFile(UnpicklerObject *self, Py_ssize_t n)
-+{
-+    PyObject *data;
-+    Py_ssize_t read_size;
-+
-+    assert(self->read != NULL);
-+
-+    if (_Unpickler_SkipConsumed(self) < 0)
-+        return -1;
-+
-+    if (n == READ_WHOLE_LINE) {
-+        PyObject *empty_tuple = PyTuple_New(0);
-+        data = PyObject_Call(self->readline, empty_tuple, NULL);
-+        Py_DECREF(empty_tuple);
-+    }
-+    else {
-+        PyObject *len;
-+        /* Prefetch some data without advancing the file pointer, if possible */
-+        if (self->peek && n < PREFETCH) {
-+            len = PyLong_FromSsize_t(PREFETCH);
-+            if (len == NULL)
-+                return -1;
-+            data = _Pickle_FastCall(self->peek, len);
-+            if (data == NULL) {
-+                if (!PyErr_ExceptionMatches(PyExc_NotImplementedError))
-+                    return -1;
-+                /* peek() is probably not supported by the given file object */
-+                PyErr_Clear();
-+                Py_CLEAR(self->peek);
-+            }
-+            else {
-+                read_size = _Unpickler_SetStringInput(self, data);
-+                Py_DECREF(data);
-+                self->prefetched_idx = 0;
-+                if (n <= read_size)
-+                    return n;
-+            }
-+        }
-+        len = PyLong_FromSsize_t(n);
-+        if (len == NULL)
-+            return -1;
-+        data = _Pickle_FastCall(self->read, len);
-+    }
-+    if (data == NULL)
-+        return -1;
-+
-+    read_size = _Unpickler_SetStringInput(self, data);
-+    Py_DECREF(data);
-+    return read_size;
-+}
-+
-+/* Read `n` bytes from the unpickler's data source, storing the result in `*s`.
-+
-+   This should be used for all data reads, rather than accessing the unpickler's
-+   input buffer directly. This method deals correctly with reading from input
-+   streams, which the input buffer doesn't deal with.
-+
-+   Note that when reading from a file-like object, self->next_read_idx won't
-+   be updated (it should remain at 0 for the entire unpickling process). You
-+   should use this function's return value to know how many bytes you can
-+   consume.
-+
-+   Returns -1 (with an exception set) on failure. On success, return the
-+   number of chars read. */
-+static Py_ssize_t
-+_Unpickler_Read(UnpicklerObject *self, char **s, Py_ssize_t n)
-+{
-+    Py_ssize_t num_read;
-+
-+    if (self->next_read_idx + n <= self->input_len) {
-+        *s = self->input_buffer + self->next_read_idx;
-+        self->next_read_idx += n;
-+        return n;
-+    }
-+    if (!self->read) {
-+        PyErr_Format(PyExc_EOFError, "Ran out of input");
-+        return -1;
-+    }
-+    num_read = _Unpickler_ReadFromFile(self, n);
-+    if (num_read < 0)
-+        return -1;
-+    if (num_read < n) {
-+        PyErr_Format(PyExc_EOFError, "Ran out of input");
-+        return -1;
-+    }
-+    *s = self->input_buffer;
-+    self->next_read_idx = n;
-+    return n;
-+}
-+
-+static Py_ssize_t
-+_Unpickler_CopyLine(UnpicklerObject *self, char *line, Py_ssize_t len,
-+                    char **result)
-+{
-+    char *input_line = PyMem_Realloc(self->input_line, len + 1);
-+    if (input_line == NULL) {
-+        PyErr_NoMemory();
-+        return -1;
-+    }
-+
-+    memcpy(input_line, line, len);
-+    input_line[len] = '\0';
-+    self->input_line = input_line;
-+    *result = self->input_line;
-+    return len;
-+}
-+
-+/* Read a line from the input stream/buffer. If we run off the end of the input
-+   before hitting \n, return the data we found.
-+
-+   Returns the number of chars read, or -1 on failure. */
-+static Py_ssize_t
-+_Unpickler_Readline(UnpicklerObject *self, char **result)
-+{
-+    Py_ssize_t i, num_read;
-+
-+    for (i = self->next_read_idx; i < self->input_len; i++) {
-+        if (self->input_buffer[i] == '\n') {
-+            char *line_start = self->input_buffer + self->next_read_idx;
-+            num_read = i - self->next_read_idx + 1;
-+            self->next_read_idx = i + 1;
-+            return _Unpickler_CopyLine(self, line_start, num_read, result);
-+        }
-+    }
-+    if (self->read) {
-+        num_read = _Unpickler_ReadFromFile(self, READ_WHOLE_LINE);
-+        if (num_read < 0)
-+            return -1;
-+        self->next_read_idx = num_read;
-+        return _Unpickler_CopyLine(self, self->input_buffer, num_read, result);
-+    }
-+
-+    /* If we get here, we've run off the end of the input string. Return the
-+       remaining string and let the caller figure it out. */
-+    *result = self->input_buffer + self->next_read_idx;
-+    num_read = i - self->next_read_idx;
-+    self->next_read_idx = i;
-+    return num_read;
-+}
-+
-+/* Returns -1 (with an exception set) on failure, 0 on success. The memo array
-+   will be modified in place. */
-+static int
-+_Unpickler_ResizeMemoList(UnpicklerObject *self, Py_ssize_t new_size)
-+{
-+    Py_ssize_t i;
-+    PyObject **memo;
-+
-+    assert(new_size > self->memo_size);
-+
-+    memo = PyMem_REALLOC(self->memo, new_size * sizeof(PyObject *));
-+    if (memo == NULL) {
-+        PyErr_NoMemory();
-+        return -1;
-+    }
-+    self->memo = memo;
-+    for (i = self->memo_size; i < new_size; i++)
-+        self->memo[i] = NULL;
-+    self->memo_size = new_size;
-+    return 0;
-+}
-+
-+/* Returns NULL if idx is out of bounds. */
-+static PyObject *
-+_Unpickler_MemoGet(UnpicklerObject *self, Py_ssize_t idx)
-+{
-+    if (idx < 0 || idx >= self->memo_size)
-+        return NULL;
-+
-+    return self->memo[idx];
-+}
-+
-+/* Returns -1 (with an exception set) on failure, 0 on success.
-+   This takes its own reference to `value`. */
-+static int
-+_Unpickler_MemoPut(UnpicklerObject *self, Py_ssize_t idx, PyObject *value)
-+{
-+    PyObject *old_item;
-+
-+    if (idx >= self->memo_size) {
-+        if (_Unpickler_ResizeMemoList(self, idx * 2) < 0)
-+            return -1;
-+        assert(idx < self->memo_size);
-+    }
-+    Py_INCREF(value);
-+    old_item = self->memo[idx];
-+    self->memo[idx] = value;
-+    if (old_item != NULL) {
-+        Py_DECREF(old_item);
-+    }
-+    else {
-+        self->memo_len++;
-+    }
-+    return 0;
-+}
-+
-+static PyObject **
-+_Unpickler_NewMemo(Py_ssize_t new_size)
-+{
-+    PyObject **memo = PyMem_MALLOC(new_size * sizeof(PyObject *));
-+    if (memo == NULL) {
-+        PyErr_NoMemory();
-+        return NULL;
-+    }
-+    memset(memo, 0, new_size * sizeof(PyObject *));
-+    return memo;
-+}
-+
-+/* Free the unpickler's memo, taking care to decref any items left in it. */
-+static void
-+_Unpickler_MemoCleanup(UnpicklerObject *self)
-+{
-+    Py_ssize_t i;
-+    PyObject **memo = self->memo;
-+
-+    if (self->memo == NULL)
-+        return;
-+    self->memo = NULL;
-+    i = self->memo_size;
-+    while (--i >= 0) {
-+        Py_XDECREF(memo[i]);
-+    }
-+    PyMem_FREE(memo);
-+}
-+
-+static UnpicklerObject *
-+_Unpickler_New(void)
-+{
-+    UnpicklerObject *self;
-+
-+    self = PyObject_GC_New(UnpicklerObject, &Unpickler_Type);
-+    if (self == NULL)
-+        return NULL;
-+
-+    self->pers_func = NULL;
-+    self->input_buffer = NULL;
-+    self->input_line = NULL;
-+    self->input_len = 0;
-+    self->next_read_idx = 0;
-+    self->prefetched_idx = 0;
-+    self->read = NULL;
-+    self->readline = NULL;
-+    self->peek = NULL;
-+    self->encoding = NULL;
-+    self->errors = NULL;
-+    self->marks = NULL;
-+    self->num_marks = 0;
-+    self->marks_size = 0;
-+    self->proto = 0;
-+    self->fix_imports = 0;
-+    memset(&self->buffer, 0, sizeof(Py_buffer));
-+    self->memo_size = 32;
-+    self->memo_len = 0;
-+    self->memo = _Unpickler_NewMemo(self->memo_size);
-+    self->stack = (Pdata *)Pdata_New();
-+
-+    if (self->memo == NULL || self->stack == NULL) {
-+        Py_DECREF(self);
-+        return NULL;
-+    }
-+
-+    return self;
-+}
-+
-+/* Returns -1 (with an exception set) on failure, 0 on success. This may
-+   be called once on a freshly created Pickler. */
-+static int
-+_Unpickler_SetInputStream(UnpicklerObject *self, PyObject *file)
-+{
-+    _Py_IDENTIFIER(peek);
-+    _Py_IDENTIFIER(read);
-+    _Py_IDENTIFIER(readline);
-+
-+    self->peek = _PyObject_GetAttrId(file, &PyId_peek);
-+    if (self->peek == NULL) {
-+        if (PyErr_ExceptionMatches(PyExc_AttributeError))
-+            PyErr_Clear();
-+        else
-+            return -1;
-+    }
-+    self->read = _PyObject_GetAttrId(file, &PyId_read);
-+    self->readline = _PyObject_GetAttrId(file, &PyId_readline);
-+    if (self->readline == NULL || self->read == NULL) {
-+        if (PyErr_ExceptionMatches(PyExc_AttributeError))
-+            PyErr_SetString(PyExc_TypeError,
-+                            "file must have 'read' and 'readline' attributes");
-+        Py_CLEAR(self->read);
-+        Py_CLEAR(self->readline);
-+        Py_CLEAR(self->peek);
-+        return -1;
-+    }
-+    return 0;
-+}
-+
-+/* Returns -1 (with an exception set) on failure, 0 on success. This may
-+   be called once on a freshly created Pickler. */
-+static int
-+_Unpickler_SetInputEncoding(UnpicklerObject *self,
-+                            const char *encoding,
-+                            const char *errors)
-+{
-+    if (encoding == NULL)
-+        encoding = "ASCII";
-+    if (errors == NULL)
-+        errors = "strict";
-+
-+    self->encoding = _PyMem_Strdup(encoding);
-+    self->errors = _PyMem_Strdup(errors);
-+    if (self->encoding == NULL || self->errors == NULL) {
-+        PyErr_NoMemory();
-+        return -1;
-+    }
-+    return 0;
-+}
-+
-+/* Generate a GET opcode for an object stored in the memo. */
-+static int
-+memo_get(PicklerObject *self, PyObject *key)
-+{
-+    Py_ssize_t *value;
-+    char pdata[30];
-+    Py_ssize_t len;
-+
-+    value = PyMemoTable_Get(self->memo, key);
-+    if (value == NULL)  {
-+        PyErr_SetObject(PyExc_KeyError, key);
-+        return -1;
-+    }
-+
-+    if (!self->bin) {
-+        pdata[0] = GET;
-+        PyOS_snprintf(pdata + 1, sizeof(pdata) - 1,
-+                      "%" PY_FORMAT_SIZE_T "d\n", *value);
-+        len = strlen(pdata);
-+    }
-+    else {
-+        if (*value < 256) {
-+            pdata[0] = BINGET;
-+            pdata[1] = (unsigned char)(*value & 0xff);
-+            len = 2;
-+        }
-+        else if (*value <= 0xffffffffL) {
-+            pdata[0] = LONG_BINGET;
-+            pdata[1] = (unsigned char)(*value & 0xff);
-+            pdata[2] = (unsigned char)((*value >> 8) & 0xff);
-+            pdata[3] = (unsigned char)((*value >> 16) & 0xff);
-+            pdata[4] = (unsigned char)((*value >> 24) & 0xff);
-+            len = 5;
-+        }
-+        else { /* unlikely */
-+            PickleState *st = _Pickle_GetGlobalState();
-+            PyErr_SetString(st->PicklingError,
-+                            "memo id too large for LONG_BINGET");
-+            return -1;
-+        }
-+    }
-+
-+    if (_Pickler_Write(self, pdata, len) < 0)
-+        return -1;
-+
-+    return 0;
-+}
-+
-+/* Store an object in the memo, assign it a new unique ID based on the number
-+   of objects currently stored in the memo and generate a PUT opcode. */
-+static int
-+memo_put(PicklerObject *self, PyObject *obj)
-+{
-+    char pdata[30];
-+    Py_ssize_t len;
-+    Py_ssize_t idx;
-+
-+    const char memoize_op = MEMOIZE;
-+
-+    if (self->fast)
-+        return 0;
-+
-+    idx = PyMemoTable_Size(self->memo);
-+    if (PyMemoTable_Set(self->memo, obj, idx) < 0)
-+        return -1;
-+
-+    if (self->proto >= 4) {
-+        if (_Pickler_Write(self, &memoize_op, 1) < 0)
-+            return -1;
-+        return 0;
-+    }
-+    else if (!self->bin) {
-+        pdata[0] = PUT;
-+        PyOS_snprintf(pdata + 1, sizeof(pdata) - 1,
-+                      "%" PY_FORMAT_SIZE_T "d\n", idx);
-+        len = strlen(pdata);
-+    }
-+    else {
-+        if (idx < 256) {
-+            pdata[0] = BINPUT;
-+            pdata[1] = (unsigned char)idx;
-+            len = 2;
-+        }
-+        else if (idx <= 0xffffffffL) {
-+            pdata[0] = LONG_BINPUT;
-+            pdata[1] = (unsigned char)(idx & 0xff);
-+            pdata[2] = (unsigned char)((idx >> 8) & 0xff);
-+            pdata[3] = (unsigned char)((idx >> 16) & 0xff);
-+            pdata[4] = (unsigned char)((idx >> 24) & 0xff);
-+            len = 5;
-+        }
-+        else { /* unlikely */
-+            PickleState *st = _Pickle_GetGlobalState();
-+            PyErr_SetString(st->PicklingError,
-+                            "memo id too large for LONG_BINPUT");
-+            return -1;
-+        }
-+    }
-+    if (_Pickler_Write(self, pdata, len) < 0)
-+        return -1;
-+
-+    return 0;
-+}
-+
-+static PyObject *
-+getattribute(PyObject *obj, PyObject *name, int allow_qualname) {
-+    PyObject *dotted_path;
-+    Py_ssize_t i;
-+    _Py_static_string(PyId_dot, ".");
-+    _Py_static_string(PyId_locals, "<locals>");
-+
-+    dotted_path = PyUnicode_Split(name, _PyUnicode_FromId(&PyId_dot), -1);
-+    if (dotted_path == NULL) {
-+        return NULL;
-+    }
-+    assert(Py_SIZE(dotted_path) >= 1);
-+    if (!allow_qualname && Py_SIZE(dotted_path) > 1) {
-+        PyErr_Format(PyExc_AttributeError,
-+                     "Can't get qualified attribute %R on %R;"
-+                     "use protocols >= 4 to enable support",
-+                     name, obj);
-+        Py_DECREF(dotted_path);
-+        return NULL;
-+    }
-+    Py_INCREF(obj);
-+    for (i = 0; i < Py_SIZE(dotted_path); i++) {
-+        PyObject *subpath = PyList_GET_ITEM(dotted_path, i);
-+        PyObject *tmp;
-+        PyObject *result = PyUnicode_RichCompare(
-+            subpath, _PyUnicode_FromId(&PyId_locals), Py_EQ);
-+        int is_equal = (result == Py_True);
-+        assert(PyBool_Check(result));
-+        Py_DECREF(result);
-+        if (is_equal) {
-+            PyErr_Format(PyExc_AttributeError,
-+                         "Can't get local attribute %R on %R", name, obj);
-+            Py_DECREF(dotted_path);
-+            Py_DECREF(obj);
-+            return NULL;
-+        }
-+        tmp = PyObject_GetAttr(obj, subpath);
-+        Py_DECREF(obj);
-+        if (tmp == NULL) {
-+            if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
-+                PyErr_Clear();
-+                PyErr_Format(PyExc_AttributeError,
-+                             "Can't get attribute %R on %R", name, obj);
-+            }
-+            Py_DECREF(dotted_path);
-+            return NULL;
-+        }
-+        obj = tmp;
-+    }
-+    Py_DECREF(dotted_path);
-+    return obj;
-+}
-+
-+static PyObject *
-+whichmodule(PyObject *global, PyObject *global_name, int allow_qualname)
-+{
-+    PyObject *module_name;
-+    PyObject *modules_dict;
-+    PyObject *module;
-+    PyObject *obj;
-+    Py_ssize_t i, j;
-+    _Py_IDENTIFIER(__module__);
-+    _Py_IDENTIFIER(modules);
-+    _Py_IDENTIFIER(__main__);
-+
-+    module_name = _PyObject_GetAttrId(global, &PyId___module__);
-+
-+    if (module_name == NULL) {
-+        if (!PyErr_ExceptionMatches(PyExc_AttributeError))
-+            return NULL;
-+        PyErr_Clear();
-+    }
-+    else {
-+        /* In some rare cases (e.g., bound methods of extension types),
-+           __module__ can be None. If it is so, then search sys.modules for
-+           the module of global. */
-+        if (module_name != Py_None)
-+            return module_name;
-+        Py_CLEAR(module_name);
-+    }
-+    assert(module_name == NULL);
-+
-+    modules_dict = _PySys_GetObjectId(&PyId_modules);
-+    if (modules_dict == NULL) {
-+        PyErr_SetString(PyExc_RuntimeError, "unable to get sys.modules");
-+        return NULL;
-+    }
-+
-+    i = 0;
-+    while ((j = PyDict_Next(modules_dict, &i, &module_name, &module))) {
-+        PyObject *result = PyUnicode_RichCompare(
-+            module_name, _PyUnicode_FromId(&PyId___main__), Py_EQ);
-+        int is_equal = (result == Py_True);
-+        assert(PyBool_Check(result));
-+        Py_DECREF(result);
-+        if (is_equal)
-+            continue;
-+        if (module == Py_None)
-+            continue;
-+
-+        obj = getattribute(module, global_name, allow_qualname);
-+        if (obj == NULL) {
-+            if (!PyErr_ExceptionMatches(PyExc_AttributeError))
-+                return NULL;
-+            PyErr_Clear();
-+            continue;
-+        }
-+
-+        if (obj == global) {
-+            Py_DECREF(obj);
-+            Py_INCREF(module_name);
-+            return module_name;
-+        }
-+        Py_DECREF(obj);
-+    }
-+
-+    /* If no module is found, use __main__. */
-+    module_name = _PyUnicode_FromId(&PyId___main__);
-+    Py_INCREF(module_name);
-+    return module_name;
-+}
-+
-+/* fast_save_enter() and fast_save_leave() are guards against recursive
-+   objects when Pickler is used with the "fast mode" (i.e., with object
-+   memoization disabled). If the nesting of a list or dict object exceed
-+   FAST_NESTING_LIMIT, these guards will start keeping an internal
-+   reference to the seen list or dict objects and check whether these objects
-+   are recursive. These are not strictly necessary, since save() has a
-+   hard-coded recursion limit, but they give a nicer error message than the
-+   typical RuntimeError. */
-+static int
-+fast_save_enter(PicklerObject *self, PyObject *obj)
-+{
-+    /* if fast_nesting < 0, we're doing an error exit. */
-+    if (++self->fast_nesting >= FAST_NESTING_LIMIT) {
-+        PyObject *key = NULL;
-+        if (self->fast_memo == NULL) {
-+            self->fast_memo = PyDict_New();
-+            if (self->fast_memo == NULL) {
-+                self->fast_nesting = -1;
-+                return 0;
-+            }
-+        }
-+        key = PyLong_FromVoidPtr(obj);
-+        if (key == NULL)
-+            return 0;
-+        if (PyDict_GetItemWithError(self->fast_memo, key)) {
-+            Py_DECREF(key);
-+            PyErr_Format(PyExc_ValueError,
-+                         "fast mode: can't pickle cyclic objects "
-+                         "including object type %.200s at %p",
-+                         obj->ob_type->tp_name, obj);
-+            self->fast_nesting = -1;
-+            return 0;
-+        }
-+        if (PyErr_Occurred()) {
-+            return 0;
-+        }
-+        if (PyDict_SetItem(self->fast_memo, key, Py_None) < 0) {
-+            Py_DECREF(key);
-+            self->fast_nesting = -1;
-+            return 0;
-+        }
-+        Py_DECREF(key);
-+    }
-+    return 1;
-+}
-+
-+static int
-+fast_save_leave(PicklerObject *self, PyObject *obj)
-+{
-+    if (self->fast_nesting-- >= FAST_NESTING_LIMIT) {
-+        PyObject *key = PyLong_FromVoidPtr(obj);
-+        if (key == NULL)
-+            return 0;
-+        if (PyDict_DelItem(self->fast_memo, key) < 0) {
-+            Py_DECREF(key);
-+            return 0;
-+        }
-+        Py_DECREF(key);
-+    }
-+    return 1;
-+}
-+
-+static int
-+save_none(PicklerObject *self, PyObject *obj)
-+{
-+    const char none_op = NONE;
-+    if (_Pickler_Write(self, &none_op, 1) < 0)
-+        return -1;
-+
-+    return 0;
-+}
-+
-+static int
-+save_bool(PicklerObject *self, PyObject *obj)
-+{
-+    if (self->proto >= 2) {
-+        const char bool_op = (obj == Py_True) ? NEWTRUE : NEWFALSE;
-+        if (_Pickler_Write(self, &bool_op, 1) < 0)
-+            return -1;
-+    }
-+    else {
-+        /* These aren't opcodes -- they're ways to pickle bools before protocol 2
-+         * so that unpicklers written before bools were introduced unpickle them
-+         * as ints, but unpicklers after can recognize that bools were intended.
-+         * Note that protocol 2 added direct ways to pickle bools.
-+         */
-+        const char *bool_str = (obj == Py_True) ? "I01\n" : "I00\n";
-+        if (_Pickler_Write(self, bool_str, strlen(bool_str)) < 0)
-+            return -1;
-+    }
-+    return 0;
-+}
-+
-+static int
-+save_long(PicklerObject *self, PyObject *obj)
-+{
-+    PyObject *repr = NULL;
-+    Py_ssize_t size;
-+    long val;
-+    int status = 0;
-+
-+    const char long_op = LONG;
-+
-+    val= PyLong_AsLong(obj);
-+    if (val == -1 && PyErr_Occurred()) {
-+        /* out of range for int pickling */
-+        PyErr_Clear();
-+    }
-+    else if (self->bin &&
-+             (sizeof(long) <= 4 ||
-+              (val <= 0x7fffffffL && val >= (-0x7fffffffL - 1)))) {
-+        /* result fits in a signed 4-byte integer.
-+
-+           Note: we can't use -0x80000000L in the above condition because some
-+           compilers (e.g., MSVC) will promote 0x80000000L to an unsigned type
-+           before applying the unary minus when sizeof(long) <= 4. The
-+           resulting value stays unsigned which is commonly not what we want,
-+           so MSVC happily warns us about it.  However, that result would have
-+           been fine because we guard for sizeof(long) <= 4 which turns the
-+           condition true in that particular case. */
-+        char pdata[32];
-+        Py_ssize_t len = 0;
-+
-+        pdata[1] = (unsigned char)(val & 0xff);
-+        pdata[2] = (unsigned char)((val >> 8) & 0xff);
-+        pdata[3] = (unsigned char)((val >> 16) & 0xff);
-+        pdata[4] = (unsigned char)((val >> 24) & 0xff);
-+
-+        if ((pdata[4] == 0) && (pdata[3] == 0)) {
-+            if (pdata[2] == 0) {
-+                pdata[0] = BININT1;
-+                len = 2;
-+            }
-+            else {
-+                pdata[0] = BININT2;
-+                len = 3;
-+            }
-+        }
-+        else {
-+            pdata[0] = BININT;
-+            len = 5;
-+        }
-+
-+        if (_Pickler_Write(self, pdata, len) < 0)
-+            return -1;
-+
-+        return 0;
-+    }
-+
-+    if (self->proto >= 2) {
-+        /* Linear-time pickling. */
-+        size_t nbits;
-+        size_t nbytes;
-+        unsigned char *pdata;
-+        char header[5];
-+        int i;
-+        int sign = _PyLong_Sign(obj);
-+
-+        if (sign == 0) {
-+            header[0] = LONG1;
-+            header[1] = 0;      /* It's 0 -- an empty bytestring. */
-+            if (_Pickler_Write(self, header, 2) < 0)
-+                goto error;
-+            return 0;
-+        }
-+        nbits = _PyLong_NumBits(obj);
-+        if (nbits == (size_t)-1 && PyErr_Occurred())
-+            goto error;
-+        /* How many bytes do we need?  There are nbits >> 3 full
-+         * bytes of data, and nbits & 7 leftover bits.  If there
-+         * are any leftover bits, then we clearly need another
-+         * byte.  Wnat's not so obvious is that we *probably*
-+         * need another byte even if there aren't any leftovers:
-+         * the most-significant bit of the most-significant byte
-+         * acts like a sign bit, and it's usually got a sense
-+         * opposite of the one we need.  The exception is ints
-+         * of the form -(2**(8*j-1)) for j > 0.  Such an int is
-+         * its own 256's-complement, so has the right sign bit
-+         * even without the extra byte.  That's a pain to check
-+         * for in advance, though, so we always grab an extra
-+         * byte at the start, and cut it back later if possible.
-+         */
-+        nbytes = (nbits >> 3) + 1;
-+        if (nbytes > 0x7fffffffL) {
-+            PyErr_SetString(PyExc_OverflowError,
-+                            "int too large to pickle");
-+            goto error;
-+        }
-+        repr = PyBytes_FromStringAndSize(NULL, (Py_ssize_t)nbytes);
-+        if (repr == NULL)
-+            goto error;
-+        pdata = (unsigned char *)PyBytes_AS_STRING(repr);
-+        i = _PyLong_AsByteArray((PyLongObject *)obj,
-+                                pdata, nbytes,
-+                                1 /* little endian */ , 1 /* signed */ );
-+        if (i < 0)
-+            goto error;
-+        /* If the int is negative, this may be a byte more than
-+         * needed.  This is so iff the MSB is all redundant sign
-+         * bits.
-+         */
-+        if (sign < 0 &&
-+            nbytes > 1 &&
-+            pdata[nbytes - 1] == 0xff &&
-+            (pdata[nbytes - 2] & 0x80) != 0) {
-+            nbytes--;
-+        }
-+
-+        if (nbytes < 256) {
-+            header[0] = LONG1;
-+            header[1] = (unsigned char)nbytes;
-+            size = 2;
-+        }
-+        else {
-+            header[0] = LONG4;
-+            size = (Py_ssize_t) nbytes;
-+            for (i = 1; i < 5; i++) {
-+                header[i] = (unsigned char)(size & 0xff);
-+                size >>= 8;
-+            }
-+            size = 5;
-+        }
-+        if (_Pickler_Write(self, header, size) < 0 ||
-+            _Pickler_Write(self, (char *)pdata, (int)nbytes) < 0)
-+            goto error;
-+    }
-+    else {
-+        char *string;
-+
-+        /* proto < 2: write the repr and newline.  This is quadratic-time (in
-+           the number of digits), in both directions.  We add a trailing 'L'
-+           to the repr, for compatibility with Python 2.x. */
-+
-+        repr = PyObject_Repr(obj);
-+        if (repr == NULL)
-+            goto error;
-+
-+        string = _PyUnicode_AsStringAndSize(repr, &size);
-+        if (string == NULL)
-+            goto error;
-+
-+        if (_Pickler_Write(self, &long_op, 1) < 0 ||
-+            _Pickler_Write(self, string, size) < 0 ||
-+            _Pickler_Write(self, "L\n", 2) < 0)
-+            goto error;
-+    }
-+
-+    if (0) {
-+  error:
-+      status = -1;
-+    }
-+    Py_XDECREF(repr);
-+
-+    return status;
-+}
-+
-+static int
-+save_float(PicklerObject *self, PyObject *obj)
-+{
-+    double x = PyFloat_AS_DOUBLE((PyFloatObject *)obj);
-+
-+    if (self->bin) {
-+        char pdata[9];
-+        pdata[0] = BINFLOAT;
-+        if (_PyFloat_Pack8(x, (unsigned char *)&pdata[1], 0) < 0)
-+            return -1;
-+        if (_Pickler_Write(self, pdata, 9) < 0)
-+            return -1;
-+   }
-+    else {
-+        int result = -1;
-+        char *buf = NULL;
-+        char op = FLOAT;
-+
-+        if (_Pickler_Write(self, &op, 1) < 0)
-+            goto done;
-+
-+        buf = PyOS_double_to_string(x, 'g', 17, 0, NULL);
-+        if (!buf) {
-+            PyErr_NoMemory();
-+            goto done;
-+        }
-+
-+        if (_Pickler_Write(self, buf, strlen(buf)) < 0)
-+            goto done;
-+
-+        if (_Pickler_Write(self, "\n", 1) < 0)
-+            goto done;
-+
-+        result = 0;
-+done:
-+        PyMem_Free(buf);
-+        return result;
-+    }
-+
-+    return 0;
-+}
-+
-+static int
-+save_bytes(PicklerObject *self, PyObject *obj)
-+{
-+    if (self->proto < 3) {
-+        /* Older pickle protocols do not have an opcode for pickling bytes
-+           objects. Therefore, we need to fake the copy protocol (i.e.,
-+           the __reduce__ method) to permit bytes object unpickling.
-+
-+           Here we use a hack to be compatible with Python 2. Since in Python
-+           2 'bytes' is just an alias for 'str' (which has different
-+           parameters than the actual bytes object), we use codecs.encode
-+           to create the appropriate 'str' object when unpickled using
-+           Python 2 *and* the appropriate 'bytes' object when unpickled
-+           using Python 3. Again this is a hack and we don't need to do this
-+           with newer protocols. */
-+        PyObject *reduce_value = NULL;
-+        int status;
-+
-+        if (PyBytes_GET_SIZE(obj) == 0) {
-+            reduce_value = Py_BuildValue("(O())", (PyObject*)&PyBytes_Type);
-+        }
-+        else {
-+            PickleState *st = _Pickle_GetGlobalState();
-+            PyObject *unicode_str =
-+                PyUnicode_DecodeLatin1(PyBytes_AS_STRING(obj),
-+                                       PyBytes_GET_SIZE(obj),
-+                                       "strict");
-+            _Py_IDENTIFIER(latin1);
-+
-+            if (unicode_str == NULL)
-+                return -1;
-+            reduce_value = Py_BuildValue("(O(OO))",
-+                                         st->codecs_encode, unicode_str,
-+                                         _PyUnicode_FromId(&PyId_latin1));
-+            Py_DECREF(unicode_str);
-+        }
-+
-+        if (reduce_value == NULL)
-+            return -1;
-+
-+        /* save_reduce() will memoize the object automatically. */
-+        status = save_reduce(self, reduce_value, obj);
-+        Py_DECREF(reduce_value);
-+        return status;
-+    }
-+    else {
-+        Py_ssize_t size;
-+        char header[9];
-+        Py_ssize_t len;
-+
-+        size = PyBytes_GET_SIZE(obj);
-+        if (size < 0)
-+            return -1;
-+
-+        if (size <= 0xff) {
-+            header[0] = SHORT_BINBYTES;
-+            header[1] = (unsigned char)size;
-+            len = 2;
-+        }
-+        else if (size <= 0xffffffffL) {
-+            header[0] = BINBYTES;
-+            header[1] = (unsigned char)(size & 0xff);
-+            header[2] = (unsigned char)((size >> 8) & 0xff);
-+            header[3] = (unsigned char)((size >> 16) & 0xff);
-+            header[4] = (unsigned char)((size >> 24) & 0xff);
-+            len = 5;
-+        }
-+        else if (self->proto >= 4) {
-+            header[0] = BINBYTES8;
-+            _write_size64(header + 1, size);
-+            len = 9;
-+        }
-+        else {
-+            PyErr_SetString(PyExc_OverflowError,
-+                            "cannot serialize a bytes object larger than 4 GiB");
-+            return -1;          /* string too large */
-+        }
-+
-+        if (_Pickler_Write(self, header, len) < 0)
-+            return -1;
-+
-+        if (_Pickler_Write(self, PyBytes_AS_STRING(obj), size) < 0)
-+            return -1;
-+
-+        if (memo_put(self, obj) < 0)
-+            return -1;
-+
-+        return 0;
-+    }
-+}
-+
-+/* A copy of PyUnicode_EncodeRawUnicodeEscape() that also translates
-+   backslash and newline characters to \uXXXX escapes. */
-+static PyObject *
-+raw_unicode_escape(PyObject *obj)
-+{
-+    PyObject *repr, *result;
-+    char *p;
-+    Py_ssize_t i, size, expandsize;
-+    void *data;
-+    unsigned int kind;
-+
-+    if (PyUnicode_READY(obj))
-+        return NULL;
-+
-+    size = PyUnicode_GET_LENGTH(obj);
-+    data = PyUnicode_DATA(obj);
-+    kind = PyUnicode_KIND(obj);
-+    if (kind == PyUnicode_4BYTE_KIND)
-+        expandsize = 10;
-+    else
-+        expandsize = 6;
-+
-+    if (size > PY_SSIZE_T_MAX / expandsize)
-+        return PyErr_NoMemory();
-+    repr = PyByteArray_FromStringAndSize(NULL, expandsize * size);
-+    if (repr == NULL)
-+        return NULL;
-+    if (size == 0)
-+        goto done;
-+
-+    p = PyByteArray_AS_STRING(repr);
-+    for (i=0; i < size; i++) {
-+        Py_UCS4 ch = PyUnicode_READ(kind, data, i);
-+        /* Map 32-bit characters to '\Uxxxxxxxx' */
-+        if (ch >= 0x10000) {
-+            *p++ = '\\';
-+            *p++ = 'U';
-+            *p++ = Py_hexdigits[(ch >> 28) & 0xf];
-+            *p++ = Py_hexdigits[(ch >> 24) & 0xf];
-+            *p++ = Py_hexdigits[(ch >> 20) & 0xf];
-+            *p++ = Py_hexdigits[(ch >> 16) & 0xf];
-+            *p++ = Py_hexdigits[(ch >> 12) & 0xf];
-+            *p++ = Py_hexdigits[(ch >> 8) & 0xf];
-+            *p++ = Py_hexdigits[(ch >> 4) & 0xf];
-+            *p++ = Py_hexdigits[ch & 15];
-+        }
-+        /* Map 16-bit characters to '\uxxxx' */
-+        else if (ch >= 256 || ch == '\\' || ch == '\n') {
-+            *p++ = '\\';
-+            *p++ = 'u';
-+            *p++ = Py_hexdigits[(ch >> 12) & 0xf];
-+            *p++ = Py_hexdigits[(ch >> 8) & 0xf];
-+            *p++ = Py_hexdigits[(ch >> 4) & 0xf];
-+            *p++ = Py_hexdigits[ch & 15];
-+        }
-+        /* Copy everything else as-is */
-+        else
-+            *p++ = (char) ch;
-+    }
-+    size = p - PyByteArray_AS_STRING(repr);
-+
-+done:
-+    result = PyBytes_FromStringAndSize(PyByteArray_AS_STRING(repr), size);
-+    Py_DECREF(repr);
-+    return result;
-+}
-+
-+static int
-+write_utf8(PicklerObject *self, char *data, Py_ssize_t size)
-+{
-+    char header[9];
-+    Py_ssize_t len;
-+
-+    if (size <= 0xff && self->proto >= 4) {
-+        header[0] = SHORT_BINUNICODE;
-+        header[1] = (unsigned char)(size & 0xff);
-+        len = 2;
-+    }
-+    else if (size <= 0xffffffffUL) {
-+        header[0] = BINUNICODE;
-+        header[1] = (unsigned char)(size & 0xff);
-+        header[2] = (unsigned char)((size >> 8) & 0xff);
-+        header[3] = (unsigned char)((size >> 16) & 0xff);
-+        header[4] = (unsigned char)((size >> 24) & 0xff);
-+        len = 5;
-+    }
-+    else if (self->proto >= 4) {
-+        header[0] = BINUNICODE8;
-+        _write_size64(header + 1, size);
-+        len = 9;
-+    }
-+    else {
-+        PyErr_SetString(PyExc_OverflowError,
-+                        "cannot serialize a string larger than 4GiB");
-+        return -1;
-+    }
-+
-+    if (_Pickler_Write(self, header, len) < 0)
-+        return -1;
-+    if (_Pickler_Write(self, data, size) < 0)
-+        return -1;
-+
-+    return 0;
-+}
-+
-+static int
-+write_unicode_binary(PicklerObject *self, PyObject *obj)
-+{
-+    PyObject *encoded = NULL;
-+    Py_ssize_t size;
-+    char *data;
-+    int r;
-+
-+    if (PyUnicode_READY(obj))
-+        return -1;
-+
-+    data = PyUnicode_AsUTF8AndSize(obj, &size);
-+    if (data != NULL)
-+        return write_utf8(self, data, size);
-+
-+    /* Issue #8383: for strings with lone surrogates, fallback on the
-+       "surrogatepass" error handler. */
-+    PyErr_Clear();
-+    encoded = PyUnicode_AsEncodedString(obj, "utf-8", "surrogatepass");
-+    if (encoded == NULL)
-+        return -1;
-+
-+    r = write_utf8(self, PyBytes_AS_STRING(encoded),
-+                   PyBytes_GET_SIZE(encoded));
-+    Py_DECREF(encoded);
-+    return r;
-+}
-+
-+static int
-+save_unicode(PicklerObject *self, PyObject *obj)
-+{
-+    if (self->bin) {
-+        if (write_unicode_binary(self, obj) < 0)
-+            return -1;
-+    }
-+    else {
-+        PyObject *encoded;
-+        Py_ssize_t size;
-+        const char unicode_op = UNICODE;
-+
-+        encoded = raw_unicode_escape(obj);
-+        if (encoded == NULL)
-+            return -1;
-+
-+        if (_Pickler_Write(self, &unicode_op, 1) < 0) {
-+            Py_DECREF(encoded);
-+            return -1;
-+        }
-+
-+        size = PyBytes_GET_SIZE(encoded);
-+        if (_Pickler_Write(self, PyBytes_AS_STRING(encoded), size) < 0) {
-+            Py_DECREF(encoded);
-+            return -1;
-+        }
-+        Py_DECREF(encoded);
-+
-+        if (_Pickler_Write(self, "\n", 1) < 0)
-+            return -1;
-+    }
-+    if (memo_put(self, obj) < 0)
-+        return -1;
-+
-+    return 0;
-+}
-+
-+/* A helper for save_tuple.  Push the len elements in tuple t on the stack. */
-+static int
-+store_tuple_elements(PicklerObject *self, PyObject *t, Py_ssize_t len)
-+{
-+    Py_ssize_t i;
-+
-+    assert(PyTuple_Size(t) == len);
-+
-+    for (i = 0; i < len; i++) {
-+        PyObject *element = PyTuple_GET_ITEM(t, i);
-+
-+        if (element == NULL)
-+            return -1;
-+        if (save(self, element, 0) < 0)
-+            return -1;
-+    }
-+
-+    return 0;
-+}
-+
-+/* Tuples are ubiquitous in the pickle protocols, so many techniques are
-+ * used across protocols to minimize the space needed to pickle them.
-+ * Tuples are also the only builtin immutable type that can be recursive
-+ * (a tuple can be reached from itself), and that requires some subtle
-+ * magic so that it works in all cases.  IOW, this is a long routine.
-+ */
-+static int
-+save_tuple(PicklerObject *self, PyObject *obj)
-+{
-+    Py_ssize_t len, i;
-+
-+    const char mark_op = MARK;
-+    const char tuple_op = TUPLE;
-+    const char pop_op = POP;
-+    const char pop_mark_op = POP_MARK;
-+    const char len2opcode[] = {EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3};
-+
-+    if ((len = PyTuple_Size(obj)) < 0)
-+        return -1;
-+
-+    if (len == 0) {
-+        char pdata[2];
-+
-+        if (self->proto) {
-+            pdata[0] = EMPTY_TUPLE;
-+            len = 1;
-+        }
-+        else {
-+            pdata[0] = MARK;
-+            pdata[1] = TUPLE;
-+            len = 2;
-+        }
-+        if (_Pickler_Write(self, pdata, len) < 0)
-+            return -1;
-+        return 0;
-+    }
-+
-+    /* The tuple isn't in the memo now.  If it shows up there after
-+     * saving the tuple elements, the tuple must be recursive, in
-+     * which case we'll pop everything we put on the stack, and fetch
-+     * its value from the memo.
-+     */
-+    if (len <= 3 && self->proto >= 2) {
-+        /* Use TUPLE{1,2,3} opcodes. */
-+        if (store_tuple_elements(self, obj, len) < 0)
-+            return -1;
-+
-+        if (PyMemoTable_Get(self->memo, obj)) {
-+            /* pop the len elements */
-+            for (i = 0; i < len; i++)
-+                if (_Pickler_Write(self, &pop_op, 1) < 0)
-+                    return -1;
-+            /* fetch from memo */
-+            if (memo_get(self, obj) < 0)
-+                return -1;
-+
-+            return 0;
-+        }
-+        else { /* Not recursive. */
-+            if (_Pickler_Write(self, len2opcode + len, 1) < 0)
-+                return -1;
-+        }
-+        goto memoize;
-+    }
-+
-+    /* proto < 2 and len > 0, or proto >= 2 and len > 3.
-+     * Generate MARK e1 e2 ... TUPLE
-+     */
-+    if (_Pickler_Write(self, &mark_op, 1) < 0)
-+        return -1;
-+
-+    if (store_tuple_elements(self, obj, len) < 0)
-+        return -1;
-+
-+    if (PyMemoTable_Get(self->memo, obj)) {
-+        /* pop the stack stuff we pushed */
-+        if (self->bin) {
-+            if (_Pickler_Write(self, &pop_mark_op, 1) < 0)
-+                return -1;
-+        }
-+        else {
-+            /* Note that we pop one more than len, to remove
-+             * the MARK too.
-+             */
-+            for (i = 0; i <= len; i++)
-+                if (_Pickler_Write(self, &pop_op, 1) < 0)
-+                    return -1;
-+        }
-+        /* fetch from memo */
-+        if (memo_get(self, obj) < 0)
-+            return -1;
-+
-+        return 0;
-+    }
-+    else { /* Not recursive. */
-+        if (_Pickler_Write(self, &tuple_op, 1) < 0)
-+            return -1;
-+    }
-+
-+  memoize:
-+    if (memo_put(self, obj) < 0)
-+        return -1;
-+
-+    return 0;
-+}
-+
-+/* iter is an iterator giving items, and we batch up chunks of
-+ *     MARK item item ... item APPENDS
-+ * opcode sequences.  Calling code should have arranged to first create an
-+ * empty list, or list-like object, for the APPENDS to operate on.
-+ * Returns 0 on success, <0 on error.
-+ */
-+static int
-+batch_list(PicklerObject *self, PyObject *iter)
-+{
-+    PyObject *obj = NULL;
-+    PyObject *firstitem = NULL;
-+    int i, n;
-+
-+    const char mark_op = MARK;
-+    const char append_op = APPEND;
-+    const char appends_op = APPENDS;
-+
-+    assert(iter != NULL);
-+
-+    /* XXX: I think this function could be made faster by avoiding the
-+       iterator interface and fetching objects directly from list using
-+       PyList_GET_ITEM.
-+    */
-+
-+    if (self->proto == 0) {
-+        /* APPENDS isn't available; do one at a time. */
-+        for (;;) {
-+            obj = PyIter_Next(iter);
-+            if (obj == NULL) {
-+                if (PyErr_Occurred())
-+                    return -1;
-+                break;
-+            }
-+            i = save(self, obj, 0);
-+            Py_DECREF(obj);
-+            if (i < 0)
-+                return -1;
-+            if (_Pickler_Write(self, &append_op, 1) < 0)
-+                return -1;
-+        }
-+        return 0;
-+    }
-+
-+    /* proto > 0:  write in batches of BATCHSIZE. */
-+    do {
-+        /* Get first item */
-+        firstitem = PyIter_Next(iter);
-+        if (firstitem == NULL) {
-+            if (PyErr_Occurred())
-+                goto error;
-+
-+            /* nothing more to add */
-+            break;
-+        }
-+
-+        /* Try to get a second item */
-+        obj = PyIter_Next(iter);
-+        if (obj == NULL) {
-+            if (PyErr_Occurred())
-+                goto error;
-+
-+            /* Only one item to write */
-+            if (save(self, firstitem, 0) < 0)
-+                goto error;
-+            if (_Pickler_Write(self, &append_op, 1) < 0)
-+                goto error;
-+            Py_CLEAR(firstitem);
-+            break;
-+        }
-+
-+        /* More than one item to write */
-+
-+        /* Pump out MARK, items, APPENDS. */
-+        if (_Pickler_Write(self, &mark_op, 1) < 0)
-+            goto error;
-+
-+        if (save(self, firstitem, 0) < 0)
-+            goto error;
-+        Py_CLEAR(firstitem);
-+        n = 1;
-+
-+        /* Fetch and save up to BATCHSIZE items */
-+        while (obj) {
-+            if (save(self, obj, 0) < 0)
-+                goto error;
-+            Py_CLEAR(obj);
-+            n += 1;
-+
-+            if (n == BATCHSIZE)
-+                break;
-+
-+            obj = PyIter_Next(iter);
-+            if (obj == NULL) {
-+                if (PyErr_Occurred())
-+                    goto error;
-+                break;
-+            }
-+        }
-+
-+        if (_Pickler_Write(self, &appends_op, 1) < 0)
-+            goto error;
-+
-+    } while (n == BATCHSIZE);
-+    return 0;
-+
-+  error:
-+    Py_XDECREF(firstitem);
-+    Py_XDECREF(obj);
-+    return -1;
-+}
-+
-+/* This is a variant of batch_list() above, specialized for lists (with no
-+ * support for list subclasses). Like batch_list(), we batch up chunks of
-+ *     MARK item item ... item APPENDS
-+ * opcode sequences.  Calling code should have arranged to first create an
-+ * empty list, or list-like object, for the APPENDS to operate on.
-+ * Returns 0 on success, -1 on error.
-+ *
-+ * This version is considerably faster than batch_list(), if less general.
-+ *
-+ * Note that this only works for protocols > 0.
-+ */
-+static int
-+batch_list_exact(PicklerObject *self, PyObject *obj)
-+{
-+    PyObject *item = NULL;
-+    Py_ssize_t this_batch, total;
-+
-+    const char append_op = APPEND;
-+    const char appends_op = APPENDS;
-+    const char mark_op = MARK;
-+
-+    assert(obj != NULL);
-+    assert(self->proto > 0);
-+    assert(PyList_CheckExact(obj));
-+
-+    if (PyList_GET_SIZE(obj) == 1) {
-+        item = PyList_GET_ITEM(obj, 0);
-+        if (save(self, item, 0) < 0)
-+            return -1;
-+        if (_Pickler_Write(self, &append_op, 1) < 0)
-+            return -1;
-+        return 0;
-+    }
-+
-+    /* Write in batches of BATCHSIZE. */
-+    total = 0;
-+    do {
-+        this_batch = 0;
-+        if (_Pickler_Write(self, &mark_op, 1) < 0)
-+            return -1;
-+        while (total < PyList_GET_SIZE(obj)) {
-+            item = PyList_GET_ITEM(obj, total);
-+            if (save(self, item, 0) < 0)
-+                return -1;
-+            total++;
-+            if (++this_batch == BATCHSIZE)
-+                break;
-+        }
-+        if (_Pickler_Write(self, &appends_op, 1) < 0)
-+            return -1;
-+
-+    } while (total < PyList_GET_SIZE(obj));
-+
-+    return 0;
-+}
-+
-+static int
-+save_list(PicklerObject *self, PyObject *obj)
-+{
-+    char header[3];
-+    Py_ssize_t len;
-+    int status = 0;
-+
-+    if (self->fast && !fast_save_enter(self, obj))
-+        goto error;
-+
-+    /* Create an empty list. */
-+    if (self->bin) {
-+        header[0] = EMPTY_LIST;
-+        len = 1;
-+    }
-+    else {
-+        header[0] = MARK;
-+        header[1] = LIST;
-+        len = 2;
-+    }
-+
-+    if (_Pickler_Write(self, header, len) < 0)
-+        goto error;
-+
-+    /* Get list length, and bow out early if empty. */
-+    if ((len = PyList_Size(obj)) < 0)
-+        goto error;
-+
-+    if (memo_put(self, obj) < 0)
-+        goto error;
-+
-+    if (len != 0) {
-+        /* Materialize the list elements. */
-+        if (PyList_CheckExact(obj) && self->proto > 0) {
-+            if (Py_EnterRecursiveCall(" while pickling an object"))
-+                goto error;
-+            status = batch_list_exact(self, obj);
-+            Py_LeaveRecursiveCall();
-+        } else {
-+            PyObject *iter = PyObject_GetIter(obj);
-+            if (iter == NULL)
-+                goto error;
-+
-+            if (Py_EnterRecursiveCall(" while pickling an object")) {
-+                Py_DECREF(iter);
-+                goto error;
-+            }
-+            status = batch_list(self, iter);
-+            Py_LeaveRecursiveCall();
-+            Py_DECREF(iter);
-+        }
-+    }
-+    if (0) {
-+  error:
-+        status = -1;
-+    }
-+
-+    if (self->fast && !fast_save_leave(self, obj))
-+        status = -1;
-+
-+    return status;
-+}
-+
-+/* iter is an iterator giving (key, value) pairs, and we batch up chunks of
-+ *     MARK key value ... key value SETITEMS
-+ * opcode sequences.  Calling code should have arranged to first create an
-+ * empty dict, or dict-like object, for the SETITEMS to operate on.
-+ * Returns 0 on success, <0 on error.
-+ *
-+ * This is very much like batch_list().  The difference between saving
-+ * elements directly, and picking apart two-tuples, is so long-winded at
-+ * the C level, though, that attempts to combine these routines were too
-+ * ugly to bear.
-+ */
-+static int
-+batch_dict(PicklerObject *self, PyObject *iter)
-+{
-+    PyObject *obj = NULL;
-+    PyObject *firstitem = NULL;
-+    int i, n;
-+
-+    const char mark_op = MARK;
-+    const char setitem_op = SETITEM;
-+    const char setitems_op = SETITEMS;
-+
-+    assert(iter != NULL);
-+
-+    if (self->proto == 0) {
-+        /* SETITEMS isn't available; do one at a time. */
-+        for (;;) {
-+            obj = PyIter_Next(iter);
-+            if (obj == NULL) {
-+                if (PyErr_Occurred())
-+                    return -1;
-+                break;
-+            }
-+            if (!PyTuple_Check(obj) || PyTuple_Size(obj) != 2) {
-+                PyErr_SetString(PyExc_TypeError, "dict items "
-+                                "iterator must return 2-tuples");
-+                return -1;
-+            }
-+            i = save(self, PyTuple_GET_ITEM(obj, 0), 0);
-+            if (i >= 0)
-+                i = save(self, PyTuple_GET_ITEM(obj, 1), 0);
-+            Py_DECREF(obj);
-+            if (i < 0)
-+                return -1;
-+            if (_Pickler_Write(self, &setitem_op, 1) < 0)
-+                return -1;
-+        }
-+        return 0;
-+    }
-+
-+    /* proto > 0:  write in batches of BATCHSIZE. */
-+    do {
-+        /* Get first item */
-+        firstitem = PyIter_Next(iter);
-+        if (firstitem == NULL) {
-+            if (PyErr_Occurred())
-+                goto error;
-+
-+            /* nothing more to add */
-+            break;
-+        }
-+        if (!PyTuple_Check(firstitem) || PyTuple_Size(firstitem) != 2) {
-+            PyErr_SetString(PyExc_TypeError, "dict items "
-+                                "iterator must return 2-tuples");
-+            goto error;
-+        }
-+
-+        /* Try to get a second item */
-+        obj = PyIter_Next(iter);
-+        if (obj == NULL) {
-+            if (PyErr_Occurred())
-+                goto error;
-+
-+            /* Only one item to write */
-+            if (save(self, PyTuple_GET_ITEM(firstitem, 0), 0) < 0)
-+                goto error;
-+            if (save(self, PyTuple_GET_ITEM(firstitem, 1), 0) < 0)
-+                goto error;
-+            if (_Pickler_Write(self, &setitem_op, 1) < 0)
-+                goto error;
-+            Py_CLEAR(firstitem);
-+            break;
-+        }
-+
-+        /* More than one item to write */
-+
-+        /* Pump out MARK, items, SETITEMS. */
-+        if (_Pickler_Write(self, &mark_op, 1) < 0)
-+            goto error;
-+
-+        if (save(self, PyTuple_GET_ITEM(firstitem, 0), 0) < 0)
-+            goto error;
-+        if (save(self, PyTuple_GET_ITEM(firstitem, 1), 0) < 0)
-+            goto error;
-+        Py_CLEAR(firstitem);
-+        n = 1;
-+
-+        /* Fetch and save up to BATCHSIZE items */
-+        while (obj) {
-+            if (!PyTuple_Check(obj) || PyTuple_Size(obj) != 2) {
-+                PyErr_SetString(PyExc_TypeError, "dict items "
-+                    "iterator must return 2-tuples");
-+                goto error;
-+            }
-+            if (save(self, PyTuple_GET_ITEM(obj, 0), 0) < 0 ||
-+                save(self, PyTuple_GET_ITEM(obj, 1), 0) < 0)
-+                goto error;
-+            Py_CLEAR(obj);
-+            n += 1;
-+
-+            if (n == BATCHSIZE)
-+                break;
-+
-+            obj = PyIter_Next(iter);
-+            if (obj == NULL) {
-+                if (PyErr_Occurred())
-+                    goto error;
-+                break;
-+            }
-+        }
-+
-+        if (_Pickler_Write(self, &setitems_op, 1) < 0)
-+            goto error;
-+
-+    } while (n == BATCHSIZE);
-+    return 0;
-+
-+  error:
-+    Py_XDECREF(firstitem);
-+    Py_XDECREF(obj);
-+    return -1;
-+}
-+
-+/* This is a variant of batch_dict() above that specializes for dicts, with no
-+ * support for dict subclasses. Like batch_dict(), we batch up chunks of
-+ *     MARK key value ... key value SETITEMS
-+ * opcode sequences.  Calling code should have arranged to first create an
-+ * empty dict, or dict-like object, for the SETITEMS to operate on.
-+ * Returns 0 on success, -1 on error.
-+ *
-+ * Note that this currently doesn't work for protocol 0.
-+ */
-+static int
-+batch_dict_exact(PicklerObject *self, PyObject *obj)
-+{
-+    PyObject *key = NULL, *value = NULL;
-+    int i;
-+    Py_ssize_t dict_size, ppos = 0;
-+
-+    const char mark_op = MARK;
-+    const char setitem_op = SETITEM;
-+    const char setitems_op = SETITEMS;
-+
-+    assert(obj != NULL);
-+    assert(self->proto > 0);
-+
-+    dict_size = PyDict_Size(obj);
-+
-+    /* Special-case len(d) == 1 to save space. */
-+    if (dict_size == 1) {
-+        PyDict_Next(obj, &ppos, &key, &value);
-+        if (save(self, key, 0) < 0)
-+            return -1;
-+        if (save(self, value, 0) < 0)
-+            return -1;
-+        if (_Pickler_Write(self, &setitem_op, 1) < 0)
-+            return -1;
-+        return 0;
-+    }
-+
-+    /* Write in batches of BATCHSIZE. */
-+    do {
-+        i = 0;
-+        if (_Pickler_Write(self, &mark_op, 1) < 0)
-+            return -1;
-+        while (PyDict_Next(obj, &ppos, &key, &value)) {
-+            if (save(self, key, 0) < 0)
-+                return -1;
-+            if (save(self, value, 0) < 0)
-+                return -1;
-+            if (++i == BATCHSIZE)
-+                break;
-+        }
-+        if (_Pickler_Write(self, &setitems_op, 1) < 0)
-+            return -1;
-+        if (PyDict_Size(obj) != dict_size) {
-+            PyErr_Format(
-+                PyExc_RuntimeError,
-+                "dictionary changed size during iteration");
-+            return -1;
-+        }
-+
-+    } while (i == BATCHSIZE);
-+    return 0;
-+}
-+
-+static int
-+save_dict(PicklerObject *self, PyObject *obj)
-+{
-+    PyObject *items, *iter;
-+    char header[3];
-+    Py_ssize_t len;
-+    int status = 0;
-+
-+    if (self->fast && !fast_save_enter(self, obj))
-+        goto error;
-+
-+    /* Create an empty dict. */
-+    if (self->bin) {
-+        header[0] = EMPTY_DICT;
-+        len = 1;
-+    }
-+    else {
-+        header[0] = MARK;
-+        header[1] = DICT;
-+        len = 2;
-+    }
-+
-+    if (_Pickler_Write(self, header, len) < 0)
-+        goto error;
-+
-+    /* Get dict size, and bow out early if empty. */
-+    if ((len = PyDict_Size(obj)) < 0)
-+        goto error;
-+
-+    if (memo_put(self, obj) < 0)
-+        goto error;
-+
-+    if (len != 0) {
-+        /* Save the dict items. */
-+        if (PyDict_CheckExact(obj) && self->proto > 0) {
-+            /* We can take certain shortcuts if we know this is a dict and
-+               not a dict subclass. */
-+            if (Py_EnterRecursiveCall(" while pickling an object"))
-+                goto error;
-+            status = batch_dict_exact(self, obj);
-+            Py_LeaveRecursiveCall();
-+        } else {
-+            _Py_IDENTIFIER(items);
-+
-+            items = _PyObject_CallMethodId(obj, &PyId_items, "()");
-+            if (items == NULL)
-+                goto error;
-+            iter = PyObject_GetIter(items);
-+            Py_DECREF(items);
-+            if (iter == NULL)
-+                goto error;
-+            if (Py_EnterRecursiveCall(" while pickling an object")) {
-+                Py_DECREF(iter);
-+                goto error;
-+            }
-+            status = batch_dict(self, iter);
-+            Py_LeaveRecursiveCall();
-+            Py_DECREF(iter);
-+        }
-+    }
-+
-+    if (0) {
-+  error:
-+        status = -1;
-+    }
-+
-+    if (self->fast && !fast_save_leave(self, obj))
-+        status = -1;
-+
-+    return status;
-+}
-+
-+static int
-+save_set(PicklerObject *self, PyObject *obj)
-+{
-+    PyObject *item;
-+    int i;
-+    Py_ssize_t set_size, ppos = 0;
-+    Py_hash_t hash;
-+
-+    const char empty_set_op = EMPTY_SET;
-+    const char mark_op = MARK;
-+    const char additems_op = ADDITEMS;
-+
-+    if (self->proto < 4) {
-+        PyObject *items;
-+        PyObject *reduce_value;
-+        int status;
-+
-+        items = PySequence_List(obj);
-+        if (items == NULL) {
-+            return -1;
-+        }
-+        reduce_value = Py_BuildValue("(O(O))", (PyObject*)&PySet_Type, items);
-+        Py_DECREF(items);
-+        if (reduce_value == NULL) {
-+            return -1;
-+        }
-+        /* save_reduce() will memoize the object automatically. */
-+        status = save_reduce(self, reduce_value, obj);
-+        Py_DECREF(reduce_value);
-+        return status;
-+    }
-+
-+    if (_Pickler_Write(self, &empty_set_op, 1) < 0)
-+        return -1;
-+
-+    if (memo_put(self, obj) < 0)
-+        return -1;
-+
-+    set_size = PySet_GET_SIZE(obj);
-+    if (set_size == 0)
-+        return 0;  /* nothing to do */
-+
-+    /* Write in batches of BATCHSIZE. */
-+    do {
-+        i = 0;
-+        if (_Pickler_Write(self, &mark_op, 1) < 0)
-+            return -1;
-+        while (_PySet_NextEntry(obj, &ppos, &item, &hash)) {
-+            if (save(self, item, 0) < 0)
-+                return -1;
-+            if (++i == BATCHSIZE)
-+                break;
-+        }
-+        if (_Pickler_Write(self, &additems_op, 1) < 0)
-+            return -1;
-+        if (PySet_GET_SIZE(obj) != set_size) {
-+            PyErr_Format(
-+                PyExc_RuntimeError,
-+                "set changed size during iteration");
-+            return -1;
-+        }
-+    } while (i == BATCHSIZE);
-+
-+    return 0;
-+}
-+
-+static int
-+save_frozenset(PicklerObject *self, PyObject *obj)
-+{
-+    PyObject *iter;
-+
-+    const char mark_op = MARK;
-+    const char frozenset_op = FROZENSET;
-+
-+    if (self->fast && !fast_save_enter(self, obj))
-+        return -1;
-+
-+    if (self->proto < 4) {
-+        PyObject *items;
-+        PyObject *reduce_value;
-+        int status;
-+
-+        items = PySequence_List(obj);
-+        if (items == NULL) {
-+            return -1;
-+        }
-+        reduce_value = Py_BuildValue("(O(O))", (PyObject*)&PyFrozenSet_Type,
-+                                     items);
-+        Py_DECREF(items);
-+        if (reduce_value == NULL) {
-+            return -1;
-+        }
-+        /* save_reduce() will memoize the object automatically. */
-+        status = save_reduce(self, reduce_value, obj);
-+        Py_DECREF(reduce_value);
-+        return status;
-+    }
-+
-+    if (_Pickler_Write(self, &mark_op, 1) < 0)
-+        return -1;
-+
-+    iter = PyObject_GetIter(obj);
-+    if (iter == NULL) {
-+        return -1;
-+    }
-+    for (;;) {
-+        PyObject *item;
-+
-+        item = PyIter_Next(iter);
-+        if (item == NULL) {
-+            if (PyErr_Occurred()) {
-+                Py_DECREF(iter);
-+                return -1;
-+            }
-+            break;
-+        }
-+        if (save(self, item, 0) < 0) {
-+            Py_DECREF(item);
-+            Py_DECREF(iter);
-+            return -1;
-+        }
-+        Py_DECREF(item);
-+    }
-+    Py_DECREF(iter);
-+
-+    /* If the object is already in the memo, this means it is
-+       recursive. In this case, throw away everything we put on the
-+       stack, and fetch the object back from the memo. */
-+    if (PyMemoTable_Get(self->memo, obj)) {
-+        const char pop_mark_op = POP_MARK;
-+
-+        if (_Pickler_Write(self, &pop_mark_op, 1) < 0)
-+            return -1;
-+        if (memo_get(self, obj) < 0)
-+            return -1;
-+        return 0;
-+    }
-+
-+    if (_Pickler_Write(self, &frozenset_op, 1) < 0)
-+        return -1;
-+    if (memo_put(self, obj) < 0)
-+        return -1;
-+
-+    return 0;
-+}
-+
-+static int
-+fix_imports(PyObject **module_name, PyObject **global_name)
-+{
-+    PyObject *key;
-+    PyObject *item;
-+    PickleState *st = _Pickle_GetGlobalState();
-+
-+    key = PyTuple_Pack(2, *module_name, *global_name);
-+    if (key == NULL)
-+        return -1;
-+    item = PyDict_GetItemWithError(st->name_mapping_3to2, key);
-+    Py_DECREF(key);
-+    if (item) {
-+        PyObject *fixed_module_name;
-+        PyObject *fixed_global_name;
-+
-+        if (!PyTuple_Check(item) || PyTuple_GET_SIZE(item) != 2) {
-+            PyErr_Format(PyExc_RuntimeError,
-+                         "_compat_pickle.REVERSE_NAME_MAPPING values "
-+                         "should be 2-tuples, not %.200s",
-+                         Py_TYPE(item)->tp_name);
-+            return -1;
-+        }
-+        fixed_module_name = PyTuple_GET_ITEM(item, 0);
-+        fixed_global_name = PyTuple_GET_ITEM(item, 1);
-+        if (!PyUnicode_Check(fixed_module_name) ||
-+            !PyUnicode_Check(fixed_global_name)) {
-+            PyErr_Format(PyExc_RuntimeError,
-+                         "_compat_pickle.REVERSE_NAME_MAPPING values "
-+                         "should be pairs of str, not (%.200s, %.200s)",
-+                         Py_TYPE(fixed_module_name)->tp_name,
-+                         Py_TYPE(fixed_global_name)->tp_name);
-+            return -1;
-+        }
-+
-+        Py_CLEAR(*module_name);
-+        Py_CLEAR(*global_name);
-+        Py_INCREF(fixed_module_name);
-+        Py_INCREF(fixed_global_name);
-+        *module_name = fixed_module_name;
-+        *global_name = fixed_global_name;
-+    }
-+    else if (PyErr_Occurred()) {
-+        return -1;
-+    }
-+
-+    item = PyDict_GetItemWithError(st->import_mapping_3to2, *module_name);
-+    if (item) {
-+        if (!PyUnicode_Check(item)) {
-+            PyErr_Format(PyExc_RuntimeError,
-+                         "_compat_pickle.REVERSE_IMPORT_MAPPING values "
-+                         "should be strings, not %.200s",
-+                         Py_TYPE(item)->tp_name);
-+            return -1;
-+        }
-+        Py_CLEAR(*module_name);
-+        Py_INCREF(item);
-+        *module_name = item;
-+    }
-+    else if (PyErr_Occurred()) {
-+        return -1;
-+    }
-+
-+    return 0;
-+}
-+
-+static int
-+save_global(PicklerObject *self, PyObject *obj, PyObject *name)
-+{
-+    PyObject *global_name = NULL;
-+    PyObject *module_name = NULL;
-+    PyObject *module = NULL;
-+    PyObject *cls;
-+    PickleState *st = _Pickle_GetGlobalState();
-+    int status = 0;
-+    _Py_IDENTIFIER(__name__);
-+    _Py_IDENTIFIER(__qualname__);
-+
-+    const char global_op = GLOBAL;
-+
-+    if (name) {
-+        Py_INCREF(name);
-+        global_name = name;
-+    }
-+    else {
-+        if (self->proto >= 4) {
-+            global_name = _PyObject_GetAttrId(obj, &PyId___qualname__);
-+            if (global_name == NULL) {
-+                if (!PyErr_ExceptionMatches(PyExc_AttributeError))
-+                    goto error;
-+                PyErr_Clear();
-+            }
-+        }
-+        if (global_name == NULL) {
-+            global_name = _PyObject_GetAttrId(obj, &PyId___name__);
-+            if (global_name == NULL)
-+                goto error;
-+        }
-+    }
-+
-+    module_name = whichmodule(obj, global_name, self->proto >= 4);
-+    if (module_name == NULL)
-+        goto error;
-+
-+    /* XXX: Change to use the import C API directly with level=0 to disallow
-+       relative imports.
-+
-+       XXX: PyImport_ImportModuleLevel could be used. However, this bypasses
-+       builtins.__import__. Therefore, _pickle, unlike pickle.py, will ignore
-+       custom import functions (IMHO, this would be a nice security
-+       feature). The import C API would need to be extended to support the
-+       extra parameters of __import__ to fix that. */
-+    module = PyImport_Import(module_name);
-+    if (module == NULL) {
-+        PyErr_Format(st->PicklingError,
-+                     "Can't pickle %R: import of module %R failed",
-+                     obj, module_name);
-+        goto error;
-+    }
-+    cls = getattribute(module, global_name, self->proto >= 4);
-+    if (cls == NULL) {
-+        PyErr_Format(st->PicklingError,
-+                     "Can't pickle %R: attribute lookup %S on %S failed",
-+                     obj, global_name, module_name);
-+        goto error;
-+    }
-+    if (cls != obj) {
-+        Py_DECREF(cls);
-+        PyErr_Format(st->PicklingError,
-+                     "Can't pickle %R: it's not the same object as %S.%S",
-+                     obj, module_name, global_name);
-+        goto error;
-+    }
-+    Py_DECREF(cls);
-+
-+    if (self->proto >= 2) {
-+        /* See whether this is in the extension registry, and if
-+         * so generate an EXT opcode.
-+         */
-+        PyObject *extension_key;
-+        PyObject *code_obj;      /* extension code as Python object */
-+        long code;               /* extension code as C value */
-+        char pdata[5];
-+        Py_ssize_t n;
-+
-+        extension_key = PyTuple_Pack(2, module_name, global_name);
-+        if (extension_key == NULL) {
-+            goto error;
-+        }
-+        code_obj = PyDict_GetItemWithError(st->extension_registry,
-+                                           extension_key);
-+        Py_DECREF(extension_key);
-+        /* The object is not registered in the extension registry.
-+           This is the most likely code path. */
-+        if (code_obj == NULL) {
-+            if (PyErr_Occurred()) {
-+                goto error;
-+            }
-+            goto gen_global;
-+        }
-+
-+        /* XXX: pickle.py doesn't check neither the type, nor the range
-+           of the value returned by the extension_registry. It should for
-+           consistency. */
-+
-+        /* Verify code_obj has the right type and value. */
-+        if (!PyLong_Check(code_obj)) {
-+            PyErr_Format(st->PicklingError,
-+                         "Can't pickle %R: extension code %R isn't an integer",
-+                         obj, code_obj);
-+            goto error;
-+        }
-+        code = PyLong_AS_LONG(code_obj);
-+        if (code <= 0 || code > 0x7fffffffL) {
-+            if (!PyErr_Occurred())
-+                PyErr_Format(st->PicklingError, "Can't pickle %R: extension "
-+                             "code %ld is out of range", obj, code);
-+            goto error;
-+        }
-+
-+        /* Generate an EXT opcode. */
-+        if (code <= 0xff) {
-+            pdata[0] = EXT1;
-+            pdata[1] = (unsigned char)code;
-+            n = 2;
-+        }
-+        else if (code <= 0xffff) {
-+            pdata[0] = EXT2;
-+            pdata[1] = (unsigned char)(code & 0xff);
-+            pdata[2] = (unsigned char)((code >> 8) & 0xff);
-+            n = 3;
-+        }
-+        else {
-+            pdata[0] = EXT4;
-+            pdata[1] = (unsigned char)(code & 0xff);
-+            pdata[2] = (unsigned char)((code >> 8) & 0xff);
-+            pdata[3] = (unsigned char)((code >> 16) & 0xff);
-+            pdata[4] = (unsigned char)((code >> 24) & 0xff);
-+            n = 5;
-+        }
-+
-+        if (_Pickler_Write(self, pdata, n) < 0)
-+            goto error;
-+    }
-+    else {
-+  gen_global:
-+        if (self->proto >= 4) {
-+            const char stack_global_op = STACK_GLOBAL;
-+
-+            if (save(self, module_name, 0) < 0)
-+                goto error;
-+            if (save(self, global_name, 0) < 0)
-+                goto error;
-+
-+            if (_Pickler_Write(self, &stack_global_op, 1) < 0)
-+                goto error;
-+        }
-+        else {
-+            /* Generate a normal global opcode if we are using a pickle
-+               protocol < 4, or if the object is not registered in the
-+               extension registry. */
-+            PyObject *encoded;
-+            PyObject *(*unicode_encoder)(PyObject *);
-+
-+            if (_Pickler_Write(self, &global_op, 1) < 0)
-+                goto error;
-+
-+            /* For protocol < 3 and if the user didn't request against doing
-+               so, we convert module names to the old 2.x module names. */
-+            if (self->proto < 3 && self->fix_imports) {
-+                if (fix_imports(&module_name, &global_name) < 0) {
-+                    goto error;
-+                }
-+            }
-+
-+            /* Since Python 3.0 now supports non-ASCII identifiers, we encode
-+               both the module name and the global name using UTF-8. We do so
-+               only when we are using the pickle protocol newer than version
-+               3. This is to ensure compatibility with older Unpickler running
-+               on Python 2.x. */
-+            if (self->proto == 3) {
-+                unicode_encoder = PyUnicode_AsUTF8String;
-+            }
-+            else {
-+                unicode_encoder = PyUnicode_AsASCIIString;
-+            }
-+            encoded = unicode_encoder(module_name);
-+            if (encoded == NULL) {
-+                if (PyErr_ExceptionMatches(PyExc_UnicodeEncodeError))
-+                    PyErr_Format(st->PicklingError,
-+                                 "can't pickle module identifier '%S' using "
-+                                 "pickle protocol %i",
-+                                 module_name, self->proto);
-+                goto error;
-+            }
-+            if (_Pickler_Write(self, PyBytes_AS_STRING(encoded),
-+                               PyBytes_GET_SIZE(encoded)) < 0) {
-+                Py_DECREF(encoded);
-+                goto error;
-+            }
-+            Py_DECREF(encoded);
-+            if(_Pickler_Write(self, "\n", 1) < 0)
-+                goto error;
-+
-+            /* Save the name of the module. */
-+            encoded = unicode_encoder(global_name);
-+            if (encoded == NULL) {
-+                if (PyErr_ExceptionMatches(PyExc_UnicodeEncodeError))
-+                    PyErr_Format(st->PicklingError,
-+                                 "can't pickle global identifier '%S' using "
-+                                 "pickle protocol %i",
-+                                 global_name, self->proto);
-+                goto error;
-+            }
-+            if (_Pickler_Write(self, PyBytes_AS_STRING(encoded),
-+                               PyBytes_GET_SIZE(encoded)) < 0) {
-+                Py_DECREF(encoded);
-+                goto error;
-+            }
-+            Py_DECREF(encoded);
-+            if (_Pickler_Write(self, "\n", 1) < 0)
-+                goto error;
-+        }
-+        /* Memoize the object. */
-+        if (memo_put(self, obj) < 0)
-+            goto error;
-+    }
-+
-+    if (0) {
-+  error:
-+        status = -1;
-+    }
-+    Py_XDECREF(module_name);
-+    Py_XDECREF(global_name);
-+    Py_XDECREF(module);
-+
-+    return status;
-+}
-+
-+static int
-+save_singleton_type(PicklerObject *self, PyObject *obj, PyObject *singleton)
-+{
-+    PyObject *reduce_value;
-+    int status;
-+
-+    reduce_value = Py_BuildValue("O(O)", &PyType_Type, singleton);
-+    if (reduce_value == NULL) {
-+        return -1;
-+    }
-+    status = save_reduce(self, reduce_value, obj);
-+    Py_DECREF(reduce_value);
-+    return status;
-+}
-+
-+static int
-+save_type(PicklerObject *self, PyObject *obj)
-+{
-+    if (obj == (PyObject *)&_PyNone_Type) {
-+        return save_singleton_type(self, obj, Py_None);
-+    }
-+    else if (obj == (PyObject *)&PyEllipsis_Type) {
-+        return save_singleton_type(self, obj, Py_Ellipsis);
-+    }
-+    else if (obj == (PyObject *)&_PyNotImplemented_Type) {
-+        return save_singleton_type(self, obj, Py_NotImplemented);
-+    }
-+    return save_global(self, obj, NULL);
-+}
-+
-+static int
-+save_pers(PicklerObject *self, PyObject *obj, PyObject *func)
-+{
-+    PyObject *pid = NULL;
-+    int status = 0;
-+
-+    const char persid_op = PERSID;
-+    const char binpersid_op = BINPERSID;
-+
-+    Py_INCREF(obj);
-+    pid = _Pickle_FastCall(func, obj);
-+    if (pid == NULL)
-+        return -1;
-+
-+    if (pid != Py_None) {
-+        if (self->bin) {
-+            if (save(self, pid, 1) < 0 ||
-+                _Pickler_Write(self, &binpersid_op, 1) < 0)
-+                goto error;
-+        }
-+        else {
-+            PyObject *pid_str = NULL;
-+            char *pid_ascii_bytes;
-+            Py_ssize_t size;
-+
-+            pid_str = PyObject_Str(pid);
-+            if (pid_str == NULL)
-+                goto error;
-+
-+            /* XXX: Should it check whether the persistent id only contains
-+               ASCII characters? And what if the pid contains embedded
-+               newlines? */
-+            pid_ascii_bytes = _PyUnicode_AsStringAndSize(pid_str, &size);
-+            Py_DECREF(pid_str);
-+            if (pid_ascii_bytes == NULL)
-+                goto error;
-+
-+            if (_Pickler_Write(self, &persid_op, 1) < 0 ||
-+                _Pickler_Write(self, pid_ascii_bytes, size) < 0 ||
-+                _Pickler_Write(self, "\n", 1) < 0)
-+                goto error;
-+        }
-+        status = 1;
-+    }
-+
-+    if (0) {
-+  error:
-+        status = -1;
-+    }
-+    Py_XDECREF(pid);
-+
-+    return status;
-+}
-+
-+static PyObject *
-+get_class(PyObject *obj)
-+{
-+    PyObject *cls;
-+    _Py_IDENTIFIER(__class__);
-+
-+    cls = _PyObject_GetAttrId(obj, &PyId___class__);
-+    if (cls == NULL) {
-+        if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
-+            PyErr_Clear();
-+            cls = (PyObject *) Py_TYPE(obj);
-+            Py_INCREF(cls);
-+        }
-+    }
-+    return cls;
-+}
-+
-+/* We're saving obj, and args is the 2-thru-5 tuple returned by the
-+ * appropriate __reduce__ method for obj.
-+ */
-+static int
-+save_reduce(PicklerObject *self, PyObject *args, PyObject *obj)
-+{
-+    PyObject *callable;
-+    PyObject *argtup;
-+    PyObject *state = NULL;
-+    PyObject *listitems = Py_None;
-+    PyObject *dictitems = Py_None;
-+    PickleState *st = _Pickle_GetGlobalState();
-+    Py_ssize_t size;
-+    int use_newobj = 0, use_newobj_ex = 0;
-+
-+    const char reduce_op = REDUCE;
-+    const char build_op = BUILD;
-+    const char newobj_op = NEWOBJ;
-+    const char newobj_ex_op = NEWOBJ_EX;
-+
-+    size = PyTuple_Size(args);
-+    if (size < 2 || size > 5) {
-+        PyErr_SetString(st->PicklingError, "tuple returned by "
-+                        "__reduce__ must contain 2 through 5 elements");
-+        return -1;
-+    }
-+
-+    if (!PyArg_UnpackTuple(args, "save_reduce", 2, 5,
-+                           &callable, &argtup, &state, &listitems, &dictitems))
-+        return -1;
-+
-+    if (!PyCallable_Check(callable)) {
-+        PyErr_SetString(st->PicklingError, "first item of the tuple "
-+                        "returned by __reduce__ must be callable");
-+        return -1;
-+    }
-+    if (!PyTuple_Check(argtup)) {
-+        PyErr_SetString(st->PicklingError, "second item of the tuple "
-+                        "returned by __reduce__ must be a tuple");
-+        return -1;
-+    }
-+
-+    if (state == Py_None)
-+        state = NULL;
-+
-+    if (listitems == Py_None)
-+        listitems = NULL;
-+    else if (!PyIter_Check(listitems)) {
-+        PyErr_Format(st->PicklingError, "fourth element of the tuple "
-+                     "returned by __reduce__ must be an iterator, not %s",
-+                     Py_TYPE(listitems)->tp_name);
-+        return -1;
-+    }
-+
-+    if (dictitems == Py_None)
-+        dictitems = NULL;
-+    else if (!PyIter_Check(dictitems)) {
-+        PyErr_Format(st->PicklingError, "fifth element of the tuple "
-+                     "returned by __reduce__ must be an iterator, not %s",
-+                     Py_TYPE(dictitems)->tp_name);
-+        return -1;
-+    }
-+
-+    if (self->proto >= 2) {
-+        PyObject *name;
-+        _Py_IDENTIFIER(__name__);
-+
-+        name = _PyObject_GetAttrId(callable, &PyId___name__);
-+        if (name == NULL) {
-+            if (!PyErr_ExceptionMatches(PyExc_AttributeError)) {
-+                return -1;
-+            }
-+            PyErr_Clear();
-+        }
-+        else if (self->proto >= 4) {
-+            _Py_IDENTIFIER(__newobj_ex__);
-+            use_newobj_ex = PyUnicode_Check(name) &&
-+                PyUnicode_Compare(
-+                    name, _PyUnicode_FromId(&PyId___newobj_ex__)) == 0;
-+            Py_DECREF(name);
-+        }
-+        else {
-+            _Py_IDENTIFIER(__newobj__);
-+            use_newobj = PyUnicode_Check(name) &&
-+                PyUnicode_Compare(
-+                    name, _PyUnicode_FromId(&PyId___newobj__)) == 0;
-+            Py_DECREF(name);
-+        }
-+    }
-+
-+    if (use_newobj_ex) {
-+        PyObject *cls;
-+        PyObject *args;
-+        PyObject *kwargs;
-+
-+        if (Py_SIZE(argtup) != 3) {
-+            PyErr_Format(st->PicklingError,
-+                         "length of the NEWOBJ_EX argument tuple must be "
-+                         "exactly 3, not %zd", Py_SIZE(argtup));
-+            return -1;
-+        }
-+
-+        cls = PyTuple_GET_ITEM(argtup, 0);
-+        if (!PyType_Check(cls)) {
-+            PyErr_Format(st->PicklingError,
-+                         "first item from NEWOBJ_EX argument tuple must "
-+                         "be a class, not %.200s", Py_TYPE(cls)->tp_name);
-+            return -1;
-+        }
-+        args = PyTuple_GET_ITEM(argtup, 1);
-+        if (!PyTuple_Check(args)) {
-+            PyErr_Format(st->PicklingError,
-+                         "second item from NEWOBJ_EX argument tuple must "
-+                         "be a tuple, not %.200s", Py_TYPE(args)->tp_name);
-+            return -1;
-+        }
-+        kwargs = PyTuple_GET_ITEM(argtup, 2);
-+        if (!PyDict_Check(kwargs)) {
-+            PyErr_Format(st->PicklingError,
-+                         "third item from NEWOBJ_EX argument tuple must "
-+                         "be a dict, not %.200s", Py_TYPE(kwargs)->tp_name);
-+            return -1;
-+        }
-+
-+        if (save(self, cls, 0) < 0 ||
-+            save(self, args, 0) < 0 ||
-+            save(self, kwargs, 0) < 0 ||
-+            _Pickler_Write(self, &newobj_ex_op, 1) < 0) {
-+            return -1;
-+        }
-+    }
-+    else if (use_newobj) {
-+        PyObject *cls;
-+        PyObject *newargtup;
-+        PyObject *obj_class;
-+        int p;
-+
-+        /* Sanity checks. */
-+        if (Py_SIZE(argtup) < 1) {
-+            PyErr_SetString(st->PicklingError, "__newobj__ arglist is empty");
-+            return -1;
-+        }
-+
-+        cls = PyTuple_GET_ITEM(argtup, 0);
-+        if (!PyType_Check(cls)) {
-+            PyErr_SetString(st->PicklingError, "args[0] from "
-+                            "__newobj__ args is not a type");
-+            return -1;
-+        }
-+
-+        if (obj != NULL) {
-+            obj_class = get_class(obj);
-+            p = obj_class != cls;    /* true iff a problem */
-+            Py_DECREF(obj_class);
-+            if (p) {
-+                PyErr_SetString(st->PicklingError, "args[0] from "
-+                                "__newobj__ args has the wrong class");
-+                return -1;
-+            }
-+        }
-+        /* XXX: These calls save() are prone to infinite recursion. Imagine
-+           what happen if the value returned by the __reduce__() method of
-+           some extension type contains another object of the same type. Ouch!
-+
-+           Here is a quick example, that I ran into, to illustrate what I
-+           mean:
-+
-+             >>> import pickle, copyreg
-+             >>> copyreg.dispatch_table.pop(complex)
-+             >>> pickle.dumps(1+2j)
-+             Traceback (most recent call last):
-+               ...
-+             RuntimeError: maximum recursion depth exceeded
-+
-+           Removing the complex class from copyreg.dispatch_table made the
-+           __reduce_ex__() method emit another complex object:
-+
-+             >>> (1+1j).__reduce_ex__(2)
-+             (<function __newobj__ at 0xb7b71c3c>,
-+               (<class 'complex'>, (1+1j)), None, None, None)
-+
-+           Thus when save() was called on newargstup (the 2nd item) recursion
-+           ensued. Of course, the bug was in the complex class which had a
-+           broken __getnewargs__() that emitted another complex object. But,
-+           the point, here, is it is quite easy to end up with a broken reduce
-+           function. */
-+
-+        /* Save the class and its __new__ arguments. */
-+        if (save(self, cls, 0) < 0)
-+            return -1;
-+
-+        newargtup = PyTuple_GetSlice(argtup, 1, Py_SIZE(argtup));
-+        if (newargtup == NULL)
-+            return -1;
-+
-+        p = save(self, newargtup, 0);
-+        Py_DECREF(newargtup);
-+        if (p < 0)
-+            return -1;
-+
-+        /* Add NEWOBJ opcode. */
-+        if (_Pickler_Write(self, &newobj_op, 1) < 0)
-+            return -1;
-+    }
-+    else { /* Not using NEWOBJ. */
-+        if (save(self, callable, 0) < 0 ||
-+            save(self, argtup, 0) < 0 ||
-+            _Pickler_Write(self, &reduce_op, 1) < 0)
-+            return -1;
-+    }
-+
-+    /* obj can be NULL when save_reduce() is used directly. A NULL obj means
-+       the caller do not want to memoize the object. Not particularly useful,
-+       but that is to mimic the behavior save_reduce() in pickle.py when
-+       obj is None. */
-+    if (obj != NULL) {
-+        /* If the object is already in the memo, this means it is
-+           recursive. In this case, throw away everything we put on the
-+           stack, and fetch the object back from the memo. */
-+        if (PyMemoTable_Get(self->memo, obj)) {
-+            const char pop_op = POP;
-+
-+            if (_Pickler_Write(self, &pop_op, 1) < 0)
-+                return -1;
-+            if (memo_get(self, obj) < 0)
-+                return -1;
-+
-+            return 0;
-+        }
-+        else if (memo_put(self, obj) < 0)
-+            return -1;
-+    }
-+
-+    if (listitems && batch_list(self, listitems) < 0)
-+        return -1;
-+
-+    if (dictitems && batch_dict(self, dictitems) < 0)
-+        return -1;
-+
-+    if (state) {
-+        if (save(self, state, 0) < 0 ||
-+            _Pickler_Write(self, &build_op, 1) < 0)
-+            return -1;
-+    }
-+
-+    return 0;
-+}
-+
-+static int
-+save(PicklerObject *self, PyObject *obj, int pers_save)
-+{
-+    PyTypeObject *type;
-+    PyObject *reduce_func = NULL;
-+    PyObject *reduce_value = NULL;
-+    int status = 0;
-+
-+    if (_Pickler_OpcodeBoundary(self) < 0)
-+        return -1;
-+
-+    if (Py_EnterRecursiveCall(" while pickling an object"))
-+        return -1;
-+
-+    /* The extra pers_save argument is necessary to avoid calling save_pers()
-+       on its returned object. */
-+    if (!pers_save && self->pers_func) {
-+        /* save_pers() returns:
-+            -1   to signal an error;
-+             0   if it did nothing successfully;
-+             1   if a persistent id was saved.
-+         */
-+        if ((status = save_pers(self, obj, self->pers_func)) != 0)
-+            goto done;
-+    }
-+
-+    type = Py_TYPE(obj);
-+
-+    /* The old cPickle had an optimization that used switch-case statement
-+       dispatching on the first letter of the type name.  This has was removed
-+       since benchmarks shown that this optimization was actually slowing
-+       things down. */
-+
-+    /* Atom types; these aren't memoized, so don't check the memo. */
-+
-+    if (obj == Py_None) {
-+        status = save_none(self, obj);
-+        goto done;
-+    }
-+    else if (obj == Py_False || obj == Py_True) {
-+        status = save_bool(self, obj);
-+        goto done;
-+    }
-+    else if (type == &PyLong_Type) {
-+        status = save_long(self, obj);
-+        goto done;
-+    }
-+    else if (type == &PyFloat_Type) {
-+        status = save_float(self, obj);
-+        goto done;
-+    }
-+
-+    /* Check the memo to see if it has the object. If so, generate
-+       a GET (or BINGET) opcode, instead of pickling the object
-+       once again. */
-+    if (PyMemoTable_Get(self->memo, obj)) {
-+        if (memo_get(self, obj) < 0)
-+            goto error;
-+        goto done;
-+    }
-+
-+    if (type == &PyBytes_Type) {
-+        status = save_bytes(self, obj);
-+        goto done;
-+    }
-+    else if (type == &PyUnicode_Type) {
-+        status = save_unicode(self, obj);
-+        goto done;
-+    }
-+    else if (type == &PyDict_Type) {
-+        status = save_dict(self, obj);
-+        goto done;
-+    }
-+    else if (type == &PySet_Type) {
-+        status = save_set(self, obj);
-+        goto done;
-+    }
-+    else if (type == &PyFrozenSet_Type) {
-+        status = save_frozenset(self, obj);
-+        goto done;
-+    }
-+    else if (type == &PyList_Type) {
-+        status = save_list(self, obj);
-+        goto done;
-+    }
-+    else if (type == &PyTuple_Type) {
-+        status = save_tuple(self, obj);
-+        goto done;
-+    }
-+    else if (type == &PyType_Type) {
-+        status = save_type(self, obj);
-+        goto done;
-+    }
-+    else if (type == &PyFunction_Type) {
-+        status = save_global(self, obj, NULL);
-+        goto done;
-+    }
-+
-+    /* XXX: This part needs some unit tests. */
-+
-+    /* Get a reduction callable, and call it.  This may come from
-+     * self.dispatch_table, copyreg.dispatch_table, the object's
-+     * __reduce_ex__ method, or the object's __reduce__ method.
-+     */
-+    if (self->dispatch_table == NULL) {
-+        PickleState *st = _Pickle_GetGlobalState();
-+        reduce_func = PyDict_GetItemWithError(st->dispatch_table,
-+                                              (PyObject *)type);
-+        if (reduce_func == NULL) {
-+            if (PyErr_Occurred()) {
-+                goto error;
-+            }
-+        } else {
-+            /* PyDict_GetItemWithError() returns a borrowed reference.
-+               Increase the reference count to be consistent with
-+               PyObject_GetItem and _PyObject_GetAttrId used below. */
-+            Py_INCREF(reduce_func);
-+        }
-+    } else {
-+        reduce_func = PyObject_GetItem(self->dispatch_table,
-+                                       (PyObject *)type);
-+        if (reduce_func == NULL) {
-+            if (PyErr_ExceptionMatches(PyExc_KeyError))
-+                PyErr_Clear();
-+            else
-+                goto error;
-+        }
-+    }
-+    if (reduce_func != NULL) {
-+        Py_INCREF(obj);
-+        reduce_value = _Pickle_FastCall(reduce_func, obj);
-+    }
-+    else if (PyType_IsSubtype(type, &PyType_Type)) {
-+        status = save_global(self, obj, NULL);
-+        goto done;
-+    }
-+    else {
-+        _Py_IDENTIFIER(__reduce__);
-+        _Py_IDENTIFIER(__reduce_ex__);
-+
-+
-+        /* XXX: If the __reduce__ method is defined, __reduce_ex__ is
-+           automatically defined as __reduce__. While this is convenient, this
-+           make it impossible to know which method was actually called. Of
-+           course, this is not a big deal. But still, it would be nice to let
-+           the user know which method was called when something go
-+           wrong. Incidentally, this means if __reduce_ex__ is not defined, we
-+           don't actually have to check for a __reduce__ method. */
-+
-+        /* Check for a __reduce_ex__ method. */
-+        reduce_func = _PyObject_GetAttrId(obj, &PyId___reduce_ex__);
-+        if (reduce_func != NULL) {
-+            PyObject *proto;
-+            proto = PyLong_FromLong(self->proto);
-+            if (proto != NULL) {
-+                reduce_value = _Pickle_FastCall(reduce_func, proto);
-+            }
-+        }
-+        else {
-+            PickleState *st = _Pickle_GetGlobalState();
-+
-+            if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
-+                PyErr_Clear();
-+            }
-+            else {
-+                goto error;
-+            }
-+            /* Check for a __reduce__ method. */
-+            reduce_func = _PyObject_GetAttrId(obj, &PyId___reduce__);
-+            if (reduce_func != NULL) {
-+                PyObject *empty_tuple = PyTuple_New(0);
-+                reduce_value = PyObject_Call(reduce_func, empty_tuple,
-+                                             NULL);
-+                Py_DECREF(empty_tuple);
-+            }
-+            else {
-+                PyErr_Format(st->PicklingError,
-+                             "can't pickle '%.200s' object: %R",
-+                             type->tp_name, obj);
-+                goto error;
-+            }
-+        }
-+    }
-+
-+    if (reduce_value == NULL)
-+        goto error;
-+
-+    if (PyUnicode_Check(reduce_value)) {
-+        status = save_global(self, obj, reduce_value);
-+        goto done;
-+    }
-+
-+    if (!PyTuple_Check(reduce_value)) {
-+        PickleState *st = _Pickle_GetGlobalState();
-+        PyErr_SetString(st->PicklingError,
-+                        "__reduce__ must return a string or tuple");
-+        goto error;
-+    }
-+
-+    status = save_reduce(self, reduce_value, obj);
-+
-+    if (0) {
-+  error:
-+        status = -1;
-+    }
-+  done:
-+
-+    Py_LeaveRecursiveCall();
-+    Py_XDECREF(reduce_func);
-+    Py_XDECREF(reduce_value);
-+
-+    return status;
-+}
-+
-+static int
-+dump(PicklerObject *self, PyObject *obj)
-+{
-+    const char stop_op = STOP;
-+
-+    if (self->proto >= 2) {
-+        char header[2];
-+
-+        header[0] = PROTO;
-+        assert(self->proto >= 0 && self->proto < 256);
-+        header[1] = (unsigned char)self->proto;
-+        if (_Pickler_Write(self, header, 2) < 0)
-+            return -1;
-+        if (self->proto >= 4)
-+            self->framing = 1;
-+    }
-+
-+    if (save(self, obj, 0) < 0 ||
-+        _Pickler_Write(self, &stop_op, 1) < 0)
-+        return -1;
-+
-+    return 0;
-+}
-+
-+/*[clinic input]
-+
-+_pickle.Pickler.clear_memo
-+
-+Clears the pickler's "memo".
-+
-+The memo is the data structure that remembers which objects the
-+pickler has already seen, so that shared or recursive objects are
-+pickled by reference and not by value.  This method is useful when
-+re-using picklers.
-+[clinic start generated code]*/
-+
-+static PyObject *
-+_pickle_Pickler_clear_memo_impl(PicklerObject *self)
-+/*[clinic end generated code: output=8665c8658aaa094b input=01bdad52f3d93e56]*/
-+{
-+    if (self->memo)
-+        PyMemoTable_Clear(self->memo);
-+
-+    Py_RETURN_NONE;
-+}
-+
-+/*[clinic input]
-+
-+_pickle.Pickler.dump
-+
-+  obj: object
-+  /
-+
-+Write a pickled representation of the given object to the open file.
-+[clinic start generated code]*/
-+
-+static PyObject *
-+_pickle_Pickler_dump(PicklerObject *self, PyObject *obj)
-+/*[clinic end generated code: output=87ecad1261e02ac7 input=552eb1c0f52260d9]*/
-+{
-+    /* Check whether the Pickler was initialized correctly (issue3664).
-+       Developers often forget to call __init__() in their subclasses, which
-+       would trigger a segfault without this check. */
-+    if (self->write == NULL) {
-+        PickleState *st = _Pickle_GetGlobalState();
-+        PyErr_Format(st->PicklingError,
-+                     "Pickler.__init__() was not called by %s.__init__()",
-+                     Py_TYPE(self)->tp_name);
-+        return NULL;
-+    }
-+
-+    if (_Pickler_ClearBuffer(self) < 0)
-+        return NULL;
-+
-+    if (dump(self, obj) < 0)
-+        return NULL;
-+
-+    if (_Pickler_FlushToFile(self) < 0)
-+        return NULL;
-+
-+    Py_RETURN_NONE;
-+}
-+
-+static struct PyMethodDef Pickler_methods[] = {
-+    _PICKLE_PICKLER_DUMP_METHODDEF
-+    _PICKLE_PICKLER_CLEAR_MEMO_METHODDEF
-+    {NULL, NULL}                /* sentinel */
-+};
-+
-+static void
-+Pickler_dealloc(PicklerObject *self)
-+{
-+    PyObject_GC_UnTrack(self);
-+
-+    Py_XDECREF(self->output_buffer);
-+    Py_XDECREF(self->write);
-+    Py_XDECREF(self->pers_func);
-+    Py_XDECREF(self->dispatch_table);
-+    Py_XDECREF(self->fast_memo);
-+
-+    PyMemoTable_Del(self->memo);
-+
-+    Py_TYPE(self)->tp_free((PyObject *)self);
-+}
-+
-+static int
-+Pickler_traverse(PicklerObject *self, visitproc visit, void *arg)
-+{
-+    Py_VISIT(self->write);
-+    Py_VISIT(self->pers_func);
-+    Py_VISIT(self->dispatch_table);
-+    Py_VISIT(self->fast_memo);
-+    return 0;
-+}
-+
-+static int
-+Pickler_clear(PicklerObject *self)
-+{
-+    Py_CLEAR(self->output_buffer);
-+    Py_CLEAR(self->write);
-+    Py_CLEAR(self->pers_func);
-+    Py_CLEAR(self->dispatch_table);
-+    Py_CLEAR(self->fast_memo);
-+
-+    if (self->memo != NULL) {
-+        PyMemoTable *memo = self->memo;
-+        self->memo = NULL;
-+        PyMemoTable_Del(memo);
-+    }
-+    return 0;
-+}
-+
-+
-+/*[clinic input]
-+
-+_pickle.Pickler.__init__
-+
-+  file: object
-+  protocol: object = NULL
-+  fix_imports: bool = True
-+
-+This takes a binary file for writing a pickle data stream.
-+
-+The optional *protocol* argument tells the pickler to use the given
-+protocol; supported protocols are 0, 1, 2, 3 and 4.  The default
-+protocol is 3; a backward-incompatible protocol designed for Python 3.
-+
-+Specifying a negative protocol version selects the highest protocol
-+version supported.  The higher the protocol used, the more recent the
-+version of Python needed to read the pickle produced.
-+
-+The *file* argument must have a write() method that accepts a single
-+bytes argument. It can thus be a file object opened for binary
-+writing, a io.BytesIO instance, or any other custom object that meets
-+this interface.
-+
-+If *fix_imports* is True and protocol is less than 3, pickle will try
-+to map the new Python 3 names to the old module names used in Python
-+2, so that the pickle data stream is readable with Python 2.
-+[clinic start generated code]*/
-+
-+static int
-+_pickle_Pickler___init___impl(PicklerObject *self, PyObject *file, PyObject *protocol, int fix_imports)
-+/*[clinic end generated code: output=56e229f3b1f4332f input=b8cdeb7e3f5ee674]*/
-+{
-+    _Py_IDENTIFIER(persistent_id);
-+    _Py_IDENTIFIER(dispatch_table);
-+
-+    /* In case of multiple __init__() calls, clear previous content. */
-+    if (self->write != NULL)
-+        (void)Pickler_clear(self);
-+
-+    if (_Pickler_SetProtocol(self, protocol, fix_imports) < 0)
-+        return -1;
-+
-+    if (_Pickler_SetOutputStream(self, file) < 0)
-+        return -1;
-+
-+    /* memo and output_buffer may have already been created in _Pickler_New */
-+    if (self->memo == NULL) {
-+        self->memo = PyMemoTable_New();
-+        if (self->memo == NULL)
-+            return -1;
-+    }
-+    self->output_len = 0;
-+    if (self->output_buffer == NULL) {
-+        self->max_output_len = WRITE_BUF_SIZE;
-+        self->output_buffer = PyBytes_FromStringAndSize(NULL,
-+                                                        self->max_output_len);
-+        if (self->output_buffer == NULL)
-+            return -1;
-+    }
-+
-+    self->fast = 0;
-+    self->fast_nesting = 0;
-+    self->fast_memo = NULL;
-+    self->pers_func = NULL;
-+    if (_PyObject_HasAttrId((PyObject *)self, &PyId_persistent_id)) {
-+        self->pers_func = _PyObject_GetAttrId((PyObject *)self,
-+                                              &PyId_persistent_id);
-+        if (self->pers_func == NULL)
-+            return -1;
-+    }
-+    self->dispatch_table = NULL;
-+    if (_PyObject_HasAttrId((PyObject *)self, &PyId_dispatch_table)) {
-+        self->dispatch_table = _PyObject_GetAttrId((PyObject *)self,
-+                                                   &PyId_dispatch_table);
-+        if (self->dispatch_table == NULL)
-+            return -1;
-+    }
-+
-+    return 0;
-+}
-+
-+
-+/* Define a proxy object for the Pickler's internal memo object. This is to
-+ * avoid breaking code like:
-+ *  pickler.memo.clear()
-+ * and
-+ *  pickler.memo = saved_memo
-+ * Is this a good idea? Not really, but we don't want to break code that uses
-+ * it. Note that we don't implement the entire mapping API here. This is
-+ * intentional, as these should be treated as black-box implementation details.
-+ */
-+
-+/*[clinic input]
-+_pickle.PicklerMemoProxy.clear
-+
-+Remove all items from memo.
-+[clinic start generated code]*/
-+
-+static PyObject *
-+_pickle_PicklerMemoProxy_clear_impl(PicklerMemoProxyObject *self)
-+/*[clinic end generated code: output=5fb9370d48ae8b05 input=ccc186dacd0f1405]*/
-+{
-+    if (self->pickler->memo)
-+        PyMemoTable_Clear(self->pickler->memo);
-+    Py_RETURN_NONE;
-+}
-+
-+/*[clinic input]
-+_pickle.PicklerMemoProxy.copy
-+
-+Copy the memo to a new object.
-+[clinic start generated code]*/
-+
-+static PyObject *
-+_pickle_PicklerMemoProxy_copy_impl(PicklerMemoProxyObject *self)
-+/*[clinic end generated code: output=bb83a919d29225ef input=b73043485ac30b36]*/
-+{
-+    Py_ssize_t i;
-+    PyMemoTable *memo;
-+    PyObject *new_memo = PyDict_New();
-+    if (new_memo == NULL)
-+        return NULL;
-+
-+    memo = self->pickler->memo;
-+    for (i = 0; i < memo->mt_allocated; ++i) {
-+        PyMemoEntry entry = memo->mt_table[i];
-+        if (entry.me_key != NULL) {
-+            int status;
-+            PyObject *key, *value;
-+
-+            key = PyLong_FromVoidPtr(entry.me_key);
-+            value = Py_BuildValue("nO", entry.me_value, entry.me_key);
-+
-+            if (key == NULL || value == NULL) {
-+                Py_XDECREF(key);
-+                Py_XDECREF(value);
-+                goto error;
-+            }
-+            status = PyDict_SetItem(new_memo, key, value);
-+            Py_DECREF(key);
-+            Py_DECREF(value);
-+            if (status < 0)
-+                goto error;
-+        }
-+    }
-+    return new_memo;
-+
-+  error:
-+    Py_XDECREF(new_memo);
-+    return NULL;
-+}
-+
-+/*[clinic input]
-+_pickle.PicklerMemoProxy.__reduce__
-+
-+Implement pickle support.
-+[clinic start generated code]*/
-+
-+static PyObject *
-+_pickle_PicklerMemoProxy___reduce___impl(PicklerMemoProxyObject *self)
-+/*[clinic end generated code: output=bebba1168863ab1d input=2f7c540e24b7aae4]*/
-+{
-+    PyObject *reduce_value, *dict_args;
-+    PyObject *contents = _pickle_PicklerMemoProxy_copy_impl(self);
-+    if (contents == NULL)
-+        return NULL;
-+
-+    reduce_value = PyTuple_New(2);
-+    if (reduce_value == NULL) {
-+        Py_DECREF(contents);
-+        return NULL;
-+    }
-+    dict_args = PyTuple_New(1);
-+    if (dict_args == NULL) {
-+        Py_DECREF(contents);
-+        Py_DECREF(reduce_value);
-+        return NULL;
-+    }
-+    PyTuple_SET_ITEM(dict_args, 0, contents);
-+    Py_INCREF((PyObject *)&PyDict_Type);
-+    PyTuple_SET_ITEM(reduce_value, 0, (PyObject *)&PyDict_Type);
-+    PyTuple_SET_ITEM(reduce_value, 1, dict_args);
-+    return reduce_value;
-+}
-+
-+static PyMethodDef picklerproxy_methods[] = {
-+    _PICKLE_PICKLERMEMOPROXY_CLEAR_METHODDEF
-+    _PICKLE_PICKLERMEMOPROXY_COPY_METHODDEF
-+    _PICKLE_PICKLERMEMOPROXY___REDUCE___METHODDEF
-+    {NULL, NULL} /* sentinel */
-+};
-+
-+static void
-+PicklerMemoProxy_dealloc(PicklerMemoProxyObject *self)
-+{
-+    PyObject_GC_UnTrack(self);
-+    Py_XDECREF(self->pickler);
-+    PyObject_GC_Del((PyObject *)self);
-+}
-+
-+static int
-+PicklerMemoProxy_traverse(PicklerMemoProxyObject *self,
-+                          visitproc visit, void *arg)
-+{
-+    Py_VISIT(self->pickler);
-+    return 0;
-+}
-+
-+static int
-+PicklerMemoProxy_clear(PicklerMemoProxyObject *self)
-+{
-+    Py_CLEAR(self->pickler);
-+    return 0;
-+}
-+
-+static PyTypeObject PicklerMemoProxyType = {
-+    PyVarObject_HEAD_INIT(NULL, 0)
-+    "_pickle.PicklerMemoProxy",                 /*tp_name*/
-+    sizeof(PicklerMemoProxyObject),             /*tp_basicsize*/
-+    0,
-+    (destructor)PicklerMemoProxy_dealloc,       /* tp_dealloc */
-+    0,                                          /* tp_print */
-+    0,                                          /* tp_getattr */
-+    0,                                          /* tp_setattr */
-+    0,                                          /* tp_compare */
-+    0,                                          /* tp_repr */
-+    0,                                          /* tp_as_number */
-+    0,                                          /* tp_as_sequence */
-+    0,                                          /* tp_as_mapping */
-+    PyObject_HashNotImplemented,                /* tp_hash */
-+    0,                                          /* tp_call */
-+    0,                                          /* tp_str */
-+    PyObject_GenericGetAttr,                    /* tp_getattro */
-+    PyObject_GenericSetAttr,                    /* tp_setattro */
-+    0,                                          /* tp_as_buffer */
-+    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC,
-+    0,                                          /* tp_doc */
-+    (traverseproc)PicklerMemoProxy_traverse,    /* tp_traverse */
-+    (inquiry)PicklerMemoProxy_clear,            /* tp_clear */
-+    0,                                          /* tp_richcompare */
-+    0,                                          /* tp_weaklistoffset */
-+    0,                                          /* tp_iter */
-+    0,                                          /* tp_iternext */
-+    picklerproxy_methods,                       /* tp_methods */
-+};
-+
-+static PyObject *
-+PicklerMemoProxy_New(PicklerObject *pickler)
-+{
-+    PicklerMemoProxyObject *self;
-+
-+    self = PyObject_GC_New(PicklerMemoProxyObject, &PicklerMemoProxyType);
-+    if (self == NULL)
-+        return NULL;
-+    Py_INCREF(pickler);
-+    self->pickler = pickler;
-+    PyObject_GC_Track(self);
-+    return (PyObject *)self;
-+}
-+
-+/*****************************************************************************/
-+
-+static PyObject *
-+Pickler_get_memo(PicklerObject *self)
-+{
-+    return PicklerMemoProxy_New(self);
-+}
-+
-+static int
-+Pickler_set_memo(PicklerObject *self, PyObject *obj)
-+{
-+    PyMemoTable *new_memo = NULL;
-+
-+    if (obj == NULL) {
-+        PyErr_SetString(PyExc_TypeError,
-+                        "attribute deletion is not supported");
-+        return -1;
-+    }
-+
-+    if (Py_TYPE(obj) == &PicklerMemoProxyType) {
-+        PicklerObject *pickler =
-+            ((PicklerMemoProxyObject *)obj)->pickler;
-+
-+        new_memo = PyMemoTable_Copy(pickler->memo);
-+        if (new_memo == NULL)
-+            return -1;
-+    }
-+    else if (PyDict_Check(obj)) {
-+        Py_ssize_t i = 0;
-+        PyObject *key, *value;
-+
-+        new_memo = PyMemoTable_New();
-+        if (new_memo == NULL)
-+            return -1;
-+
-+        while (PyDict_Next(obj, &i, &key, &value)) {
-+            Py_ssize_t memo_id;
-+            PyObject *memo_obj;
-+
-+            if (!PyTuple_Check(value) || Py_SIZE(value) != 2) {
-+                PyErr_SetString(PyExc_TypeError,
-+                                "'memo' values must be 2-item tuples");
-+                goto error;
-+            }
-+            memo_id = PyLong_AsSsize_t(PyTuple_GET_ITEM(value, 0));
-+            if (memo_id == -1 && PyErr_Occurred())
-+                goto error;
-+            memo_obj = PyTuple_GET_ITEM(value, 1);
-+            if (PyMemoTable_Set(new_memo, memo_obj, memo_id) < 0)
-+                goto error;
-+        }
-+    }
-+    else {
-+        PyErr_Format(PyExc_TypeError,
-+                     "'memo' attribute must be an PicklerMemoProxy object"
-+                     "or dict, not %.200s", Py_TYPE(obj)->tp_name);
-+        return -1;
-+    }
-+
-+    PyMemoTable_Del(self->memo);
-+    self->memo = new_memo;
-+
-+    return 0;
-+
-+  error:
-+    if (new_memo)
-+        PyMemoTable_Del(new_memo);
-+    return -1;
-+}
-+
-+static PyObject *
-+Pickler_get_persid(PicklerObject *self)
-+{
-+    if (self->pers_func == NULL)
-+        PyErr_SetString(PyExc_AttributeError, "persistent_id");
-+    else
-+        Py_INCREF(self->pers_func);
-+    return self->pers_func;
-+}
-+
-+static int
-+Pickler_set_persid(PicklerObject *self, PyObject *value)
-+{
-+    PyObject *tmp;
-+
-+    if (value == NULL) {
-+        PyErr_SetString(PyExc_TypeError,
-+                        "attribute deletion is not supported");
-+        return -1;
-+    }
-+    if (!PyCallable_Check(value)) {
-+        PyErr_SetString(PyExc_TypeError,
-+                        "persistent_id must be a callable taking one argument");
-+        return -1;
-+    }
-+
-+    tmp = self->pers_func;
-+    Py_INCREF(value);
-+    self->pers_func = value;
-+    Py_XDECREF(tmp);      /* self->pers_func can be NULL, so be careful. */
-+
-+    return 0;
-+}
-+
-+static PyMemberDef Pickler_members[] = {
-+    {"bin", T_INT, offsetof(PicklerObject, bin)},
-+    {"fast", T_INT, offsetof(PicklerObject, fast)},
-+    {"dispatch_table", T_OBJECT_EX, offsetof(PicklerObject, dispatch_table)},
-+    {NULL}
-+};
-+
-+static PyGetSetDef Pickler_getsets[] = {
-+    {"memo",          (getter)Pickler_get_memo,
-+                      (setter)Pickler_set_memo},
-+    {"persistent_id", (getter)Pickler_get_persid,
-+                      (setter)Pickler_set_persid},
-+    {NULL}
-+};
-+
-+static PyTypeObject Pickler_Type = {
-+    PyVarObject_HEAD_INIT(NULL, 0)
-+    "_pickle.Pickler"  ,                /*tp_name*/
-+    sizeof(PicklerObject),              /*tp_basicsize*/
-+    0,                                  /*tp_itemsize*/
-+    (destructor)Pickler_dealloc,        /*tp_dealloc*/
-+    0,                                  /*tp_print*/
-+    0,                                  /*tp_getattr*/
-+    0,                                  /*tp_setattr*/
-+    0,                                  /*tp_reserved*/
-+    0,                                  /*tp_repr*/
-+    0,                                  /*tp_as_number*/
-+    0,                                  /*tp_as_sequence*/
-+    0,                                  /*tp_as_mapping*/
-+    0,                                  /*tp_hash*/
-+    0,                                  /*tp_call*/
-+    0,                                  /*tp_str*/
-+    0,                                  /*tp_getattro*/
-+    0,                                  /*tp_setattro*/
-+    0,                                  /*tp_as_buffer*/
-+    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC,
-+    _pickle_Pickler___init____doc__,    /*tp_doc*/
-+    (traverseproc)Pickler_traverse,     /*tp_traverse*/
-+    (inquiry)Pickler_clear,             /*tp_clear*/
-+    0,                                  /*tp_richcompare*/
-+    0,                                  /*tp_weaklistoffset*/
-+    0,                                  /*tp_iter*/
-+    0,                                  /*tp_iternext*/
-+    Pickler_methods,                    /*tp_methods*/
-+    Pickler_members,                    /*tp_members*/
-+    Pickler_getsets,                    /*tp_getset*/
-+    0,                                  /*tp_base*/
-+    0,                                  /*tp_dict*/
-+    0,                                  /*tp_descr_get*/
-+    0,                                  /*tp_descr_set*/
-+    0,                                  /*tp_dictoffset*/
-+    _pickle_Pickler___init__,           /*tp_init*/
-+    PyType_GenericAlloc,                /*tp_alloc*/
-+    PyType_GenericNew,                  /*tp_new*/
-+    PyObject_GC_Del,                    /*tp_free*/
-+    0,                                  /*tp_is_gc*/
-+};
-+
-+/* Temporary helper for calling self.find_class().
-+
-+   XXX: It would be nice to able to avoid Python function call overhead, by
-+   using directly the C version of find_class(), when find_class() is not
-+   overridden by a subclass. Although, this could become rather hackish. A
-+   simpler optimization would be to call the C function when self is not a
-+   subclass instance. */
-+static PyObject *
-+find_class(UnpicklerObject *self, PyObject *module_name, PyObject *global_name)
-+{
-+    _Py_IDENTIFIER(find_class);
-+
-+    return _PyObject_CallMethodId((PyObject *)self, &PyId_find_class, "OO",
-+                                  module_name, global_name);
-+}
-+
-+static Py_ssize_t
-+marker(UnpicklerObject *self)
-+{
-+    PickleState *st = _Pickle_GetGlobalState();
-+    if (self->num_marks < 1) {
-+        PyErr_SetString(st->UnpicklingError, "could not find MARK");
-+        return -1;
-+    }
-+
-+    return self->marks[--self->num_marks];
-+}
-+
-+static int
-+load_none(UnpicklerObject *self)
-+{
-+    PDATA_APPEND(self->stack, Py_None, -1);
-+    return 0;
-+}
-+
-+static int
-+bad_readline(void)
-+{
-+    PickleState *st = _Pickle_GetGlobalState();
-+    PyErr_SetString(st->UnpicklingError, "pickle data was truncated");
-+    return -1;
-+}
-+
-+static int
-+load_int(UnpicklerObject *self)
-+{
-+    PyObject *value;
-+    char *endptr, *s;
-+    Py_ssize_t len;
-+    long x;
-+
-+    if ((len = _Unpickler_Readline(self, &s)) < 0)
-+        return -1;
-+    if (len < 2)
-+        return bad_readline();
-+
-+    errno = 0;
-+    /* XXX: Should the base argument of strtol() be explicitly set to 10?
-+       XXX(avassalotti): Should this uses PyOS_strtol()? */
-+    x = strtol(s, &endptr, 0);
-+
-+    if (errno || (*endptr != '\n' && *endptr != '\0')) {
-+        /* Hm, maybe we've got something long.  Let's try reading
-+         * it as a Python int object. */
-+        errno = 0;
-+        /* XXX: Same thing about the base here. */
-+        value = PyLong_FromString(s, NULL, 0);
-+        if (value == NULL) {
-+            PyErr_SetString(PyExc_ValueError,
-+                            "could not convert string to int");
-+            return -1;
-+        }
-+    }
-+    else {
-+        if (len == 3 && (x == 0 || x == 1)) {
-+            if ((value = PyBool_FromLong(x)) == NULL)
-+                return -1;
-+        }
-+        else {
-+            if ((value = PyLong_FromLong(x)) == NULL)
-+                return -1;
-+        }
-+    }
-+
-+    PDATA_PUSH(self->stack, value, -1);
-+    return 0;
-+}
-+
-+static int
-+load_bool(UnpicklerObject *self, PyObject *boolean)
-+{
-+    assert(boolean == Py_True || boolean == Py_False);
-+    PDATA_APPEND(self->stack, boolean, -1);
-+    return 0;
-+}
-+
-+/* s contains x bytes of an unsigned little-endian integer.  Return its value
-+ * as a C Py_ssize_t, or -1 if it's higher than PY_SSIZE_T_MAX.
-+ */
-+static Py_ssize_t
-+calc_binsize(char *bytes, int nbytes)
-+{
-+    unsigned char *s = (unsigned char *)bytes;
-+    int i;
-+    size_t x = 0;
-+
-+    for (i = 0; i < nbytes && i < sizeof(size_t); i++) {
-+        x |= (size_t) s[i] << (8 * i);
-+    }
-+
-+    if (x > PY_SSIZE_T_MAX)
-+        return -1;
-+    else
-+        return (Py_ssize_t) x;
-+}
-+
-+/* s contains x bytes of a little-endian integer.  Return its value as a
-+ * C int.  Obscure:  when x is 1 or 2, this is an unsigned little-endian
-+ * int, but when x is 4 it's a signed one.  This is an historical source
-+ * of x-platform bugs.
-+ */
-+static long
-+calc_binint(char *bytes, int nbytes)
-+{
-+    unsigned char *s = (unsigned char *)bytes;
-+    int i;
-+    long x = 0;
-+
-+    for (i = 0; i < nbytes; i++) {
-+        x |= (long)s[i] << (8 * i);
-+    }
-+
-+    /* Unlike BININT1 and BININT2, BININT (more accurately BININT4)
-+     * is signed, so on a box with longs bigger than 4 bytes we need
-+     * to extend a BININT's sign bit to the full width.
-+     */
-+    if (SIZEOF_LONG > 4 && nbytes == 4) {
-+        x |= -(x & (1L << 31));
-+    }
-+
-+    return x;
-+}
-+
-+static int
-+load_binintx(UnpicklerObject *self, char *s, int size)
-+{
-+    PyObject *value;
-+    long x;
-+
-+    x = calc_binint(s, size);
-+
-+    if ((value = PyLong_FromLong(x)) == NULL)
-+        return -1;
-+
-+    PDATA_PUSH(self->stack, value, -1);
-+    return 0;
-+}
-+
-+static int
-+load_binint(UnpicklerObject *self)
-+{
-+    char *s;
-+
-+    if (_Unpickler_Read(self, &s, 4) < 0)
-+        return -1;
-+
-+    return load_binintx(self, s, 4);
-+}
-+
-+static int
-+load_binint1(UnpicklerObject *self)
-+{
-+    char *s;
-+
-+    if (_Unpickler_Read(self, &s, 1) < 0)
-+        return -1;
-+
-+    return load_binintx(self, s, 1);
-+}
-+
-+static int
-+load_binint2(UnpicklerObject *self)
-+{
-+    char *s;
-+
-+    if (_Unpickler_Read(self, &s, 2) < 0)
-+        return -1;
-+
-+    return load_binintx(self, s, 2);
-+}
-+
-+static int
-+load_long(UnpicklerObject *self)
-+{
-+    PyObject *value;
-+    char *s;
-+    Py_ssize_t len;
-+
-+    if ((len = _Unpickler_Readline(self, &s)) < 0)
-+        return -1;
-+    if (len < 2)
-+        return bad_readline();
-+
-+    /* s[len-2] will usually be 'L' (and s[len-1] is '\n'); we need to remove
-+       the 'L' before calling PyLong_FromString.  In order to maintain
-+       compatibility with Python 3.0.0, we don't actually *require*
-+       the 'L' to be present. */
-+    if (s[len-2] == 'L')
-+        s[len-2] = '\0';
-+    /* XXX: Should the base argument explicitly set to 10? */
-+    value = PyLong_FromString(s, NULL, 0);
-+    if (value == NULL)
-+        return -1;
-+
-+    PDATA_PUSH(self->stack, value, -1);
-+    return 0;
-+}
-+
-+/* 'size' bytes contain the # of bytes of little-endian 256's-complement
-+ * data following.
-+ */
-+static int
-+load_counted_long(UnpicklerObject *self, int size)
-+{
-+    PyObject *value;
-+    char *nbytes;
-+    char *pdata;
-+
-+    assert(size == 1 || size == 4);
-+    if (_Unpickler_Read(self, &nbytes, size) < 0)
-+        return -1;
-+
-+    size = calc_binint(nbytes, size);
-+    if (size < 0) {
-+        PickleState *st = _Pickle_GetGlobalState();
-+        /* Corrupt or hostile pickle -- we never write one like this */
-+        PyErr_SetString(st->UnpicklingError,
-+                        "LONG pickle has negative byte count");
-+        return -1;
-+    }
-+
-+    if (size == 0)
-+        value = PyLong_FromLong(0L);
-+    else {
-+        /* Read the raw little-endian bytes and convert. */
-+        if (_Unpickler_Read(self, &pdata, size) < 0)
-+            return -1;
-+        value = _PyLong_FromByteArray((unsigned char *)pdata, (size_t)size,
-+                                      1 /* little endian */ , 1 /* signed */ );
-+    }
-+    if (value == NULL)
-+        return -1;
-+    PDATA_PUSH(self->stack, value, -1);
-+    return 0;
-+}
-+
-+static int
-+load_float(UnpicklerObject *self)
-+{
-+    PyObject *value;
-+    char *endptr, *s;
-+    Py_ssize_t len;
-+    double d;
-+
-+    if ((len = _Unpickler_Readline(self, &s)) < 0)
-+        return -1;
-+    if (len < 2)
-+        return bad_readline();
-+
-+    errno = 0;
-+    d = PyOS_string_to_double(s, &endptr, PyExc_OverflowError);
-+    if (d == -1.0 && PyErr_Occurred())
-+        return -1;
-+    if ((endptr[0] != '\n') && (endptr[0] != '\0')) {
-+        PyErr_SetString(PyExc_ValueError, "could not convert string to float");
-+        return -1;
-+    }
-+    value = PyFloat_FromDouble(d);
-+    if (value == NULL)
-+        return -1;
-+
-+    PDATA_PUSH(self->stack, value, -1);
-+    return 0;
-+}
-+
-+static int
-+load_binfloat(UnpicklerObject *self)
-+{
-+    PyObject *value;
-+    double x;
-+    char *s;
-+
-+    if (_Unpickler_Read(self, &s, 8) < 0)
-+        return -1;
-+
-+    x = _PyFloat_Unpack8((unsigned char *)s, 0);
-+    if (x == -1.0 && PyErr_Occurred())
-+        return -1;
-+
-+    if ((value = PyFloat_FromDouble(x)) == NULL)
-+        return -1;
-+
-+    PDATA_PUSH(self->stack, value, -1);
-+    return 0;
-+}
-+
-+static int
-+load_string(UnpicklerObject *self)
-+{
-+    PyObject *bytes;
-+    PyObject *obj;
-+    Py_ssize_t len;
-+    char *s, *p;
-+
-+    if ((len = _Unpickler_Readline(self, &s)) < 0)
-+        return -1;
-+    /* Strip the newline */
-+    len--;
-+    /* Strip outermost quotes */
-+    if (len >= 2 && s[0] == s[len - 1] && (s[0] == '\'' || s[0] == '"')) {
-+        p = s + 1;
-+        len -= 2;
-+    }
-+    else {
-+        PickleState *st = _Pickle_GetGlobalState();
-+        PyErr_SetString(st->UnpicklingError,
-+                        "the STRING opcode argument must be quoted");
-+        return -1;
-+    }
-+    assert(len >= 0);
-+
-+    /* Use the PyBytes API to decode the string, since that is what is used
-+       to encode, and then coerce the result to Unicode. */
-+    bytes = PyBytes_DecodeEscape(p, len, NULL, 0, NULL);
-+    if (bytes == NULL)
-+        return -1;
-+
-+    /* Leave the Python 2.x strings as bytes if the *encoding* given to the
-+       Unpickler was 'bytes'. Otherwise, convert them to unicode. */
-+    if (strcmp(self->encoding, "bytes") == 0) {
-+        obj = bytes;
-+    }
-+    else if (strcmp(self->errors, "bytes") == 0) {
-+        PyObject *decoded = PyUnicode_FromEncodedObject(bytes, self->encoding,
-+                                                        "strict");
-+        if (decoded == NULL) {
-+            PyErr_Clear();
-+            obj = bytes;
-+        } else {
-+            obj = decoded;
-+        }
-+    }
-+    else {
-+        obj = PyUnicode_FromEncodedObject(bytes, self->encoding, self->errors);
-+        Py_DECREF(bytes);
-+        if (obj == NULL) {
-+            return -1;
-+        }
-+    }
-+
-+    PDATA_PUSH(self->stack, obj, -1);
-+    return 0;
-+}
-+
-+static int
-+load_counted_binstring(UnpicklerObject *self, int nbytes)
-+{
-+    PyObject *obj;
-+    Py_ssize_t size;
-+    char *s;
-+
-+    if (_Unpickler_Read(self, &s, nbytes) < 0)
-+        return -1;
-+
-+    size = calc_binsize(s, nbytes);
-+    if (size < 0) {
-+        PickleState *st = _Pickle_GetGlobalState();
-+        PyErr_Format(st->UnpicklingError,
-+                     "BINSTRING exceeds system's maximum size of %zd bytes",
-+                     PY_SSIZE_T_MAX);
-+        return -1;
-+    }
-+
-+    if (_Unpickler_Read(self, &s, size) < 0)
-+        return -1;
-+
-+    /* Convert Python 2.x strings to bytes if the *encoding* given to the
-+       Unpickler was 'bytes'. Otherwise, convert them to unicode. */
-+    if (strcmp(self->encoding, "bytes") == 0) {
-+        obj = PyBytes_FromStringAndSize(s, size);
-+    }
-+    else {
-+        obj = PyUnicode_Decode(s, size, self->encoding, self->errors);
-+    }
-+    if (obj == NULL) {
-+        return -1;
-+    }
-+
-+    PDATA_PUSH(self->stack, obj, -1);
-+    return 0;
-+}
-+
-+static int
-+load_counted_binbytes(UnpicklerObject *self, int nbytes)
-+{
-+    PyObject *bytes;
-+    Py_ssize_t size;
-+    char *s;
-+
-+    if (_Unpickler_Read(self, &s, nbytes) < 0)
-+        return -1;
-+
-+    size = calc_binsize(s, nbytes);
-+    if (size < 0) {
-+        PyErr_Format(PyExc_OverflowError,
-+                     "BINBYTES exceeds system's maximum size of %zd bytes",
-+                     PY_SSIZE_T_MAX);
-+        return -1;
-+    }
-+
-+    if (_Unpickler_Read(self, &s, size) < 0)
-+        return -1;
-+
-+    bytes = PyBytes_FromStringAndSize(s, size);
-+    if (bytes == NULL)
-+        return -1;
-+
-+    PDATA_PUSH(self->stack, bytes, -1);
-+    return 0;
-+}
-+
-+static int
-+load_unicode(UnpicklerObject *self)
-+{
-+    PyObject *str;
-+    Py_ssize_t len;
-+    char *s;
-+
-+    if ((len = _Unpickler_Readline(self, &s)) < 0)
-+        return -1;
-+    if (len < 1)
-+        return bad_readline();
-+
-+    str = PyUnicode_DecodeRawUnicodeEscape(s, len - 1, NULL);
-+    if (str == NULL)
-+        return -1;
-+
-+    PDATA_PUSH(self->stack, str, -1);
-+    return 0;
-+}
-+
-+static int
-+load_counted_binunicode(UnpicklerObject *self, int nbytes)
-+{
-+    PyObject *str;
-+    Py_ssize_t size;
-+    char *s;
-+
-+    if (_Unpickler_Read(self, &s, nbytes) < 0)
-+        return -1;
-+
-+    size = calc_binsize(s, nbytes);
-+    if (size < 0) {
-+        PyErr_Format(PyExc_OverflowError,
-+                     "BINUNICODE exceeds system's maximum size of %zd bytes",
-+                     PY_SSIZE_T_MAX);
-+        return -1;
-+    }
-+
-+    if (_Unpickler_Read(self, &s, size) < 0)
-+        return -1;
-+
-+    str = PyUnicode_DecodeUTF8(s, size, "surrogatepass");
-+    if (str == NULL)
-+        return -1;
-+
-+    PDATA_PUSH(self->stack, str, -1);
-+    return 0;
-+}
-+
-+static int
-+load_tuple(UnpicklerObject *self)
-+{
-+    PyObject *tuple;
-+    Py_ssize_t i;
-+
-+    if ((i = marker(self)) < 0)
-+        return -1;
-+
-+    tuple = Pdata_poptuple(self->stack, i);
-+    if (tuple == NULL)
-+        return -1;
-+    PDATA_PUSH(self->stack, tuple, -1);
-+    return 0;
-+}
-+
-+static int
-+load_counted_tuple(UnpicklerObject *self, int len)
-+{
-+    PyObject *tuple;
-+
-+    tuple = PyTuple_New(len);
-+    if (tuple == NULL)
-+        return -1;
-+
-+    while (--len >= 0) {
-+        PyObject *item;
-+
-+        PDATA_POP(self->stack, item);
-+        if (item == NULL)
-+            return -1;
-+        PyTuple_SET_ITEM(tuple, len, item);
-+    }
-+    PDATA_PUSH(self->stack, tuple, -1);
-+    return 0;
-+}
-+
-+static int
-+load_empty_list(UnpicklerObject *self)
-+{
-+    PyObject *list;
-+
-+    if ((list = PyList_New(0)) == NULL)
-+        return -1;
-+    PDATA_PUSH(self->stack, list, -1);
-+    return 0;
-+}
-+
-+static int
-+load_empty_dict(UnpicklerObject *self)
-+{
-+    PyObject *dict;
-+
-+    if ((dict = PyDict_New()) == NULL)
-+        return -1;
-+    PDATA_PUSH(self->stack, dict, -1);
-+    return 0;
-+}
-+
-+static int
-+load_empty_set(UnpicklerObject *self)
-+{
-+    PyObject *set;
-+
-+    if ((set = PySet_New(NULL)) == NULL)
-+        return -1;
-+    PDATA_PUSH(self->stack, set, -1);
-+    return 0;
-+}
-+
-+static int
-+load_list(UnpicklerObject *self)
-+{
-+    PyObject *list;
-+    Py_ssize_t i;
-+
-+    if ((i = marker(self)) < 0)
-+        return -1;
-+
-+    list = Pdata_poplist(self->stack, i);
-+    if (list == NULL)
-+        return -1;
-+    PDATA_PUSH(self->stack, list, -1);
-+    return 0;
-+}
-+
-+static int
-+load_dict(UnpicklerObject *self)
-+{
-+    PyObject *dict, *key, *value;
-+    Py_ssize_t i, j, k;
-+
-+    if ((i = marker(self)) < 0)
-+        return -1;
-+    j = Py_SIZE(self->stack);
-+
-+    if ((dict = PyDict_New()) == NULL)
-+        return -1;
-+
-+    for (k = i + 1; k < j; k += 2) {
-+        key = self->stack->data[k - 1];
-+        value = self->stack->data[k];
-+        if (PyDict_SetItem(dict, key, value) < 0) {
-+            Py_DECREF(dict);
-+            return -1;
-+        }
-+    }
-+    Pdata_clear(self->stack, i);
-+    PDATA_PUSH(self->stack, dict, -1);
-+    return 0;
-+}
-+
-+static int
-+load_frozenset(UnpicklerObject *self)
-+{
-+    PyObject *items;
-+    PyObject *frozenset;
-+    Py_ssize_t i;
-+
-+    if ((i = marker(self)) < 0)
-+        return -1;
-+
-+    items = Pdata_poptuple(self->stack, i);
-+    if (items == NULL)
-+        return -1;
-+
-+    frozenset = PyFrozenSet_New(items);
-+    Py_DECREF(items);
-+    if (frozenset == NULL)
-+        return -1;
-+
-+    PDATA_PUSH(self->stack, frozenset, -1);
-+    return 0;
-+}
-+
-+static PyObject *
-+instantiate(PyObject *cls, PyObject *args)
-+{
-+    PyObject *result = NULL;
-+    _Py_IDENTIFIER(__getinitargs__);
-+    /* Caller must assure args are a tuple.  Normally, args come from
-+       Pdata_poptuple which packs objects from the top of the stack
-+       into a newly created tuple. */
-+    assert(PyTuple_Check(args));
-+    if (Py_SIZE(args) > 0 || !PyType_Check(cls) ||
-+        _PyObject_HasAttrId(cls, &PyId___getinitargs__)) {
-+        result = PyObject_CallObject(cls, args);
-+    }
-+    else {
-+        _Py_IDENTIFIER(__new__);
-+
-+        result = _PyObject_CallMethodId(cls, &PyId___new__, "O", cls);
-+    }
-+    return result;
-+}
-+
-+static int
-+load_obj(UnpicklerObject *self)
-+{
-+    PyObject *cls, *args, *obj = NULL;
-+    Py_ssize_t i;
-+
-+    if ((i = marker(self)) < 0)
-+        return -1;
-+
-+    args = Pdata_poptuple(self->stack, i + 1);
-+    if (args == NULL)
-+        return -1;
-+
-+    PDATA_POP(self->stack, cls);
-+    if (cls) {
-+        obj = instantiate(cls, args);
-+        Py_DECREF(cls);
-+    }
-+    Py_DECREF(args);
-+    if (obj == NULL)
-+        return -1;
-+
-+    PDATA_PUSH(self->stack, obj, -1);
-+    return 0;
-+}
-+
-+static int
-+load_inst(UnpicklerObject *self)
-+{
-+    PyObject *cls = NULL;
-+    PyObject *args = NULL;
-+    PyObject *obj = NULL;
-+    PyObject *module_name;
-+    PyObject *class_name;
-+    Py_ssize_t len;
-+    Py_ssize_t i;
-+    char *s;
-+
-+    if ((i = marker(self)) < 0)
-+        return -1;
-+    if ((len = _Unpickler_Readline(self, &s)) < 0)
-+        return -1;
-+    if (len < 2)
-+        return bad_readline();
-+
-+    /* Here it is safe to use PyUnicode_DecodeASCII(), even though non-ASCII
-+       identifiers are permitted in Python 3.0, since the INST opcode is only
-+       supported by older protocols on Python 2.x. */
-+    module_name = PyUnicode_DecodeASCII(s, len - 1, "strict");
-+    if (module_name == NULL)
-+        return -1;
-+
-+    if ((len = _Unpickler_Readline(self, &s)) >= 0) {
-+        if (len < 2)
-+            return bad_readline();
-+        class_name = PyUnicode_DecodeASCII(s, len - 1, "strict");
-+        if (class_name != NULL) {
-+            cls = find_class(self, module_name, class_name);
-+            Py_DECREF(class_name);
-+        }
-+    }
-+    Py_DECREF(module_name);
-+
-+    if (cls == NULL)
-+        return -1;
-+
-+    if ((args = Pdata_poptuple(self->stack, i)) != NULL) {
-+        obj = instantiate(cls, args);
-+        Py_DECREF(args);
-+    }
-+    Py_DECREF(cls);
-+
-+    if (obj == NULL)
-+        return -1;
-+
-+    PDATA_PUSH(self->stack, obj, -1);
-+    return 0;
-+}
-+
-+static int
-+load_newobj(UnpicklerObject *self)
-+{
-+    PyObject *args = NULL;
-+    PyObject *clsraw = NULL;
-+    PyTypeObject *cls;          /* clsraw cast to its true type */
-+    PyObject *obj;
-+    PickleState *st = _Pickle_GetGlobalState();
-+
-+    /* Stack is ... cls argtuple, and we want to call
-+     * cls.__new__(cls, *argtuple).
-+     */
-+    PDATA_POP(self->stack, args);
-+    if (args == NULL)
-+        goto error;
-+    if (!PyTuple_Check(args)) {
-+        PyErr_SetString(st->UnpicklingError,
-+                        "NEWOBJ expected an arg " "tuple.");
-+        goto error;
-+    }
-+
-+    PDATA_POP(self->stack, clsraw);
-+    cls = (PyTypeObject *)clsraw;
-+    if (cls == NULL)
-+        goto error;
-+    if (!PyType_Check(cls)) {
-+        PyErr_SetString(st->UnpicklingError, "NEWOBJ class argument "
-+                        "isn't a type object");
-+        goto error;
-+    }
-+    if (cls->tp_new == NULL) {
-+        PyErr_SetString(st->UnpicklingError, "NEWOBJ class argument "
-+                        "has NULL tp_new");
-+        goto error;
-+    }
-+
-+    /* Call __new__. */
-+    obj = cls->tp_new(cls, args, NULL);
-+    if (obj == NULL)
-+        goto error;
-+
-+    Py_DECREF(args);
-+    Py_DECREF(clsraw);
-+    PDATA_PUSH(self->stack, obj, -1);
-+    return 0;
-+
-+  error:
-+    Py_XDECREF(args);
-+    Py_XDECREF(clsraw);
-+    return -1;
-+}
-+
-+static int
-+load_newobj_ex(UnpicklerObject *self)
-+{
-+    PyObject *cls, *args, *kwargs;
-+    PyObject *obj;
-+    PickleState *st = _Pickle_GetGlobalState();
-+
-+    PDATA_POP(self->stack, kwargs);
-+    if (kwargs == NULL) {
-+        return -1;
-+    }
-+    PDATA_POP(self->stack, args);
-+    if (args == NULL) {
-+        Py_DECREF(kwargs);
-+        return -1;
-+    }
-+    PDATA_POP(self->stack, cls);
-+    if (cls == NULL) {
-+        Py_DECREF(kwargs);
-+        Py_DECREF(args);
-+        return -1;
-+    }
-+
-+    if (!PyType_Check(cls)) {
-+        Py_DECREF(kwargs);
-+        Py_DECREF(args);
-+        Py_DECREF(cls);
-+        PyErr_Format(st->UnpicklingError,
-+                     "NEWOBJ_EX class argument must be a type, not %.200s",
-+                     Py_TYPE(cls)->tp_name);
-+        return -1;
-+    }
-+
-+    if (((PyTypeObject *)cls)->tp_new == NULL) {
-+        Py_DECREF(kwargs);
-+        Py_DECREF(args);
-+        Py_DECREF(cls);
-+        PyErr_SetString(st->UnpicklingError,
-+                        "NEWOBJ_EX class argument doesn't have __new__");
-+        return -1;
-+    }
-+    obj = ((PyTypeObject *)cls)->tp_new((PyTypeObject *)cls, args, kwargs);
-+    Py_DECREF(kwargs);
-+    Py_DECREF(args);
-+    Py_DECREF(cls);
-+    if (obj == NULL) {
-+        return -1;
-+    }
-+    PDATA_PUSH(self->stack, obj, -1);
-+    return 0;
-+}
-+
-+static int
-+load_global(UnpicklerObject *self)
-+{
-+    PyObject *global = NULL;
-+    PyObject *module_name;
-+    PyObject *global_name;
-+    Py_ssize_t len;
-+    char *s;
-+
-+    if ((len = _Unpickler_Readline(self, &s)) < 0)
-+        return -1;
-+    if (len < 2)
-+        return bad_readline();
-+    module_name = PyUnicode_DecodeUTF8(s, len - 1, "strict");
-+    if (!module_name)
-+        return -1;
-+
-+    if ((len = _Unpickler_Readline(self, &s)) >= 0) {
-+        if (len < 2) {
-+            Py_DECREF(module_name);
-+            return bad_readline();
-+        }
-+        global_name = PyUnicode_DecodeUTF8(s, len - 1, "strict");
-+        if (global_name) {
-+            global = find_class(self, module_name, global_name);
-+            Py_DECREF(global_name);
-+        }
-+    }
-+    Py_DECREF(module_name);
-+
-+    if (global == NULL)
-+        return -1;
-+    PDATA_PUSH(self->stack, global, -1);
-+    return 0;
-+}
-+
-+static int
-+load_stack_global(UnpicklerObject *self)
-+{
-+    PyObject *global;
-+    PyObject *module_name;
-+    PyObject *global_name;
-+
-+    PDATA_POP(self->stack, global_name);
-+    PDATA_POP(self->stack, module_name);
-+    if (module_name == NULL || !PyUnicode_CheckExact(module_name) ||
-+        global_name == NULL || !PyUnicode_CheckExact(global_name)) {
-+        PickleState *st = _Pickle_GetGlobalState();
-+        PyErr_SetString(st->UnpicklingError, "STACK_GLOBAL requires str");
-+        Py_XDECREF(global_name);
-+        Py_XDECREF(module_name);
-+        return -1;
-+    }
-+    global = find_class(self, module_name, global_name);
-+    Py_DECREF(global_name);
-+    Py_DECREF(module_name);
-+    if (global == NULL)
-+        return -1;
-+    PDATA_PUSH(self->stack, global, -1);
-+    return 0;
-+}
-+
-+static int
-+load_persid(UnpicklerObject *self)
-+{
-+    PyObject *pid;
-+    Py_ssize_t len;
-+    char *s;
-+
-+    if (self->pers_func) {
-+        if ((len = _Unpickler_Readline(self, &s)) < 0)
-+            return -1;
-+        if (len < 1)
-+            return bad_readline();
-+
-+        pid = PyBytes_FromStringAndSize(s, len - 1);
-+        if (pid == NULL)
-+            return -1;
-+
-+        /* This does not leak since _Pickle_FastCall() steals the reference
-+           to pid first. */
-+        pid = _Pickle_FastCall(self->pers_func, pid);
-+        if (pid == NULL)
-+            return -1;
-+
-+        PDATA_PUSH(self->stack, pid, -1);
-+        return 0;
-+    }
-+    else {
-+        PickleState *st = _Pickle_GetGlobalState();
-+        PyErr_SetString(st->UnpicklingError,
-+                        "A load persistent id instruction was encountered,\n"
-+                        "but no persistent_load function was specified.");
-+        return -1;
-+    }
-+}
-+
-+static int
-+load_binpersid(UnpicklerObject *self)
-+{
-+    PyObject *pid;
-+
-+    if (self->pers_func) {
-+        PDATA_POP(self->stack, pid);
-+        if (pid == NULL)
-+            return -1;
-+
-+        /* This does not leak since _Pickle_FastCall() steals the
-+           reference to pid first. */
-+        pid = _Pickle_FastCall(self->pers_func, pid);
-+        if (pid == NULL)
-+            return -1;
-+
-+        PDATA_PUSH(self->stack, pid, -1);
-+        return 0;
-+    }
-+    else {
-+        PickleState *st = _Pickle_GetGlobalState();
-+        PyErr_SetString(st->UnpicklingError,
-+                        "A load persistent id instruction was encountered,\n"
-+                        "but no persistent_load function was specified.");
-+        return -1;
-+    }
-+}
-+
-+static int
-+load_pop(UnpicklerObject *self)
-+{
-+    Py_ssize_t len = Py_SIZE(self->stack);
-+
-+    /* Note that we split the (pickle.py) stack into two stacks,
-+     * an object stack and a mark stack. We have to be clever and
-+     * pop the right one. We do this by looking at the top of the
-+     * mark stack first, and only signalling a stack underflow if
-+     * the object stack is empty and the mark stack doesn't match
-+     * our expectations.
-+     */
-+    if (self->num_marks > 0 && self->marks[self->num_marks - 1] == len) {
-+        self->num_marks--;
-+    } else if (len > 0) {
-+        len--;
-+        Py_DECREF(self->stack->data[len]);
-+        Py_SIZE(self->stack) = len;
-+    } else {
-+        return stack_underflow();
-+    }
-+    return 0;
-+}
-+
-+static int
-+load_pop_mark(UnpicklerObject *self)
-+{
-+    Py_ssize_t i;
-+
-+    if ((i = marker(self)) < 0)
-+        return -1;
-+
-+    Pdata_clear(self->stack, i);
-+
-+    return 0;
-+}
-+
-+static int
-+load_dup(UnpicklerObject *self)
-+{
-+    PyObject *last;
-+    Py_ssize_t len;
-+
-+    if ((len = Py_SIZE(self->stack)) <= 0)
-+        return stack_underflow();
-+    last = self->stack->data[len - 1];
-+    PDATA_APPEND(self->stack, last, -1);
-+    return 0;
-+}
-+
-+static int
-+load_get(UnpicklerObject *self)
-+{
-+    PyObject *key, *value;
-+    Py_ssize_t idx;
-+    Py_ssize_t len;
-+    char *s;
-+
-+    if ((len = _Unpickler_Readline(self, &s)) < 0)
-+        return -1;
-+    if (len < 2)
-+        return bad_readline();
-+
-+    key = PyLong_FromString(s, NULL, 10);
-+    if (key == NULL)
-+        return -1;
-+    idx = PyLong_AsSsize_t(key);
-+    if (idx == -1 && PyErr_Occurred()) {
-+        Py_DECREF(key);
-+        return -1;
-+    }
-+
-+    value = _Unpickler_MemoGet(self, idx);
-+    if (value == NULL) {
-+        if (!PyErr_Occurred())
-+            PyErr_SetObject(PyExc_KeyError, key);
-+        Py_DECREF(key);
-+        return -1;
-+    }
-+    Py_DECREF(key);
-+
-+    PDATA_APPEND(self->stack, value, -1);
-+    return 0;
-+}
-+
-+static int
-+load_binget(UnpicklerObject *self)
-+{
-+    PyObject *value;
-+    Py_ssize_t idx;
-+    char *s;
-+
-+    if (_Unpickler_Read(self, &s, 1) < 0)
-+        return -1;
-+
-+    idx = Py_CHARMASK(s[0]);
-+
-+    value = _Unpickler_MemoGet(self, idx);
-+    if (value == NULL) {
-+        PyObject *key = PyLong_FromSsize_t(idx);
-+        if (key != NULL) {
-+            PyErr_SetObject(PyExc_KeyError, key);
-+            Py_DECREF(key);
-+        }
-+        return -1;
-+    }
-+
-+    PDATA_APPEND(self->stack, value, -1);
-+    return 0;
-+}
-+
-+static int
-+load_long_binget(UnpicklerObject *self)
-+{
-+    PyObject *value;
-+    Py_ssize_t idx;
-+    char *s;
-+
-+    if (_Unpickler_Read(self, &s, 4) < 0)
-+        return -1;
-+
-+    idx = calc_binsize(s, 4);
-+
-+    value = _Unpickler_MemoGet(self, idx);
-+    if (value == NULL) {
-+        PyObject *key = PyLong_FromSsize_t(idx);
-+        if (key != NULL) {
-+            PyErr_SetObject(PyExc_KeyError, key);
-+            Py_DECREF(key);
-+        }
-+        return -1;
-+    }
-+
-+    PDATA_APPEND(self->stack, value, -1);
-+    return 0;
-+}
-+
-+/* Push an object from the extension registry (EXT[124]).  nbytes is
-+ * the number of bytes following the opcode, holding the index (code) value.
-+ */
-+static int
-+load_extension(UnpicklerObject *self, int nbytes)
-+{
-+    char *codebytes;            /* the nbytes bytes after the opcode */
-+    long code;                  /* calc_binint returns long */
-+    PyObject *py_code;          /* code as a Python int */
-+    PyObject *obj;              /* the object to push */
-+    PyObject *pair;             /* (module_name, class_name) */
-+    PyObject *module_name, *class_name;
-+    PickleState *st = _Pickle_GetGlobalState();
-+
-+    assert(nbytes == 1 || nbytes == 2 || nbytes == 4);
-+    if (_Unpickler_Read(self, &codebytes, nbytes) < 0)
-+        return -1;
-+    code = calc_binint(codebytes, nbytes);
-+    if (code <= 0) {            /* note that 0 is forbidden */
-+        /* Corrupt or hostile pickle. */
-+        PyErr_SetString(st->UnpicklingError, "EXT specifies code <= 0");
-+        return -1;
-+    }
-+
-+    /* Look for the code in the cache. */
-+    py_code = PyLong_FromLong(code);
-+    if (py_code == NULL)
-+        return -1;
-+    obj = PyDict_GetItemWithError(st->extension_cache, py_code);
-+    if (obj != NULL) {
-+        /* Bingo. */
-+        Py_DECREF(py_code);
-+        PDATA_APPEND(self->stack, obj, -1);
-+        return 0;
-+    }
-+    if (PyErr_Occurred()) {
-+        Py_DECREF(py_code);
-+        return -1;
-+    }
-+
-+    /* Look up the (module_name, class_name) pair. */
-+    pair = PyDict_GetItemWithError(st->inverted_registry, py_code);
-+    if (pair == NULL) {
-+        Py_DECREF(py_code);
-+        if (!PyErr_Occurred()) {
-+            PyErr_Format(PyExc_ValueError, "unregistered extension "
-+                         "code %ld", code);
-+        }
-+        return -1;
-+    }
-+    /* Since the extension registry is manipulable via Python code,
-+     * confirm that pair is really a 2-tuple of strings.
-+     */
-+    if (!PyTuple_Check(pair) || PyTuple_Size(pair) != 2 ||
-+        !PyUnicode_Check(module_name = PyTuple_GET_ITEM(pair, 0)) ||
-+        !PyUnicode_Check(class_name = PyTuple_GET_ITEM(pair, 1))) {
-+        Py_DECREF(py_code);
-+        PyErr_Format(PyExc_ValueError, "_inverted_registry[%ld] "
-+                     "isn't a 2-tuple of strings", code);
-+        return -1;
-+    }
-+    /* Load the object. */
-+    obj = find_class(self, module_name, class_name);
-+    if (obj == NULL) {
-+        Py_DECREF(py_code);
-+        return -1;
-+    }
-+    /* Cache code -> obj. */
-+    code = PyDict_SetItem(st->extension_cache, py_code, obj);
-+    Py_DECREF(py_code);
-+    if (code < 0) {
-+        Py_DECREF(obj);
-+        return -1;
-+    }
-+    PDATA_PUSH(self->stack, obj, -1);
-+    return 0;
-+}
-+
-+static int
-+load_put(UnpicklerObject *self)
-+{
-+    PyObject *key, *value;
-+    Py_ssize_t idx;
-+    Py_ssize_t len;
-+    char *s;
-+
-+    if ((len = _Unpickler_Readline(self, &s)) < 0)
-+        return -1;
-+    if (len < 2)
-+        return bad_readline();
-+    if (Py_SIZE(self->stack) <= 0)
-+        return stack_underflow();
-+    value = self->stack->data[Py_SIZE(self->stack) - 1];
-+
-+    key = PyLong_FromString(s, NULL, 10);
-+    if (key == NULL)
-+        return -1;
-+    idx = PyLong_AsSsize_t(key);
-+    Py_DECREF(key);
-+    if (idx < 0) {
-+        if (!PyErr_Occurred())
-+            PyErr_SetString(PyExc_ValueError,
-+                            "negative PUT argument");
-+        return -1;
-+    }
-+
-+    return _Unpickler_MemoPut(self, idx, value);
-+}
-+
-+static int
-+load_binput(UnpicklerObject *self)
-+{
-+    PyObject *value;
-+    Py_ssize_t idx;
-+    char *s;
-+
-+    if (_Unpickler_Read(self, &s, 1) < 0)
-+        return -1;
-+
-+    if (Py_SIZE(self->stack) <= 0)
-+        return stack_underflow();
-+    value = self->stack->data[Py_SIZE(self->stack) - 1];
-+
-+    idx = Py_CHARMASK(s[0]);
-+
-+    return _Unpickler_MemoPut(self, idx, value);
-+}
-+
-+static int
-+load_long_binput(UnpicklerObject *self)
-+{
-+    PyObject *value;
-+    Py_ssize_t idx;
-+    char *s;
-+
-+    if (_Unpickler_Read(self, &s, 4) < 0)
-+        return -1;
-+
-+    if (Py_SIZE(self->stack) <= 0)
-+        return stack_underflow();
-+    value = self->stack->data[Py_SIZE(self->stack) - 1];
-+
-+    idx = calc_binsize(s, 4);
-+    if (idx < 0) {
-+        PyErr_SetString(PyExc_ValueError,
-+                        "negative LONG_BINPUT argument");
-+        return -1;
-+    }
-+
-+    return _Unpickler_MemoPut(self, idx, value);
-+}
-+
-+static int
-+load_memoize(UnpicklerObject *self)
-+{
-+    PyObject *value;
-+
-+    if (Py_SIZE(self->stack) <= 0)
-+        return stack_underflow();
-+    value = self->stack->data[Py_SIZE(self->stack) - 1];
-+
-+    return _Unpickler_MemoPut(self, self->memo_len, value);
-+}
-+
-+static int
-+do_append(UnpicklerObject *self, Py_ssize_t x)
-+{
-+    PyObject *value;
-+    PyObject *list;
-+    Py_ssize_t len, i;
-+
-+    len = Py_SIZE(self->stack);
-+    if (x > len || x <= 0)
-+        return stack_underflow();
-+    if (len == x)  /* nothing to do */
-+        return 0;
-+
-+    list = self->stack->data[x - 1];
-+
-+    if (PyList_Check(list)) {
-+        PyObject *slice;
-+        Py_ssize_t list_len;
-+        int ret;
-+
-+        slice = Pdata_poplist(self->stack, x);
-+        if (!slice)
-+            return -1;
-+        list_len = PyList_GET_SIZE(list);
-+        ret = PyList_SetSlice(list, list_len, list_len, slice);
-+        Py_DECREF(slice);
-+        return ret;
-+    }
-+    else {
-+        PyObject *append_func;
-+        _Py_IDENTIFIER(append);
-+
-+        append_func = _PyObject_GetAttrId(list, &PyId_append);
-+        if (append_func == NULL)
-+            return -1;
-+        for (i = x; i < len; i++) {
-+            PyObject *result;
-+
-+            value = self->stack->data[i];
-+            result = _Pickle_FastCall(append_func, value);
-+            if (result == NULL) {
-+                Pdata_clear(self->stack, i + 1);
-+                Py_SIZE(self->stack) = x;
-+                Py_DECREF(append_func);
-+                return -1;
-+            }
-+            Py_DECREF(result);
-+        }
-+        Py_SIZE(self->stack) = x;
-+        Py_DECREF(append_func);
-+    }
-+
-+    return 0;
-+}
-+
-+static int
-+load_append(UnpicklerObject *self)
-+{
-+    return do_append(self, Py_SIZE(self->stack) - 1);
-+}
-+
-+static int
-+load_appends(UnpicklerObject *self)
-+{
-+    return do_append(self, marker(self));
-+}
-+
-+static int
-+do_setitems(UnpicklerObject *self, Py_ssize_t x)
-+{
-+    PyObject *value, *key;
-+    PyObject *dict;
-+    Py_ssize_t len, i;
-+    int status = 0;
-+
-+    len = Py_SIZE(self->stack);
-+    if (x > len || x <= 0)
-+        return stack_underflow();
-+    if (len == x)  /* nothing to do */
-+        return 0;
-+    if ((len - x) % 2 != 0) {
-+        PickleState *st = _Pickle_GetGlobalState();
-+        /* Currupt or hostile pickle -- we never write one like this. */
-+        PyErr_SetString(st->UnpicklingError,
-+                        "odd number of items for SETITEMS");
-+        return -1;
-+    }
-+
-+    /* Here, dict does not actually need to be a PyDict; it could be anything
-+       that supports the __setitem__ attribute. */
-+    dict = self->stack->data[x - 1];
-+
-+    for (i = x + 1; i < len; i += 2) {
-+        key = self->stack->data[i - 1];
-+        value = self->stack->data[i];
-+        if (PyObject_SetItem(dict, key, value) < 0) {
-+            status = -1;
-+            break;
-+        }
-+    }
-+
-+    Pdata_clear(self->stack, x);
-+    return status;
-+}
-+
-+static int
-+load_setitem(UnpicklerObject *self)
-+{
-+    return do_setitems(self, Py_SIZE(self->stack) - 2);
-+}
-+
-+static int
-+load_setitems(UnpicklerObject *self)
-+{
-+    return do_setitems(self, marker(self));
-+}
-+
-+static int
-+load_additems(UnpicklerObject *self)
-+{
-+    PyObject *set;
-+    Py_ssize_t mark, len, i;
-+
-+    mark =  marker(self);
-+    len = Py_SIZE(self->stack);
-+    if (mark > len || mark <= 0)
-+        return stack_underflow();
-+    if (len == mark)  /* nothing to do */
-+        return 0;
-+
-+    set = self->stack->data[mark - 1];
-+
-+    if (PySet_Check(set)) {
-+        PyObject *items;
-+        int status;
-+
-+        items = Pdata_poptuple(self->stack, mark);
-+        if (items == NULL)
-+            return -1;
-+
-+        status = _PySet_Update(set, items);
-+        Py_DECREF(items);
-+        return status;
-+    }
-+    else {
-+        PyObject *add_func;
-+        _Py_IDENTIFIER(add);
-+
-+        add_func = _PyObject_GetAttrId(set, &PyId_add);
-+        if (add_func == NULL)
-+            return -1;
-+        for (i = mark; i < len; i++) {
-+            PyObject *result;
-+            PyObject *item;
-+
-+            item = self->stack->data[i];
-+            result = _Pickle_FastCall(add_func, item);
-+            if (result == NULL) {
-+                Pdata_clear(self->stack, i + 1);
-+                Py_SIZE(self->stack) = mark;
-+                return -1;
-+            }
-+            Py_DECREF(result);
-+        }
-+        Py_SIZE(self->stack) = mark;
-+    }
-+
-+    return 0;
-+}
-+
-+static int
-+load_build(UnpicklerObject *self)
-+{
-+    PyObject *state, *inst, *slotstate;
-+    PyObject *setstate;
-+    int status = 0;
-+    _Py_IDENTIFIER(__setstate__);
-+
-+    /* Stack is ... instance, state.  We want to leave instance at
-+     * the stack top, possibly mutated via instance.__setstate__(state).
-+     */
-+    if (Py_SIZE(self->stack) < 2)
-+        return stack_underflow();
-+
-+    PDATA_POP(self->stack, state);
-+    if (state == NULL)
-+        return -1;
-+
-+    inst = self->stack->data[Py_SIZE(self->stack) - 1];
-+
-+    setstate = _PyObject_GetAttrId(inst, &PyId___setstate__);
-+    if (setstate == NULL) {
-+        if (PyErr_ExceptionMatches(PyExc_AttributeError))
-+            PyErr_Clear();
-+        else {
-+            Py_DECREF(state);
-+            return -1;
-+        }
-+    }
-+    else {
-+        PyObject *result;
-+
-+        /* The explicit __setstate__ is responsible for everything. */
-+        result = _Pickle_FastCall(setstate, state);
-+        Py_DECREF(setstate);
-+        if (result == NULL)
-+            return -1;
-+        Py_DECREF(result);
-+        return 0;
-+    }
-+
-+    /* A default __setstate__.  First see whether state embeds a
-+     * slot state dict too (a proto 2 addition).
-+     */
-+    if (PyTuple_Check(state) && Py_SIZE(state) == 2) {
-+        PyObject *tmp = state;
-+
-+        state = PyTuple_GET_ITEM(tmp, 0);
-+        slotstate = PyTuple_GET_ITEM(tmp, 1);
-+        Py_INCREF(state);
-+        Py_INCREF(slotstate);
-+        Py_DECREF(tmp);
-+    }
-+    else
-+        slotstate = NULL;
-+
-+    /* Set inst.__dict__ from the state dict (if any). */
-+    if (state != Py_None) {
-+        PyObject *dict;
-+        PyObject *d_key, *d_value;
-+        Py_ssize_t i;
-+        _Py_IDENTIFIER(__dict__);
-+
-+        if (!PyDict_Check(state)) {
-+            PickleState *st = _Pickle_GetGlobalState();
-+            PyErr_SetString(st->UnpicklingError, "state is not a dictionary");
-+            goto error;
-+        }
-+        dict = _PyObject_GetAttrId(inst, &PyId___dict__);
-+        if (dict == NULL)
-+            goto error;
-+
-+        i = 0;
-+        while (PyDict_Next(state, &i, &d_key, &d_value)) {
-+            /* normally the keys for instance attributes are
-+               interned.  we should try to do that here. */
-+            Py_INCREF(d_key);
-+            if (PyUnicode_CheckExact(d_key))
-+                PyUnicode_InternInPlace(&d_key);
-+            if (PyObject_SetItem(dict, d_key, d_value) < 0) {
-+                Py_DECREF(d_key);
-+                goto error;
-+            }
-+            Py_DECREF(d_key);
-+        }
-+        Py_DECREF(dict);
-+    }
-+
-+    /* Also set instance attributes from the slotstate dict (if any). */
-+    if (slotstate != NULL) {
-+        PyObject *d_key, *d_value;
-+        Py_ssize_t i;
-+
-+        if (!PyDict_Check(slotstate)) {
-+            PickleState *st = _Pickle_GetGlobalState();
-+            PyErr_SetString(st->UnpicklingError,
-+                            "slot state is not a dictionary");
-+            goto error;
-+        }
-+        i = 0;
-+        while (PyDict_Next(slotstate, &i, &d_key, &d_value)) {
-+            if (PyObject_SetAttr(inst, d_key, d_value) < 0)
-+                goto error;
-+        }
-+    }
-+
-+    if (0) {
-+  error:
-+        status = -1;
-+    }
-+
-+    Py_DECREF(state);
-+    Py_XDECREF(slotstate);
-+    return status;
-+}
-+
-+static int
-+load_mark(UnpicklerObject *self)
-+{
-+
-+    /* Note that we split the (pickle.py) stack into two stacks, an
-+     * object stack and a mark stack. Here we push a mark onto the
-+     * mark stack.
-+     */
-+
-+    if ((self->num_marks + 1) >= self->marks_size) {
-+        size_t alloc;
-+        Py_ssize_t *marks;
-+
-+        /* Use the size_t type to check for overflow. */
-+        alloc = ((size_t)self->num_marks << 1) + 20;
-+        if (alloc > (PY_SSIZE_T_MAX / sizeof(Py_ssize_t)) ||
-+            alloc <= ((size_t)self->num_marks + 1)) {
-+            PyErr_NoMemory();
-+            return -1;
-+        }
-+
-+        if (self->marks == NULL)
-+            marks = (Py_ssize_t *) PyMem_Malloc(alloc * sizeof(Py_ssize_t));
-+        else
-+            marks = (Py_ssize_t *) PyMem_Realloc(self->marks,
-+                                                 alloc * sizeof(Py_ssize_t));
-+        if (marks == NULL) {
-+            PyErr_NoMemory();
-+            return -1;
-+        }
-+        self->marks = marks;
-+        self->marks_size = (Py_ssize_t)alloc;
-+    }
-+
-+    self->marks[self->num_marks++] = Py_SIZE(self->stack);
-+
-+    return 0;
-+}
-+
-+static int
-+load_reduce(UnpicklerObject *self)
-+{
-+    PyObject *callable = NULL;
-+    PyObject *argtup = NULL;
-+    PyObject *obj = NULL;
-+
-+    PDATA_POP(self->stack, argtup);
-+    if (argtup == NULL)
-+        return -1;
-+    PDATA_POP(self->stack, callable);
-+    if (callable) {
-+        obj = PyObject_CallObject(callable, argtup);
-+        Py_DECREF(callable);
-+    }
-+    Py_DECREF(argtup);
-+
-+    if (obj == NULL)
-+        return -1;
-+
-+    PDATA_PUSH(self->stack, obj, -1);
-+    return 0;
-+}
-+
-+/* Just raises an error if we don't know the protocol specified.  PROTO
-+ * is the first opcode for protocols >= 2.
-+ */
-+static int
-+load_proto(UnpicklerObject *self)
-+{
-+    char *s;
-+    int i;
-+
-+    if (_Unpickler_Read(self, &s, 1) < 0)
-+        return -1;
-+
-+    i = (unsigned char)s[0];
-+    if (i <= HIGHEST_PROTOCOL) {
-+        self->proto = i;
-+        return 0;
-+    }
-+
-+    PyErr_Format(PyExc_ValueError, "unsupported pickle protocol: %d", i);
-+    return -1;
-+}
-+
-+static int
-+load_frame(UnpicklerObject *self)
-+{
-+    char *s;
-+    Py_ssize_t frame_len;
-+
-+    if (_Unpickler_Read(self, &s, 8) < 0)
-+        return -1;
-+
-+    frame_len = calc_binsize(s, 8);
-+    if (frame_len < 0) {
-+        PyErr_Format(PyExc_OverflowError,
-+                     "FRAME length exceeds system's maximum of %zd bytes",
-+                     PY_SSIZE_T_MAX);
-+        return -1;
-+    }
-+
-+    if (_Unpickler_Read(self, &s, frame_len) < 0)
-+        return -1;
-+
-+    /* Rewind to start of frame */
-+    self->next_read_idx -= frame_len;
-+    return 0;
-+}
-+
-+static PyObject *
-+load(UnpicklerObject *self)
-+{
-+    PyObject *value = NULL;
-+    char *s = NULL;
-+
-+    self->num_marks = 0;
-+    self->proto = 0;
-+    if (Py_SIZE(self->stack))
-+        Pdata_clear(self->stack, 0);
-+
-+    /* Convenient macros for the dispatch while-switch loop just below. */
-+#define OP(opcode, load_func) \
-+    case opcode: if (load_func(self) < 0) break; continue;
-+
-+#define OP_ARG(opcode, load_func, arg) \
-+    case opcode: if (load_func(self, (arg)) < 0) break; continue;
-+
-+    while (1) {
-+        if (_Unpickler_Read(self, &s, 1) < 0)
-+            break;
-+
-+        switch ((enum opcode)s[0]) {
-+        OP(NONE, load_none)
-+        OP(BININT, load_binint)
-+        OP(BININT1, load_binint1)
-+        OP(BININT2, load_binint2)
-+        OP(INT, load_int)
-+        OP(LONG, load_long)
-+        OP_ARG(LONG1, load_counted_long, 1)
-+        OP_ARG(LONG4, load_counted_long, 4)
-+        OP(FLOAT, load_float)
-+        OP(BINFLOAT, load_binfloat)
-+        OP_ARG(SHORT_BINBYTES, load_counted_binbytes, 1)
-+        OP_ARG(BINBYTES, load_counted_binbytes, 4)
-+        OP_ARG(BINBYTES8, load_counted_binbytes, 8)
-+        OP_ARG(SHORT_BINSTRING, load_counted_binstring, 1)
-+        OP_ARG(BINSTRING, load_counted_binstring, 4)
-+        OP(STRING, load_string)
-+        OP(UNICODE, load_unicode)
-+        OP_ARG(SHORT_BINUNICODE, load_counted_binunicode, 1)
-+        OP_ARG(BINUNICODE, load_counted_binunicode, 4)
-+        OP_ARG(BINUNICODE8, load_counted_binunicode, 8)
-+        OP_ARG(EMPTY_TUPLE, load_counted_tuple, 0)
-+        OP_ARG(TUPLE1, load_counted_tuple, 1)
-+        OP_ARG(TUPLE2, load_counted_tuple, 2)
-+        OP_ARG(TUPLE3, load_counted_tuple, 3)
-+        OP(TUPLE, load_tuple)
-+        OP(EMPTY_LIST, load_empty_list)
-+        OP(LIST, load_list)
-+        OP(EMPTY_DICT, load_empty_dict)
-+        OP(DICT, load_dict)
-+        OP(EMPTY_SET, load_empty_set)
-+        OP(ADDITEMS, load_additems)
-+        OP(FROZENSET, load_frozenset)
-+        OP(OBJ, load_obj)
-+        OP(INST, load_inst)
-+        OP(NEWOBJ, load_newobj)
-+        OP(NEWOBJ_EX, load_newobj_ex)
-+        OP(GLOBAL, load_global)
-+        OP(STACK_GLOBAL, load_stack_global)
-+        OP(APPEND, load_append)
-+        OP(APPENDS, load_appends)
-+        OP(BUILD, load_build)
-+        OP(DUP, load_dup)
-+        OP(BINGET, load_binget)
-+        OP(LONG_BINGET, load_long_binget)
-+        OP(GET, load_get)
-+        OP(MARK, load_mark)
-+        OP(BINPUT, load_binput)
-+        OP(LONG_BINPUT, load_long_binput)
-+        OP(PUT, load_put)
-+        OP(MEMOIZE, load_memoize)
-+        OP(POP, load_pop)
-+        OP(POP_MARK, load_pop_mark)
-+        OP(SETITEM, load_setitem)
-+        OP(SETITEMS, load_setitems)
-+        OP(PERSID, load_persid)
-+        OP(BINPERSID, load_binpersid)
-+        OP(REDUCE, load_reduce)
-+        OP(PROTO, load_proto)
-+        OP(FRAME, load_frame)
-+        OP_ARG(EXT1, load_extension, 1)
-+        OP_ARG(EXT2, load_extension, 2)
-+        OP_ARG(EXT4, load_extension, 4)
-+        OP_ARG(NEWTRUE, load_bool, Py_True)
-+        OP_ARG(NEWFALSE, load_bool, Py_False)
-+
-+        case STOP:
-+            break;
-+
-+        default:
-+            if (s[0] == '\0') {
-+                PyErr_SetNone(PyExc_EOFError);
-+            }
-+            else {
-+                PickleState *st = _Pickle_GetGlobalState();
-+                PyErr_Format(st->UnpicklingError,
-+                             "invalid load key, '%c'.", s[0]);
-+            }
-+            return NULL;
-+        }
-+
-+        break;                  /* and we are done! */
-+    }
-+
-+    if (PyErr_Occurred()) {
-+        return NULL;
-+    }
-+
-+    if (_Unpickler_SkipConsumed(self) < 0)
-+        return NULL;
-+
-+    PDATA_POP(self->stack, value);
-+    return value;
-+}
-+
-+/*[clinic input]
-+
-+_pickle.Unpickler.load
-+
-+Load a pickle.
-+
-+Read a pickled object representation from the open file object given
-+in the constructor, and return the reconstituted object hierarchy
-+specified therein.
-+[clinic start generated code]*/
-+
-+static PyObject *
-+_pickle_Unpickler_load_impl(UnpicklerObject *self)
-+/*[clinic end generated code: output=fdcc488aad675b14 input=acbb91a42fa9b7b9]*/
-+{
-+    UnpicklerObject *unpickler = (UnpicklerObject*)self;
-+
-+    /* Check whether the Unpickler was initialized correctly. This prevents
-+       segfaulting if a subclass overridden __init__ with a function that does
-+       not call Unpickler.__init__(). Here, we simply ensure that self->read
-+       is not NULL. */
-+    if (unpickler->read == NULL) {
-+        PickleState *st = _Pickle_GetGlobalState();
-+        PyErr_Format(st->UnpicklingError,
-+                     "Unpickler.__init__() was not called by %s.__init__()",
-+                     Py_TYPE(unpickler)->tp_name);
-+        return NULL;
-+    }
-+
-+    return load(unpickler);
-+}
-+
-+/* No-load functions to support noload, which is used to
-+   find persistent references. */
-+
-+static int
-+noload_obj(UnpicklerObject *self)
-+{
-+    Py_ssize_t i;
-+
-+    if ((i = marker(self)) < 0) return -1;
-+    return Pdata_clear(self->stack, i+1);
-+}
-+
-+
-+static int
-+noload_inst(UnpicklerObject *self)
-+{
-+    Py_ssize_t i;
-+    char *s;
-+
-+    if ((i = marker(self)) < 0) return -1;
-+    if (_Unpickler_Readline(self, &s) < 0) return -1;
-+    if (_Unpickler_Readline(self, &s) < 0) return -1;
-+    Pdata_clear(self->stack, i);
-+    PDATA_APPEND(self->stack, Py_None, -1);
-+    return 0;
-+}
-+
-+static int
-+noload_newobj(UnpicklerObject *self)
-+{
-+    PyObject *obj;
-+
-+    PDATA_POP(self->stack, obj);        /* pop argtuple */
-+    if (obj == NULL) return -1;
-+    Py_DECREF(obj);
-+
-+    PDATA_POP(self->stack, obj);        /* pop cls */
-+    if (obj == NULL) return -1;
-+    Py_DECREF(obj);
-+
-+    PDATA_APPEND(self->stack, Py_None, -1);
-+    return 0;
-+}
-+
-+static int
-+noload_newobj_ex(UnpicklerObject *self)
-+{
-+    PyObject *obj;
-+
-+    PDATA_POP(self->stack, obj);        /* pop keyword args */
-+    if (obj == NULL) return -1;
-+    Py_DECREF(obj);
-+
-+    PDATA_POP(self->stack, obj);        /* pop argtuple */
-+    if (obj == NULL) return -1;
-+    Py_DECREF(obj);
-+
-+    PDATA_POP(self->stack, obj);        /* pop cls */
-+    if (obj == NULL) return -1;
-+    Py_DECREF(obj);
-+
-+    PDATA_APPEND(self->stack, Py_None, -1);
-+    return 0;
-+}
-+
-+static int
-+noload_global(UnpicklerObject *self)
-+{
-+    char *s;
-+
-+    if (_Unpickler_Readline(self, &s) < 0) return -1;
-+    if (_Unpickler_Readline(self, &s) < 0) return -1;
-+    PDATA_APPEND(self->stack, Py_None,-1);
-+    return 0;
-+}
-+
-+static int
-+noload_stack_global(UnpicklerObject *self)
-+{
-+    PyObject *obj;
-+
-+    PDATA_POP(self->stack, obj);        /* pop global name */
-+    if (obj == NULL) return -1;
-+    Py_DECREF(obj);
-+
-+    PDATA_POP(self->stack, obj);        /* pop module name */
-+    if (obj == NULL) return -1;
-+    Py_DECREF(obj);
-+
-+    PDATA_APPEND(self->stack, Py_None,-1);
-+    return 0;
-+}
-+
-+static int
-+noload_reduce(UnpicklerObject *self)
-+{
-+    PyObject *obj;
-+
-+    PDATA_POP(self->stack, obj);        /* pop argtup */
-+    if (obj == NULL) return -1;
-+    Py_DECREF(obj);
-+
-+    PDATA_POP(self->stack, obj);        /* pop callable */
-+    if (obj == NULL) return -1;
-+    Py_DECREF(obj);
-+
-+    PDATA_APPEND(self->stack, Py_None,-1);
-+    return 0;
-+}
-+
-+static int
-+noload_build(UnpicklerObject *self)
-+{
-+    if (Py_SIZE(self->stack) < 2) return stack_underflow();
-+    Pdata_clear(self->stack, Py_SIZE(self->stack)-1);
-+    return 0;
-+}
-+
-+static int
-+noload_extension(UnpicklerObject *self, int nbytes)
-+{
-+    char *codebytes;
-+
-+    assert(nbytes == 1 || nbytes == 2 || nbytes == 4);
-+    if (_Unpickler_Read(self, &codebytes, nbytes) < 0) return -1;
-+    PDATA_APPEND(self->stack, Py_None, -1);
-+    return 0;
-+}
-+
-+static int
-+noload_append(UnpicklerObject *self)
-+{
-+    if (Py_SIZE(self->stack) < 1) return stack_underflow();
-+    return Pdata_clear(self->stack, Py_SIZE(self->stack) - 1);
-+}
-+
-+static int
-+noload_appends(UnpicklerObject *self)
-+{
-+    Py_ssize_t i;
-+    if ((i = marker(self)) < 0) return -1;
-+    if (Py_SIZE(self->stack) < i) return stack_underflow();
-+    return Pdata_clear(self->stack, i);
-+}
-+
-+static int
-+noload_setitem(UnpicklerObject *self)
-+{
-+    if (Py_SIZE(self->stack) < 2) return stack_underflow();
-+    return Pdata_clear(self->stack, Py_SIZE(self->stack) - 2);
-+}
-+
-+static int
-+noload_setitems(UnpicklerObject *self)
-+{
-+    Py_ssize_t i;
-+    if ((i = marker(self)) < 0) return -1;
-+    if (Py_SIZE(self->stack) < i) return stack_underflow();
-+    return Pdata_clear(self->stack, i);
-+}
-+
-+static int
-+noload_additems(UnpicklerObject *self)
-+{
-+    Py_ssize_t i;
-+    if ((i = marker(self)) < 0) return -1;
-+    if (Py_SIZE(self->stack) < i) return stack_underflow();
-+    if (Py_SIZE(self->stack) == i) return 0;
-+    return Pdata_clear(self->stack, i);
-+}
-+
-+static int
-+noload_frozenset(UnpicklerObject *self)
-+{
-+    Py_ssize_t i;
-+    if ((i = marker(self)) < 0) return -1;
-+    Pdata_clear(self->stack, i);
-+    PDATA_APPEND(self->stack, Py_None, -1);
-+    return 0;
-+}
-+
-+static PyObject *
-+noload(UnpicklerObject *self)
-+{
-+    PyObject *err = 0, *val = 0;
-+    char *s;
-+
-+    self->num_marks = 0;
-+    Pdata_clear(self->stack, 0);
-+
-+    while (1) {
-+        if (_Unpickler_Read(self, &s, 1) < 0)
-+            break;
-+
-+        switch (s[0]) {
-+        case NONE:
-+            if (load_none(self) < 0)
-+                break;
-+            continue;
-+
-+        case BININT:
-+            if (load_binint(self) < 0)
-+                break;
-+            continue;
-+
-+        case BININT1:
-+            if (load_binint1(self) < 0)
-+                break;
-+            continue;
-+
-+        case BININT2:
-+            if (load_binint2(self) < 0)
-+                break;
-+            continue;
-+
-+        case INT:
-+            if (load_int(self) < 0)
-+                break;
-+            continue;
-+
-+        case LONG:
-+            if (load_long(self) < 0)
-+                break;
-+            continue;
-+
-+        case LONG1:
-+            if (load_counted_long(self, 1) < 0)
-+                break;
-+            continue;
-+
-+        case LONG4:
-+            if (load_counted_long(self, 4) < 0)
-+                break;
-+            continue;
-+
-+        case FLOAT:
-+            if (load_float(self) < 0)
-+                break;
-+            continue;
-+
-+        case BINFLOAT:
-+            if (load_binfloat(self) < 0)
-+                break;
-+            continue;
-+
-+        case BINSTRING:
-+            if (load_counted_binstring(self, 4) < 0)
-+                break;
-+            continue;
-+
-+        case SHORT_BINSTRING:
-+            if (load_counted_binstring(self, 1) < 0)
-+                break;
-+            continue;
-+
-+        case STRING:
-+            if (load_string(self) < 0)
-+                break;
-+            continue;
-+
-+        case UNICODE:
-+            if (load_unicode(self) < 0)
-+                break;
-+            continue;
-+
-+        case BINUNICODE:
-+            if (load_counted_binunicode(self, 4) < 0)
-+                break;
-+            continue;
-+
-+        case EMPTY_TUPLE:
-+            if (load_counted_tuple(self, 0) < 0)
-+                break;
-+            continue;
-+
-+        case TUPLE1:
-+            if (load_counted_tuple(self, 1) < 0)
-+                break;
-+            continue;
-+
-+        case TUPLE2:
-+            if (load_counted_tuple(self, 2) < 0)
-+                break;
-+            continue;
-+
-+        case TUPLE3:
-+            if (load_counted_tuple(self, 3) < 0)
-+                break;
-+            continue;
-+
-+        case TUPLE:
-+            if (load_tuple(self) < 0)
-+                break;
-+            continue;
-+
-+        case EMPTY_LIST:
-+            if (load_empty_list(self) < 0)
-+                break;
-+            continue;
-+
-+        case LIST:
-+            if (load_list(self) < 0)
-+                break;
-+            continue;
-+
-+        case EMPTY_DICT:
-+            if (load_empty_dict(self) < 0)
-+                break;
-+            continue;
-+
-+        case DICT:
-+            if (load_dict(self) < 0)
-+                break;
-+            continue;
-+
-+        case OBJ:
-+            if (noload_obj(self) < 0)
-+                break;
-+            continue;
-+
-+        case INST:
-+            if (noload_inst(self) < 0)
-+                break;
-+            continue;
-+
-+        case NEWOBJ:
-+            if (noload_newobj(self) < 0)
-+                break;
-+            continue;
-+
-+        case GLOBAL:
-+            if (noload_global(self) < 0)
-+                break;
-+            continue;
-+
-+        case APPEND:
-+            if (noload_append(self) < 0)
-+                break;
-+            continue;
-+
-+        case APPENDS:
-+            if (noload_appends(self) < 0)
-+                break;
-+            continue;
-+
-+        case BUILD:
-+            if (noload_build(self) < 0)
-+                break;
-+            continue;
-+
-+        case DUP:
-+            if (load_dup(self) < 0)
-+                break;
-+            continue;
-+
-+        case BINGET:
-+            if (load_binget(self) < 0)
-+                break;
-+            continue;
-+
-+        case LONG_BINGET:
-+            if (load_long_binget(self) < 0)
-+                break;
-+            continue;
-+
-+        case GET:
-+            if (load_get(self) < 0)
-+                break;
-+            continue;
-+
-+        case EXT1:
-+            if (noload_extension(self, 1) < 0)
-+                break;
-+            continue;
-+
-+        case EXT2:
-+            if (noload_extension(self, 2) < 0)
-+                break;
-+            continue;
-+
-+        case EXT4:
-+            if (noload_extension(self, 4) < 0)
-+                break;
-+            continue;
-+
-+        case MARK:
-+            if (load_mark(self) < 0)
-+                break;
-+            continue;
-+
-+        case BINPUT:
-+            if (load_binput(self) < 0)
-+                break;
-+            continue;
-+
-+        case LONG_BINPUT:
-+            if (load_long_binput(self) < 0)
-+                break;
-+            continue;
-+
-+        case PUT:
-+            if (load_put(self) < 0)
-+                break;
-+            continue;
-+
-+        case POP:
-+            if (load_pop(self) < 0)
-+                break;
-+            continue;
-+
-+        case POP_MARK:
-+            if (load_pop_mark(self) < 0)
-+                break;
-+            continue;
-+
-+        case SETITEM:
-+            if (noload_setitem(self) < 0)
-+                break;
-+            continue;
-+
-+        case SETITEMS:
-+            if (noload_setitems(self) < 0)
-+                break;
-+            continue;
-+
-+        case STOP:
-+            break;
-+
-+        case PERSID:
-+            if (load_persid(self) < 0)
-+                break;
-+            continue;
-+
-+        case BINPERSID:
-+            if (load_binpersid(self) < 0)
-+                break;
-+            continue;
-+
-+        case REDUCE:
-+            if (noload_reduce(self) < 0)
-+                break;
-+            continue;
-+
-+        case PROTO:
-+            if (load_proto(self) < 0)
-+                break;
-+            continue;
-+
-+        case NEWTRUE:
-+            if (load_bool(self, Py_True) < 0)
-+                break;
-+            continue;
-+
-+        case NEWFALSE:
-+            if (load_bool(self, Py_False) < 0)
-+                break;
-+            continue;
-+
-+        case BINBYTES:
-+            if (load_counted_binbytes(self, 4) < 0)
-+                break;
-+            continue;
-+
-+        case SHORT_BINBYTES:
-+            if (load_counted_binbytes(self, 1) < 0)
-+                break;
-+            continue;
-+
-+        case SHORT_BINUNICODE:
-+            if (load_counted_binunicode(self, 1) < 0)
-+                break;
-+            continue;
-+
-+        case BINUNICODE8:
-+            if (load_counted_binunicode(self, 8) < 0)
-+                break;
-+            continue;
-+
-+        case BINBYTES8:
-+            if (load_counted_binbytes(self, 8) < 0)
-+                break;
-+            continue;
-+
-+        case EMPTY_SET:
-+            if (load_empty_set(self) < 0)
-+                break;
-+            continue;
-+
-+        case ADDITEMS:
-+            if (noload_additems(self) < 0)
-+                break;
-+            continue;
-+
-+        case FROZENSET:
-+            if (noload_frozenset(self) < 0)
-+                break;
-+            continue;
-+
-+        case NEWOBJ_EX:
-+            if (noload_newobj_ex(self) < 0)
-+                break;
-+            continue;
-+
-+        case STACK_GLOBAL:
-+            if (noload_stack_global(self) < 0)
-+                break;
-+            continue;
-+
-+        case MEMOIZE:
-+            if (load_memoize(self) < 0)
-+                break;
-+            continue;
-+
-+        case FRAME:
-+            if (load_frame(self) < 0)
-+                break;
-+            continue;
-+
-+        default:
-+            PyErr_Format(_Pickle_GetGlobalState()->UnpicklingError,
-+                         "invalid load key, '%c'.", s[0]);
-+        }
-+
-+        break;
-+    }
-+
-+    if ((err = PyErr_Occurred())) {
-+        if (err == PyExc_EOFError) {
-+            PyErr_SetNone(PyExc_EOFError);
-+        }
-+        return NULL;
-+    }
-+
-+    PDATA_POP(self->stack, val);
-+    return val;
-+}
-+
-+PyDoc_STRVAR(Unpickler_noload_doc,
-+"noload() -- not load a pickle, but go through most of the motions\n"
-+"\n"
-+"This function can be used to read past a pickle without instantiating\n"
-+"any objects or importing any modules.  It can also be used to find all\n"
-+"persistent references without instantiating any objects or importing\n"
-+"any modules.\n");
-+
-+static PyObject *
-+Unpickler_noload(UnpicklerObject *self, PyObject *unused)
-+{
-+    return noload(self);
-+}
-+
-+/* The name of find_class() is misleading. In newer pickle protocols, this
-+   function is used for loading any global (i.e., functions), not just
-+   classes. The name is kept only for backward compatibility. */
-+
-+/*[clinic input]
-+
-+_pickle.Unpickler.find_class
-+
-+  module_name: object
-+  global_name: object
-+  /
-+
-+Return an object from a specified module.
-+
-+If necessary, the module will be imported. Subclasses may override
-+this method (e.g. to restrict unpickling of arbitrary classes and
-+functions).
-+
-+This method is called whenever a class or a function object is
-+needed.  Both arguments passed are str objects.
-+[clinic start generated code]*/
-+
-+static PyObject *
-+_pickle_Unpickler_find_class_impl(UnpicklerObject *self, PyObject *module_name, PyObject *global_name)
-+/*[clinic end generated code: output=64c77437e088e188 input=e2e6a865de093ef4]*/
-+{
-+    PyObject *global;
-+    PyObject *modules_dict;
-+    PyObject *module;
-+    _Py_IDENTIFIER(modules);
-+
-+    /* Try to map the old names used in Python 2.x to the new ones used in
-+       Python 3.x.  We do this only with old pickle protocols and when the
-+       user has not disabled the feature. */
-+    if (self->proto < 3 && self->fix_imports) {
-+        PyObject *key;
-+        PyObject *item;
-+        PickleState *st = _Pickle_GetGlobalState();
-+
-+        /* Check if the global (i.e., a function or a class) was renamed
-+           or moved to another module. */
-+        key = PyTuple_Pack(2, module_name, global_name);
-+        if (key == NULL)
-+            return NULL;
-+        item = PyDict_GetItemWithError(st->name_mapping_2to3, key);
-+        Py_DECREF(key);
-+        if (item) {
-+            if (!PyTuple_Check(item) || PyTuple_GET_SIZE(item) != 2) {
-+                PyErr_Format(PyExc_RuntimeError,
-+                             "_compat_pickle.NAME_MAPPING values should be "
-+                             "2-tuples, not %.200s", Py_TYPE(item)->tp_name);
-+                return NULL;
-+            }
-+            module_name = PyTuple_GET_ITEM(item, 0);
-+            global_name = PyTuple_GET_ITEM(item, 1);
-+            if (!PyUnicode_Check(module_name) ||
-+                !PyUnicode_Check(global_name)) {
-+                PyErr_Format(PyExc_RuntimeError,
-+                             "_compat_pickle.NAME_MAPPING values should be "
-+                             "pairs of str, not (%.200s, %.200s)",
-+                             Py_TYPE(module_name)->tp_name,
-+                             Py_TYPE(global_name)->tp_name);
-+                return NULL;
-+            }
-+        }
-+        else if (PyErr_Occurred()) {
-+            return NULL;
-+        }
-+
-+        /* Check if the module was renamed. */
-+        item = PyDict_GetItemWithError(st->import_mapping_2to3, module_name);
-+        if (item) {
-+            if (!PyUnicode_Check(item)) {
-+                PyErr_Format(PyExc_RuntimeError,
-+                             "_compat_pickle.IMPORT_MAPPING values should be "
-+                             "strings, not %.200s", Py_TYPE(item)->tp_name);
-+                return NULL;
-+            }
-+            module_name = item;
-+        }
-+        else if (PyErr_Occurred()) {
-+            return NULL;
-+        }
-+    }
-+
-+    modules_dict = _PySys_GetObjectId(&PyId_modules);
-+    if (modules_dict == NULL) {
-+        PyErr_SetString(PyExc_RuntimeError, "unable to get sys.modules");
-+        return NULL;
-+    }
-+
-+    module = PyDict_GetItemWithError(modules_dict, module_name);
-+    if (module == NULL) {
-+        if (PyErr_Occurred())
-+            return NULL;
-+        module = PyImport_Import(module_name);
-+        if (module == NULL)
-+            return NULL;
-+        global = getattribute(module, global_name, self->proto >= 4);
-+        Py_DECREF(module);
-+    }
-+    else {
-+        global = getattribute(module, global_name, self->proto >= 4);
-+    }
-+    return global;
-+}
-+
-+static struct PyMethodDef Unpickler_methods[] = {
-+    _PICKLE_UNPICKLER_LOAD_METHODDEF
-+    {"noload", (PyCFunction)Unpickler_noload, METH_NOARGS,
-+     Unpickler_noload_doc},
-+    _PICKLE_UNPICKLER_FIND_CLASS_METHODDEF
-+    {NULL, NULL}                /* sentinel */
-+};
-+
-+static void
-+Unpickler_dealloc(UnpicklerObject *self)
-+{
-+    PyObject_GC_UnTrack((PyObject *)self);
-+    Py_XDECREF(self->readline);
-+    Py_XDECREF(self->read);
-+    Py_XDECREF(self->peek);
-+    Py_XDECREF(self->stack);
-+    Py_XDECREF(self->pers_func);
-+    if (self->buffer.buf != NULL) {
-+        PyBuffer_Release(&self->buffer);
-+        self->buffer.buf = NULL;
-+    }
-+
-+    _Unpickler_MemoCleanup(self);
-+    PyMem_Free(self->marks);
-+    PyMem_Free(self->input_line);
-+    PyMem_Free(self->encoding);
-+    PyMem_Free(self->errors);
-+
-+    Py_TYPE(self)->tp_free((PyObject *)self);
-+}
-+
-+static int
-+Unpickler_traverse(UnpicklerObject *self, visitproc visit, void *arg)
-+{
-+    Py_VISIT(self->readline);
-+    Py_VISIT(self->read);
-+    Py_VISIT(self->peek);
-+    Py_VISIT(self->stack);
-+    Py_VISIT(self->pers_func);
-+    return 0;
-+}
-+
-+static int
-+Unpickler_clear(UnpicklerObject *self)
-+{
-+    Py_CLEAR(self->readline);
-+    Py_CLEAR(self->read);
-+    Py_CLEAR(self->peek);
-+    Py_CLEAR(self->stack);
-+    Py_CLEAR(self->pers_func);
-+    if (self->buffer.buf != NULL) {
-+        PyBuffer_Release(&self->buffer);
-+        self->buffer.buf = NULL;
-+    }
-+
-+    _Unpickler_MemoCleanup(self);
-+    PyMem_Free(self->marks);
-+    self->marks = NULL;
-+    PyMem_Free(self->input_line);
-+    self->input_line = NULL;
-+    PyMem_Free(self->encoding);
-+    self->encoding = NULL;
-+    PyMem_Free(self->errors);
-+    self->errors = NULL;
-+
-+    return 0;
-+}
-+
-+/*[clinic input]
-+
-+_pickle.Unpickler.__init__
-+
-+  file: object
-+  *
-+  fix_imports: bool = True
-+  encoding: str = 'ASCII'
-+  errors: str = 'strict'
-+
-+This takes a binary file for reading a pickle data stream.
-+
-+The protocol version of the pickle is detected automatically, so no
-+protocol argument is needed.  Bytes past the pickled object's
-+representation are ignored.
-+
-+The argument *file* must have two methods, a read() method that takes
-+an integer argument, and a readline() method that requires no
-+arguments.  Both methods should return bytes.  Thus *file* can be a
-+binary file object opened for reading, a io.BytesIO object, or any
-+other custom object that meets this interface.
-+
-+Optional keyword arguments are *fix_imports*, *encoding* and *errors*,
-+which are used to control compatiblity support for pickle stream
-+generated by Python 2.  If *fix_imports* is True, pickle will try to
-+map the old Python 2 names to the new names used in Python 3.  The
-+*encoding* and *errors* tell pickle how to decode 8-bit string
-+instances pickled by Python 2; these default to 'ASCII' and 'strict',
-+respectively.  The *encoding* can be 'bytes' to read these 8-bit
-+string instances as bytes objects.
-+[clinic start generated code]*/
-+
-+static int
-+_pickle_Unpickler___init___impl(UnpicklerObject *self, PyObject *file, int fix_imports, const char *encoding, const char *errors)
-+/*[clinic end generated code: output=b9ed1d84d315f3b5 input=30b4dc9e976b890c]*/
-+{
-+    _Py_IDENTIFIER(persistent_load);
-+
-+    /* In case of multiple __init__() calls, clear previous content. */
-+    if (self->read != NULL)
-+        (void)Unpickler_clear(self);
-+
-+    if (_Unpickler_SetInputStream(self, file) < 0)
-+        return -1;
-+
-+    if (_Unpickler_SetInputEncoding(self, encoding, errors) < 0)
-+        return -1;
-+
-+    self->fix_imports = fix_imports;
-+    if (self->fix_imports == -1)
-+        return -1;
-+
-+    if (_PyObject_HasAttrId((PyObject *)self, &PyId_persistent_load)) {
-+        self->pers_func = _PyObject_GetAttrId((PyObject *)self,
-+                                              &PyId_persistent_load);
-+        if (self->pers_func == NULL)
-+            return 1;
-+    }
-+    else {
-+        self->pers_func = NULL;
-+    }
-+
-+    self->stack = (Pdata *)Pdata_New();
-+    if (self->stack == NULL)
-+        return 1;
-+
-+    self->memo_size = 32;
-+    self->memo = _Unpickler_NewMemo(self->memo_size);
-+    if (self->memo == NULL)
-+        return -1;
-+
-+    self->proto = 0;
-+
-+    return 0;
-+}
-+
-+
-+/* Define a proxy object for the Unpickler's internal memo object. This is to
-+ * avoid breaking code like:
-+ *  unpickler.memo.clear()
-+ * and
-+ *  unpickler.memo = saved_memo
-+ * Is this a good idea? Not really, but we don't want to break code that uses
-+ * it. Note that we don't implement the entire mapping API here. This is
-+ * intentional, as these should be treated as black-box implementation details.
-+ *
-+ * We do, however, have to implement pickling/unpickling support because of
-+ * real-world code like cvs2svn.
-+ */
-+
-+/*[clinic input]
-+_pickle.UnpicklerMemoProxy.clear
-+
-+Remove all items from memo.
-+[clinic start generated code]*/
-+
-+static PyObject *
-+_pickle_UnpicklerMemoProxy_clear_impl(UnpicklerMemoProxyObject *self)
-+/*[clinic end generated code: output=d20cd43f4ba1fb1f input=b1df7c52e7afd9bd]*/
-+{
-+    _Unpickler_MemoCleanup(self->unpickler);
-+    self->unpickler->memo = _Unpickler_NewMemo(self->unpickler->memo_size);
-+    if (self->unpickler->memo == NULL)
-+        return NULL;
-+    Py_RETURN_NONE;
-+}
-+
-+/*[clinic input]
-+_pickle.UnpicklerMemoProxy.copy
-+
-+Copy the memo to a new object.
-+[clinic start generated code]*/
-+
-+static PyObject *
-+_pickle_UnpicklerMemoProxy_copy_impl(UnpicklerMemoProxyObject *self)
-+/*[clinic end generated code: output=e12af7e9bc1e4c77 input=97769247ce032c1d]*/
-+{
-+    Py_ssize_t i;
-+    PyObject *new_memo = PyDict_New();
-+    if (new_memo == NULL)
-+        return NULL;
-+
-+    for (i = 0; i < self->unpickler->memo_size; i++) {
-+        int status;
-+        PyObject *key, *value;
-+
-+        value = self->unpickler->memo[i];
-+        if (value == NULL)
-+            continue;
-+
-+        key = PyLong_FromSsize_t(i);
-+        if (key == NULL)
-+            goto error;
-+        status = PyDict_SetItem(new_memo, key, value);
-+        Py_DECREF(key);
-+        if (status < 0)
-+            goto error;
-+    }
-+    return new_memo;
-+
-+error:
-+    Py_DECREF(new_memo);
-+    return NULL;
-+}
-+
-+/*[clinic input]
-+_pickle.UnpicklerMemoProxy.__reduce__
-+
-+Implement pickling support.
-+[clinic start generated code]*/
-+
-+static PyObject *
-+_pickle_UnpicklerMemoProxy___reduce___impl(UnpicklerMemoProxyObject *self)
-+/*[clinic end generated code: output=6da34ac048d94cca input=6920862413407199]*/
-+{
-+    PyObject *reduce_value;
-+    PyObject *constructor_args;
-+    PyObject *contents = _pickle_UnpicklerMemoProxy_copy_impl(self);
-+    if (contents == NULL)
-+        return NULL;
-+
-+    reduce_value = PyTuple_New(2);
-+    if (reduce_value == NULL) {
-+        Py_DECREF(contents);
-+        return NULL;
-+    }
-+    constructor_args = PyTuple_New(1);
-+    if (constructor_args == NULL) {
-+        Py_DECREF(contents);
-+        Py_DECREF(reduce_value);
-+        return NULL;
-+    }
-+    PyTuple_SET_ITEM(constructor_args, 0, contents);
-+    Py_INCREF((PyObject *)&PyDict_Type);
-+    PyTuple_SET_ITEM(reduce_value, 0, (PyObject *)&PyDict_Type);
-+    PyTuple_SET_ITEM(reduce_value, 1, constructor_args);
-+    return reduce_value;
-+}
-+
-+static PyMethodDef unpicklerproxy_methods[] = {
-+    _PICKLE_UNPICKLERMEMOPROXY_CLEAR_METHODDEF
-+    _PICKLE_UNPICKLERMEMOPROXY_COPY_METHODDEF
-+    _PICKLE_UNPICKLERMEMOPROXY___REDUCE___METHODDEF
-+    {NULL, NULL}    /* sentinel */
-+};
-+
-+static void
-+UnpicklerMemoProxy_dealloc(UnpicklerMemoProxyObject *self)
-+{
-+    PyObject_GC_UnTrack(self);
-+    Py_XDECREF(self->unpickler);
-+    PyObject_GC_Del((PyObject *)self);
-+}
-+
-+static int
-+UnpicklerMemoProxy_traverse(UnpicklerMemoProxyObject *self,
-+                            visitproc visit, void *arg)
-+{
-+    Py_VISIT(self->unpickler);
-+    return 0;
-+}
-+
-+static int
-+UnpicklerMemoProxy_clear(UnpicklerMemoProxyObject *self)
-+{
-+    Py_CLEAR(self->unpickler);
-+    return 0;
-+}
-+
-+static PyTypeObject UnpicklerMemoProxyType = {
-+    PyVarObject_HEAD_INIT(NULL, 0)
-+    "_pickle.UnpicklerMemoProxy",               /*tp_name*/
-+    sizeof(UnpicklerMemoProxyObject),           /*tp_basicsize*/
-+    0,
-+    (destructor)UnpicklerMemoProxy_dealloc,     /* tp_dealloc */
-+    0,                                          /* tp_print */
-+    0,                                          /* tp_getattr */
-+    0,                                          /* tp_setattr */
-+    0,                                          /* tp_compare */
-+    0,                                          /* tp_repr */
-+    0,                                          /* tp_as_number */
-+    0,                                          /* tp_as_sequence */
-+    0,                                          /* tp_as_mapping */
-+    PyObject_HashNotImplemented,                /* tp_hash */
-+    0,                                          /* tp_call */
-+    0,                                          /* tp_str */
-+    PyObject_GenericGetAttr,                    /* tp_getattro */
-+    PyObject_GenericSetAttr,                    /* tp_setattro */
-+    0,                                          /* tp_as_buffer */
-+    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC,
-+    0,                                          /* tp_doc */
-+    (traverseproc)UnpicklerMemoProxy_traverse,  /* tp_traverse */
-+    (inquiry)UnpicklerMemoProxy_clear,          /* tp_clear */
-+    0,                                          /* tp_richcompare */
-+    0,                                          /* tp_weaklistoffset */
-+    0,                                          /* tp_iter */
-+    0,                                          /* tp_iternext */
-+    unpicklerproxy_methods,                     /* tp_methods */
-+};
-+
-+static PyObject *
-+UnpicklerMemoProxy_New(UnpicklerObject *unpickler)
-+{
-+    UnpicklerMemoProxyObject *self;
-+
-+    self = PyObject_GC_New(UnpicklerMemoProxyObject,
-+                           &UnpicklerMemoProxyType);
-+    if (self == NULL)
-+        return NULL;
-+    Py_INCREF(unpickler);
-+    self->unpickler = unpickler;
-+    PyObject_GC_Track(self);
-+    return (PyObject *)self;
-+}
-+
-+/*****************************************************************************/
-+
-+
-+static PyObject *
-+Unpickler_get_memo(UnpicklerObject *self)
-+{
-+    return UnpicklerMemoProxy_New(self);
-+}
-+
-+static int
-+Unpickler_set_memo(UnpicklerObject *self, PyObject *obj)
-+{
-+    PyObject **new_memo;
-+    Py_ssize_t new_memo_size = 0;
-+    Py_ssize_t i;
-+
-+    if (obj == NULL) {
-+        PyErr_SetString(PyExc_TypeError,
-+                        "attribute deletion is not supported");
-+        return -1;
-+    }
-+
-+    if (Py_TYPE(obj) == &UnpicklerMemoProxyType) {
-+        UnpicklerObject *unpickler =
-+            ((UnpicklerMemoProxyObject *)obj)->unpickler;
-+
-+        new_memo_size = unpickler->memo_size;
-+        new_memo = _Unpickler_NewMemo(new_memo_size);
-+        if (new_memo == NULL)
-+            return -1;
-+
-+        for (i = 0; i < new_memo_size; i++) {
-+            Py_XINCREF(unpickler->memo[i]);
-+            new_memo[i] = unpickler->memo[i];
-+        }
-+    }
-+    else if (PyDict_Check(obj)) {
-+        Py_ssize_t i = 0;
-+        PyObject *key, *value;
-+
-+        new_memo_size = PyDict_Size(obj);
-+        new_memo = _Unpickler_NewMemo(new_memo_size);
-+        if (new_memo == NULL)
-+            return -1;
-+
-+        while (PyDict_Next(obj, &i, &key, &value)) {
-+            Py_ssize_t idx;
-+            if (!PyLong_Check(key)) {
-+                PyErr_SetString(PyExc_TypeError,
-+                                "memo key must be integers");
-+                goto error;
-+            }
-+            idx = PyLong_AsSsize_t(key);
-+            if (idx == -1 && PyErr_Occurred())
-+                goto error;
-+            if (idx < 0) {
-+                PyErr_SetString(PyExc_ValueError,
-+                                "memo key must be positive integers.");
-+                goto error;
-+            }
-+            if (_Unpickler_MemoPut(self, idx, value) < 0)
-+                goto error;
-+        }
-+    }
-+    else {
-+        PyErr_Format(PyExc_TypeError,
-+                     "'memo' attribute must be an UnpicklerMemoProxy object"
-+                     "or dict, not %.200s", Py_TYPE(obj)->tp_name);
-+        return -1;
-+    }
-+
-+    _Unpickler_MemoCleanup(self);
-+    self->memo_size = new_memo_size;
-+    self->memo = new_memo;
-+
-+    return 0;
-+
-+  error:
-+    if (new_memo_size) {
-+        i = new_memo_size;
-+        while (--i >= 0) {
-+            Py_XDECREF(new_memo[i]);
-+        }
-+        PyMem_FREE(new_memo);
-+    }
-+    return -1;
-+}
-+
-+static PyObject *
-+Unpickler_get_persload(UnpicklerObject *self)
-+{
-+    if (self->pers_func == NULL)
-+        PyErr_SetString(PyExc_AttributeError, "persistent_load");
-+    else
-+        Py_INCREF(self->pers_func);
-+    return self->pers_func;
-+}
-+
-+static int
-+Unpickler_set_persload(UnpicklerObject *self, PyObject *value)
-+{
-+    PyObject *tmp;
-+
-+    if (value == NULL) {
-+        PyErr_SetString(PyExc_TypeError,
-+                        "attribute deletion is not supported");
-+        return -1;
-+    }
-+    if (!PyCallable_Check(value)) {
-+        PyErr_SetString(PyExc_TypeError,
-+                        "persistent_load must be a callable taking "
-+                        "one argument");
-+        return -1;
-+    }
-+
-+    tmp = self->pers_func;
-+    Py_INCREF(value);
-+    self->pers_func = value;
-+    Py_XDECREF(tmp);      /* self->pers_func can be NULL, so be careful. */
-+
-+    return 0;
-+}
-+
-+static PyGetSetDef Unpickler_getsets[] = {
-+    {"memo", (getter)Unpickler_get_memo, (setter)Unpickler_set_memo},
-+    {"persistent_load", (getter)Unpickler_get_persload,
-+                        (setter)Unpickler_set_persload},
-+    {NULL}
-+};
-+
-+static PyTypeObject Unpickler_Type = {
-+    PyVarObject_HEAD_INIT(NULL, 0)
-+    "_pickle.Unpickler",                /*tp_name*/
-+    sizeof(UnpicklerObject),            /*tp_basicsize*/
-+    0,                                  /*tp_itemsize*/
-+    (destructor)Unpickler_dealloc,      /*tp_dealloc*/
-+    0,                                  /*tp_print*/
-+    0,                                  /*tp_getattr*/
-+    0,                                  /*tp_setattr*/
-+    0,                                  /*tp_reserved*/
-+    0,                                  /*tp_repr*/
-+    0,                                  /*tp_as_number*/
-+    0,                                  /*tp_as_sequence*/
-+    0,                                  /*tp_as_mapping*/
-+    0,                                  /*tp_hash*/
-+    0,                                  /*tp_call*/
-+    0,                                  /*tp_str*/
-+    0,                                  /*tp_getattro*/
-+    0,                                  /*tp_setattro*/
-+    0,                                  /*tp_as_buffer*/
-+    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC,
-+    _pickle_Unpickler___init____doc__,  /*tp_doc*/
-+    (traverseproc)Unpickler_traverse,   /*tp_traverse*/
-+    (inquiry)Unpickler_clear,           /*tp_clear*/
-+    0,                                  /*tp_richcompare*/
-+    0,                                  /*tp_weaklistoffset*/
-+    0,                                  /*tp_iter*/
-+    0,                                  /*tp_iternext*/
-+    Unpickler_methods,                  /*tp_methods*/
-+    0,                                  /*tp_members*/
-+    Unpickler_getsets,                  /*tp_getset*/
-+    0,                                  /*tp_base*/
-+    0,                                  /*tp_dict*/
-+    0,                                  /*tp_descr_get*/
-+    0,                                  /*tp_descr_set*/
-+    0,                                  /*tp_dictoffset*/
-+    _pickle_Unpickler___init__,         /*tp_init*/
-+    PyType_GenericAlloc,                /*tp_alloc*/
-+    PyType_GenericNew,                  /*tp_new*/
-+    PyObject_GC_Del,                    /*tp_free*/
-+    0,                                  /*tp_is_gc*/
-+};
-+
-+/*[clinic input]
-+
-+_pickle.dump
-+
-+  obj: object
-+  file: object
-+  protocol: object = NULL
-+  *
-+  fix_imports: bool = True
-+
-+Write a pickled representation of obj to the open file object file.
-+
-+This is equivalent to ``Pickler(file, protocol).dump(obj)``, but may
-+be more efficient.
-+
-+The optional *protocol* argument tells the pickler to use the given
-+protocol supported protocols are 0, 1, 2, 3 and 4.  The default
-+protocol is 3; a backward-incompatible protocol designed for Python 3.
-+
-+Specifying a negative protocol version selects the highest protocol
-+version supported.  The higher the protocol used, the more recent the
-+version of Python needed to read the pickle produced.
-+
-+The *file* argument must have a write() method that accepts a single
-+bytes argument.  It can thus be a file object opened for binary
-+writing, a io.BytesIO instance, or any other custom object that meets
-+this interface.
-+
-+If *fix_imports* is True and protocol is less than 3, pickle will try
-+to map the new Python 3 names to the old module names used in Python
-+2, so that the pickle data stream is readable with Python 2.
-+[clinic start generated code]*/
-+
-+static PyObject *
-+_pickle_dump_impl(PyModuleDef *module, PyObject *obj, PyObject *file, PyObject *protocol, int fix_imports)
-+/*[clinic end generated code: output=a606e626d553850d input=e9e5fdd48de92eae]*/
-+{
-+    PicklerObject *pickler = _Pickler_New();
-+
-+    if (pickler == NULL)
-+        return NULL;
-+
-+    if (_Pickler_SetProtocol(pickler, protocol, fix_imports) < 0)
-+        goto error;
-+
-+    if (_Pickler_SetOutputStream(pickler, file) < 0)
-+        goto error;
-+
-+    if (dump(pickler, obj) < 0)
-+        goto error;
-+
-+    if (_Pickler_FlushToFile(pickler) < 0)
-+        goto error;
-+
-+    Py_DECREF(pickler);
-+    Py_RETURN_NONE;
-+
-+  error:
-+    Py_XDECREF(pickler);
-+    return NULL;
-+}
-+
-+/*[clinic input]
-+
-+_pickle.dumps
-+
-+  obj: object
-+  protocol: object = NULL
-+  *
-+  fix_imports: bool = True
-+
-+Return the pickled representation of the object as a bytes object.
-+
-+The optional *protocol* argument tells the pickler to use the given
-+protocol; supported protocols are 0, 1, 2, 3 and 4.  The default
-+protocol is 3; a backward-incompatible protocol designed for Python 3.
-+
-+Specifying a negative protocol version selects the highest protocol
-+version supported.  The higher the protocol used, the more recent the
-+version of Python needed to read the pickle produced.
-+
-+If *fix_imports* is True and *protocol* is less than 3, pickle will
-+try to map the new Python 3 names to the old module names used in
-+Python 2, so that the pickle data stream is readable with Python 2.
-+[clinic start generated code]*/
-+
-+static PyObject *
-+_pickle_dumps_impl(PyModuleDef *module, PyObject *obj, PyObject *protocol, int fix_imports)
-+/*[clinic end generated code: output=777f0deefe5b88ee input=293dbeda181580b7]*/
-+{
-+    PyObject *result;
-+    PicklerObject *pickler = _Pickler_New();
-+
-+    if (pickler == NULL)
-+        return NULL;
-+
-+    if (_Pickler_SetProtocol(pickler, protocol, fix_imports) < 0)
-+        goto error;
-+
-+    if (dump(pickler, obj) < 0)
-+        goto error;
-+
-+    result = _Pickler_GetString(pickler);
-+    Py_DECREF(pickler);
-+    return result;
-+
-+  error:
-+    Py_XDECREF(pickler);
-+    return NULL;
-+}
-+
-+/*[clinic input]
-+
-+_pickle.load
-+
-+  file: object
-+  *
-+  fix_imports: bool = True
-+  encoding: str = 'ASCII'
-+  errors: str = 'strict'
-+
-+Read and return an object from the pickle data stored in a file.
-+
-+This is equivalent to ``Unpickler(file).load()``, but may be more
-+efficient.
-+
-+The protocol version of the pickle is detected automatically, so no
-+protocol argument is needed.  Bytes past the pickled object's
-+representation are ignored.
-+
-+The argument *file* must have two methods, a read() method that takes
-+an integer argument, and a readline() method that requires no
-+arguments.  Both methods should return bytes.  Thus *file* can be a
-+binary file object opened for reading, a io.BytesIO object, or any
-+other custom object that meets this interface.
-+
-+Optional keyword arguments are *fix_imports*, *encoding* and *errors*,
-+which are used to control compatiblity support for pickle stream
-+generated by Python 2.  If *fix_imports* is True, pickle will try to
-+map the old Python 2 names to the new names used in Python 3.  The
-+*encoding* and *errors* tell pickle how to decode 8-bit string
-+instances pickled by Python 2; these default to 'ASCII' and 'strict',
-+respectively.  The *encoding* can be 'bytes' to read these 8-bit
-+string instances as bytes objects.
-+[clinic start generated code]*/
-+
-+static PyObject *
-+_pickle_load_impl(PyModuleDef *module, PyObject *file, int fix_imports, const char *encoding, const char *errors)
-+/*[clinic end generated code: output=568c61356c172654 input=da97372e38e510a6]*/
-+{
-+    PyObject *result;
-+    UnpicklerObject *unpickler = _Unpickler_New();
-+
-+    if (unpickler == NULL)
-+        return NULL;
-+
-+    if (_Unpickler_SetInputStream(unpickler, file) < 0)
-+        goto error;
-+
-+    if (_Unpickler_SetInputEncoding(unpickler, encoding, errors) < 0)
-+        goto error;
-+
-+    unpickler->fix_imports = fix_imports;
-+
-+    result = load(unpickler);
-+    Py_DECREF(unpickler);
-+    return result;
-+
-+  error:
-+    Py_XDECREF(unpickler);
-+    return NULL;
-+}
-+
-+/*[clinic input]
-+
-+_pickle.loads
-+
-+  data: object
-+  *
-+  fix_imports: bool = True
-+  encoding: str = 'ASCII'
-+  errors: str = 'strict'
-+
-+Read and return an object from the given pickle data.
-+
-+The protocol version of the pickle is detected automatically, so no
-+protocol argument is needed.  Bytes past the pickled object's
-+representation are ignored.
-+
-+Optional keyword arguments are *fix_imports*, *encoding* and *errors*,
-+which are used to control compatiblity support for pickle stream
-+generated by Python 2.  If *fix_imports* is True, pickle will try to
-+map the old Python 2 names to the new names used in Python 3.  The
-+*encoding* and *errors* tell pickle how to decode 8-bit string
-+instances pickled by Python 2; these default to 'ASCII' and 'strict',
-+respectively.  The *encoding* can be 'bytes' to read these 8-bit
-+string instances as bytes objects.
-+[clinic start generated code]*/
-+
-+static PyObject *
-+_pickle_loads_impl(PyModuleDef *module, PyObject *data, int fix_imports, const char *encoding, const char *errors)
-+/*[clinic end generated code: output=0b3845ad110b2522 input=f57f0fdaa2b4cb8b]*/
-+{
-+    PyObject *result;
-+    UnpicklerObject *unpickler = _Unpickler_New();
-+
-+    if (unpickler == NULL)
-+        return NULL;
-+
-+    if (_Unpickler_SetStringInput(unpickler, data) < 0)
-+        goto error;
-+
-+    if (_Unpickler_SetInputEncoding(unpickler, encoding, errors) < 0)
-+        goto error;
-+
-+    unpickler->fix_imports = fix_imports;
-+
-+    result = load(unpickler);
-+    Py_DECREF(unpickler);
-+    return result;
-+
-+  error:
-+    Py_XDECREF(unpickler);
-+    return NULL;
-+}
-+
-+static struct PyMethodDef pickle_methods[] = {
-+    _PICKLE_DUMP_METHODDEF
-+    _PICKLE_DUMPS_METHODDEF
-+    _PICKLE_LOAD_METHODDEF
-+    _PICKLE_LOADS_METHODDEF
-+    {NULL, NULL} /* sentinel */
-+};
-+
-+static int
-+pickle_clear(PyObject *m)
-+{
-+    _Pickle_ClearState(_Pickle_GetState(m));
-+    return 0;
-+}
-+
-+static void
-+pickle_free(PyObject *m)
-+{
-+    _Pickle_ClearState(_Pickle_GetState(m));
-+}
-+
-+static int
-+pickle_traverse(PyObject *m, visitproc visit, void *arg)
-+{
-+    PickleState *st = _Pickle_GetState(m);
-+    Py_VISIT(st->PickleError);
-+    Py_VISIT(st->PicklingError);
-+    Py_VISIT(st->UnpicklingError);
-+    Py_VISIT(st->dispatch_table);
-+    Py_VISIT(st->extension_registry);
-+    Py_VISIT(st->extension_cache);
-+    Py_VISIT(st->inverted_registry);
-+    Py_VISIT(st->name_mapping_2to3);
-+    Py_VISIT(st->import_mapping_2to3);
-+    Py_VISIT(st->name_mapping_3to2);
-+    Py_VISIT(st->import_mapping_3to2);
-+    Py_VISIT(st->codecs_encode);
-+    return 0;
-+}
-+
-+static struct PyModuleDef _picklemodule = {
-+    PyModuleDef_HEAD_INIT,
-+    "_pickle",            /* m_name */
-+    pickle_module_doc,    /* m_doc */
-+    sizeof(PickleState),  /* m_size */
-+    pickle_methods,       /* m_methods */
-+    NULL,                 /* m_reload */
-+    pickle_traverse,      /* m_traverse */
-+    pickle_clear,         /* m_clear */
-+    (freefunc)pickle_free /* m_free */
-+};
-+
-+PyMODINIT_FUNC
-+PyInit__pickle(void)
-+{
-+    PyObject *m;
-+    PickleState *st;
-+
-+    m = PyState_FindModule(&_picklemodule);
-+    if (m) {
-+        Py_INCREF(m);
-+        return m;
-+    }
-+
-+    if (PyType_Ready(&Unpickler_Type) < 0)
-+        return NULL;
-+    if (PyType_Ready(&Pickler_Type) < 0)
-+        return NULL;
-+    if (PyType_Ready(&Pdata_Type) < 0)
-+        return NULL;
-+    if (PyType_Ready(&PicklerMemoProxyType) < 0)
-+        return NULL;
-+    if (PyType_Ready(&UnpicklerMemoProxyType) < 0)
-+        return NULL;
-+
-+    /* Create the module and add the functions. */
-+    m = PyModule_Create(&_picklemodule);
-+    if (m == NULL)
-+        return NULL;
-+
-+    Py_INCREF(&Pickler_Type);
-+    if (PyModule_AddObject(m, "Pickler", (PyObject *)&Pickler_Type) < 0)
-+        return NULL;
-+    Py_INCREF(&Unpickler_Type);
-+    if (PyModule_AddObject(m, "Unpickler", (PyObject *)&Unpickler_Type) < 0)
-+        return NULL;
-+
-+    st = _Pickle_GetState(m);
-+
-+    /* Initialize the exceptions. */
-+    st->PickleError = PyErr_NewException("_pickle.PickleError", NULL, NULL);
-+    if (st->PickleError == NULL)
-+        return NULL;
-+    st->PicklingError = \
-+        PyErr_NewException("_pickle.PicklingError", st->PickleError, NULL);
-+    if (st->PicklingError == NULL)
-+        return NULL;
-+    st->UnpicklingError = \
-+        PyErr_NewException("_pickle.UnpicklingError", st->PickleError, NULL);
-+    if (st->UnpicklingError == NULL)
-+        return NULL;
-+
-+    Py_INCREF(st->PickleError);
-+    if (PyModule_AddObject(m, "PickleError", st->PickleError) < 0)
-+        return NULL;
-+    Py_INCREF(st->PicklingError);
-+    if (PyModule_AddObject(m, "PicklingError", st->PicklingError) < 0)
-+        return NULL;
-+    Py_INCREF(st->UnpicklingError);
-+    if (PyModule_AddObject(m, "UnpicklingError", st->UnpicklingError) < 0)
-+        return NULL;
-+
-+    if (_Pickle_InitState(st) < 0)
-+        return NULL;
-+
-+    return m;
-+}
-diff --git a/src/zodbpickle/fastpickle.py b/src/zodbpickle/fastpickle.py
-index c052373..394b187 100644
---- a/src/zodbpickle/fastpickle.py
-+++ b/src/zodbpickle/fastpickle.py
-@@ -20,9 +20,14 @@ So this is a rare case where 'import *' is exactly the right thing to do.
- 
- # pick up all names that the module defines
- if sys.version_info[0] >= 3:
--    from .pickle_3 import *
--    # do not share the globals with a slow version
--    del sys.modules['zodbpickle.pickle_3']
-+    if sys.version_info[1] >= 4:
-+        from .pickle_34 import *
-+        # do not share the globals with a slow version
-+        del sys.modules['zodbpickle.pickle_34']
-+    else:
-+        from .pickle_3 import *
-+        # do not share the globals with a slow version
-+        del sys.modules['zodbpickle.pickle_3']
- else:
-     from .pickle_2 import *
- # also make sure that we really have the fast version, although
-diff --git a/src/zodbpickle/pickle.py b/src/zodbpickle/pickle.py
-index 1491ea6..add0944 100644
---- a/src/zodbpickle/pickle.py
-+++ b/src/zodbpickle/pickle.py
-@@ -1,7 +1,10 @@
- import sys
- 
- if sys.version_info[0] >= 3:
--    from .pickle_3 import *
-+    if sys.version_info[1] >= 4:
-+        from .pickle_34 import *
-+    else:
-+        from .pickle_3 import *
- else:
-     from .pickle_2 import *
- del sys
-diff --git a/src/zodbpickle/pickle_34.py b/src/zodbpickle/pickle_34.py
-new file mode 100644
-index 0000000..aa8d153
---- /dev/null
-+++ b/src/zodbpickle/pickle_34.py
-@@ -0,0 +1,1714 @@
-+"""Create portable serialized representations of Python objects.
-+
-+See module copyreg for a mechanism for registering custom picklers.
-+See module pickletools source for extensive comments.
-+
-+Classes:
-+
-+    Pickler
-+    Unpickler
-+
-+Functions:
-+
-+    dump(object, file)
-+    dumps(object) -> string
-+    load(file) -> object
-+    loads(string) -> object
-+
-+Misc variables:
-+
-+    __version__
-+    format_version
-+    compatible_formats
-+
-+"""
-+
-+from types import FunctionType
-+from copyreg import dispatch_table
-+from copyreg import _extension_registry, _inverted_registry, _extension_cache
-+from itertools import islice
-+import sys
-+from sys import maxsize
-+from struct import pack, unpack
-+import re
-+import io
-+import codecs
-+import _compat_pickle
-+
-+__all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler",
-+           "Unpickler", "dump", "dumps", "load", "loads"]
-+
-+# Shortcut for use in isinstance testing
-+bytes_types = (bytes, bytearray)
-+__all__.append('bytes_types')
-+
-+# These are purely informational; no code uses these.
-+format_version = "4.0"                  # File format version we write
-+compatible_formats = ["1.0",            # Original protocol 0
-+                      "1.1",            # Protocol 0 with INST added
-+                      "1.2",            # Original protocol 1
-+                      "1.3",            # Protocol 1 with BINFLOAT added
-+                      "2.0",            # Protocol 2
-+                      "3.0",            # Protocol 3
-+                      "4.0",            # Protocol 4
-+                      ]                 # Old format versions we can read
-+
-+# This is the highest protocol number we know how to read.
-+HIGHEST_PROTOCOL = 4
-+
-+# The protocol we write by default.  May be less than HIGHEST_PROTOCOL.
-+# We intentionally write a protocol that Python 2.x cannot read;
-+# there are too many issues with that.
-+DEFAULT_PROTOCOL = 3
-+
-+class PickleError(Exception):
-+    """A common base class for the other pickling exceptions."""
-+    pass
-+
-+class PicklingError(PickleError):
-+    """This exception is raised when an unpicklable object is passed to the
-+    dump() method.
-+
-+    """
-+    pass
-+
-+class UnpicklingError(PickleError):
-+    """This exception is raised when there is a problem unpickling an object,
-+    such as a security violation.
-+
-+    Note that other exceptions may also be raised during unpickling, including
-+    (but not necessarily limited to) AttributeError, EOFError, ImportError,
-+    and IndexError.
-+
-+    """
-+    pass
-+
-+# An instance of _Stop is raised by Unpickler.load_stop() in response to
-+# the STOP opcode, passing the object that is the result of unpickling.
-+class _Stop(Exception):
-+    def __init__(self, value):
-+        self.value = value
-+
-+# Jython has PyStringMap; it's a dict subclass with string keys
-+try:
-+    from org.python.core import PyStringMap
-+except ImportError:
-+    PyStringMap = None
-+
-+# Pickle opcodes.  See pickletools_34.py for extensive docs.  The listing
-+# here is in kind-of alphabetical order of 1-character pickle code.
-+# pickletools groups them by purpose.
-+
-+MARK           = b'('   # push special markobject on stack
-+STOP           = b'.'   # every pickle ends with STOP
-+POP            = b'0'   # discard topmost stack item
-+POP_MARK       = b'1'   # discard stack top through topmost markobject
-+DUP            = b'2'   # duplicate top stack item
-+FLOAT          = b'F'   # push float object; decimal string argument
-+INT            = b'I'   # push integer or bool; decimal string argument
-+BININT         = b'J'   # push four-byte signed int
-+BININT1        = b'K'   # push 1-byte unsigned int
-+LONG           = b'L'   # push long; decimal string argument
-+BININT2        = b'M'   # push 2-byte unsigned int
-+NONE           = b'N'   # push None
-+PERSID         = b'P'   # push persistent object; id is taken from string arg
-+BINPERSID      = b'Q'   #  "       "         "  ;  "  "   "     "  stack
-+REDUCE         = b'R'   # apply callable to argtuple, both on stack
-+STRING         = b'S'   # push string; NL-terminated string argument
-+BINSTRING      = b'T'   # push string; counted binary string argument
-+SHORT_BINSTRING= b'U'   #  "     "   ;    "      "       "      " < 256 bytes
-+UNICODE        = b'V'   # push Unicode string; raw-unicode-escaped'd argument
-+BINUNICODE     = b'X'   #   "     "       "  ; counted UTF-8 string argument
-+APPEND         = b'a'   # append stack top to list below it
-+BUILD          = b'b'   # call __setstate__ or __dict__.update()
-+GLOBAL         = b'c'   # push self.find_class(modname, name); 2 string args
-+DICT           = b'd'   # build a dict from stack items
-+EMPTY_DICT     = b'}'   # push empty dict
-+APPENDS        = b'e'   # extend list on stack by topmost stack slice
-+GET            = b'g'   # push item from memo on stack; index is string arg
-+BINGET         = b'h'   #   "    "    "    "   "   "  ;   "    " 1-byte arg
-+INST           = b'i'   # build & push class instance
-+LONG_BINGET    = b'j'   # push item from memo on stack; index is 4-byte arg
-+LIST           = b'l'   # build list from topmost stack items
-+EMPTY_LIST     = b']'   # push empty list
-+OBJ            = b'o'   # build & push class instance
-+PUT            = b'p'   # store stack top in memo; index is string arg
-+BINPUT         = b'q'   #   "     "    "   "   " ;   "    " 1-byte arg
-+LONG_BINPUT    = b'r'   #   "     "    "   "   " ;   "    " 4-byte arg
-+SETITEM        = b's'   # add key+value pair to dict
-+TUPLE          = b't'   # build tuple from topmost stack items
-+EMPTY_TUPLE    = b')'   # push empty tuple
-+SETITEMS       = b'u'   # modify dict by adding topmost key+value pairs
-+BINFLOAT       = b'G'   # push float; arg is 8-byte float encoding
-+
-+TRUE           = b'I01\n'  # not an opcode; see INT docs in pickletools_34.py
-+FALSE          = b'I00\n'  # not an opcode; see INT docs in pickletools_34.py
-+
-+# Protocol 2
-+
-+PROTO          = b'\x80'  # identify pickle protocol
-+NEWOBJ         = b'\x81'  # build object by applying cls.__new__ to argtuple
-+EXT1           = b'\x82'  # push object from extension registry; 1-byte index
-+EXT2           = b'\x83'  # ditto, but 2-byte index
-+EXT4           = b'\x84'  # ditto, but 4-byte index
-+TUPLE1         = b'\x85'  # build 1-tuple from stack top
-+TUPLE2         = b'\x86'  # build 2-tuple from two topmost stack items
-+TUPLE3         = b'\x87'  # build 3-tuple from three topmost stack items
-+NEWTRUE        = b'\x88'  # push True
-+NEWFALSE       = b'\x89'  # push False
-+LONG1          = b'\x8a'  # push long from < 256 bytes
-+LONG4          = b'\x8b'  # push really big long
-+
-+_tuplesize2code = [EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3]
-+
-+# Protocol 3 (Python 3.x)
-+
-+BINBYTES       = b'B'   # push bytes; counted binary string argument
-+SHORT_BINBYTES = b'C'   #  "     "   ;    "      "       "      " < 256 bytes
-+
-+# Protocol 4
-+SHORT_BINUNICODE = b'\x8c'  # push short string; UTF-8 length < 256 bytes
-+BINUNICODE8      = b'\x8d'  # push very long string
-+BINBYTES8        = b'\x8e'  # push very long bytes string
-+EMPTY_SET        = b'\x8f'  # push empty set on the stack
-+ADDITEMS         = b'\x90'  # modify set by adding topmost stack items
-+FROZENSET        = b'\x91'  # build frozenset from topmost stack items
-+NEWOBJ_EX        = b'\x92'  # like NEWOBJ but work with keyword only arguments
-+STACK_GLOBAL     = b'\x93'  # same as GLOBAL but using names on the stacks
-+MEMOIZE          = b'\x94'  # store top of the stack in memo
-+FRAME            = b'\x95'  # indicate the beginning of a new frame
-+
-+__all__.extend([x for x in dir() if re.match("[A-Z][A-Z0-9_]+$", x)])
-+
-+
-+class _Framer:
-+
-+    _FRAME_SIZE_TARGET = 64 * 1024
-+
-+    def __init__(self, file_write):
-+        self.file_write = file_write
-+        self.current_frame = None
-+
-+    def start_framing(self):
-+        self.current_frame = io.BytesIO()
-+
-+    def end_framing(self):
-+        if self.current_frame and self.current_frame.tell() > 0:
-+            self.commit_frame(force=True)
-+            self.current_frame = None
-+
-+    def commit_frame(self, force=False):
-+        if self.current_frame:
-+            f = self.current_frame
-+            if f.tell() >= self._FRAME_SIZE_TARGET or force:
-+                with f.getbuffer() as data:
-+                    n = len(data)
-+                    write = self.file_write
-+                    write(FRAME)
-+                    write(pack("<Q", n))
-+                    write(data)
-+                f.seek(0)
-+                f.truncate()
-+
-+    def write(self, data):
-+        if self.current_frame:
-+            return self.current_frame.write(data)
-+        else:
-+            return self.file_write(data)
-+
-+
-+class _Unframer:
-+
-+    def __init__(self, file_read, file_readline, file_tell=None):
-+        self.file_read = file_read
-+        self.file_readline = file_readline
-+        self.current_frame = None
-+
-+    def read(self, n):
-+        if self.current_frame:
-+            data = self.current_frame.read(n)
-+            if not data and n != 0:
-+                self.current_frame = None
-+                return self.file_read(n)
-+            if len(data) < n:
-+                raise UnpicklingError(
-+                    "pickle exhausted before end of frame")
-+            return data
-+        else:
-+            return self.file_read(n)
-+
-+    def readline(self):
-+        if self.current_frame:
-+            data = self.current_frame.readline()
-+            if not data:
-+                self.current_frame = None
-+                return self.file_readline()
-+            if data[-1] != b'\n':
-+                raise UnpicklingError(
-+                    "pickle exhausted before end of frame")
-+            return data
-+        else:
-+            return self.file_readline()
-+
-+    def load_frame(self, frame_size):
-+        if self.current_frame and self.current_frame.read() != b'':
-+            raise UnpicklingError(
-+                "beginning of a new frame before end of current frame")
-+        self.current_frame = io.BytesIO(self.file_read(frame_size))
-+
-+
-+# Tools used for pickling.
-+
-+def _getattribute(obj, name, allow_qualname=False):
-+    dotted_path = name.split(".")
-+    if not allow_qualname and len(dotted_path) > 1:
-+        raise AttributeError("Can't get qualified attribute {!r} on {!r}; " +
-+                             "use protocols >= 4 to enable support"
-+                             .format(name, obj))
-+    for subpath in dotted_path:
-+        if subpath == '<locals>':
-+            raise AttributeError("Can't get local attribute {!r} on {!r}"
-+                                 .format(name, obj))
-+        try:
-+            obj = getattr(obj, subpath)
-+        except AttributeError:
-+            raise AttributeError("Can't get attribute {!r} on {!r}"
-+                                 .format(name, obj))
-+    return obj
-+
-+def whichmodule(obj, name, allow_qualname=False):
-+    """Find the module an object belong to."""
-+    module_name = getattr(obj, '__module__', None)
-+    if module_name is not None:
-+        return module_name
-+    for module_name, module in sys.modules.items():
-+        if module_name == '__main__' or module is None:
-+            continue
-+        try:
-+            if _getattribute(module, name, allow_qualname) is obj:
-+                return module_name
-+        except AttributeError:
-+            pass
-+    return '__main__'
-+
-+def encode_long(x):
-+    r"""Encode a long to a two's complement little-endian binary string.
-+    Note that 0 is a special case, returning an empty string, to save a
-+    byte in the LONG1 pickling context.
-+
-+    >>> encode_long(0)
-+    b''
-+    >>> encode_long(255)
-+    b'\xff\x00'
-+    >>> encode_long(32767)
-+    b'\xff\x7f'
-+    >>> encode_long(-256)
-+    b'\x00\xff'
-+    >>> encode_long(-32768)
-+    b'\x00\x80'
-+    >>> encode_long(-128)
-+    b'\x80'
-+    >>> encode_long(127)
-+    b'\x7f'
-+    >>>
-+    """
-+    if x == 0:
-+        return b''
-+    nbytes = (x.bit_length() >> 3) + 1
-+    result = x.to_bytes(nbytes, byteorder='little', signed=True)
-+    if x < 0 and nbytes > 1:
-+        if result[-1] == 0xff and (result[-2] & 0x80) != 0:
-+            result = result[:-1]
-+    return result
-+
-+def decode_long(data):
-+    r"""Decode a long from a two's complement little-endian binary string.
-+
-+    >>> decode_long(b'')
-+    0
-+    >>> decode_long(b"\xff\x00")
-+    255
-+    >>> decode_long(b"\xff\x7f")
-+    32767
-+    >>> decode_long(b"\x00\xff")
-+    -256
-+    >>> decode_long(b"\x00\x80")
-+    -32768
-+    >>> decode_long(b"\x80")
-+    -128
-+    >>> decode_long(b"\x7f")
-+    127
-+    """
-+    return int.from_bytes(data, byteorder='little', signed=True)
-+
-+
-+# Pickling machinery
-+
-+class _Pickler:
-+
-+    def __init__(self, file, protocol=None, *, fix_imports=True):
-+        """This takes a binary file for writing a pickle data stream.
-+
-+        The optional *protocol* argument tells the pickler to use the
-+        given protocol; supported protocols are 0, 1, 2, 3 and 4.  The
-+        default protocol is 3; a backward-incompatible protocol designed
-+        for Python 3.
-+
-+        Specifying a negative protocol version selects the highest
-+        protocol version supported.  The higher the protocol used, the
-+        more recent the version of Python needed to read the pickle
-+        produced.
-+
-+        The *file* argument must have a write() method that accepts a
-+        single bytes argument. It can thus be a file object opened for
-+        binary writing, a io.BytesIO instance, or any other custom
-+        object that meets this interface.
-+
-+        If *fix_imports* is True and *protocol* is less than 3, pickle
-+        will try to map the new Python 3 names to the old module names
-+        used in Python 2, so that the pickle data stream is readable
-+        with Python 2.
-+        """
-+        if protocol is None:
-+            protocol = DEFAULT_PROTOCOL
-+        if protocol < 0:
-+            protocol = HIGHEST_PROTOCOL
-+        elif not 0 <= protocol <= HIGHEST_PROTOCOL:
-+            raise ValueError("pickle protocol must be <= %d" % HIGHEST_PROTOCOL)
-+        try:
-+            self._file_write = file.write
-+        except AttributeError:
-+            raise TypeError("file must have a 'write' attribute")
-+        self.framer = _Framer(self._file_write)
-+        self.write = self.framer.write
-+        self.memo = {}
-+        self.proto = int(protocol)
-+        self.bin = protocol >= 1
-+        self.fast = 0
-+        self.fix_imports = fix_imports and protocol < 3
-+
-+    def clear_memo(self):
-+        """Clears the pickler's "memo".
-+
-+        The memo is the data structure that remembers which objects the
-+        pickler has already seen, so that shared or recursive objects
-+        are pickled by reference and not by value.  This method is
-+        useful when re-using picklers.
-+        """
-+        self.memo.clear()
-+
-+    def dump(self, obj):
-+        """Write a pickled representation of obj to the open file."""
-+        # Check whether Pickler was initialized correctly. This is
-+        # only needed to mimic the behavior of _pickle.Pickler.dump().
-+        if not hasattr(self, "_file_write"):
-+            raise PicklingError("Pickler.__init__() was not called by "
-+                                "%s.__init__()" % (self.__class__.__name__,))
-+        if self.proto >= 2:
-+            self.write(PROTO + pack("<B", self.proto))
-+        if self.proto >= 4:
-+            self.framer.start_framing()
-+        self.save(obj)
-+        self.write(STOP)
-+        self.framer.end_framing()
-+
-+    def memoize(self, obj):
-+        """Store an object in the memo."""
-+
-+        # The Pickler memo is a dictionary mapping object ids to 2-tuples
-+        # that contain the Unpickler memo key and the object being memoized.
-+        # The memo key is written to the pickle and will become
-+        # the key in the Unpickler's memo.  The object is stored in the
-+        # Pickler memo so that transient objects are kept alive during
-+        # pickling.
-+
-+        # The use of the Unpickler memo length as the memo key is just a
-+        # convention.  The only requirement is that the memo values be unique.
-+        # But there appears no advantage to any other scheme, and this
-+        # scheme allows the Unpickler memo to be implemented as a plain (but
-+        # growable) array, indexed by memo key.
-+        if self.fast:
-+            return
-+        assert id(obj) not in self.memo
-+        idx = len(self.memo)
-+        self.write(self.put(idx))
-+        self.memo[id(obj)] = idx, obj
-+
-+    # Return a PUT (BINPUT, LONG_BINPUT) opcode string, with argument i.
-+    def put(self, idx):
-+        if self.proto >= 4:
-+            return MEMOIZE
-+        elif self.bin:
-+            if idx < 256:
-+                return BINPUT + pack("<B", idx)
-+            else:
-+                return LONG_BINPUT + pack("<I", idx)
-+        else:
-+            return PUT + repr(idx).encode("ascii") + b'\n'
-+
-+    # Return a GET (BINGET, LONG_BINGET) opcode string, with argument i.
-+    def get(self, i):
-+        if self.bin:
-+            if i < 256:
-+                return BINGET + pack("<B", i)
-+            else:
-+                return LONG_BINGET + pack("<I", i)
-+
-+        return GET + repr(i).encode("ascii") + b'\n'
-+
-+    def save(self, obj, save_persistent_id=True):
-+        self.framer.commit_frame()
-+
-+        # Check for persistent id (defined by a subclass)
-+        pid = self.persistent_id(obj)
-+        if pid is not None and save_persistent_id:
-+            self.save_pers(pid)
-+            return
-+
-+        # Check the memo
-+        x = self.memo.get(id(obj))
-+        if x is not None:
-+            self.write(self.get(x[0]))
-+            return
-+
-+        # Check the type dispatch table
-+        t = type(obj)
-+        f = self.dispatch.get(t)
-+        if f is not None:
-+            f(self, obj) # Call unbound method with explicit self
-+            return
-+
-+        # Check private dispatch table if any, or else copyreg.dispatch_table
-+        reduce = getattr(self, 'dispatch_table', dispatch_table).get(t)
-+        if reduce is not None:
-+            rv = reduce(obj)
-+        else:
-+            # Check for a class with a custom metaclass; treat as regular class
-+            try:
-+                issc = issubclass(t, type)
-+            except TypeError: # t is not a class (old Boost; see SF #502085)
-+                issc = False
-+            if issc:
-+                self.save_global(obj)
-+                return
-+
-+            # Check for a __reduce_ex__ method, fall back to __reduce__
-+            reduce = getattr(obj, "__reduce_ex__", None)
-+            if reduce is not None:
-+                rv = reduce(self.proto)
-+            else:
-+                reduce = getattr(obj, "__reduce__", None)
-+                if reduce is not None:
-+                    rv = reduce()
-+                else:
-+                    raise PicklingError("Can't pickle %r object: %r" %
-+                                        (t.__name__, obj))
-+
-+        # Check for string returned by reduce(), meaning "save as global"
-+        if isinstance(rv, str):
-+            self.save_global(obj, rv)
-+            return
-+
-+        # Assert that reduce() returned a tuple
-+        if not isinstance(rv, tuple):
-+            raise PicklingError("%s must return string or tuple" % reduce)
-+
-+        # Assert that it returned an appropriately sized tuple
-+        l = len(rv)
-+        if not (2 <= l <= 5):
-+            raise PicklingError("Tuple returned by %s must have "
-+                                "two to five elements" % reduce)
-+
-+        # Save the reduce() output and finally memoize the object
-+        self.save_reduce(obj=obj, *rv)
-+
-+    def persistent_id(self, obj):
-+        # This exists so a subclass can override it
-+        return None
-+
-+    def save_pers(self, pid):
-+        # Save a persistent id reference
-+        if self.bin:
-+            self.save(pid, save_persistent_id=False)
-+            self.write(BINPERSID)
-+        else:
-+            self.write(PERSID + str(pid).encode("ascii") + b'\n')
-+
-+    def save_reduce(self, func, args, state=None, listitems=None,
-+                    dictitems=None, obj=None):
-+        # This API is called by some subclasses
-+
-+        if not isinstance(args, tuple):
-+            raise PicklingError("args from save_reduce() must be a tuple")
-+        if not callable(func):
-+            raise PicklingError("func from save_reduce() must be callable")
-+
-+        save = self.save
-+        write = self.write
-+
-+        func_name = getattr(func, "__name__", "")
-+        if self.proto >= 4 and func_name == "__newobj_ex__":
-+            cls, args, kwargs = args
-+            if not hasattr(cls, "__new__"):
-+                raise PicklingError("args[0] from {} args has no __new__"
-+                                    .format(func_name))
-+            if obj is not None and cls is not obj.__class__:
-+                raise PicklingError("args[0] from {} args has the wrong class"
-+                                    .format(func_name))
-+            save(cls)
-+            save(args)
-+            save(kwargs)
-+            write(NEWOBJ_EX)
-+        elif self.proto >= 2 and func_name == "__newobj__":
-+            # A __reduce__ implementation can direct protocol 2 or newer to
-+            # use the more efficient NEWOBJ opcode, while still
-+            # allowing protocol 0 and 1 to work normally.  For this to
-+            # work, the function returned by __reduce__ should be
-+            # called __newobj__, and its first argument should be a
-+            # class.  The implementation for __newobj__
-+            # should be as follows, although pickle has no way to
-+            # verify this:
-+            #
-+            # def __newobj__(cls, *args):
-+            #     return cls.__new__(cls, *args)
-+            #
-+            # Protocols 0 and 1 will pickle a reference to __newobj__,
-+            # while protocol 2 (and above) will pickle a reference to
-+            # cls, the remaining args tuple, and the NEWOBJ code,
-+            # which calls cls.__new__(cls, *args) at unpickling time
-+            # (see load_newobj below).  If __reduce__ returns a
-+            # three-tuple, the state from the third tuple item will be
-+            # pickled regardless of the protocol, calling __setstate__
-+            # at unpickling time (see load_build below).
-+            #
-+            # Note that no standard __newobj__ implementation exists;
-+            # you have to provide your own.  This is to enforce
-+            # compatibility with Python 2.2 (pickles written using
-+            # protocol 0 or 1 in Python 2.3 should be unpicklable by
-+            # Python 2.2).
-+            cls = args[0]
-+            if not hasattr(cls, "__new__"):
-+                raise PicklingError(
-+                    "args[0] from __newobj__ args has no __new__")
-+            if obj is not None and cls is not obj.__class__:
-+                raise PicklingError(
-+                    "args[0] from __newobj__ args has the wrong class")
-+            args = args[1:]
-+            save(cls)
-+            save(args)
-+            write(NEWOBJ)
-+        else:
-+            save(func)
-+            save(args)
-+            write(REDUCE)
-+
-+        if obj is not None:
-+            # If the object is already in the memo, this means it is
-+            # recursive. In this case, throw away everything we put on the
-+            # stack, and fetch the object back from the memo.
-+            if id(obj) in self.memo:
-+                write(POP + self.get(self.memo[id(obj)][0]))
-+            else:
-+                self.memoize(obj)
-+
-+        # More new special cases (that work with older protocols as
-+        # well): when __reduce__ returns a tuple with 4 or 5 items,
-+        # the 4th and 5th item should be iterators that provide list
-+        # items and dict items (as (key, value) tuples), or None.
-+
-+        if listitems is not None:
-+            self._batch_appends(listitems)
-+
-+        if dictitems is not None:
-+            self._batch_setitems(dictitems)
-+
-+        if state is not None:
-+            save(state)
-+            write(BUILD)
-+
-+    # Methods below this point are dispatched through the dispatch table
-+
-+    dispatch = {}
-+
-+    def save_none(self, obj):
-+        self.write(NONE)
-+    dispatch[type(None)] = save_none
-+
-+    def save_bool(self, obj):
-+        if self.proto >= 2:
-+            self.write(NEWTRUE if obj else NEWFALSE)
-+        else:
-+            self.write(TRUE if obj else FALSE)
-+    dispatch[bool] = save_bool
-+
-+    def save_long(self, obj):
-+        if self.bin:
-+            # If the int is small enough to fit in a signed 4-byte 2's-comp
-+            # format, we can store it more efficiently than the general
-+            # case.
-+            # First one- and two-byte unsigned ints:
-+            if obj >= 0:
-+                if obj <= 0xff:
-+                    self.write(BININT1 + pack("<B", obj))
-+                    return
-+                if obj <= 0xffff:
-+                    self.write(BININT2 + pack("<H", obj))
-+                    return
-+            # Next check for 4-byte signed ints:
-+            if -0x80000000 <= obj <= 0x7fffffff:
-+                self.write(BININT + pack("<i", obj))
-+                return
-+        if self.proto >= 2:
-+            encoded = encode_long(obj)
-+            n = len(encoded)
-+            if n < 256:
-+                self.write(LONG1 + pack("<B", n) + encoded)
-+            else:
-+                self.write(LONG4 + pack("<i", n) + encoded)
-+            return
-+        self.write(LONG + repr(obj).encode("ascii") + b'L\n')
-+    dispatch[int] = save_long
-+
-+    def save_float(self, obj):
-+        if self.bin:
-+            self.write(BINFLOAT + pack('>d', obj))
-+        else:
-+            self.write(FLOAT + repr(obj).encode("ascii") + b'\n')
-+    dispatch[float] = save_float
-+
-+    def save_bytes(self, obj):
-+        if self.proto < 3:
-+            if not obj: # bytes object is empty
-+                self.save_reduce(bytes, (), obj=obj)
-+            else:
-+                self.save_reduce(codecs.encode,
-+                                 (str(obj, 'latin1'), 'latin1'), obj=obj)
-+            return
-+        n = len(obj)
-+        if n <= 0xff:
-+            self.write(SHORT_BINBYTES + pack("<B", n) + obj)
-+        elif n > 0xffffffff and self.proto >= 4:
-+            self.write(BINBYTES8 + pack("<Q", n) + obj)
-+        else:
-+            self.write(BINBYTES + pack("<I", n) + obj)
-+        self.memoize(obj)
-+    dispatch[bytes] = save_bytes
-+
-+    def save_str(self, obj):
-+        if self.bin:
-+            encoded = obj.encode('utf-8', 'surrogatepass')
-+            n = len(encoded)
-+            if n <= 0xff and self.proto >= 4:
-+                self.write(SHORT_BINUNICODE + pack("<B", n) + encoded)
-+            elif n > 0xffffffff and self.proto >= 4:
-+                self.write(BINUNICODE8 + pack("<Q", n) + encoded)
-+            else:
-+                self.write(BINUNICODE + pack("<I", n) + encoded)
-+        else:
-+            obj = obj.replace("\\", "\\u005c")
-+            obj = obj.replace("\n", "\\u000a")
-+            self.write(UNICODE + obj.encode('raw-unicode-escape') +
-+                       b'\n')
-+        self.memoize(obj)
-+    dispatch[str] = save_str
-+
-+    def save_tuple(self, obj):
-+        if not obj: # tuple is empty
-+            if self.bin:
-+                self.write(EMPTY_TUPLE)
-+            else:
-+                self.write(MARK + TUPLE)
-+            return
-+
-+        n = len(obj)
-+        save = self.save
-+        memo = self.memo
-+        if n <= 3 and self.proto >= 2:
-+            for element in obj:
-+                save(element)
-+            # Subtle.  Same as in the big comment below.
-+            if id(obj) in memo:
-+                get = self.get(memo[id(obj)][0])
-+                self.write(POP * n + get)
-+            else:
-+                self.write(_tuplesize2code[n])
-+                self.memoize(obj)
-+            return
-+
-+        # proto 0 or proto 1 and tuple isn't empty, or proto > 1 and tuple
-+        # has more than 3 elements.
-+        write = self.write
-+        write(MARK)
-+        for element in obj:
-+            save(element)
-+
-+        if id(obj) in memo:
-+            # Subtle.  d was not in memo when we entered save_tuple(), so
-+            # the process of saving the tuple's elements must have saved
-+            # the tuple itself:  the tuple is recursive.  The proper action
-+            # now is to throw away everything we put on the stack, and
-+            # simply GET the tuple (it's already constructed).  This check
-+            # could have been done in the "for element" loop instead, but
-+            # recursive tuples are a rare thing.
-+            get = self.get(memo[id(obj)][0])
-+            if self.bin:
-+                write(POP_MARK + get)
-+            else:   # proto 0 -- POP_MARK not available
-+                write(POP * (n+1) + get)
-+            return
-+
-+        # No recursion.
-+        write(TUPLE)
-+        self.memoize(obj)
-+
-+    dispatch[tuple] = save_tuple
-+
-+    def save_list(self, obj):
-+        if self.bin:
-+            self.write(EMPTY_LIST)
-+        else:   # proto 0 -- can't use EMPTY_LIST
-+            self.write(MARK + LIST)
-+
-+        self.memoize(obj)
-+        self._batch_appends(obj)
-+
-+    dispatch[list] = save_list
-+
-+    _BATCHSIZE = 1000
-+
-+    def _batch_appends(self, items):
-+        # Helper to batch up APPENDS sequences
-+        save = self.save
-+        write = self.write
-+
-+        if not self.bin:
-+            for x in items:
-+                save(x)
-+                write(APPEND)
-+            return
-+
-+        it = iter(items)
-+        while True:
-+            tmp = list(islice(it, self._BATCHSIZE))
-+            n = len(tmp)
-+            if n > 1:
-+                write(MARK)
-+                for x in tmp:
-+                    save(x)
-+                write(APPENDS)
-+            elif n:
-+                save(tmp[0])
-+                write(APPEND)
-+            # else tmp is empty, and we're done
-+            if n < self._BATCHSIZE:
-+                return
-+
-+    def save_dict(self, obj):
-+        if self.bin:
-+            self.write(EMPTY_DICT)
-+        else:   # proto 0 -- can't use EMPTY_DICT
-+            self.write(MARK + DICT)
-+
-+        self.memoize(obj)
-+        self._batch_setitems(obj.items())
-+
-+    dispatch[dict] = save_dict
-+    if PyStringMap is not None:
-+        dispatch[PyStringMap] = save_dict
-+
-+    def _batch_setitems(self, items):
-+        # Helper to batch up SETITEMS sequences; proto >= 1 only
-+        save = self.save
-+        write = self.write
-+
-+        if not self.bin:
-+            for k, v in items:
-+                save(k)
-+                save(v)
-+                write(SETITEM)
-+            return
-+
-+        it = iter(items)
-+        while True:
-+            tmp = list(islice(it, self._BATCHSIZE))
-+            n = len(tmp)
-+            if n > 1:
-+                write(MARK)
-+                for k, v in tmp:
-+                    save(k)
-+                    save(v)
-+                write(SETITEMS)
-+            elif n:
-+                k, v = tmp[0]
-+                save(k)
-+                save(v)
-+                write(SETITEM)
-+            # else tmp is empty, and we're done
-+            if n < self._BATCHSIZE:
-+                return
-+
-+    def save_set(self, obj):
-+        save = self.save
-+        write = self.write
-+
-+        if self.proto < 4:
-+            self.save_reduce(set, (list(obj),), obj=obj)
-+            return
-+
-+        write(EMPTY_SET)
-+        self.memoize(obj)
-+
-+        it = iter(obj)
-+        while True:
-+            batch = list(islice(it, self._BATCHSIZE))
-+            n = len(batch)
-+            if n > 0:
-+                write(MARK)
-+                for item in batch:
-+                    save(item)
-+                write(ADDITEMS)
-+            if n < self._BATCHSIZE:
-+                return
-+    dispatch[set] = save_set
-+
-+    def save_frozenset(self, obj):
-+        save = self.save
-+        write = self.write
-+
-+        if self.proto < 4:
-+            self.save_reduce(frozenset, (list(obj),), obj=obj)
-+            return
-+
-+        write(MARK)
-+        for item in obj:
-+            save(item)
-+
-+        if id(obj) in self.memo:
-+            # If the object is already in the memo, this means it is
-+            # recursive. In this case, throw away everything we put on the
-+            # stack, and fetch the object back from the memo.
-+            write(POP_MARK + self.get(self.memo[id(obj)][0]))
-+            return
-+
-+        write(FROZENSET)
-+        self.memoize(obj)
-+    dispatch[frozenset] = save_frozenset
-+
-+    def save_global(self, obj, name=None):
-+        write = self.write
-+        memo = self.memo
-+
-+        if name is None and self.proto >= 4:
-+            name = getattr(obj, '__qualname__', None)
-+        if name is None:
-+            name = obj.__name__
-+
-+        module_name = whichmodule(obj, name, allow_qualname=self.proto >= 4)
-+        try:
-+            __import__(module_name, level=0)
-+            module = sys.modules[module_name]
-+            obj2 = _getattribute(module, name, allow_qualname=self.proto >= 4)
-+        except (ImportError, KeyError, AttributeError):
-+            raise PicklingError(
-+                "Can't pickle %r: it's not found as %s.%s" %
-+                (obj, module_name, name))
-+        else:
-+            if obj2 is not obj:
-+                raise PicklingError(
-+                    "Can't pickle %r: it's not the same object as %s.%s" %
-+                    (obj, module_name, name))
-+
-+        if self.proto >= 2:
-+            code = _extension_registry.get((module_name, name))
-+            if code:
-+                assert code > 0
-+                if code <= 0xff:
-+                    write(EXT1 + pack("<B", code))
-+                elif code <= 0xffff:
-+                    write(EXT2 + pack("<H", code))
-+                else:
-+                    write(EXT4 + pack("<i", code))
-+                return
-+        # Non-ASCII identifiers are supported only with protocols >= 3.
-+        if self.proto >= 4:
-+            self.save(module_name)
-+            self.save(name)
-+            write(STACK_GLOBAL)
-+        elif self.proto >= 3:
-+            write(GLOBAL + bytes(module_name, "utf-8") + b'\n' +
-+                  bytes(name, "utf-8") + b'\n')
-+        else:
-+            if self.fix_imports:
-+                r_name_mapping = _compat_pickle.REVERSE_NAME_MAPPING
-+                r_import_mapping = _compat_pickle.REVERSE_IMPORT_MAPPING
-+                if (module_name, name) in r_name_mapping:
-+                    module_name, name = r_name_mapping[(module_name, name)]
-+                if module_name in r_import_mapping:
-+                    module_name = r_import_mapping[module_name]
-+            try:
-+                write(GLOBAL + bytes(module_name, "ascii") + b'\n' +
-+                      bytes(name, "ascii") + b'\n')
-+            except UnicodeEncodeError:
-+                raise PicklingError(
-+                    "can't pickle global identifier '%s.%s' using "
-+                    "pickle protocol %i" % (module, name, self.proto))
-+
-+        self.memoize(obj)
-+
-+    def save_type(self, obj):
-+        if obj is type(None):
-+            return self.save_reduce(type, (None,), obj=obj)
-+        elif obj is type(NotImplemented):
-+            return self.save_reduce(type, (NotImplemented,), obj=obj)
-+        elif obj is type(...):
-+            return self.save_reduce(type, (...,), obj=obj)
-+        return self.save_global(obj)
-+
-+    dispatch[FunctionType] = save_global
-+    dispatch[type] = save_type
-+
-+
-+# Unpickling machinery
-+
-+class _Unpickler:
-+
-+    def __init__(self, file, *, fix_imports=True,
-+                 encoding="ASCII", errors="strict"):
-+        """This takes a binary file for reading a pickle data stream.
-+
-+        The protocol version of the pickle is detected automatically, so
-+        no proto argument is needed.
-+
-+        The argument *file* must have two methods, a read() method that
-+        takes an integer argument, and a readline() method that requires
-+        no arguments.  Both methods should return bytes.  Thus *file*
-+        can be a binary file object opened for reading, a io.BytesIO
-+        object, or any other custom object that meets this interface.
-+
-+        The file-like object must have two methods, a read() method
-+        that takes an integer argument, and a readline() method that
-+        requires no arguments.  Both methods should return bytes.
-+        Thus file-like object can be a binary file object opened for
-+        reading, a BytesIO object, or any other custom object that
-+        meets this interface.
-+
-+        Optional keyword arguments are *fix_imports*, *encoding* and
-+        *errors*, which are used to control compatiblity support for
-+        pickle stream generated by Python 2.  If *fix_imports* is True,
-+        pickle will try to map the old Python 2 names to the new names
-+        used in Python 3.  The *encoding* and *errors* tell pickle how
-+        to decode 8-bit string instances pickled by Python 2; these
-+        default to 'ASCII' and 'strict', respectively. *encoding* can be
-+        'bytes' to read theses 8-bit string instances as bytes objects.
-+        *errors* can also be 'bytes', which means any string that can't
-+        be decoded will be left as a bytes object.
-+        """
-+        self._file_readline = file.readline
-+        self._file_read = file.read
-+        self.memo = {}
-+        self.encoding = encoding
-+        self.errors = errors
-+        self.proto = 0
-+        self.fix_imports = fix_imports
-+
-+    def load(self):
-+        """Read a pickled object representation from the open file.
-+
-+        Return the reconstituted object hierarchy specified in the file.
-+        """
-+        # Check whether Unpickler was initialized correctly. This is
-+        # only needed to mimic the behavior of _pickle.Unpickler.dump().
-+        if not hasattr(self, "_file_read"):
-+            raise UnpicklingError("Unpickler.__init__() was not called by "
-+                                  "%s.__init__()" % (self.__class__.__name__,))
-+        self._unframer = _Unframer(self._file_read, self._file_readline)
-+        self.read = self._unframer.read
-+        self.readline = self._unframer.readline
-+        self.mark = object() # any new unique object
-+        self.stack = []
-+        self.append = self.stack.append
-+        self.proto = 0
-+        read = self.read
-+        dispatch = self.dispatch
-+        try:
-+            while True:
-+                key = read(1)
-+                if not key:
-+                    raise EOFError
-+                assert isinstance(key, bytes_types)
-+                dispatch[key[0]](self)
-+        except _Stop as stopinst:
-+            return stopinst.value
-+
-+    def noload(self):
-+        """Read a pickled object representation from the open file.
-+
-+        Don't return anything useful, just go through the motions.
-+        """
-+        # Check whether Unpickler was initialized correctly. This is
-+        # only needed to mimic the behavior of _pickle.Unpickler.dump().
-+        if not hasattr(self, "_file_read"):
-+            raise UnpicklingError("Unpickler.__init__() was not called by "
-+                                  "%s.__init__()" % (self.__class__.__name__,))
-+        self._unframer = _Unframer(self._file_read, self._file_readline)
-+        self.read = self._unframer.read
-+        self.readline = self._unframer.readline
-+        self.mark = object() # any new unique object
-+        self.stack = []
-+        self.append = self.stack.append
-+        self.proto = 0
-+        read = self.read
-+        dispatch = self.nl_dispatch
-+        try:
-+            while True:
-+                key = read(1)
-+                if not key:
-+                    raise EOFError
-+                assert isinstance(key, bytes_types)
-+                dispatch[key[0]](self)
-+        except _Stop as stopinst:
-+            return stopinst.value
-+
-+    # Return largest index k such that self.stack[k] is self.mark.
-+    # If the stack doesn't contain a mark, eventually raises IndexError.
-+    # This could be sped by maintaining another stack, of indices at which
-+    # the mark appears.  For that matter, the latter stack would suffice,
-+    # and we wouldn't need to push mark objects on self.stack at all.
-+    # Doing so is probably a good thing, though, since if the pickle is
-+    # corrupt (or hostile) we may get a clue from finding self.mark embedded
-+    # in unpickled objects.
-+    def marker(self):
-+        stack = self.stack
-+        mark = self.mark
-+        k = len(stack)-1
-+        while stack[k] is not mark: k = k-1
-+        return k
-+
-+    def persistent_load(self, pid):
-+        raise UnpicklingError("unsupported persistent id encountered")
-+
-+    dispatch = {}
-+
-+    def load_proto(self):
-+        proto = self.read(1)[0]
-+        if not 0 <= proto <= HIGHEST_PROTOCOL:
-+            raise ValueError("unsupported pickle protocol: %d" % proto)
-+        self.proto = proto
-+    dispatch[PROTO[0]] = load_proto
-+
-+    def load_frame(self):
-+        frame_size, = unpack('<Q', self.read(8))
-+        if frame_size > sys.maxsize:
-+            raise ValueError("frame size > sys.maxsize: %d" % frame_size)
-+        self._unframer.load_frame(frame_size)
-+    dispatch[FRAME[0]] = load_frame
-+
-+    def load_persid(self):
-+        pid = self.readline()[:-1].decode("ascii")
-+        self.append(self.persistent_load(pid))
-+    dispatch[PERSID[0]] = load_persid
-+
-+    def load_binpersid(self):
-+        pid = self.stack.pop()
-+        self.append(self.persistent_load(pid))
-+    dispatch[BINPERSID[0]] = load_binpersid
-+
-+    def load_none(self):
-+        self.append(None)
-+    dispatch[NONE[0]] = load_none
-+
-+    def load_false(self):
-+        self.append(False)
-+    dispatch[NEWFALSE[0]] = load_false
-+
-+    def load_true(self):
-+        self.append(True)
-+    dispatch[NEWTRUE[0]] = load_true
-+
-+    def load_int(self):
-+        data = self.readline()
-+        if data == FALSE[1:]:
-+            val = False
-+        elif data == TRUE[1:]:
-+            val = True
-+        else:
-+            val = int(data, 0)
-+        self.append(val)
-+    dispatch[INT[0]] = load_int
-+
-+    def load_binint(self):
-+        self.append(unpack('<i', self.read(4))[0])
-+    dispatch[BININT[0]] = load_binint
-+
-+    def load_binint1(self):
-+        self.append(self.read(1)[0])
-+    dispatch[BININT1[0]] = load_binint1
-+
-+    def load_binint2(self):
-+        self.append(unpack('<H', self.read(2))[0])
-+    dispatch[BININT2[0]] = load_binint2
-+
-+    def load_long(self):
-+        val = self.readline()[:-1]
-+        if val and val[-1] == b'L'[0]:
-+            val = val[:-1]
-+        self.append(int(val, 0))
-+    dispatch[LONG[0]] = load_long
-+
-+    def load_long1(self):
-+        n = self.read(1)[0]
-+        data = self.read(n)
-+        self.append(decode_long(data))
-+    dispatch[LONG1[0]] = load_long1
-+
-+    def load_long4(self):
-+        n, = unpack('<i', self.read(4))
-+        if n < 0:
-+            # Corrupt or hostile pickle -- we never write one like this
-+            raise UnpicklingError("LONG pickle has negative byte count")
-+        data = self.read(n)
-+        self.append(decode_long(data))
-+    dispatch[LONG4[0]] = load_long4
-+
-+    def load_float(self):
-+        self.append(float(self.readline()[:-1]))
-+    dispatch[FLOAT[0]] = load_float
-+
-+    def load_binfloat(self):
-+        self.append(unpack('>d', self.read(8))[0])
-+    dispatch[BINFLOAT[0]] = load_binfloat
-+
-+    def _decode_string(self, value):
-+        # Used to allow strings from Python 2 to be decoded either as
-+        # bytes or Unicode strings.  This should be used only with the
-+        # STRING, BINSTRING and SHORT_BINSTRING opcodes.
-+        if self.encoding == "bytes":
-+            return value
-+        elif self.errors == "bytes":
-+            try:
-+                return value.decode(self.encoding)
-+            except UnicodeDecodeError:
-+                return value
-+        else:
-+            return value.decode(self.encoding, self.errors)
-+
-+    def load_string(self):
-+        data = self.readline()[:-1]
-+        # Strip outermost quotes
-+        if len(data) >= 2 and data[0] == data[-1] and data[0] in b'"\'':
-+            data = data[1:-1]
-+        else:
-+            raise UnpicklingError("the STRING opcode argument must be quoted")
-+        self.append(self._decode_string(codecs.escape_decode(data)[0]))
-+    dispatch[STRING[0]] = load_string
-+
-+    def load_binstring(self):
-+        # Deprecated BINSTRING uses signed 32-bit length
-+        len, = unpack('<i', self.read(4))
-+        if len < 0:
-+            raise UnpicklingError("BINSTRING pickle has negative byte count")
-+        data = self.read(len)
-+        self.append(self._decode_string(data))
-+    dispatch[BINSTRING[0]] = load_binstring
-+
-+    def load_binbytes(self):
-+        len, = unpack('<I', self.read(4))
-+        if len > maxsize:
-+            raise UnpicklingError("BINBYTES exceeds system's maximum size "
-+                                  "of %d bytes" % maxsize)
-+        self.append(self.read(len))
-+    dispatch[BINBYTES[0]] = load_binbytes
-+
-+    def load_unicode(self):
-+        self.append(str(self.readline()[:-1], 'raw-unicode-escape'))
-+    dispatch[UNICODE[0]] = load_unicode
-+
-+    def load_binunicode(self):
-+        len, = unpack('<I', self.read(4))
-+        if len > maxsize:
-+            raise UnpicklingError("BINUNICODE exceeds system's maximum size "
-+                                  "of %d bytes" % maxsize)
-+        self.append(str(self.read(len), 'utf-8', 'surrogatepass'))
-+    dispatch[BINUNICODE[0]] = load_binunicode
-+
-+    def load_binunicode8(self):
-+        len, = unpack('<Q', self.read(8))
-+        if len > maxsize:
-+            raise UnpicklingError("BINUNICODE8 exceeds system's maximum size "
-+                                  "of %d bytes" % maxsize)
-+        self.append(str(self.read(len), 'utf-8', 'surrogatepass'))
-+    dispatch[BINUNICODE8[0]] = load_binunicode8
-+
-+    def load_short_binstring(self):
-+        len = self.read(1)[0]
-+        data = self.read(len)
-+        self.append(self._decode_string(data))
-+    dispatch[SHORT_BINSTRING[0]] = load_short_binstring
-+
-+    def load_short_binbytes(self):
-+        len = self.read(1)[0]
-+        self.append(self.read(len))
-+    dispatch[SHORT_BINBYTES[0]] = load_short_binbytes
-+
-+    def load_short_binunicode(self):
-+        len = self.read(1)[0]
-+        self.append(str(self.read(len), 'utf-8', 'surrogatepass'))
-+    dispatch[SHORT_BINUNICODE[0]] = load_short_binunicode
-+
-+    def load_tuple(self):
-+        k = self.marker()
-+        self.stack[k:] = [tuple(self.stack[k+1:])]
-+    dispatch[TUPLE[0]] = load_tuple
-+
-+    def load_empty_tuple(self):
-+        self.append(())
-+    dispatch[EMPTY_TUPLE[0]] = load_empty_tuple
-+
-+    def load_tuple1(self):
-+        self.stack[-1] = (self.stack[-1],)
-+    dispatch[TUPLE1[0]] = load_tuple1
-+
-+    def load_tuple2(self):
-+        self.stack[-2:] = [(self.stack[-2], self.stack[-1])]
-+    dispatch[TUPLE2[0]] = load_tuple2
-+
-+    def load_tuple3(self):
-+        self.stack[-3:] = [(self.stack[-3], self.stack[-2], self.stack[-1])]
-+    dispatch[TUPLE3[0]] = load_tuple3
-+
-+    def load_empty_list(self):
-+        self.append([])
-+    dispatch[EMPTY_LIST[0]] = load_empty_list
-+
-+    def load_empty_dictionary(self):
-+        self.append({})
-+    dispatch[EMPTY_DICT[0]] = load_empty_dictionary
-+
-+    def load_empty_set(self):
-+        self.append(set())
-+    dispatch[EMPTY_SET[0]] = load_empty_set
-+
-+    def load_frozenset(self):
-+        k = self.marker()
-+        self.stack[k:] = [frozenset(self.stack[k+1:])]
-+    dispatch[FROZENSET[0]] = load_frozenset
-+
-+    def load_list(self):
-+        k = self.marker()
-+        self.stack[k:] = [self.stack[k+1:]]
-+    dispatch[LIST[0]] = load_list
-+
-+    def load_dict(self):
-+        k = self.marker()
-+        items = self.stack[k+1:]
-+        d = {items[i]: items[i+1]
-+             for i in range(0, len(items), 2)}
-+        self.stack[k:] = [d]
-+    dispatch[DICT[0]] = load_dict
-+
-+    # INST and OBJ differ only in how they get a class object.  It's not
-+    # only sensible to do the rest in a common routine, the two routines
-+    # previously diverged and grew different bugs.
-+    # klass is the class to instantiate, and k points to the topmost mark
-+    # object, following which are the arguments for klass.__init__.
-+    def _instantiate(self, klass, k):
-+        args = tuple(self.stack[k+1:])
-+        del self.stack[k:]
-+        if (args or not isinstance(klass, type) or
-+            hasattr(klass, "__getinitargs__")):
-+            try:
-+                value = klass(*args)
-+            except TypeError as err:
-+                raise TypeError("in constructor for %s: %s" %
-+                                (klass.__name__, str(err)), sys.exc_info()[2])
-+        else:
-+            value = klass.__new__(klass)
-+        self.append(value)
-+
-+    def load_inst(self):
-+        module = self.readline()[:-1].decode("ascii")
-+        name = self.readline()[:-1].decode("ascii")
-+        klass = self.find_class(module, name)
-+        self._instantiate(klass, self.marker())
-+    dispatch[INST[0]] = load_inst
-+
-+    def load_obj(self):
-+        # Stack is ... markobject classobject arg1 arg2 ...
-+        k = self.marker()
-+        klass = self.stack.pop(k+1)
-+        self._instantiate(klass, k)
-+    dispatch[OBJ[0]] = load_obj
-+
-+    def load_newobj(self):
-+        args = self.stack.pop()
-+        cls = self.stack.pop()
-+        obj = cls.__new__(cls, *args)
-+        self.append(obj)
-+    dispatch[NEWOBJ[0]] = load_newobj
-+
-+    def load_newobj_ex(self):
-+        kwargs = self.stack.pop()
-+        args = self.stack.pop()
-+        cls = self.stack.pop()
-+        obj = cls.__new__(cls, *args, **kwargs)
-+        self.append(obj)
-+    dispatch[NEWOBJ_EX[0]] = load_newobj_ex
-+
-+    def load_global(self):
-+        module = self.readline()[:-1].decode("utf-8")
-+        name = self.readline()[:-1].decode("utf-8")
-+        klass = self.find_class(module, name)
-+        self.append(klass)
-+    dispatch[GLOBAL[0]] = load_global
-+
-+    def load_stack_global(self):
-+        name = self.stack.pop()
-+        module = self.stack.pop()
-+        if type(name) is not str or type(module) is not str:
-+            raise UnpicklingError("STACK_GLOBAL requires str")
-+        self.append(self.find_class(module, name))
-+    dispatch[STACK_GLOBAL[0]] = load_stack_global
-+
-+    def load_ext1(self):
-+        code = self.read(1)[0]
-+        self.get_extension(code)
-+    dispatch[EXT1[0]] = load_ext1
-+
-+    def load_ext2(self):
-+        code, = unpack('<H', self.read(2))
-+        self.get_extension(code)
-+    dispatch[EXT2[0]] = load_ext2
-+
-+    def load_ext4(self):
-+        code, = unpack('<i', self.read(4))
-+        self.get_extension(code)
-+    dispatch[EXT4[0]] = load_ext4
-+
-+    def get_extension(self, code):
-+        nil = []
-+        obj = _extension_cache.get(code, nil)
-+        if obj is not nil:
-+            self.append(obj)
-+            return
-+        key = _inverted_registry.get(code)
-+        if not key:
-+            if code <= 0: # note that 0 is forbidden
-+                # Corrupt or hostile pickle.
-+                raise UnpicklingError("EXT specifies code <= 0")
-+            raise ValueError("unregistered extension code %d" % code)
-+        obj = self.find_class(*key)
-+        _extension_cache[code] = obj
-+        self.append(obj)
-+
-+    def find_class(self, module, name):
-+        # Subclasses may override this.
-+        if self.proto < 3 and self.fix_imports:
-+            if (module, name) in _compat_pickle.NAME_MAPPING:
-+                module, name = _compat_pickle.NAME_MAPPING[(module, name)]
-+            if module in _compat_pickle.IMPORT_MAPPING:
-+                module = _compat_pickle.IMPORT_MAPPING[module]
-+        __import__(module, level=0)
-+        return _getattribute(sys.modules[module], name,
-+                             allow_qualname=self.proto >= 4)
-+
-+    def load_reduce(self):
-+        stack = self.stack
-+        args = stack.pop()
-+        func = stack[-1]
-+        try:
-+            value = func(*args)
-+        except:
-+            print(sys.exc_info())
-+            print(func, args)
-+            raise
-+        stack[-1] = value
-+    dispatch[REDUCE[0]] = load_reduce
-+
-+    def load_pop(self):
-+        del self.stack[-1]
-+    dispatch[POP[0]] = load_pop
-+
-+    def load_pop_mark(self):
-+        k = self.marker()
-+        del self.stack[k:]
-+    dispatch[POP_MARK[0]] = load_pop_mark
-+
-+    def load_dup(self):
-+        self.append(self.stack[-1])
-+    dispatch[DUP[0]] = load_dup
-+
-+    def load_get(self):
-+        i = int(self.readline()[:-1])
-+        self.append(self.memo[i])
-+    dispatch[GET[0]] = load_get
-+
-+    def load_binget(self):
-+        i = self.read(1)[0]
-+        self.append(self.memo[i])
-+    dispatch[BINGET[0]] = load_binget
-+
-+    def load_long_binget(self):
-+        i, = unpack('<I', self.read(4))
-+        self.append(self.memo[i])
-+    dispatch[LONG_BINGET[0]] = load_long_binget
-+
-+    def load_put(self):
-+        i = int(self.readline()[:-1])
-+        if i < 0:
-+            raise ValueError("negative PUT argument")
-+        self.memo[i] = self.stack[-1]
-+    dispatch[PUT[0]] = load_put
-+
-+    def load_binput(self):
-+        i = self.read(1)[0]
-+        if i < 0:
-+            raise ValueError("negative BINPUT argument")
-+        self.memo[i] = self.stack[-1]
-+    dispatch[BINPUT[0]] = load_binput
-+
-+    def load_long_binput(self):
-+        i, = unpack('<I', self.read(4))
-+        if i > maxsize:
-+            raise ValueError("negative LONG_BINPUT argument")
-+        self.memo[i] = self.stack[-1]
-+    dispatch[LONG_BINPUT[0]] = load_long_binput
-+
-+    def load_memoize(self):
-+        memo = self.memo
-+        memo[len(memo)] = self.stack[-1]
-+    dispatch[MEMOIZE[0]] = load_memoize
-+
-+    def load_append(self):
-+        stack = self.stack
-+        value = stack.pop()
-+        list = stack[-1]
-+        list.append(value)
-+    dispatch[APPEND[0]] = load_append
-+
-+    def load_appends(self):
-+        stack = self.stack
-+        mark = self.marker()
-+        list_obj = stack[mark - 1]
-+        items = stack[mark + 1:]
-+        if isinstance(list_obj, list):
-+            list_obj.extend(items)
-+        else:
-+            append = list_obj.append
-+            for item in items:
-+                append(item)
-+        del stack[mark:]
-+    dispatch[APPENDS[0]] = load_appends
-+
-+    def load_setitem(self):
-+        stack = self.stack
-+        value = stack.pop()
-+        key = stack.pop()
-+        dict = stack[-1]
-+        dict[key] = value
-+    dispatch[SETITEM[0]] = load_setitem
-+
-+    def load_setitems(self):
-+        stack = self.stack
-+        mark = self.marker()
-+        dict = stack[mark - 1]
-+        for i in range(mark + 1, len(stack), 2):
-+            dict[stack[i]] = stack[i + 1]
-+
-+        del stack[mark:]
-+    dispatch[SETITEMS[0]] = load_setitems
-+
-+    def load_additems(self):
-+        stack = self.stack
-+        mark = self.marker()
-+        set_obj = stack[mark - 1]
-+        items = stack[mark + 1:]
-+        if isinstance(set_obj, set):
-+            set_obj.update(items)
-+        else:
-+            add = set_obj.add
-+            for item in items:
-+                add(item)
-+        del stack[mark:]
-+    dispatch[ADDITEMS[0]] = load_additems
-+
-+    def load_build(self):
-+        stack = self.stack
-+        state = stack.pop()
-+        inst = stack[-1]
-+        setstate = getattr(inst, "__setstate__", None)
-+        if setstate is not None:
-+            setstate(state)
-+            return
-+        slotstate = None
-+        if isinstance(state, tuple) and len(state) == 2:
-+            state, slotstate = state
-+        if state:
-+            inst_dict = inst.__dict__
-+            intern = sys.intern
-+            for k, v in state.items():
-+                if type(k) is str:
-+                    inst_dict[intern(k)] = v
-+                else:
-+                    inst_dict[k] = v
-+        if slotstate:
-+            for k, v in slotstate.items():
-+                setattr(inst, k, v)
-+    dispatch[BUILD[0]] = load_build
-+
-+    def load_mark(self):
-+        self.append(self.mark)
-+    dispatch[MARK[0]] = load_mark
-+
-+    def load_stop(self):
-+        value = self.stack.pop()
-+        raise _Stop(value)
-+    dispatch[STOP[0]] = load_stop
-+
-+    nl_dispatch = dispatch.copy()
-+
-+    def noload_obj(self):
-+        # Stack is ... markobject classobject arg1 arg2 ...
-+        k = self.marker()
-+        klass = self.stack.pop(k+1)
-+    nl_dispatch[OBJ[0]] = noload_obj
-+
-+    def noload_inst(self):
-+        self.readline() # skip module
-+        self.readline()[:-1] # skip name
-+        k = self.marker()
-+        klass = self.stack.pop(k+1)
-+        self.append(None)
-+    nl_dispatch[INST[0]] = noload_inst
-+
-+    def noload_newobj(self):
-+        self.stack.pop() # skip args
-+        self.stack.pop() # skip cls
-+        self.stack.append(None)
-+    nl_dispatch[NEWOBJ[0]] = noload_newobj
-+
-+    def noload_global(self):
-+        self.readline() # skip module
-+        self.readline()[:-1] # skip name
-+        self.append(None)
-+    nl_dispatch[GLOBAL[0]] = noload_global
-+
-+    def noload_append(self):
-+        self.stack.pop() # skip value
-+    nl_dispatch[APPEND[0]] = noload_append
-+
-+    def noload_appends(self):
-+        mark = self.marker()
-+        del self.stack[mark:]
-+    nl_dispatch[APPENDS[0]] = noload_appends
-+
-+    def noload_setitem(self):
-+        self.stack.pop() # skip value
-+        self.stack.pop() # skip key
-+    nl_dispatch[SETITEM[0]] = noload_setitem
-+
-+    def noload_setitems(self):
-+        mark = self.marker()
-+        del self.stack[mark:]
-+    nl_dispatch[SETITEMS[0]] = noload_setitems
-+
-+    def noload_reduce(self):
-+        self.stack.pop() # skip args
-+        self.stack.pop() # skip func
-+        self.stack.append(None)
-+    nl_dispatch[REDUCE[0]] = noload_reduce
-+
-+    def noload_build(self):
-+        state = self.stack.pop()
-+    nl_dispatch[BUILD[0]] = noload_build
-+
-+    def noload_ext1(self):
-+        code = ord(self.read(1))
-+        self.get_extension(code)
-+        self.stack.pop()
-+        self.stack.append(None)
-+    nl_dispatch[EXT1[0]] = noload_ext1
-+
-+    def noload_ext2(self):
-+        code = mloads(b'i' + self.read(2) + b'\000\000')
-+        self.get_extension(code)
-+        self.stack.pop()
-+        self.stack.append(None)
-+    nl_dispatch[EXT2[0]] = noload_ext2
-+
-+    def noload_ext4(self):
-+        code = mloads(b'i' + self.read(4))
-+        self.get_extension(code)
-+        self.stack.pop()
-+        self.stack.append(None)
-+    nl_dispatch[EXT4[0]] = noload_ext4
-+
-+
-+# Shorthands
-+
-+def _dump(obj, file, protocol=None, *, fix_imports=True):
-+    _Pickler(file, protocol, fix_imports=fix_imports).dump(obj)
-+
-+def _dumps(obj, protocol=None, *, fix_imports=True):
-+    f = io.BytesIO()
-+    _Pickler(f, protocol, fix_imports=fix_imports).dump(obj)
-+    res = f.getvalue()
-+    assert isinstance(res, bytes_types)
-+    return res
-+
-+def _load(file, *, fix_imports=True, encoding="ASCII", errors="strict"):
-+    return _Unpickler(file, fix_imports=fix_imports,
-+                     encoding=encoding, errors=errors).load()
-+
-+def _loads(s, *, fix_imports=True, encoding="ASCII", errors="strict"):
-+    if isinstance(s, str):
-+        raise TypeError("Can't load pickle from unicode string")
-+    file = io.BytesIO(s)
-+    return _Unpickler(file, fix_imports=fix_imports,
-+                      encoding=encoding, errors=errors).load()
-+
-+# Use the faster _pickle if possible
-+try:
-+    from zodbpickle._pickle import (
-+        PickleError,
-+        PicklingError,
-+        UnpicklingError,
-+        Pickler,
-+        Unpickler,
-+        dump,
-+        dumps,
-+        load,
-+        loads
-+    )
-+except ImportError:
-+    Pickler, Unpickler = _Pickler, _Unpickler
-+    dump, dumps, load, loads = _dump, _dumps, _load, _loads
-+
-+# Doctest
-+def _test():
-+    import doctest
-+    return doctest.testmod()
-+
-+if __name__ == "__main__":
-+    import argparse
-+    parser = argparse.ArgumentParser(
-+        description='display contents of the pickle files')
-+    parser.add_argument(
-+        'pickle_file', type=argparse.FileType('br'),
-+        nargs='*', help='the pickle file')
-+    parser.add_argument(
-+        '-t', '--test', action='store_true',
-+        help='run self-test suite')
-+    parser.add_argument(
-+        '-v', action='store_true',
-+        help='run verbosely; only affects self-test run')
-+    args = parser.parse_args()
-+    if args.test:
-+        _test()
-+    else:
-+        if not args.pickle_file:
-+            parser.print_help()
-+        else:
-+            import pprint
-+            for f in args.pickle_file:
-+                obj = load(f)
-+                pprint.pprint(obj)
-diff --git a/src/zodbpickle/pickletools_34.py b/src/zodbpickle/pickletools_34.py
-new file mode 100644
-index 0000000..1711841
---- /dev/null
-+++ b/src/zodbpickle/pickletools_34.py
-@@ -0,0 +1,2818 @@
-+'''"Executable documentation" for the pickle module.
-+
-+Extensive comments about the pickle protocols and pickle-machine opcodes
-+can be found here.  Some functions meant for external use:
-+
-+genops(pickle)
-+   Generate all the opcodes in a pickle, as (opcode, arg, position) triples.
-+
-+dis(pickle, out=None, memo=None, indentlevel=4)
-+   Print a symbolic disassembly of a pickle.
-+'''
-+
-+import codecs
-+import io
-+import re
-+import sys
-+from zodbpickle import pickle_34 as pickle
-+
-+__all__ = ['dis', 'genops', 'optimize']
-+
-+bytes_types = pickle.bytes_types
-+
-+# Other ideas:
-+#
-+# - A pickle verifier:  read a pickle and check it exhaustively for
-+#   well-formedness.  dis() does a lot of this already.
-+#
-+# - A protocol identifier:  examine a pickle and return its protocol number
-+#   (== the highest .proto attr value among all the opcodes in the pickle).
-+#   dis() already prints this info at the end.
-+#
-+# - A pickle optimizer:  for example, tuple-building code is sometimes more
-+#   elaborate than necessary, catering for the possibility that the tuple
-+#   is recursive.  Or lots of times a PUT is generated that's never accessed
-+#   by a later GET.
-+
-+
-+# "A pickle" is a program for a virtual pickle machine (PM, but more accurately
-+# called an unpickling machine).  It's a sequence of opcodes, interpreted by the
-+# PM, building an arbitrarily complex Python object.
-+#
-+# For the most part, the PM is very simple:  there are no looping, testing, or
-+# conditional instructions, no arithmetic and no function calls.  Opcodes are
-+# executed once each, from first to last, until a STOP opcode is reached.
-+#
-+# The PM has two data areas, "the stack" and "the memo".
-+#
-+# Many opcodes push Python objects onto the stack; e.g., INT pushes a Python
-+# integer object on the stack, whose value is gotten from a decimal string
-+# literal immediately following the INT opcode in the pickle bytestream.  Other
-+# opcodes take Python objects off the stack.  The result of unpickling is
-+# whatever object is left on the stack when the final STOP opcode is executed.
-+#
-+# The memo is simply an array of objects, or it can be implemented as a dict
-+# mapping little integers to objects.  The memo serves as the PM's "long term
-+# memory", and the little integers indexing the memo are akin to variable
-+# names.  Some opcodes pop a stack object into the memo at a given index,
-+# and others push a memo object at a given index onto the stack again.
-+#
-+# At heart, that's all the PM has.  Subtleties arise for these reasons:
-+#
-+# + Object identity.  Objects can be arbitrarily complex, and subobjects
-+#   may be shared (for example, the list [a, a] refers to the same object a
-+#   twice).  It can be vital that unpickling recreate an isomorphic object
-+#   graph, faithfully reproducing sharing.
-+#
-+# + Recursive objects.  For example, after "L = []; L.append(L)", L is a
-+#   list, and L[0] is the same list.  This is related to the object identity
-+#   point, and some sequences of pickle opcodes are subtle in order to
-+#   get the right result in all cases.
-+#
-+# + Things pickle doesn't know everything about.  Examples of things pickle
-+#   does know everything about are Python's builtin scalar and container
-+#   types, like ints and tuples.  They generally have opcodes dedicated to
-+#   them.  For things like module references and instances of user-defined
-+#   classes, pickle's knowledge is limited.  Historically, many enhancements
-+#   have been made to the pickle protocol in order to do a better (faster,
-+#   and/or more compact) job on those.
-+#
-+# + Backward compatibility and micro-optimization.  As explained below,
-+#   pickle opcodes never go away, not even when better ways to do a thing
-+#   get invented.  The repertoire of the PM just keeps growing over time.
-+#   For example, protocol 0 had two opcodes for building Python integers (INT
-+#   and LONG), protocol 1 added three more for more-efficient pickling of short
-+#   integers, and protocol 2 added two more for more-efficient pickling of
-+#   long integers (before protocol 2, the only ways to pickle a Python long
-+#   took time quadratic in the number of digits, for both pickling and
-+#   unpickling).  "Opcode bloat" isn't so much a subtlety as a source of
-+#   wearying complication.
-+#
-+#
-+# Pickle protocols:
-+#
-+# For compatibility, the meaning of a pickle opcode never changes.  Instead new
-+# pickle opcodes get added, and each version's unpickler can handle all the
-+# pickle opcodes in all protocol versions to date.  So old pickles continue to
-+# be readable forever.  The pickler can generally be told to restrict itself to
-+# the subset of opcodes available under previous protocol versions too, so that
-+# users can create pickles under the current version readable by older
-+# versions.  However, a pickle does not contain its version number embedded
-+# within it.  If an older unpickler tries to read a pickle using a later
-+# protocol, the result is most likely an exception due to seeing an unknown (in
-+# the older unpickler) opcode.
-+#
-+# The original pickle used what's now called "protocol 0", and what was called
-+# "text mode" before Python 2.3.  The entire pickle bytestream is made up of
-+# printable 7-bit ASCII characters, plus the newline character, in protocol 0.
-+# That's why it was called text mode.  Protocol 0 is small and elegant, but
-+# sometimes painfully inefficient.
-+#
-+# The second major set of additions is now called "protocol 1", and was called
-+# "binary mode" before Python 2.3.  This added many opcodes with arguments
-+# consisting of arbitrary bytes, including NUL bytes and unprintable "high bit"
-+# bytes.  Binary mode pickles can be substantially smaller than equivalent
-+# text mode pickles, and sometimes faster too; e.g., BININT represents a 4-byte
-+# int as 4 bytes following the opcode, which is cheaper to unpickle than the
-+# (perhaps) 11-character decimal string attached to INT.  Protocol 1 also added
-+# a number of opcodes that operate on many stack elements at once (like APPENDS
-+# and SETITEMS), and "shortcut" opcodes (like EMPTY_DICT and EMPTY_TUPLE).
-+#
-+# The third major set of additions came in Python 2.3, and is called "protocol
-+# 2".  This added:
-+#
-+# - A better way to pickle instances of new-style classes (NEWOBJ).
-+#
-+# - A way for a pickle to identify its protocol (PROTO).
-+#
-+# - Time- and space- efficient pickling of long ints (LONG{1,4}).
-+#
-+# - Shortcuts for small tuples (TUPLE{1,2,3}}.
-+#
-+# - Dedicated opcodes for bools (NEWTRUE, NEWFALSE).
-+#
-+# - The "extension registry", a vector of popular objects that can be pushed
-+#   efficiently by index (EXT{1,2,4}).  This is akin to the memo and GET, but
-+#   the registry contents are predefined (there's nothing akin to the memo's
-+#   PUT).
-+#
-+# Another independent change with Python 2.3 is the abandonment of any
-+# pretense that it might be safe to load pickles received from untrusted
-+# parties -- no sufficient security analysis has been done to guarantee
-+# this and there isn't a use case that warrants the expense of such an
-+# analysis.
-+#
-+# To this end, all tests for __safe_for_unpickling__ or for
-+# copyreg.safe_constructors are removed from the unpickling code.
-+# References to these variables in the descriptions below are to be seen
-+# as describing unpickling in Python 2.2 and before.
-+
-+
-+# Meta-rule:  Descriptions are stored in instances of descriptor objects,
-+# with plain constructors.  No meta-language is defined from which
-+# descriptors could be constructed.  If you want, e.g., XML, write a little
-+# program to generate XML from the objects.
-+
-+##############################################################################
-+# Some pickle opcodes have an argument, following the opcode in the
-+# bytestream.  An argument is of a specific type, described by an instance
-+# of ArgumentDescriptor.  These are not to be confused with arguments taken
-+# off the stack -- ArgumentDescriptor applies only to arguments embedded in
-+# the opcode stream, immediately following an opcode.
-+
-+# Represents the number of bytes consumed by an argument delimited by the
-+# next newline character.
-+UP_TO_NEWLINE = -1
-+
-+# Represents the number of bytes consumed by a two-argument opcode where
-+# the first argument gives the number of bytes in the second argument.
-+TAKEN_FROM_ARGUMENT1  = -2   # num bytes is 1-byte unsigned int
-+TAKEN_FROM_ARGUMENT4  = -3   # num bytes is 4-byte signed little-endian int
-+TAKEN_FROM_ARGUMENT4U = -4   # num bytes is 4-byte unsigned little-endian int
-+TAKEN_FROM_ARGUMENT8U = -5   # num bytes is 8-byte unsigned little-endian int
-+
-+class ArgumentDescriptor(object):
-+    __slots__ = (
-+        # name of descriptor record, also a module global name; a string
-+        'name',
-+
-+        # length of argument, in bytes; an int; UP_TO_NEWLINE and
-+        # TAKEN_FROM_ARGUMENT{1,4,8} are negative values for variable-length
-+        # cases
-+        'n',
-+
-+        # a function taking a file-like object, reading this kind of argument
-+        # from the object at the current position, advancing the current
-+        # position by n bytes, and returning the value of the argument
-+        'reader',
-+
-+        # human-readable docs for this arg descriptor; a string
-+        'doc',
-+    )
-+
-+    def __init__(self, name, n, reader, doc):
-+        assert isinstance(name, str)
-+        self.name = name
-+
-+        assert isinstance(n, int) and (n >= 0 or
-+                                       n in (UP_TO_NEWLINE,
-+                                             TAKEN_FROM_ARGUMENT1,
-+                                             TAKEN_FROM_ARGUMENT4,
-+                                             TAKEN_FROM_ARGUMENT4U,
-+                                             TAKEN_FROM_ARGUMENT8U))
-+        self.n = n
-+
-+        self.reader = reader
-+
-+        assert isinstance(doc, str)
-+        self.doc = doc
-+
-+from struct import unpack as _unpack
-+
-+def read_uint1(f):
-+    r"""
-+    >>> import io
-+    >>> read_uint1(io.BytesIO(b'\xff'))
-+    255
-+    """
-+
-+    data = f.read(1)
-+    if data:
-+        return data[0]
-+    raise ValueError("not enough data in stream to read uint1")
-+
-+uint1 = ArgumentDescriptor(
-+            name='uint1',
-+            n=1,
-+            reader=read_uint1,
-+            doc="One-byte unsigned integer.")
-+
-+
-+def read_uint2(f):
-+    r"""
-+    >>> import io
-+    >>> read_uint2(io.BytesIO(b'\xff\x00'))
-+    255
-+    >>> read_uint2(io.BytesIO(b'\xff\xff'))
-+    65535
-+    """
-+
-+    data = f.read(2)
-+    if len(data) == 2:
-+        return _unpack("<H", data)[0]
-+    raise ValueError("not enough data in stream to read uint2")
-+
-+uint2 = ArgumentDescriptor(
-+            name='uint2',
-+            n=2,
-+            reader=read_uint2,
-+            doc="Two-byte unsigned integer, little-endian.")
-+
-+
-+def read_int4(f):
-+    r"""
-+    >>> import io
-+    >>> read_int4(io.BytesIO(b'\xff\x00\x00\x00'))
-+    255
-+    >>> read_int4(io.BytesIO(b'\x00\x00\x00\x80')) == -(2**31)
-+    True
-+    """
-+
-+    data = f.read(4)
-+    if len(data) == 4:
-+        return _unpack("<i", data)[0]
-+    raise ValueError("not enough data in stream to read int4")
-+
-+int4 = ArgumentDescriptor(
-+           name='int4',
-+           n=4,
-+           reader=read_int4,
-+           doc="Four-byte signed integer, little-endian, 2's complement.")
-+
-+
-+def read_uint4(f):
-+    r"""
-+    >>> import io
-+    >>> read_uint4(io.BytesIO(b'\xff\x00\x00\x00'))
-+    255
-+    >>> read_uint4(io.BytesIO(b'\x00\x00\x00\x80')) == 2**31
-+    True
-+    """
-+
-+    data = f.read(4)
-+    if len(data) == 4:
-+        return _unpack("<I", data)[0]
-+    raise ValueError("not enough data in stream to read uint4")
-+
-+uint4 = ArgumentDescriptor(
-+            name='uint4',
-+            n=4,
-+            reader=read_uint4,
-+            doc="Four-byte unsigned integer, little-endian.")
-+
-+
-+def read_uint8(f):
-+    r"""
-+    >>> import io
-+    >>> read_uint8(io.BytesIO(b'\xff\x00\x00\x00\x00\x00\x00\x00'))
-+    255
-+    >>> read_uint8(io.BytesIO(b'\xff' * 8)) == 2**64-1
-+    True
-+    """
-+
-+    data = f.read(8)
-+    if len(data) == 8:
-+        return _unpack("<Q", data)[0]
-+    raise ValueError("not enough data in stream to read uint8")
-+
-+uint8 = ArgumentDescriptor(
-+            name='uint8',
-+            n=8,
-+            reader=read_uint8,
-+            doc="Eight-byte unsigned integer, little-endian.")
-+
-+
-+def read_stringnl(f, decode=True, stripquotes=True):
-+    r"""
-+    >>> import io
-+    >>> read_stringnl(io.BytesIO(b"'abcd'\nefg\n"))
-+    'abcd'
-+
-+    >>> read_stringnl(io.BytesIO(b"\n"))
-+    Traceback (most recent call last):
-+    ...
-+    ValueError: no string quotes around b''
-+
-+    >>> read_stringnl(io.BytesIO(b"\n"), stripquotes=False)
-+    ''
-+
-+    >>> read_stringnl(io.BytesIO(b"''\n"))
-+    ''
-+
-+    >>> read_stringnl(io.BytesIO(b'"abcd"'))
-+    Traceback (most recent call last):
-+    ...
-+    ValueError: no newline found when trying to read stringnl
-+
-+    Embedded escapes are undone in the result.
-+    >>> read_stringnl(io.BytesIO(br"'a\n\\b\x00c\td'" + b"\n'e'"))
-+    'a\n\\b\x00c\td'
-+    """
-+
-+    data = f.readline()
-+    if not data.endswith(b'\n'):
-+        raise ValueError("no newline found when trying to read stringnl")
-+    data = data[:-1]    # lose the newline
-+
-+    if stripquotes:
-+        for q in (b'"', b"'"):
-+            if data.startswith(q):
-+                if not data.endswith(q):
-+                    raise ValueError("strinq quote %r not found at both "
-+                                     "ends of %r" % (q, data))
-+                data = data[1:-1]
-+                break
-+        else:
-+            raise ValueError("no string quotes around %r" % data)
-+
-+    if decode:
-+        data = codecs.escape_decode(data)[0].decode("ascii")
-+    return data
-+
-+stringnl = ArgumentDescriptor(
-+               name='stringnl',
-+               n=UP_TO_NEWLINE,
-+               reader=read_stringnl,
-+               doc="""A newline-terminated string.
-+
-+                   This is a repr-style string, with embedded escapes, and
-+                   bracketing quotes.
-+                   """)
-+
-+def read_stringnl_noescape(f):
-+    return read_stringnl(f, stripquotes=False)
-+
-+stringnl_noescape = ArgumentDescriptor(
-+                        name='stringnl_noescape',
-+                        n=UP_TO_NEWLINE,
-+                        reader=read_stringnl_noescape,
-+                        doc="""A newline-terminated string.
-+
-+                        This is a str-style string, without embedded escapes,
-+                        or bracketing quotes.  It should consist solely of
-+                        printable ASCII characters.
-+                        """)
-+
-+def read_stringnl_noescape_pair(f):
-+    r"""
-+    >>> import io
-+    >>> read_stringnl_noescape_pair(io.BytesIO(b"Queue\nEmpty\njunk"))
-+    'Queue Empty'
-+    """
-+
-+    return "%s %s" % (read_stringnl_noescape(f), read_stringnl_noescape(f))
-+
-+stringnl_noescape_pair = ArgumentDescriptor(
-+                             name='stringnl_noescape_pair',
-+                             n=UP_TO_NEWLINE,
-+                             reader=read_stringnl_noescape_pair,
-+                             doc="""A pair of newline-terminated strings.
-+
-+                             These are str-style strings, without embedded
-+                             escapes, or bracketing quotes.  They should
-+                             consist solely of printable ASCII characters.
-+                             The pair is returned as a single string, with
-+                             a single blank separating the two strings.
-+                             """)
-+
-+
-+def read_string1(f):
-+    r"""
-+    >>> import io
-+    >>> read_string1(io.BytesIO(b"\x00"))
-+    ''
-+    >>> read_string1(io.BytesIO(b"\x03abcdef"))
-+    'abc'
-+    """
-+
-+    n = read_uint1(f)
-+    assert n >= 0
-+    data = f.read(n)
-+    if len(data) == n:
-+        return data.decode("latin-1")
-+    raise ValueError("expected %d bytes in a string1, but only %d remain" %
-+                     (n, len(data)))
-+
-+string1 = ArgumentDescriptor(
-+              name="string1",
-+              n=TAKEN_FROM_ARGUMENT1,
-+              reader=read_string1,
-+              doc="""A counted string.
-+
-+              The first argument is a 1-byte unsigned int giving the number
-+              of bytes in the string, and the second argument is that many
-+              bytes.
-+              """)
-+
-+
-+def read_string4(f):
-+    r"""
-+    >>> import io
-+    >>> read_string4(io.BytesIO(b"\x00\x00\x00\x00abc"))
-+    ''
-+    >>> read_string4(io.BytesIO(b"\x03\x00\x00\x00abcdef"))
-+    'abc'
-+    >>> read_string4(io.BytesIO(b"\x00\x00\x00\x03abcdef"))
-+    Traceback (most recent call last):
-+    ...
-+    ValueError: expected 50331648 bytes in a string4, but only 6 remain
-+    """
-+
-+    n = read_int4(f)
-+    if n < 0:
-+        raise ValueError("string4 byte count < 0: %d" % n)
-+    data = f.read(n)
-+    if len(data) == n:
-+        return data.decode("latin-1")
-+    raise ValueError("expected %d bytes in a string4, but only %d remain" %
-+                     (n, len(data)))
-+
-+string4 = ArgumentDescriptor(
-+              name="string4",
-+              n=TAKEN_FROM_ARGUMENT4,
-+              reader=read_string4,
-+              doc="""A counted string.
-+
-+              The first argument is a 4-byte little-endian signed int giving
-+              the number of bytes in the string, and the second argument is
-+              that many bytes.
-+              """)
-+
-+
-+def read_bytes1(f):
-+    r"""
-+    >>> import io
-+    >>> read_bytes1(io.BytesIO(b"\x00"))
-+    b''
-+    >>> read_bytes1(io.BytesIO(b"\x03abcdef"))
-+    b'abc'
-+    """
-+
-+    n = read_uint1(f)
-+    assert n >= 0
-+    data = f.read(n)
-+    if len(data) == n:
-+        return data
-+    raise ValueError("expected %d bytes in a bytes1, but only %d remain" %
-+                     (n, len(data)))
-+
-+bytes1 = ArgumentDescriptor(
-+              name="bytes1",
-+              n=TAKEN_FROM_ARGUMENT1,
-+              reader=read_bytes1,
-+              doc="""A counted bytes string.
-+
-+              The first argument is a 1-byte unsigned int giving the number
-+              of bytes in the string, and the second argument is that many
-+              bytes.
-+              """)
-+
-+
-+def read_bytes1(f):
-+    r"""
-+    >>> import io
-+    >>> read_bytes1(io.BytesIO(b"\x00"))
-+    b''
-+    >>> read_bytes1(io.BytesIO(b"\x03abcdef"))
-+    b'abc'
-+    """
-+
-+    n = read_uint1(f)
-+    assert n >= 0
-+    data = f.read(n)
-+    if len(data) == n:
-+        return data
-+    raise ValueError("expected %d bytes in a bytes1, but only %d remain" %
-+                     (n, len(data)))
-+
-+bytes1 = ArgumentDescriptor(
-+              name="bytes1",
-+              n=TAKEN_FROM_ARGUMENT1,
-+              reader=read_bytes1,
-+              doc="""A counted bytes string.
-+
-+              The first argument is a 1-byte unsigned int giving the number
-+              of bytes, and the second argument is that many bytes.
-+              """)
-+
-+
-+def read_bytes4(f):
-+    r"""
-+    >>> import io
-+    >>> read_bytes4(io.BytesIO(b"\x00\x00\x00\x00abc"))
-+    b''
-+    >>> read_bytes4(io.BytesIO(b"\x03\x00\x00\x00abcdef"))
-+    b'abc'
-+    >>> read_bytes4(io.BytesIO(b"\x00\x00\x00\x03abcdef"))
-+    Traceback (most recent call last):
-+    ...
-+    ValueError: expected 50331648 bytes in a bytes4, but only 6 remain
-+    """
-+
-+    n = read_uint4(f)
-+    assert n >= 0
-+    if n > sys.maxsize:
-+        raise ValueError("bytes4 byte count > sys.maxsize: %d" % n)
-+    data = f.read(n)
-+    if len(data) == n:
-+        return data
-+    raise ValueError("expected %d bytes in a bytes4, but only %d remain" %
-+                     (n, len(data)))
-+
-+bytes4 = ArgumentDescriptor(
-+              name="bytes4",
-+              n=TAKEN_FROM_ARGUMENT4U,
-+              reader=read_bytes4,
-+              doc="""A counted bytes string.
-+
-+              The first argument is a 4-byte little-endian unsigned int giving
-+              the number of bytes, and the second argument is that many bytes.
-+              """)
-+
-+
-+def read_bytes8(f):
-+    r"""
-+    >>> import io, struct, sys
-+    >>> read_bytes8(io.BytesIO(b"\x00\x00\x00\x00\x00\x00\x00\x00abc"))
-+    b''
-+    >>> read_bytes8(io.BytesIO(b"\x03\x00\x00\x00\x00\x00\x00\x00abcdef"))
-+    b'abc'
-+    >>> bigsize8 = struct.pack("<Q", sys.maxsize//3)
-+    >>> read_bytes8(io.BytesIO(bigsize8 + b"abcdef"))  #doctest: +ELLIPSIS
-+    Traceback (most recent call last):
-+    ...
-+    ValueError: expected ... bytes in a bytes8, but only 6 remain
-+    """
-+
-+    n = read_uint8(f)
-+    assert n >= 0
-+    if n > sys.maxsize:
-+        raise ValueError("bytes8 byte count > sys.maxsize: %d" % n)
-+    data = f.read(n)
-+    if len(data) == n:
-+        return data
-+    raise ValueError("expected %d bytes in a bytes8, but only %d remain" %
-+                     (n, len(data)))
-+
-+bytes8 = ArgumentDescriptor(
-+              name="bytes8",
-+              n=TAKEN_FROM_ARGUMENT8U,
-+              reader=read_bytes8,
-+              doc="""A counted bytes string.
-+
-+              The first argument is a 8-byte little-endian unsigned int giving
-+              the number of bytes, and the second argument is that many bytes.
-+              """)
-+
-+def read_unicodestringnl(f):
-+    r"""
-+    >>> import io
-+    >>> read_unicodestringnl(io.BytesIO(b"abc\\uabcd\njunk")) == 'abc\uabcd'
-+    True
-+    """
-+
-+    data = f.readline()
-+    if not data.endswith(b'\n'):
-+        raise ValueError("no newline found when trying to read "
-+                         "unicodestringnl")
-+    data = data[:-1]    # lose the newline
-+    return str(data, 'raw-unicode-escape')
-+
-+unicodestringnl = ArgumentDescriptor(
-+                      name='unicodestringnl',
-+                      n=UP_TO_NEWLINE,
-+                      reader=read_unicodestringnl,
-+                      doc="""A newline-terminated Unicode string.
-+
-+                      This is raw-unicode-escape encoded, so consists of
-+                      printable ASCII characters, and may contain embedded
-+                      escape sequences.
-+                      """)
-+
-+
-+def read_unicodestring1(f):
-+    r"""
-+    >>> import io
-+    >>> s = 'abcd\uabcd'
-+    >>> enc = s.encode('utf-8')
-+    >>> enc
-+    b'abcd\xea\xaf\x8d'
-+    >>> n = bytes([len(enc)])  # little-endian 1-byte length
-+    >>> t = read_unicodestring1(io.BytesIO(n + enc + b'junk'))
-+    >>> s == t
-+    True
-+
-+    >>> read_unicodestring1(io.BytesIO(n + enc[:-1]))
-+    Traceback (most recent call last):
-+    ...
-+    ValueError: expected 7 bytes in a unicodestring1, but only 6 remain
-+    """
-+
-+    n = read_uint1(f)
-+    assert n >= 0
-+    data = f.read(n)
-+    if len(data) == n:
-+        return str(data, 'utf-8', 'surrogatepass')
-+    raise ValueError("expected %d bytes in a unicodestring1, but only %d "
-+                     "remain" % (n, len(data)))
-+
-+unicodestring1 = ArgumentDescriptor(
-+                    name="unicodestring1",
-+                    n=TAKEN_FROM_ARGUMENT1,
-+                    reader=read_unicodestring1,
-+                    doc="""A counted Unicode string.
-+
-+                    The first argument is a 1-byte little-endian signed int
-+                    giving the number of bytes in the string, and the second
-+                    argument-- the UTF-8 encoding of the Unicode string --
-+                    contains that many bytes.
-+                    """)
-+
-+
-+def read_unicodestring4(f):
-+    r"""
-+    >>> import io
-+    >>> s = 'abcd\uabcd'
-+    >>> enc = s.encode('utf-8')
-+    >>> enc
-+    b'abcd\xea\xaf\x8d'
-+    >>> n = bytes([len(enc), 0, 0, 0])  # little-endian 4-byte length
-+    >>> t = read_unicodestring4(io.BytesIO(n + enc + b'junk'))
-+    >>> s == t
-+    True
-+
-+    >>> read_unicodestring4(io.BytesIO(n + enc[:-1]))
-+    Traceback (most recent call last):
-+    ...
-+    ValueError: expected 7 bytes in a unicodestring4, but only 6 remain
-+    """
-+
-+    n = read_uint4(f)
-+    assert n >= 0
-+    if n > sys.maxsize:
-+        raise ValueError("unicodestring4 byte count > sys.maxsize: %d" % n)
-+    data = f.read(n)
-+    if len(data) == n:
-+        return str(data, 'utf-8', 'surrogatepass')
-+    raise ValueError("expected %d bytes in a unicodestring4, but only %d "
-+                     "remain" % (n, len(data)))
-+
-+unicodestring4 = ArgumentDescriptor(
-+                    name="unicodestring4",
-+                    n=TAKEN_FROM_ARGUMENT4U,
-+                    reader=read_unicodestring4,
-+                    doc="""A counted Unicode string.
-+
-+                    The first argument is a 4-byte little-endian signed int
-+                    giving the number of bytes in the string, and the second
-+                    argument-- the UTF-8 encoding of the Unicode string --
-+                    contains that many bytes.
-+                    """)
-+
-+
-+def read_unicodestring8(f):
-+    r"""
-+    >>> import io
-+    >>> s = 'abcd\uabcd'
-+    >>> enc = s.encode('utf-8')
-+    >>> enc
-+    b'abcd\xea\xaf\x8d'
-+    >>> n = bytes([len(enc)]) + bytes(7)  # little-endian 8-byte length
-+    >>> t = read_unicodestring8(io.BytesIO(n + enc + b'junk'))
-+    >>> s == t
-+    True
-+
-+    >>> read_unicodestring8(io.BytesIO(n + enc[:-1]))
-+    Traceback (most recent call last):
-+    ...
-+    ValueError: expected 7 bytes in a unicodestring8, but only 6 remain
-+    """
-+
-+    n = read_uint8(f)
-+    assert n >= 0
-+    if n > sys.maxsize:
-+        raise ValueError("unicodestring8 byte count > sys.maxsize: %d" % n)
-+    data = f.read(n)
-+    if len(data) == n:
-+        return str(data, 'utf-8', 'surrogatepass')
-+    raise ValueError("expected %d bytes in a unicodestring8, but only %d "
-+                     "remain" % (n, len(data)))
-+
-+unicodestring8 = ArgumentDescriptor(
-+                    name="unicodestring8",
-+                    n=TAKEN_FROM_ARGUMENT8U,
-+                    reader=read_unicodestring8,
-+                    doc="""A counted Unicode string.
-+
-+                    The first argument is a 8-byte little-endian signed int
-+                    giving the number of bytes in the string, and the second
-+                    argument-- the UTF-8 encoding of the Unicode string --
-+                    contains that many bytes.
-+                    """)
-+
-+
-+def read_decimalnl_short(f):
-+    r"""
-+    >>> import io
-+    >>> read_decimalnl_short(io.BytesIO(b"1234\n56"))
-+    1234
-+
-+    >>> read_decimalnl_short(io.BytesIO(b"1234L\n56"))
-+    Traceback (most recent call last):
-+    ...
-+    ValueError: invalid literal for int() with base 10: b'1234L'
-+    """
-+
-+    s = read_stringnl(f, decode=False, stripquotes=False)
-+
-+    # There's a hack for True and False here.
-+    if s == b"00":
-+        return False
-+    elif s == b"01":
-+        return True
-+
-+    return int(s)
-+
-+def read_decimalnl_long(f):
-+    r"""
-+    >>> import io
-+
-+    >>> read_decimalnl_long(io.BytesIO(b"1234L\n56"))
-+    1234
-+
-+    >>> read_decimalnl_long(io.BytesIO(b"123456789012345678901234L\n6"))
-+    123456789012345678901234
-+    """
-+
-+    s = read_stringnl(f, decode=False, stripquotes=False)
-+    if s[-1:] == b'L':
-+        s = s[:-1]
-+    return int(s)
-+
-+
-+decimalnl_short = ArgumentDescriptor(
-+                      name='decimalnl_short',
-+                      n=UP_TO_NEWLINE,
-+                      reader=read_decimalnl_short,
-+                      doc="""A newline-terminated decimal integer literal.
-+
-+                          This never has a trailing 'L', and the integer fit
-+                          in a short Python int on the box where the pickle
-+                          was written -- but there's no guarantee it will fit
-+                          in a short Python int on the box where the pickle
-+                          is read.
-+                          """)
-+
-+decimalnl_long = ArgumentDescriptor(
-+                     name='decimalnl_long',
-+                     n=UP_TO_NEWLINE,
-+                     reader=read_decimalnl_long,
-+                     doc="""A newline-terminated decimal integer literal.
-+
-+                         This has a trailing 'L', and can represent integers
-+                         of any size.
-+                         """)
-+
-+
-+def read_floatnl(f):
-+    r"""
-+    >>> import io
-+    >>> read_floatnl(io.BytesIO(b"-1.25\n6"))
-+    -1.25
-+    """
-+    s = read_stringnl(f, decode=False, stripquotes=False)
-+    return float(s)
-+
-+floatnl = ArgumentDescriptor(
-+              name='floatnl',
-+              n=UP_TO_NEWLINE,
-+              reader=read_floatnl,
-+              doc="""A newline-terminated decimal floating literal.
-+
-+              In general this requires 17 significant digits for roundtrip
-+              identity, and pickling then unpickling infinities, NaNs, and
-+              minus zero doesn't work across boxes, or on some boxes even
-+              on itself (e.g., Windows can't read the strings it produces
-+              for infinities or NaNs).
-+              """)
-+
-+def read_float8(f):
-+    r"""
-+    >>> import io, struct
-+    >>> raw = struct.pack(">d", -1.25)
-+    >>> raw
-+    b'\xbf\xf4\x00\x00\x00\x00\x00\x00'
-+    >>> read_float8(io.BytesIO(raw + b"\n"))
-+    -1.25
-+    """
-+
-+    data = f.read(8)
-+    if len(data) == 8:
-+        return _unpack(">d", data)[0]
-+    raise ValueError("not enough data in stream to read float8")
-+
-+
-+float8 = ArgumentDescriptor(
-+             name='float8',
-+             n=8,
-+             reader=read_float8,
-+             doc="""An 8-byte binary representation of a float, big-endian.
-+
-+             The format is unique to Python, and shared with the struct
-+             module (format string '>d') "in theory" (the struct and pickle
-+             implementations don't share the code -- they should).  It's
-+             strongly related to the IEEE-754 double format, and, in normal
-+             cases, is in fact identical to the big-endian 754 double format.
-+             On other boxes the dynamic range is limited to that of a 754
-+             double, and "add a half and chop" rounding is used to reduce
-+             the precision to 53 bits.  However, even on a 754 box,
-+             infinities, NaNs, and minus zero may not be handled correctly
-+             (may not survive roundtrip pickling intact).
-+             """)
-+
-+# Protocol 2 formats
-+
-+from pickle import decode_long
-+
-+def read_long1(f):
-+    r"""
-+    >>> import io
-+    >>> read_long1(io.BytesIO(b"\x00"))
-+    0
-+    >>> read_long1(io.BytesIO(b"\x02\xff\x00"))
-+    255
-+    >>> read_long1(io.BytesIO(b"\x02\xff\x7f"))
-+    32767
-+    >>> read_long1(io.BytesIO(b"\x02\x00\xff"))
-+    -256
-+    >>> read_long1(io.BytesIO(b"\x02\x00\x80"))
-+    -32768
-+    """
-+
-+    n = read_uint1(f)
-+    data = f.read(n)
-+    if len(data) != n:
-+        raise ValueError("not enough data in stream to read long1")
-+    return decode_long(data)
-+
-+long1 = ArgumentDescriptor(
-+    name="long1",
-+    n=TAKEN_FROM_ARGUMENT1,
-+    reader=read_long1,
-+    doc="""A binary long, little-endian, using 1-byte size.
-+
-+    This first reads one byte as an unsigned size, then reads that
-+    many bytes and interprets them as a little-endian 2's-complement long.
-+    If the size is 0, that's taken as a shortcut for the long 0L.
-+    """)
-+
-+def read_long4(f):
-+    r"""
-+    >>> import io
-+    >>> read_long4(io.BytesIO(b"\x02\x00\x00\x00\xff\x00"))
-+    255
-+    >>> read_long4(io.BytesIO(b"\x02\x00\x00\x00\xff\x7f"))
-+    32767
-+    >>> read_long4(io.BytesIO(b"\x02\x00\x00\x00\x00\xff"))
-+    -256
-+    >>> read_long4(io.BytesIO(b"\x02\x00\x00\x00\x00\x80"))
-+    -32768
-+    >>> read_long1(io.BytesIO(b"\x00\x00\x00\x00"))
-+    0
-+    """
-+
-+    n = read_int4(f)
-+    if n < 0:
-+        raise ValueError("long4 byte count < 0: %d" % n)
-+    data = f.read(n)
-+    if len(data) != n:
-+        raise ValueError("not enough data in stream to read long4")
-+    return decode_long(data)
-+
-+long4 = ArgumentDescriptor(
-+    name="long4",
-+    n=TAKEN_FROM_ARGUMENT4,
-+    reader=read_long4,
-+    doc="""A binary representation of a long, little-endian.
-+
-+    This first reads four bytes as a signed size (but requires the
-+    size to be >= 0), then reads that many bytes and interprets them
-+    as a little-endian 2's-complement long.  If the size is 0, that's taken
-+    as a shortcut for the int 0, although LONG1 should really be used
-+    then instead (and in any case where # of bytes < 256).
-+    """)
-+
-+
-+##############################################################################
-+# Object descriptors.  The stack used by the pickle machine holds objects,
-+# and in the stack_before and stack_after attributes of OpcodeInfo
-+# descriptors we need names to describe the various types of objects that can
-+# appear on the stack.
-+
-+class StackObject(object):
-+    __slots__ = (
-+        # name of descriptor record, for info only
-+        'name',
-+
-+        # type of object, or tuple of type objects (meaning the object can
-+        # be of any type in the tuple)
-+        'obtype',
-+
-+        # human-readable docs for this kind of stack object; a string
-+        'doc',
-+    )
-+
-+    def __init__(self, name, obtype, doc):
-+        assert isinstance(name, str)
-+        self.name = name
-+
-+        assert isinstance(obtype, type) or isinstance(obtype, tuple)
-+        if isinstance(obtype, tuple):
-+            for contained in obtype:
-+                assert isinstance(contained, type)
-+        self.obtype = obtype
-+
-+        assert isinstance(doc, str)
-+        self.doc = doc
-+
-+    def __repr__(self):
-+        return self.name
-+
-+
-+pyint = pylong = StackObject(
-+    name='int',
-+    obtype=int,
-+    doc="A Python integer object.")
-+
-+pyinteger_or_bool = StackObject(
-+    name='int_or_bool',
-+    obtype=(int, bool),
-+    doc="A Python integer or boolean object.")
-+
-+pybool = StackObject(
-+    name='bool',
-+    obtype=bool,
-+    doc="A Python boolean object.")
-+
-+pyfloat = StackObject(
-+    name='float',
-+    obtype=float,
-+    doc="A Python float object.")
-+
-+pybytes_or_str = pystring = StackObject(
-+    name='bytes_or_str',
-+    obtype=(bytes, str),
-+    doc="A Python bytes or (Unicode) string object.")
-+
-+pybytes = StackObject(
-+    name='bytes',
-+    obtype=bytes,
-+    doc="A Python bytes object.")
-+
-+pyunicode = StackObject(
-+    name='str',
-+    obtype=str,
-+    doc="A Python (Unicode) string object.")
-+
-+pynone = StackObject(
-+    name="None",
-+    obtype=type(None),
-+    doc="The Python None object.")
-+
-+pytuple = StackObject(
-+    name="tuple",
-+    obtype=tuple,
-+    doc="A Python tuple object.")
-+
-+pylist = StackObject(
-+    name="list",
-+    obtype=list,
-+    doc="A Python list object.")
-+
-+pydict = StackObject(
-+    name="dict",
-+    obtype=dict,
-+    doc="A Python dict object.")
-+
-+pyset = StackObject(
-+    name="set",
-+    obtype=set,
-+    doc="A Python set object.")
-+
-+pyfrozenset = StackObject(
-+    name="frozenset",
-+    obtype=set,
-+    doc="A Python frozenset object.")
-+
-+anyobject = StackObject(
-+    name='any',
-+    obtype=object,
-+    doc="Any kind of object whatsoever.")
-+
-+markobject = StackObject(
-+    name="mark",
-+    obtype=StackObject,
-+    doc="""'The mark' is a unique object.
-+
-+Opcodes that operate on a variable number of objects
-+generally don't embed the count of objects in the opcode,
-+or pull it off the stack.  Instead the MARK opcode is used
-+to push a special marker object on the stack, and then
-+some other opcodes grab all the objects from the top of
-+the stack down to (but not including) the topmost marker
-+object.
-+""")
-+
-+stackslice = StackObject(
-+    name="stackslice",
-+    obtype=StackObject,
-+    doc="""An object representing a contiguous slice of the stack.
-+
-+This is used in conjunction with markobject, to represent all
-+of the stack following the topmost markobject.  For example,
-+the POP_MARK opcode changes the stack from
-+
-+    [..., markobject, stackslice]
-+to
-+    [...]
-+
-+No matter how many object are on the stack after the topmost
-+markobject, POP_MARK gets rid of all of them (including the
-+topmost markobject too).
-+""")
-+
-+##############################################################################
-+# Descriptors for pickle opcodes.
-+
-+class OpcodeInfo(object):
-+
-+    __slots__ = (
-+        # symbolic name of opcode; a string
-+        'name',
-+
-+        # the code used in a bytestream to represent the opcode; a
-+        # one-character string
-+        'code',
-+
-+        # If the opcode has an argument embedded in the byte string, an
-+        # instance of ArgumentDescriptor specifying its type.  Note that
-+        # arg.reader(s) can be used to read and decode the argument from
-+        # the bytestream s, and arg.doc documents the format of the raw
-+        # argument bytes.  If the opcode doesn't have an argument embedded
-+        # in the bytestream, arg should be None.
-+        'arg',
-+
-+        # what the stack looks like before this opcode runs; a list
-+        'stack_before',
-+
-+        # what the stack looks like after this opcode runs; a list
-+        'stack_after',
-+
-+        # the protocol number in which this opcode was introduced; an int
-+        'proto',
-+
-+        # human-readable docs for this opcode; a string
-+        'doc',
-+    )
-+
-+    def __init__(self, name, code, arg,
-+                 stack_before, stack_after, proto, doc):
-+        assert isinstance(name, str)
-+        self.name = name
-+
-+        assert isinstance(code, str)
-+        assert len(code) == 1
-+        self.code = code
-+
-+        assert arg is None or isinstance(arg, ArgumentDescriptor)
-+        self.arg = arg
-+
-+        assert isinstance(stack_before, list)
-+        for x in stack_before:
-+            assert isinstance(x, StackObject)
-+        self.stack_before = stack_before
-+
-+        assert isinstance(stack_after, list)
-+        for x in stack_after:
-+            assert isinstance(x, StackObject)
-+        self.stack_after = stack_after
-+
-+        assert isinstance(proto, int) and 0 <= proto <= pickle.HIGHEST_PROTOCOL
-+        self.proto = proto
-+
-+        assert isinstance(doc, str)
-+        self.doc = doc
-+
-+I = OpcodeInfo
-+opcodes = [
-+
-+    # Ways to spell integers.
-+
-+    I(name='INT',
-+      code='I',
-+      arg=decimalnl_short,
-+      stack_before=[],
-+      stack_after=[pyinteger_or_bool],
-+      proto=0,
-+      doc="""Push an integer or bool.
-+
-+      The argument is a newline-terminated decimal literal string.
-+
-+      The intent may have been that this always fit in a short Python int,
-+      but INT can be generated in pickles written on a 64-bit box that
-+      require a Python long on a 32-bit box.  The difference between this
-+      and LONG then is that INT skips a trailing 'L', and produces a short
-+      int whenever possible.
-+
-+      Another difference is due to that, when bool was introduced as a
-+      distinct type in 2.3, builtin names True and False were also added to
-+      2.2.2, mapping to ints 1 and 0.  For compatibility in both directions,
-+      True gets pickled as INT + "I01\\n", and False as INT + "I00\\n".
-+      Leading zeroes are never produced for a genuine integer.  The 2.3
-+      (and later) unpicklers special-case these and return bool instead;
-+      earlier unpicklers ignore the leading "0" and return the int.
-+      """),
-+
-+    I(name='BININT',
-+      code='J',
-+      arg=int4,
-+      stack_before=[],
-+      stack_after=[pyint],
-+      proto=1,
-+      doc="""Push a four-byte signed integer.
-+
-+      This handles the full range of Python (short) integers on a 32-bit
-+      box, directly as binary bytes (1 for the opcode and 4 for the integer).
-+      If the integer is non-negative and fits in 1 or 2 bytes, pickling via
-+      BININT1 or BININT2 saves space.
-+      """),
-+
-+    I(name='BININT1',
-+      code='K',
-+      arg=uint1,
-+      stack_before=[],
-+      stack_after=[pyint],
-+      proto=1,
-+      doc="""Push a one-byte unsigned integer.
-+
-+      This is a space optimization for pickling very small non-negative ints,
-+      in range(256).
-+      """),
-+
-+    I(name='BININT2',
-+      code='M',
-+      arg=uint2,
-+      stack_before=[],
-+      stack_after=[pyint],
-+      proto=1,
-+      doc="""Push a two-byte unsigned integer.
-+
-+      This is a space optimization for pickling small positive ints, in
-+      range(256, 2**16).  Integers in range(256) can also be pickled via
-+      BININT2, but BININT1 instead saves a byte.
-+      """),
-+
-+    I(name='LONG',
-+      code='L',
-+      arg=decimalnl_long,
-+      stack_before=[],
-+      stack_after=[pyint],
-+      proto=0,
-+      doc="""Push a long integer.
-+
-+      The same as INT, except that the literal ends with 'L', and always
-+      unpickles to a Python long.  There doesn't seem a real purpose to the
-+      trailing 'L'.
-+
-+      Note that LONG takes time quadratic in the number of digits when
-+      unpickling (this is simply due to the nature of decimal->binary
-+      conversion).  Proto 2 added linear-time (in C; still quadratic-time
-+      in Python) LONG1 and LONG4 opcodes.
-+      """),
-+
-+    I(name="LONG1",
-+      code='\x8a',
-+      arg=long1,
-+      stack_before=[],
-+      stack_after=[pyint],
-+      proto=2,
-+      doc="""Long integer using one-byte length.
-+
-+      A more efficient encoding of a Python long; the long1 encoding
-+      says it all."""),
-+
-+    I(name="LONG4",
-+      code='\x8b',
-+      arg=long4,
-+      stack_before=[],
-+      stack_after=[pyint],
-+      proto=2,
-+      doc="""Long integer using found-byte length.
-+
-+      A more efficient encoding of a Python long; the long4 encoding
-+      says it all."""),
-+
-+    # Ways to spell strings (8-bit, not Unicode).
-+
-+    I(name='STRING',
-+      code='S',
-+      arg=stringnl,
-+      stack_before=[],
-+      stack_after=[pybytes_or_str],
-+      proto=0,
-+      doc="""Push a Python string object.
-+
-+      The argument is a repr-style string, with bracketing quote characters,
-+      and perhaps embedded escapes.  The argument extends until the next
-+      newline character.  These are usually decoded into a str instance
-+      using the encoding given to the Unpickler constructor. or the default,
-+      'ASCII'.  If the encoding given was 'bytes' however, they will be
-+      decoded as bytes object instead.
-+      """),
-+
-+    I(name='BINSTRING',
-+      code='T',
-+      arg=string4,
-+      stack_before=[],
-+      stack_after=[pybytes_or_str],
-+      proto=1,
-+      doc="""Push a Python string object.
-+
-+      There are two arguments: the first is a 4-byte little-endian
-+      signed int giving the number of bytes in the string, and the
-+      second is that many bytes, which are taken literally as the string
-+      content.  These are usually decoded into a str instance using the
-+      encoding given to the Unpickler constructor. or the default,
-+      'ASCII'.  If the encoding given was 'bytes' however, they will be
-+      decoded as bytes object instead.
-+      """),
-+
-+    I(name='SHORT_BINSTRING',
-+      code='U',
-+      arg=string1,
-+      stack_before=[],
-+      stack_after=[pybytes_or_str],
-+      proto=1,
-+      doc="""Push a Python string object.
-+
-+      There are two arguments: the first is a 1-byte unsigned int giving
-+      the number of bytes in the string, and the second is that many
-+      bytes, which are taken literally as the string content.  These are
-+      usually decoded into a str instance using the encoding given to
-+      the Unpickler constructor. or the default, 'ASCII'.  If the
-+      encoding given was 'bytes' however, they will be decoded as bytes
-+      object instead.
-+      """),
-+
-+    # Bytes (protocol 3 only; older protocols don't support bytes at all)
-+
-+    I(name='BINBYTES',
-+      code='B',
-+      arg=bytes4,
-+      stack_before=[],
-+      stack_after=[pybytes],
-+      proto=3,
-+      doc="""Push a Python bytes object.
-+
-+      There are two arguments:  the first is a 4-byte little-endian unsigned int
-+      giving the number of bytes, and the second is that many bytes, which are
-+      taken literally as the bytes content.
-+      """),
-+
-+    I(name='SHORT_BINBYTES',
-+      code='C',
-+      arg=bytes1,
-+      stack_before=[],
-+      stack_after=[pybytes],
-+      proto=3,
-+      doc="""Push a Python bytes object.
-+
-+      There are two arguments:  the first is a 1-byte unsigned int giving
-+      the number of bytes, and the second is that many bytes, which are taken
-+      literally as the string content.
-+      """),
-+
-+    I(name='BINBYTES8',
-+      code='\x8e',
-+      arg=bytes8,
-+      stack_before=[],
-+      stack_after=[pybytes],
-+      proto=4,
-+      doc="""Push a Python bytes object.
-+
-+      There are two arguments:  the first is a 8-byte unsigned int giving
-+      the number of bytes in the string, and the second is that many bytes,
-+      which are taken literally as the string content.
-+      """),
-+
-+    # Ways to spell None.
-+
-+    I(name='NONE',
-+      code='N',
-+      arg=None,
-+      stack_before=[],
-+      stack_after=[pynone],
-+      proto=0,
-+      doc="Push None on the stack."),
-+
-+    # Ways to spell bools, starting with proto 2.  See INT for how this was
-+    # done before proto 2.
-+
-+    I(name='NEWTRUE',
-+      code='\x88',
-+      arg=None,
-+      stack_before=[],
-+      stack_after=[pybool],
-+      proto=2,
-+      doc="""True.
-+
-+      Push True onto the stack."""),
-+
-+    I(name='NEWFALSE',
-+      code='\x89',
-+      arg=None,
-+      stack_before=[],
-+      stack_after=[pybool],
-+      proto=2,
-+      doc="""True.
-+
-+      Push False onto the stack."""),
-+
-+    # Ways to spell Unicode strings.
-+
-+    I(name='UNICODE',
-+      code='V',
-+      arg=unicodestringnl,
-+      stack_before=[],
-+      stack_after=[pyunicode],
-+      proto=0,  # this may be pure-text, but it's a later addition
-+      doc="""Push a Python Unicode string object.
-+
-+      The argument is a raw-unicode-escape encoding of a Unicode string,
-+      and so may contain embedded escape sequences.  The argument extends
-+      until the next newline character.
-+      """),
-+
-+    I(name='SHORT_BINUNICODE',
-+      code='\x8c',
-+      arg=unicodestring1,
-+      stack_before=[],
-+      stack_after=[pyunicode],
-+      proto=4,
-+      doc="""Push a Python Unicode string object.
-+
-+      There are two arguments:  the first is a 1-byte little-endian signed int
-+      giving the number of bytes in the string.  The second is that many
-+      bytes, and is the UTF-8 encoding of the Unicode string.
-+      """),
-+
-+    I(name='BINUNICODE',
-+      code='X',
-+      arg=unicodestring4,
-+      stack_before=[],
-+      stack_after=[pyunicode],
-+      proto=1,
-+      doc="""Push a Python Unicode string object.
-+
-+      There are two arguments:  the first is a 4-byte little-endian unsigned int
-+      giving the number of bytes in the string.  The second is that many
-+      bytes, and is the UTF-8 encoding of the Unicode string.
-+      """),
-+
-+    I(name='BINUNICODE8',
-+      code='\x8d',
-+      arg=unicodestring8,
-+      stack_before=[],
-+      stack_after=[pyunicode],
-+      proto=4,
-+      doc="""Push a Python Unicode string object.
-+
-+      There are two arguments:  the first is a 8-byte little-endian signed int
-+      giving the number of bytes in the string.  The second is that many
-+      bytes, and is the UTF-8 encoding of the Unicode string.
-+      """),
-+
-+    # Ways to spell floats.
-+
-+    I(name='FLOAT',
-+      code='F',
-+      arg=floatnl,
-+      stack_before=[],
-+      stack_after=[pyfloat],
-+      proto=0,
-+      doc="""Newline-terminated decimal float literal.
-+
-+      The argument is repr(a_float), and in general requires 17 significant
-+      digits for roundtrip conversion to be an identity (this is so for
-+      IEEE-754 double precision values, which is what Python float maps to
-+      on most boxes).
-+
-+      In general, FLOAT cannot be used to transport infinities, NaNs, or
-+      minus zero across boxes (or even on a single box, if the platform C
-+      library can't read the strings it produces for such things -- Windows
-+      is like that), but may do less damage than BINFLOAT on boxes with
-+      greater precision or dynamic range than IEEE-754 double.
-+      """),
-+
-+    I(name='BINFLOAT',
-+      code='G',
-+      arg=float8,
-+      stack_before=[],
-+      stack_after=[pyfloat],
-+      proto=1,
-+      doc="""Float stored in binary form, with 8 bytes of data.
-+
-+      This generally requires less than half the space of FLOAT encoding.
-+      In general, BINFLOAT cannot be used to transport infinities, NaNs, or
-+      minus zero, raises an exception if the exponent exceeds the range of
-+      an IEEE-754 double, and retains no more than 53 bits of precision (if
-+      there are more than that, "add a half and chop" rounding is used to
-+      cut it back to 53 significant bits).
-+      """),
-+
-+    # Ways to build lists.
-+
-+    I(name='EMPTY_LIST',
-+      code=']',
-+      arg=None,
-+      stack_before=[],
-+      stack_after=[pylist],
-+      proto=1,
-+      doc="Push an empty list."),
-+
-+    I(name='APPEND',
-+      code='a',
-+      arg=None,
-+      stack_before=[pylist, anyobject],
-+      stack_after=[pylist],
-+      proto=0,
-+      doc="""Append an object to a list.
-+
-+      Stack before:  ... pylist anyobject
-+      Stack after:   ... pylist+[anyobject]
-+
-+      although pylist is really extended in-place.
-+      """),
-+
-+    I(name='APPENDS',
-+      code='e',
-+      arg=None,
-+      stack_before=[pylist, markobject, stackslice],
-+      stack_after=[pylist],
-+      proto=1,
-+      doc="""Extend a list by a slice of stack objects.
-+
-+      Stack before:  ... pylist markobject stackslice
-+      Stack after:   ... pylist+stackslice
-+
-+      although pylist is really extended in-place.
-+      """),
-+
-+    I(name='LIST',
-+      code='l',
-+      arg=None,
-+      stack_before=[markobject, stackslice],
-+      stack_after=[pylist],
-+      proto=0,
-+      doc="""Build a list out of the topmost stack slice, after markobject.
-+
-+      All the stack entries following the topmost markobject are placed into
-+      a single Python list, which single list object replaces all of the
-+      stack from the topmost markobject onward.  For example,
-+
-+      Stack before: ... markobject 1 2 3 'abc'
-+      Stack after:  ... [1, 2, 3, 'abc']
-+      """),
-+
-+    # Ways to build tuples.
-+
-+    I(name='EMPTY_TUPLE',
-+      code=')',
-+      arg=None,
-+      stack_before=[],
-+      stack_after=[pytuple],
-+      proto=1,
-+      doc="Push an empty tuple."),
-+
-+    I(name='TUPLE',
-+      code='t',
-+      arg=None,
-+      stack_before=[markobject, stackslice],
-+      stack_after=[pytuple],
-+      proto=0,
-+      doc="""Build a tuple out of the topmost stack slice, after markobject.
-+
-+      All the stack entries following the topmost markobject are placed into
-+      a single Python tuple, which single tuple object replaces all of the
-+      stack from the topmost markobject onward.  For example,
-+
-+      Stack before: ... markobject 1 2 3 'abc'
-+      Stack after:  ... (1, 2, 3, 'abc')
-+      """),
-+
-+    I(name='TUPLE1',
-+      code='\x85',
-+      arg=None,
-+      stack_before=[anyobject],
-+      stack_after=[pytuple],
-+      proto=2,
-+      doc="""Build a one-tuple out of the topmost item on the stack.
-+
-+      This code pops one value off the stack and pushes a tuple of
-+      length 1 whose one item is that value back onto it.  In other
-+      words:
-+
-+          stack[-1] = tuple(stack[-1:])
-+      """),
-+
-+    I(name='TUPLE2',
-+      code='\x86',
-+      arg=None,
-+      stack_before=[anyobject, anyobject],
-+      stack_after=[pytuple],
-+      proto=2,
-+      doc="""Build a two-tuple out of the top two items on the stack.
-+
-+      This code pops two values off the stack and pushes a tuple of
-+      length 2 whose items are those values back onto it.  In other
-+      words:
-+
-+          stack[-2:] = [tuple(stack[-2:])]
-+      """),
-+
-+    I(name='TUPLE3',
-+      code='\x87',
-+      arg=None,
-+      stack_before=[anyobject, anyobject, anyobject],
-+      stack_after=[pytuple],
-+      proto=2,
-+      doc="""Build a three-tuple out of the top three items on the stack.
-+
-+      This code pops three values off the stack and pushes a tuple of
-+      length 3 whose items are those values back onto it.  In other
-+      words:
-+
-+          stack[-3:] = [tuple(stack[-3:])]
-+      """),
-+
-+    # Ways to build dicts.
-+
-+    I(name='EMPTY_DICT',
-+      code='}',
-+      arg=None,
-+      stack_before=[],
-+      stack_after=[pydict],
-+      proto=1,
-+      doc="Push an empty dict."),
-+
-+    I(name='DICT',
-+      code='d',
-+      arg=None,
-+      stack_before=[markobject, stackslice],
-+      stack_after=[pydict],
-+      proto=0,
-+      doc="""Build a dict out of the topmost stack slice, after markobject.
-+
-+      All the stack entries following the topmost markobject are placed into
-+      a single Python dict, which single dict object replaces all of the
-+      stack from the topmost markobject onward.  The stack slice alternates
-+      key, value, key, value, ....  For example,
-+
-+      Stack before: ... markobject 1 2 3 'abc'
-+      Stack after:  ... {1: 2, 3: 'abc'}
-+      """),
-+
-+    I(name='SETITEM',
-+      code='s',
-+      arg=None,
-+      stack_before=[pydict, anyobject, anyobject],
-+      stack_after=[pydict],
-+      proto=0,
-+      doc="""Add a key+value pair to an existing dict.
-+
-+      Stack before:  ... pydict key value
-+      Stack after:   ... pydict
-+
-+      where pydict has been modified via pydict[key] = value.
-+      """),
-+
-+    I(name='SETITEMS',
-+      code='u',
-+      arg=None,
-+      stack_before=[pydict, markobject, stackslice],
-+      stack_after=[pydict],
-+      proto=1,
-+      doc="""Add an arbitrary number of key+value pairs to an existing dict.
-+
-+      The slice of the stack following the topmost markobject is taken as
-+      an alternating sequence of keys and values, added to the dict
-+      immediately under the topmost markobject.  Everything at and after the
-+      topmost markobject is popped, leaving the mutated dict at the top
-+      of the stack.
-+
-+      Stack before:  ... pydict markobject key_1 value_1 ... key_n value_n
-+      Stack after:   ... pydict
-+
-+      where pydict has been modified via pydict[key_i] = value_i for i in
-+      1, 2, ..., n, and in that order.
-+      """),
-+
-+    # Ways to build sets
-+
-+    I(name='EMPTY_SET',
-+      code='\x8f',
-+      arg=None,
-+      stack_before=[],
-+      stack_after=[pyset],
-+      proto=4,
-+      doc="Push an empty set."),
-+
-+    I(name='ADDITEMS',
-+      code='\x90',
-+      arg=None,
-+      stack_before=[pyset, markobject, stackslice],
-+      stack_after=[pyset],
-+      proto=4,
-+      doc="""Add an arbitrary number of items to an existing set.
-+
-+      The slice of the stack following the topmost markobject is taken as
-+      a sequence of items, added to the set immediately under the topmost
-+      markobject.  Everything at and after the topmost markobject is popped,
-+      leaving the mutated set at the top of the stack.
-+
-+      Stack before:  ... pyset markobject item_1 ... item_n
-+      Stack after:   ... pyset
-+
-+      where pyset has been modified via pyset.add(item_i) = item_i for i in
-+      1, 2, ..., n, and in that order.
-+      """),
-+
-+    # Way to build frozensets
-+
-+    I(name='FROZENSET',
-+      code='\x91',
-+      arg=None,
-+      stack_before=[markobject, stackslice],
-+      stack_after=[pyfrozenset],
-+      proto=4,
-+      doc="""Build a frozenset out of the topmost slice, after markobject.
-+
-+      All the stack entries following the topmost markobject are placed into
-+      a single Python frozenset, which single frozenset object replaces all
-+      of the stack from the topmost markobject onward.  For example,
-+
-+      Stack before: ... markobject 1 2 3
-+      Stack after:  ... frozenset({1, 2, 3})
-+      """),
-+
-+    # Stack manipulation.
-+
-+    I(name='POP',
-+      code='0',
-+      arg=None,
-+      stack_before=[anyobject],
-+      stack_after=[],
-+      proto=0,
-+      doc="Discard the top stack item, shrinking the stack by one item."),
-+
-+    I(name='DUP',
-+      code='2',
-+      arg=None,
-+      stack_before=[anyobject],
-+      stack_after=[anyobject, anyobject],
-+      proto=0,
-+      doc="Push the top stack item onto the stack again, duplicating it."),
-+
-+    I(name='MARK',
-+      code='(',
-+      arg=None,
-+      stack_before=[],
-+      stack_after=[markobject],
-+      proto=0,
-+      doc="""Push markobject onto the stack.
-+
-+      markobject is a unique object, used by other opcodes to identify a
-+      region of the stack containing a variable number of objects for them
-+      to work on.  See markobject.doc for more detail.
-+      """),
-+
-+    I(name='POP_MARK',
-+      code='1',
-+      arg=None,
-+      stack_before=[markobject, stackslice],
-+      stack_after=[],
-+      proto=1,
-+      doc="""Pop all the stack objects at and above the topmost markobject.
-+
-+      When an opcode using a variable number of stack objects is done,
-+      POP_MARK is used to remove those objects, and to remove the markobject
-+      that delimited their starting position on the stack.
-+      """),
-+
-+    # Memo manipulation.  There are really only two operations (get and put),
-+    # each in all-text, "short binary", and "long binary" flavors.
-+
-+    I(name='GET',
-+      code='g',
-+      arg=decimalnl_short,
-+      stack_before=[],
-+      stack_after=[anyobject],
-+      proto=0,
-+      doc="""Read an object from the memo and push it on the stack.
-+
-+      The index of the memo object to push is given by the newline-terminated
-+      decimal string following.  BINGET and LONG_BINGET are space-optimized
-+      versions.
-+      """),
-+
-+    I(name='BINGET',
-+      code='h',
-+      arg=uint1,
-+      stack_before=[],
-+      stack_after=[anyobject],
-+      proto=1,
-+      doc="""Read an object from the memo and push it on the stack.
-+
-+      The index of the memo object to push is given by the 1-byte unsigned
-+      integer following.
-+      """),
-+
-+    I(name='LONG_BINGET',
-+      code='j',
-+      arg=uint4,
-+      stack_before=[],
-+      stack_after=[anyobject],
-+      proto=1,
-+      doc="""Read an object from the memo and push it on the stack.
-+
-+      The index of the memo object to push is given by the 4-byte unsigned
-+      little-endian integer following.
-+      """),
-+
-+    I(name='PUT',
-+      code='p',
-+      arg=decimalnl_short,
-+      stack_before=[],
-+      stack_after=[],
-+      proto=0,
-+      doc="""Store the stack top into the memo.  The stack is not popped.
-+
-+      The index of the memo location to write into is given by the newline-
-+      terminated decimal string following.  BINPUT and LONG_BINPUT are
-+      space-optimized versions.
-+      """),
-+
-+    I(name='BINPUT',
-+      code='q',
-+      arg=uint1,
-+      stack_before=[],
-+      stack_after=[],
-+      proto=1,
-+      doc="""Store the stack top into the memo.  The stack is not popped.
-+
-+      The index of the memo location to write into is given by the 1-byte
-+      unsigned integer following.
-+      """),
-+
-+    I(name='LONG_BINPUT',
-+      code='r',
-+      arg=uint4,
-+      stack_before=[],
-+      stack_after=[],
-+      proto=1,
-+      doc="""Store the stack top into the memo.  The stack is not popped.
-+
-+      The index of the memo location to write into is given by the 4-byte
-+      unsigned little-endian integer following.
-+      """),
-+
-+    I(name='MEMOIZE',
-+      code='\x94',
-+      arg=None,
-+      stack_before=[anyobject],
-+      stack_after=[anyobject],
-+      proto=4,
-+      doc="""Store the stack top into the memo.  The stack is not popped.
-+
-+      The index of the memo location to write is the number of
-+      elements currently present in the memo.
-+      """),
-+
-+    # Access the extension registry (predefined objects).  Akin to the GET
-+    # family.
-+
-+    I(name='EXT1',
-+      code='\x82',
-+      arg=uint1,
-+      stack_before=[],
-+      stack_after=[anyobject],
-+      proto=2,
-+      doc="""Extension code.
-+
-+      This code and the similar EXT2 and EXT4 allow using a registry
-+      of popular objects that are pickled by name, typically classes.
-+      It is envisioned that through a global negotiation and
-+      registration process, third parties can set up a mapping between
-+      ints and object names.
-+
-+      In order to guarantee pickle interchangeability, the extension
-+      code registry ought to be global, although a range of codes may
-+      be reserved for private use.
-+
-+      EXT1 has a 1-byte integer argument.  This is used to index into the
-+      extension registry, and the object at that index is pushed on the stack.
-+      """),
-+
-+    I(name='EXT2',
-+      code='\x83',
-+      arg=uint2,
-+      stack_before=[],
-+      stack_after=[anyobject],
-+      proto=2,
-+      doc="""Extension code.
-+
-+      See EXT1.  EXT2 has a two-byte integer argument.
-+      """),
-+
-+    I(name='EXT4',
-+      code='\x84',
-+      arg=int4,
-+      stack_before=[],
-+      stack_after=[anyobject],
-+      proto=2,
-+      doc="""Extension code.
-+
-+      See EXT1.  EXT4 has a four-byte integer argument.
-+      """),
-+
-+    # Push a class object, or module function, on the stack, via its module
-+    # and name.
-+
-+    I(name='GLOBAL',
-+      code='c',
-+      arg=stringnl_noescape_pair,
-+      stack_before=[],
-+      stack_after=[anyobject],
-+      proto=0,
-+      doc="""Push a global object (module.attr) on the stack.
-+
-+      Two newline-terminated strings follow the GLOBAL opcode.  The first is
-+      taken as a module name, and the second as a class name.  The class
-+      object module.class is pushed on the stack.  More accurately, the
-+      object returned by self.find_class(module, class) is pushed on the
-+      stack, so unpickling subclasses can override this form of lookup.
-+      """),
-+
-+    I(name='STACK_GLOBAL',
-+      code='\x93',
-+      arg=None,
-+      stack_before=[pyunicode, pyunicode],
-+      stack_after=[anyobject],
-+      proto=0,
-+      doc="""Push a global object (module.attr) on the stack.
-+      """),
-+
-+    # Ways to build objects of classes pickle doesn't know about directly
-+    # (user-defined classes).  I despair of documenting this accurately
-+    # and comprehensibly -- you really have to read the pickle code to
-+    # find all the special cases.
-+
-+    I(name='REDUCE',
-+      code='R',
-+      arg=None,
-+      stack_before=[anyobject, anyobject],
-+      stack_after=[anyobject],
-+      proto=0,
-+      doc="""Push an object built from a callable and an argument tuple.
-+
-+      The opcode is named to remind of the __reduce__() method.
-+
-+      Stack before: ... callable pytuple
-+      Stack after:  ... callable(*pytuple)
-+
-+      The callable and the argument tuple are the first two items returned
-+      by a __reduce__ method.  Applying the callable to the argtuple is
-+      supposed to reproduce the original object, or at least get it started.
-+      If the __reduce__ method returns a 3-tuple, the last component is an
-+      argument to be passed to the object's __setstate__, and then the REDUCE
-+      opcode is followed by code to create setstate's argument, and then a
-+      BUILD opcode to apply  __setstate__ to that argument.
-+
-+      If not isinstance(callable, type), REDUCE complains unless the
-+      callable has been registered with the copyreg module's
-+      safe_constructors dict, or the callable has a magic
-+      '__safe_for_unpickling__' attribute with a true value.  I'm not sure
-+      why it does this, but I've sure seen this complaint often enough when
-+      I didn't want to <wink>.
-+      """),
-+
-+    I(name='BUILD',
-+      code='b',
-+      arg=None,
-+      stack_before=[anyobject, anyobject],
-+      stack_after=[anyobject],
-+      proto=0,
-+      doc="""Finish building an object, via __setstate__ or dict update.
-+
-+      Stack before: ... anyobject argument
-+      Stack after:  ... anyobject
-+
-+      where anyobject may have been mutated, as follows:
-+
-+      If the object has a __setstate__ method,
-+
-+          anyobject.__setstate__(argument)
-+
-+      is called.
-+
-+      Else the argument must be a dict, the object must have a __dict__, and
-+      the object is updated via
-+
-+          anyobject.__dict__.update(argument)
-+      """),
-+
-+    I(name='INST',
-+      code='i',
-+      arg=stringnl_noescape_pair,
-+      stack_before=[markobject, stackslice],
-+      stack_after=[anyobject],
-+      proto=0,
-+      doc="""Build a class instance.
-+
-+      This is the protocol 0 version of protocol 1's OBJ opcode.
-+      INST is followed by two newline-terminated strings, giving a
-+      module and class name, just as for the GLOBAL opcode (and see
-+      GLOBAL for more details about that).  self.find_class(module, name)
-+      is used to get a class object.
-+
-+      In addition, all the objects on the stack following the topmost
-+      markobject are gathered into a tuple and popped (along with the
-+      topmost markobject), just as for the TUPLE opcode.
-+
-+      Now it gets complicated.  If all of these are true:
-+
-+        + The argtuple is empty (markobject was at the top of the stack
-+          at the start).
-+
-+        + The class object does not have a __getinitargs__ attribute.
-+
-+      then we want to create an old-style class instance without invoking
-+      its __init__() method (pickle has waffled on this over the years; not
-+      calling __init__() is current wisdom).  In this case, an instance of
-+      an old-style dummy class is created, and then we try to rebind its
-+      __class__ attribute to the desired class object.  If this succeeds,
-+      the new instance object is pushed on the stack, and we're done.
-+
-+      Else (the argtuple is not empty, it's not an old-style class object,
-+      or the class object does have a __getinitargs__ attribute), the code
-+      first insists that the class object have a __safe_for_unpickling__
-+      attribute.  Unlike as for the __safe_for_unpickling__ check in REDUCE,
-+      it doesn't matter whether this attribute has a true or false value, it
-+      only matters whether it exists (XXX this is a bug).  If
-+      __safe_for_unpickling__ doesn't exist, UnpicklingError is raised.
-+
-+      Else (the class object does have a __safe_for_unpickling__ attr),
-+      the class object obtained from INST's arguments is applied to the
-+      argtuple obtained from the stack, and the resulting instance object
-+      is pushed on the stack.
-+
-+      NOTE:  checks for __safe_for_unpickling__ went away in Python 2.3.
-+      NOTE:  the distinction between old-style and new-style classes does
-+             not make sense in Python 3.
-+      """),
-+
-+    I(name='OBJ',
-+      code='o',
-+      arg=None,
-+      stack_before=[markobject, anyobject, stackslice],
-+      stack_after=[anyobject],
-+      proto=1,
-+      doc="""Build a class instance.
-+
-+      This is the protocol 1 version of protocol 0's INST opcode, and is
-+      very much like it.  The major difference is that the class object
-+      is taken off the stack, allowing it to be retrieved from the memo
-+      repeatedly if several instances of the same class are created.  This
-+      can be much more efficient (in both time and space) than repeatedly
-+      embedding the module and class names in INST opcodes.
-+
-+      Unlike INST, OBJ takes no arguments from the opcode stream.  Instead
-+      the class object is taken off the stack, immediately above the
-+      topmost markobject:
-+
-+      Stack before: ... markobject classobject stackslice
-+      Stack after:  ... new_instance_object
-+
-+      As for INST, the remainder of the stack above the markobject is
-+      gathered into an argument tuple, and then the logic seems identical,
-+      except that no __safe_for_unpickling__ check is done (XXX this is
-+      a bug).  See INST for the gory details.
-+
-+      NOTE:  In Python 2.3, INST and OBJ are identical except for how they
-+      get the class object.  That was always the intent; the implementations
-+      had diverged for accidental reasons.
-+      """),
-+
-+    I(name='NEWOBJ',
-+      code='\x81',
-+      arg=None,
-+      stack_before=[anyobject, anyobject],
-+      stack_after=[anyobject],
-+      proto=2,
-+      doc="""Build an object instance.
-+
-+      The stack before should be thought of as containing a class
-+      object followed by an argument tuple (the tuple being the stack
-+      top).  Call these cls and args.  They are popped off the stack,
-+      and the value returned by cls.__new__(cls, *args) is pushed back
-+      onto the stack.
-+      """),
-+
-+    I(name='NEWOBJ_EX',
-+      code='\x92',
-+      arg=None,
-+      stack_before=[anyobject, anyobject, anyobject],
-+      stack_after=[anyobject],
-+      proto=4,
-+      doc="""Build an object instance.
-+
-+      The stack before should be thought of as containing a class
-+      object followed by an argument tuple and by a keyword argument dict
-+      (the dict being the stack top).  Call these cls and args.  They are
-+      popped off the stack, and the value returned by
-+      cls.__new__(cls, *args, *kwargs) is  pushed back  onto the stack.
-+      """),
-+
-+    # Machine control.
-+
-+    I(name='PROTO',
-+      code='\x80',
-+      arg=uint1,
-+      stack_before=[],
-+      stack_after=[],
-+      proto=2,
-+      doc="""Protocol version indicator.
-+
-+      For protocol 2 and above, a pickle must start with this opcode.
-+      The argument is the protocol version, an int in range(2, 256).
-+      """),
-+
-+    I(name='STOP',
-+      code='.',
-+      arg=None,
-+      stack_before=[anyobject],
-+      stack_after=[],
-+      proto=0,
-+      doc="""Stop the unpickling machine.
-+
-+      Every pickle ends with this opcode.  The object at the top of the stack
-+      is popped, and that's the result of unpickling.  The stack should be
-+      empty then.
-+      """),
-+
-+    # Framing support.
-+
-+    I(name='FRAME',
-+      code='\x95',
-+      arg=uint8,
-+      stack_before=[],
-+      stack_after=[],
-+      proto=4,
-+      doc="""Indicate the beginning of a new frame.
-+
-+      The unpickler may use this opcode to safely prefetch data from its
-+      underlying stream.
-+      """),
-+
-+    # Ways to deal with persistent IDs.
-+
-+    I(name='PERSID',
-+      code='P',
-+      arg=stringnl_noescape,
-+      stack_before=[],
-+      stack_after=[anyobject],
-+      proto=0,
-+      doc="""Push an object identified by a persistent ID.
-+
-+      The pickle module doesn't define what a persistent ID means.  PERSID's
-+      argument is a newline-terminated str-style (no embedded escapes, no
-+      bracketing quote characters) string, which *is* "the persistent ID".
-+      The unpickler passes this string to self.persistent_load().  Whatever
-+      object that returns is pushed on the stack.  There is no implementation
-+      of persistent_load() in Python's unpickler:  it must be supplied by an
-+      unpickler subclass.
-+      """),
-+
-+    I(name='BINPERSID',
-+      code='Q',
-+      arg=None,
-+      stack_before=[anyobject],
-+      stack_after=[anyobject],
-+      proto=1,
-+      doc="""Push an object identified by a persistent ID.
-+
-+      Like PERSID, except the persistent ID is popped off the stack (instead
-+      of being a string embedded in the opcode bytestream).  The persistent
-+      ID is passed to self.persistent_load(), and whatever object that
-+      returns is pushed on the stack.  See PERSID for more detail.
-+      """),
-+]
-+del I
-+
-+# Verify uniqueness of .name and .code members.
-+name2i = {}
-+code2i = {}
-+
-+for i, d in enumerate(opcodes):
-+    if d.name in name2i:
-+        raise ValueError("repeated name %r at indices %d and %d" %
-+                         (d.name, name2i[d.name], i))
-+    if d.code in code2i:
-+        raise ValueError("repeated code %r at indices %d and %d" %
-+                         (d.code, code2i[d.code], i))
-+
-+    name2i[d.name] = i
-+    code2i[d.code] = i
-+
-+del name2i, code2i, i, d
-+
-+##############################################################################
-+# Build a code2op dict, mapping opcode characters to OpcodeInfo records.
-+# Also ensure we've got the same stuff as pickle.py, although the
-+# introspection here is dicey.
-+
-+code2op = {}
-+for d in opcodes:
-+    code2op[d.code] = d
-+del d
-+
-+def assure_pickle_consistency(verbose=False):
-+
-+    copy = code2op.copy()
-+    for name in pickle.__all__:
-+        if not re.match("[A-Z][A-Z0-9_]+$", name):
-+            if verbose:
-+                print("skipping %r: it doesn't look like an opcode name" % name)
-+            continue
-+        picklecode = getattr(pickle, name)
-+        if not isinstance(picklecode, bytes) or len(picklecode) != 1:
-+            if verbose:
-+                print(("skipping %r: value %r doesn't look like a pickle "
-+                       "code" % (name, picklecode)))
-+            continue
-+        picklecode = picklecode.decode("latin-1")
-+        if picklecode in copy:
-+            if verbose:
-+                print("checking name %r w/ code %r for consistency" % (
-+                      name, picklecode))
-+            d = copy[picklecode]
-+            if d.name != name:
-+                raise ValueError("for pickle code %r, pickle.py uses name %r "
-+                                 "but we're using name %r" % (picklecode,
-+                                                              name,
-+                                                              d.name))
-+            # Forget this one.  Any left over in copy at the end are a problem
-+            # of a different kind.
-+            del copy[picklecode]
-+        else:
-+            raise ValueError("pickle.py appears to have a pickle opcode with "
-+                             "name %r and code %r, but we don't" %
-+                             (name, picklecode))
-+    if copy:
-+        msg = ["we appear to have pickle opcodes that pickle.py doesn't have:"]
-+        for code, d in copy.items():
-+            msg.append("    name %r with code %r" % (d.name, code))
-+        raise ValueError("\n".join(msg))
-+
-+assure_pickle_consistency()
-+del assure_pickle_consistency
-+
-+##############################################################################
-+# A pickle opcode generator.
-+
-+def _genops(data, yield_end_pos=False):
-+    if isinstance(data, bytes_types):
-+        data = io.BytesIO(data)
-+
-+    if hasattr(data, "tell"):
-+        getpos = data.tell
-+    else:
-+        getpos = lambda: None
-+
-+    while True:
-+        pos = getpos()
-+        code = data.read(1)
-+        opcode = code2op.get(code.decode("latin-1"))
-+        if opcode is None:
-+            if code == b"":
-+                raise ValueError("pickle exhausted before seeing STOP")
-+            else:
-+                raise ValueError("at position %s, opcode %r unknown" % (
-+                                 "<unknown>" if pos is None else pos,
-+                                 code))
-+        if opcode.arg is None:
-+            arg = None
-+        else:
-+            arg = opcode.arg.reader(data)
-+        if yield_end_pos:
-+            yield opcode, arg, pos, getpos()
-+        else:
-+            yield opcode, arg, pos
-+        if code == b'.':
-+            assert opcode.name == 'STOP'
-+            break
-+
-+def genops(pickle):
-+    """Generate all the opcodes in a pickle.
-+
-+    'pickle' is a file-like object, or string, containing the pickle.
-+
-+    Each opcode in the pickle is generated, from the current pickle position,
-+    stopping after a STOP opcode is delivered.  A triple is generated for
-+    each opcode:
-+
-+        opcode, arg, pos
-+
-+    opcode is an OpcodeInfo record, describing the current opcode.
-+
-+    If the opcode has an argument embedded in the pickle, arg is its decoded
-+    value, as a Python object.  If the opcode doesn't have an argument, arg
-+    is None.
-+
-+    If the pickle has a tell() method, pos was the value of pickle.tell()
-+    before reading the current opcode.  If the pickle is a bytes object,
-+    it's wrapped in a BytesIO object, and the latter's tell() result is
-+    used.  Else (the pickle doesn't have a tell(), and it's not obvious how
-+    to query its current position) pos is None.
-+    """
-+    return _genops(pickle)
-+
-+##############################################################################
-+# A pickle optimizer.
-+
-+def optimize(p):
-+    'Optimize a pickle string by removing unused PUT opcodes'
-+    not_a_put = object()
-+    gets = { not_a_put }    # set of args used by a GET opcode
-+    opcodes = []            # (startpos, stoppos, putid)
-+    proto = 0
-+    for opcode, arg, pos, end_pos in _genops(p, yield_end_pos=True):
-+        if 'PUT' in opcode.name:
-+            opcodes.append((pos, end_pos, arg))
-+        elif 'FRAME' in opcode.name:
-+            pass
-+        else:
-+            if 'GET' in opcode.name:
-+                gets.add(arg)
-+            elif opcode.name == 'PROTO':
-+                assert pos == 0, pos
-+                proto = arg
-+            opcodes.append((pos, end_pos, not_a_put))
-+            prevpos, prevarg = pos, None
-+
-+    # Copy the opcodes except for PUTS without a corresponding GET
-+    out = io.BytesIO()
-+    opcodes = iter(opcodes)
-+    if proto >= 2:
-+        # Write the PROTO header before any framing
-+        start, stop, _ = next(opcodes)
-+        out.write(p[start:stop])
-+    buf = pickle._Framer(out.write)
-+    if proto >= 4:
-+        buf.start_framing()
-+    for start, stop, putid in opcodes:
-+        if putid in gets:
-+            buf.commit_frame()
-+            buf.write(p[start:stop])
-+    if proto >= 4:
-+        buf.end_framing()
-+    return out.getvalue()
-+
-+##############################################################################
-+# A symbolic pickle disassembler.
-+
-+def dis(pickle, out=None, memo=None, indentlevel=4, annotate=0):
-+    """Produce a symbolic disassembly of a pickle.
-+
-+    'pickle' is a file-like object, or string, containing a (at least one)
-+    pickle.  The pickle is disassembled from the current position, through
-+    the first STOP opcode encountered.
-+
-+    Optional arg 'out' is a file-like object to which the disassembly is
-+    printed.  It defaults to sys.stdout.
-+
-+    Optional arg 'memo' is a Python dict, used as the pickle's memo.  It
-+    may be mutated by dis(), if the pickle contains PUT or BINPUT opcodes.
-+    Passing the same memo object to another dis() call then allows disassembly
-+    to proceed across multiple pickles that were all created by the same
-+    pickler with the same memo.  Ordinarily you don't need to worry about this.
-+
-+    Optional arg 'indentlevel' is the number of blanks by which to indent
-+    a new MARK level.  It defaults to 4.
-+
-+    Optional arg 'annotate' if nonzero instructs dis() to add short
-+    description of the opcode on each line of disassembled output.
-+    The value given to 'annotate' must be an integer and is used as a
-+    hint for the column where annotation should start.  The default
-+    value is 0, meaning no annotations.
-+
-+    In addition to printing the disassembly, some sanity checks are made:
-+
-+    + All embedded opcode arguments "make sense".
-+
-+    + Explicit and implicit pop operations have enough items on the stack.
-+
-+    + When an opcode implicitly refers to a markobject, a markobject is
-+      actually on the stack.
-+
-+    + A memo entry isn't referenced before it's defined.
-+
-+    + The markobject isn't stored in the memo.
-+
-+    + A memo entry isn't redefined.
-+    """
-+
-+    # Most of the hair here is for sanity checks, but most of it is needed
-+    # anyway to detect when a protocol 0 POP takes a MARK off the stack
-+    # (which in turn is needed to indent MARK blocks correctly).
-+
-+    stack = []          # crude emulation of unpickler stack
-+    if memo is None:
-+        memo = {}       # crude emulation of unpickler memo
-+    maxproto = -1       # max protocol number seen
-+    markstack = []      # bytecode positions of MARK opcodes
-+    indentchunk = ' ' * indentlevel
-+    errormsg = None
-+    annocol = annotate  # column hint for annotations
-+    for opcode, arg, pos in genops(pickle):
-+        if pos is not None:
-+            print("%5d:" % pos, end=' ', file=out)
-+
-+        line = "%-4s %s%s" % (repr(opcode.code)[1:-1],
-+                              indentchunk * len(markstack),
-+                              opcode.name)
-+
-+        maxproto = max(maxproto, opcode.proto)
-+        before = opcode.stack_before    # don't mutate
-+        after = opcode.stack_after      # don't mutate
-+        numtopop = len(before)
-+
-+        # See whether a MARK should be popped.
-+        markmsg = None
-+        if markobject in before or (opcode.name == "POP" and
-+                                    stack and
-+                                    stack[-1] is markobject):
-+            assert markobject not in after
-+            if __debug__:
-+                if markobject in before:
-+                    assert before[-1] is stackslice
-+            if markstack:
-+                markpos = markstack.pop()
-+                if markpos is None:
-+                    markmsg = "(MARK at unknown opcode offset)"
-+                else:
-+                    markmsg = "(MARK at %d)" % markpos
-+                # Pop everything at and after the topmost markobject.
-+                while stack[-1] is not markobject:
-+                    stack.pop()
-+                stack.pop()
-+                # Stop later code from popping too much.
-+                try:
-+                    numtopop = before.index(markobject)
-+                except ValueError:
-+                    assert opcode.name == "POP"
-+                    numtopop = 0
-+            else:
-+                errormsg = markmsg = "no MARK exists on stack"
-+
-+        # Check for correct memo usage.
-+        if opcode.name in ("PUT", "BINPUT", "LONG_BINPUT", "MEMOIZE"):
-+            if opcode.name == "MEMOIZE":
-+                memo_idx = len(memo)
-+            else:
-+                assert arg is not None
-+                memo_idx = arg
-+            if memo_idx in memo:
-+                errormsg = "memo key %r already defined" % arg
-+            elif not stack:
-+                errormsg = "stack is empty -- can't store into memo"
-+            elif stack[-1] is markobject:
-+                errormsg = "can't store markobject in the memo"
-+            else:
-+                memo[memo_idx] = stack[-1]
-+        elif opcode.name in ("GET", "BINGET", "LONG_BINGET"):
-+            if arg in memo:
-+                assert len(after) == 1
-+                after = [memo[arg]]     # for better stack emulation
-+            else:
-+                errormsg = "memo key %r has never been stored into" % arg
-+
-+        if arg is not None or markmsg:
-+            # make a mild effort to align arguments
-+            line += ' ' * (10 - len(opcode.name))
-+            if arg is not None:
-+                line += ' ' + repr(arg)
-+            if markmsg:
-+                line += ' ' + markmsg
-+        if annotate:
-+            line += ' ' * (annocol - len(line))
-+            # make a mild effort to align annotations
-+            annocol = len(line)
-+            if annocol > 50:
-+                annocol = annotate
-+            line += ' ' + opcode.doc.split('\n', 1)[0]
-+        print(line, file=out)
-+
-+        if errormsg:
-+            # Note that we delayed complaining until the offending opcode
-+            # was printed.
-+            raise ValueError(errormsg)
-+
-+        # Emulate the stack effects.
-+        if len(stack) < numtopop:
-+            raise ValueError("tries to pop %d items from stack with "
-+                             "only %d items" % (numtopop, len(stack)))
-+        if numtopop:
-+            del stack[-numtopop:]
-+        if markobject in after:
-+            assert markobject not in before
-+            markstack.append(pos)
-+
-+        stack.extend(after)
-+
-+    print("highest protocol among opcodes =", maxproto, file=out)
-+    if stack:
-+        raise ValueError("stack not empty after STOP: %r" % stack)
-+
-+# For use in the doctest, simply as an example of a class to pickle.
-+class _Example:
-+    def __init__(self, value):
-+        self.value = value
-+
-+_dis_test = r"""
-+>>> import pickle
-+>>> x = [1, 2, (3, 4), {b'abc': "def"}]
-+>>> pkl0 = pickle.dumps(x, 0)
-+>>> dis(pkl0)
-+    0: (    MARK
-+    1: l        LIST       (MARK at 0)
-+    2: p    PUT        0
-+    5: L    LONG       1
-+    9: a    APPEND
-+   10: L    LONG       2
-+   14: a    APPEND
-+   15: (    MARK
-+   16: L        LONG       3
-+   20: L        LONG       4
-+   24: t        TUPLE      (MARK at 15)
-+   25: p    PUT        1
-+   28: a    APPEND
-+   29: (    MARK
-+   30: d        DICT       (MARK at 29)
-+   31: p    PUT        2
-+   34: c    GLOBAL     '_codecs encode'
-+   50: p    PUT        3
-+   53: (    MARK
-+   54: V        UNICODE    'abc'
-+   59: p        PUT        4
-+   62: V        UNICODE    'latin1'
-+   70: p        PUT        5
-+   73: t        TUPLE      (MARK at 53)
-+   74: p    PUT        6
-+   77: R    REDUCE
-+   78: p    PUT        7
-+   81: V    UNICODE    'def'
-+   86: p    PUT        8
-+   89: s    SETITEM
-+   90: a    APPEND
-+   91: .    STOP
-+highest protocol among opcodes = 0
-+
-+Try again with a "binary" pickle.
-+
-+>>> pkl1 = pickle.dumps(x, 1)
-+>>> dis(pkl1)
-+    0: ]    EMPTY_LIST
-+    1: q    BINPUT     0
-+    3: (    MARK
-+    4: K        BININT1    1
-+    6: K        BININT1    2
-+    8: (        MARK
-+    9: K            BININT1    3
-+   11: K            BININT1    4
-+   13: t            TUPLE      (MARK at 8)
-+   14: q        BINPUT     1
-+   16: }        EMPTY_DICT
-+   17: q        BINPUT     2
-+   19: c        GLOBAL     '_codecs encode'
-+   35: q        BINPUT     3
-+   37: (        MARK
-+   38: X            BINUNICODE 'abc'
-+   46: q            BINPUT     4
-+   48: X            BINUNICODE 'latin1'
-+   59: q            BINPUT     5
-+   61: t            TUPLE      (MARK at 37)
-+   62: q        BINPUT     6
-+   64: R        REDUCE
-+   65: q        BINPUT     7
-+   67: X        BINUNICODE 'def'
-+   75: q        BINPUT     8
-+   77: s        SETITEM
-+   78: e        APPENDS    (MARK at 3)
-+   79: .    STOP
-+highest protocol among opcodes = 1
-+
-+Exercise the INST/OBJ/BUILD family.
-+
-+>>> import pickletools
-+>>> dis(pickle.dumps(pickletools.dis, 0))
-+    0: c    GLOBAL     'pickletools dis'
-+   17: p    PUT        0
-+   20: .    STOP
-+highest protocol among opcodes = 0
-+
-+>>> from pickletools import _Example
-+>>> x = [_Example(42)] * 2
-+>>> dis(pickle.dumps(x, 0))
-+    0: (    MARK
-+    1: l        LIST       (MARK at 0)
-+    2: p    PUT        0
-+    5: c    GLOBAL     'copy_reg _reconstructor'
-+   30: p    PUT        1
-+   33: (    MARK
-+   34: c        GLOBAL     'pickletools _Example'
-+   56: p        PUT        2
-+   59: c        GLOBAL     '__builtin__ object'
-+   79: p        PUT        3
-+   82: N        NONE
-+   83: t        TUPLE      (MARK at 33)
-+   84: p    PUT        4
-+   87: R    REDUCE
-+   88: p    PUT        5
-+   91: (    MARK
-+   92: d        DICT       (MARK at 91)
-+   93: p    PUT        6
-+   96: V    UNICODE    'value'
-+  103: p    PUT        7
-+  106: L    LONG       42
-+  111: s    SETITEM
-+  112: b    BUILD
-+  113: a    APPEND
-+  114: g    GET        5
-+  117: a    APPEND
-+  118: .    STOP
-+highest protocol among opcodes = 0
-+
-+>>> dis(pickle.dumps(x, 1))
-+    0: ]    EMPTY_LIST
-+    1: q    BINPUT     0
-+    3: (    MARK
-+    4: c        GLOBAL     'copy_reg _reconstructor'
-+   29: q        BINPUT     1
-+   31: (        MARK
-+   32: c            GLOBAL     'pickletools _Example'
-+   54: q            BINPUT     2
-+   56: c            GLOBAL     '__builtin__ object'
-+   76: q            BINPUT     3
-+   78: N            NONE
-+   79: t            TUPLE      (MARK at 31)
-+   80: q        BINPUT     4
-+   82: R        REDUCE
-+   83: q        BINPUT     5
-+   85: }        EMPTY_DICT
-+   86: q        BINPUT     6
-+   88: X        BINUNICODE 'value'
-+   98: q        BINPUT     7
-+  100: K        BININT1    42
-+  102: s        SETITEM
-+  103: b        BUILD
-+  104: h        BINGET     5
-+  106: e        APPENDS    (MARK at 3)
-+  107: .    STOP
-+highest protocol among opcodes = 1
-+
-+Try "the canonical" recursive-object test.
-+
-+>>> L = []
-+>>> T = L,
-+>>> L.append(T)
-+>>> L[0] is T
-+True
-+>>> T[0] is L
-+True
-+>>> L[0][0] is L
-+True
-+>>> T[0][0] is T
-+True
-+>>> dis(pickle.dumps(L, 0))
-+    0: (    MARK
-+    1: l        LIST       (MARK at 0)
-+    2: p    PUT        0
-+    5: (    MARK
-+    6: g        GET        0
-+    9: t        TUPLE      (MARK at 5)
-+   10: p    PUT        1
-+   13: a    APPEND
-+   14: .    STOP
-+highest protocol among opcodes = 0
-+
-+>>> dis(pickle.dumps(L, 1))
-+    0: ]    EMPTY_LIST
-+    1: q    BINPUT     0
-+    3: (    MARK
-+    4: h        BINGET     0
-+    6: t        TUPLE      (MARK at 3)
-+    7: q    BINPUT     1
-+    9: a    APPEND
-+   10: .    STOP
-+highest protocol among opcodes = 1
-+
-+Note that, in the protocol 0 pickle of the recursive tuple, the disassembler
-+has to emulate the stack in order to realize that the POP opcode at 16 gets
-+rid of the MARK at 0.
-+
-+>>> dis(pickle.dumps(T, 0))
-+    0: (    MARK
-+    1: (        MARK
-+    2: l            LIST       (MARK at 1)
-+    3: p        PUT        0
-+    6: (        MARK
-+    7: g            GET        0
-+   10: t            TUPLE      (MARK at 6)
-+   11: p        PUT        1
-+   14: a        APPEND
-+   15: 0        POP
-+   16: 0        POP        (MARK at 0)
-+   17: g    GET        1
-+   20: .    STOP
-+highest protocol among opcodes = 0
-+
-+>>> dis(pickle.dumps(T, 1))
-+    0: (    MARK
-+    1: ]        EMPTY_LIST
-+    2: q        BINPUT     0
-+    4: (        MARK
-+    5: h            BINGET     0
-+    7: t            TUPLE      (MARK at 4)
-+    8: q        BINPUT     1
-+   10: a        APPEND
-+   11: 1        POP_MARK   (MARK at 0)
-+   12: h    BINGET     1
-+   14: .    STOP
-+highest protocol among opcodes = 1
-+
-+Try protocol 2.
-+
-+>>> dis(pickle.dumps(L, 2))
-+    0: \x80 PROTO      2
-+    2: ]    EMPTY_LIST
-+    3: q    BINPUT     0
-+    5: h    BINGET     0
-+    7: \x85 TUPLE1
-+    8: q    BINPUT     1
-+   10: a    APPEND
-+   11: .    STOP
-+highest protocol among opcodes = 2
-+
-+>>> dis(pickle.dumps(T, 2))
-+    0: \x80 PROTO      2
-+    2: ]    EMPTY_LIST
-+    3: q    BINPUT     0
-+    5: h    BINGET     0
-+    7: \x85 TUPLE1
-+    8: q    BINPUT     1
-+   10: a    APPEND
-+   11: 0    POP
-+   12: h    BINGET     1
-+   14: .    STOP
-+highest protocol among opcodes = 2
-+
-+Try protocol 3 with annotations:
-+
-+>>> dis(pickle.dumps(T, 3), annotate=1)
-+    0: \x80 PROTO      3 Protocol version indicator.
-+    2: ]    EMPTY_LIST   Push an empty list.
-+    3: q    BINPUT     0 Store the stack top into the memo.  The stack is not popped.
-+    5: h    BINGET     0 Read an object from the memo and push it on the stack.
-+    7: \x85 TUPLE1       Build a one-tuple out of the topmost item on the stack.
-+    8: q    BINPUT     1 Store the stack top into the memo.  The stack is not popped.
-+   10: a    APPEND       Append an object to a list.
-+   11: 0    POP          Discard the top stack item, shrinking the stack by one item.
-+   12: h    BINGET     1 Read an object from the memo and push it on the stack.
-+   14: .    STOP         Stop the unpickling machine.
-+highest protocol among opcodes = 2
-+
-+"""
-+
-+_memo_test = r"""
-+>>> import pickle
-+>>> import io
-+>>> f = io.BytesIO()
-+>>> p = pickle.Pickler(f, 2)
-+>>> x = [1, 2, 3]
-+>>> p.dump(x)
-+>>> p.dump(x)
-+>>> f.seek(0)
-+0
-+>>> memo = {}
-+>>> dis(f, memo=memo)
-+    0: \x80 PROTO      2
-+    2: ]    EMPTY_LIST
-+    3: q    BINPUT     0
-+    5: (    MARK
-+    6: K        BININT1    1
-+    8: K        BININT1    2
-+   10: K        BININT1    3
-+   12: e        APPENDS    (MARK at 5)
-+   13: .    STOP
-+highest protocol among opcodes = 2
-+>>> dis(f, memo=memo)
-+   14: \x80 PROTO      2
-+   16: h    BINGET     0
-+   18: .    STOP
-+highest protocol among opcodes = 2
-+"""
-+
-+__test__ = {'disassembler_test': _dis_test,
-+            'disassembler_memo_test': _memo_test,
-+           }
-+
-+def _test():
-+    import doctest
-+    return doctest.testmod()
-+
-+if __name__ == "__main__":
-+    import sys, argparse
-+    parser = argparse.ArgumentParser(
-+        description='disassemble one or more pickle files')
-+    parser.add_argument(
-+        'pickle_file', type=argparse.FileType('br'),
-+        nargs='*', help='the pickle file')
-+    parser.add_argument(
-+        '-o', '--output', default=sys.stdout, type=argparse.FileType('w'),
-+        help='the file where the output should be written')
-+    parser.add_argument(
-+        '-m', '--memo', action='store_true',
-+        help='preserve memo between disassemblies')
-+    parser.add_argument(
-+        '-l', '--indentlevel', default=4, type=int,
-+        help='the number of blanks by which to indent a new MARK level')
-+    parser.add_argument(
-+        '-a', '--annotate',  action='store_true',
-+        help='annotate each line with a short opcode description')
-+    parser.add_argument(
-+        '-p', '--preamble', default="==> {name} <==",
-+        help='if more than one pickle file is specified, print this before'
-+        ' each disassembly')
-+    parser.add_argument(
-+        '-t', '--test', action='store_true',
-+        help='run self-test suite')
-+    parser.add_argument(
-+        '-v', action='store_true',
-+        help='run verbosely; only affects self-test run')
-+    args = parser.parse_args()
-+    if args.test:
-+        _test()
-+    else:
-+        annotate = 30 if args.annotate else 0
-+        if not args.pickle_file:
-+            parser.print_help()
-+        elif len(args.pickle_file) == 1:
-+            dis(args.pickle_file[0], args.output, None,
-+                args.indentlevel, annotate)
-+        else:
-+            memo = {} if args.memo else None
-+            for f in args.pickle_file:
-+                preamble = args.preamble.format(name=f.name)
-+                args.output.write(preamble + '\n')
-+                dis(f, args.output, memo, args.indentlevel, annotate)
-diff --git a/src/zodbpickle/slowpickle.py b/src/zodbpickle/slowpickle.py
-index 4976657..bf8fedb 100644
---- a/src/zodbpickle/slowpickle.py
-+++ b/src/zodbpickle/slowpickle.py
-@@ -19,15 +19,24 @@ So this is a rare case where 'import *' is exactly the right thing to do.
- '''
- 
- if sys.version_info[0] >= 3:
--    import zodbpickle.pickle_3 as p
-+    if sys.version_info[1] >= 4:
-+        import zodbpickle.pickle_34 as p
-+    else:
-+        import zodbpickle.pickle_3 as p
-     # undo the replacement with fast versions
-     p.Pickler, p.Unpickler = p._Pickler, p._Pickler
-     p.dump, p.dumps, p.load, p.loads = p._dump, p._dumps, p._load, p._loads
-     del p
--    # pick up all names that the module defines
--    from .pickle_3 import *
--    # do not share the globals with a fast version
--    del sys.modules['zodbpickle.pickle_3']
-+    if sys.version_info[1] >= 4:
-+        # pick up all names that the module defines
-+        from .pickle_34 import *
-+        # do not share the globals with a fast version
-+        del sys.modules['zodbpickle.pickle_34']
-+    else:
-+        # pick up all names that the module defines
-+        from .pickle_3 import *
-+        # do not share the globals with a fast version
-+        del sys.modules['zodbpickle.pickle_3']
- else:
-     # pick up all names that the module defines
-     from .pickle_2 import *
-diff --git a/src/zodbpickle/tests/pickletester_34.py b/src/zodbpickle/tests/pickletester_34.py
-new file mode 100644
-index 0000000..e2032d3
---- /dev/null
-+++ b/src/zodbpickle/tests/pickletester_34.py
-@@ -0,0 +1,2388 @@
-+import copyreg
-+import io
-+import random
-+import struct
-+import sys
-+import unittest
-+import weakref
-+from http.cookies import SimpleCookie
-+from zodbpickle import pickle_34 as pickle
-+from zodbpickle import pickletools_34 as pickletools
-+
-+from test.support import (
-+    TestFailed, TESTFN, run_with_locale,
-+    _2G, _4G, bigmemtest,
-+    )
-+
-+try:
-+    from test.support import no_tracing
-+except ImportError:
-+    from functools import wraps
-+    def no_tracing(func):
-+        if not hasattr(sys, 'gettrace'):
-+            return func
-+        @wraps(func)
-+        def wrapper(*args, **kwargs):
-+            original_trace = sys.gettrace()
-+            try:
-+                sys.settrace(None)
-+                return func(*args, **kwargs)
-+            finally:
-+                sys.settrace(original_trace)
-+        return wrapper
-+
-+from zodbpickle.pickle_34 import bytes_types
-+
-+# Tests that try a number of pickle protocols should have a
-+#     for proto in protocols:
-+# kind of outer loop.
-+protocols = range(pickle.HIGHEST_PROTOCOL + 1)
-+
-+ascii_char_size = 1
-+
-+
-+# Return True if opcode code appears in the pickle, else False.
-+def opcode_in_pickle(code, pickle):
-+    for op, dummy, dummy in pickletools.genops(pickle):
-+        if op.code == code.decode("latin-1"):
-+            return True
-+    return False
-+
-+# Return the number of times opcode code appears in pickle.
-+def count_opcode(code, pickle):
-+    n = 0
-+    for op, dummy, dummy in pickletools.genops(pickle):
-+        if op.code == code.decode("latin-1"):
-+            n += 1
-+    return n
-+
-+
-+class UnseekableIO(io.BytesIO):
-+    def peek(self, *args):
-+        raise NotImplementedError
-+
-+    def seekable(self):
-+        return False
-+
-+    def seek(self, *args):
-+        raise io.UnsupportedOperation
-+
-+    def tell(self):
-+        raise io.UnsupportedOperation
-+
-+
-+# We can't very well test the extension registry without putting known stuff
-+# in it, but we have to be careful to restore its original state.  Code
-+# should do this:
-+#
-+#     e = ExtensionSaver(extension_code)
-+#     try:
-+#         fiddle w/ the extension registry's stuff for extension_code
-+#     finally:
-+#         e.restore()
-+
-+class ExtensionSaver:
-+    # Remember current registration for code (if any), and remove it (if
-+    # there is one).
-+    def __init__(self, code):
-+        self.code = code
-+        if code in copyreg._inverted_registry:
-+            self.pair = copyreg._inverted_registry[code]
-+            copyreg.remove_extension(self.pair[0], self.pair[1], code)
-+        else:
-+            self.pair = None
-+
-+    # Restore previous registration for code.
-+    def restore(self):
-+        code = self.code
-+        curpair = copyreg._inverted_registry.get(code)
-+        if curpair is not None:
-+            copyreg.remove_extension(curpair[0], curpair[1], code)
-+        pair = self.pair
-+        if pair is not None:
-+            copyreg.add_extension(pair[0], pair[1], code)
-+
-+class C:
-+    def __eq__(self, other):
-+        return self.__dict__ == other.__dict__
-+
-+class D(C):
-+    def __init__(self, arg):
-+        pass
-+
-+class E(C):
-+    def __getinitargs__(self):
-+        return ()
-+
-+class H(object):
-+    pass
-+
-+import __main__
-+__main__.C = C
-+C.__module__ = "__main__"
-+__main__.D = D
-+D.__module__ = "__main__"
-+__main__.E = E
-+E.__module__ = "__main__"
-+__main__.H = H
-+H.__module__ = "__main__"
-+
-+class myint(int):
-+    def __init__(self, x):
-+        self.str = str(x)
-+
-+class initarg(C):
-+
-+    def __init__(self, a, b):
-+        self.a = a
-+        self.b = b
-+
-+    def __getinitargs__(self):
-+        return self.a, self.b
-+
-+class metaclass(type):
-+    pass
-+
-+class use_metaclass(object, metaclass=metaclass):
-+    pass
-+
-+class pickling_metaclass(type):
-+    def __eq__(self, other):
-+        return (type(self) == type(other) and
-+                self.reduce_args == other.reduce_args)
-+
-+    def __reduce__(self):
-+        return (create_dynamic_class, self.reduce_args)
-+
-+def create_dynamic_class(name, bases):
-+    result = pickling_metaclass(name, bases, dict())
-+    result.reduce_args = (name, bases)
-+    return result
-+
-+# DATA0 .. DATA2 are the pickles we expect under the various protocols, for
-+# the object returned by create_data().
-+
-+DATA0 = (
-+    b'(lp0\nL0L\naL1L\naF2.0\nac'
-+    b'builtins\ncomplex\n'
-+    b'p1\n(F3.0\nF0.0\ntp2\nRp'
-+    b'3\naL1L\naL-1L\naL255L\naL-'
-+    b'255L\naL-256L\naL65535L\na'
-+    b'L-65535L\naL-65536L\naL2'
-+    b'147483647L\naL-2147483'
-+    b'647L\naL-2147483648L\na('
-+    b'Vabc\np4\ng4\nccopyreg'
-+    b'\n_reconstructor\np5\n('
-+    b'c__main__\nC\np6\ncbu'
-+    b'iltins\nobject\np7\nNt'
-+    b'p8\nRp9\n(dp10\nVfoo\np1'
-+    b'1\nL1L\nsVbar\np12\nL2L\nsb'
-+    b'g9\ntp13\nag13\naL5L\na.'
-+)
-+
-+# Disassembly of DATA0
-+DATA0_DIS = """\
-+    0: (    MARK
-+    1: l        LIST       (MARK at 0)
-+    2: p    PUT        0
-+    5: L    LONG       0
-+    9: a    APPEND
-+   10: L    LONG       1
-+   14: a    APPEND
-+   15: F    FLOAT      2.0
-+   20: a    APPEND
-+   21: c    GLOBAL     'builtins complex'
-+   39: p    PUT        1
-+   42: (    MARK
-+   43: F        FLOAT      3.0
-+   48: F        FLOAT      0.0
-+   53: t        TUPLE      (MARK at 42)
-+   54: p    PUT        2
-+   57: R    REDUCE
-+   58: p    PUT        3
-+   61: a    APPEND
-+   62: L    LONG       1
-+   66: a    APPEND
-+   67: L    LONG       -1
-+   72: a    APPEND
-+   73: L    LONG       255
-+   79: a    APPEND
-+   80: L    LONG       -255
-+   87: a    APPEND
-+   88: L    LONG       -256
-+   95: a    APPEND
-+   96: L    LONG       65535
-+  104: a    APPEND
-+  105: L    LONG       -65535
-+  114: a    APPEND
-+  115: L    LONG       -65536
-+  124: a    APPEND
-+  125: L    LONG       2147483647
-+  138: a    APPEND
-+  139: L    LONG       -2147483647
-+  153: a    APPEND
-+  154: L    LONG       -2147483648
-+  168: a    APPEND
-+  169: (    MARK
-+  170: V        UNICODE    'abc'
-+  175: p        PUT        4
-+  178: g        GET        4
-+  181: c        GLOBAL     'copyreg _reconstructor'
-+  205: p        PUT        5
-+  208: (        MARK
-+  209: c            GLOBAL     '__main__ C'
-+  221: p            PUT        6
-+  224: c            GLOBAL     'builtins object'
-+  241: p            PUT        7
-+  244: N            NONE
-+  245: t            TUPLE      (MARK at 208)
-+  246: p        PUT        8
-+  249: R        REDUCE
-+  250: p        PUT        9
-+  253: (        MARK
-+  254: d            DICT       (MARK at 253)
-+  255: p        PUT        10
-+  259: V        UNICODE    'foo'
-+  264: p        PUT        11
-+  268: L        LONG       1
-+  272: s        SETITEM
-+  273: V        UNICODE    'bar'
-+  278: p        PUT        12
-+  282: L        LONG       2
-+  286: s        SETITEM
-+  287: b        BUILD
-+  288: g        GET        9
-+  291: t        TUPLE      (MARK at 169)
-+  292: p    PUT        13
-+  296: a    APPEND
-+  297: g    GET        13
-+  301: a    APPEND
-+  302: L    LONG       5
-+  306: a    APPEND
-+  307: .    STOP
-+highest protocol among opcodes = 0
-+"""
-+
-+DATA1 = (
-+    b']q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c'
-+    b'builtins\ncomplex\nq\x01'
-+    b'(G@\x08\x00\x00\x00\x00\x00\x00G\x00\x00\x00\x00\x00\x00\x00\x00t'
-+    b'q\x02Rq\x03K\x01J\xff\xff\xff\xffK\xffJ\x01\xff\xff\xffJ'
-+    b'\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff\xff'
-+    b'\xff\x7fJ\x01\x00\x00\x80J\x00\x00\x00\x80(X\x03\x00\x00\x00ab'
-+    b'cq\x04h\x04ccopyreg\n_reco'
-+    b'nstructor\nq\x05(c__main'
-+    b'__\nC\nq\x06cbuiltins\n'
-+    b'object\nq\x07Ntq\x08Rq\t}q\n('
-+    b'X\x03\x00\x00\x00fooq\x0bK\x01X\x03\x00\x00\x00bar'
-+    b'q\x0cK\x02ubh\ttq\rh\rK\x05e.'
-+)
-+
-+# Disassembly of DATA1
-+DATA1_DIS = """\
-+    0: ]    EMPTY_LIST
-+    1: q    BINPUT     0
-+    3: (    MARK
-+    4: K        BININT1    0
-+    6: K        BININT1    1
-+    8: G        BINFLOAT   2.0
-+   17: c        GLOBAL     'builtins complex'
-+   35: q        BINPUT     1
-+   37: (        MARK
-+   38: G            BINFLOAT   3.0
-+   47: G            BINFLOAT   0.0
-+   56: t            TUPLE      (MARK at 37)
-+   57: q        BINPUT     2
-+   59: R        REDUCE
-+   60: q        BINPUT     3
-+   62: K        BININT1    1
-+   64: J        BININT     -1
-+   69: K        BININT1    255
-+   71: J        BININT     -255
-+   76: J        BININT     -256
-+   81: M        BININT2    65535
-+   84: J        BININT     -65535
-+   89: J        BININT     -65536
-+   94: J        BININT     2147483647
-+   99: J        BININT     -2147483647
-+  104: J        BININT     -2147483648
-+  109: (        MARK
-+  110: X            BINUNICODE 'abc'
-+  118: q            BINPUT     4
-+  120: h            BINGET     4
-+  122: c            GLOBAL     'copyreg _reconstructor'
-+  146: q            BINPUT     5
-+  148: (            MARK
-+  149: c                GLOBAL     '__main__ C'
-+  161: q                BINPUT     6
-+  163: c                GLOBAL     'builtins object'
-+  180: q                BINPUT     7
-+  182: N                NONE
-+  183: t                TUPLE      (MARK at 148)
-+  184: q            BINPUT     8
-+  186: R            REDUCE
-+  187: q            BINPUT     9
-+  189: }            EMPTY_DICT
-+  190: q            BINPUT     10
-+  192: (            MARK
-+  193: X                BINUNICODE 'foo'
-+  201: q                BINPUT     11
-+  203: K                BININT1    1
-+  205: X                BINUNICODE 'bar'
-+  213: q                BINPUT     12
-+  215: K                BININT1    2
-+  217: u                SETITEMS   (MARK at 192)
-+  218: b            BUILD
-+  219: h            BINGET     9
-+  221: t            TUPLE      (MARK at 109)
-+  222: q        BINPUT     13
-+  224: h        BINGET     13
-+  226: K        BININT1    5
-+  228: e        APPENDS    (MARK at 3)
-+  229: .    STOP
-+highest protocol among opcodes = 1
-+"""
-+
-+DATA2 = (
-+    b'\x80\x02]q\x00(K\x00K\x01G@\x00\x00\x00\x00\x00\x00\x00c'
-+    b'builtins\ncomplex\n'
-+    b'q\x01G@\x08\x00\x00\x00\x00\x00\x00G\x00\x00\x00\x00\x00\x00\x00\x00'
-+    b'\x86q\x02Rq\x03K\x01J\xff\xff\xff\xffK\xffJ\x01\xff\xff\xff'
-+    b'J\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff'
-+    b'\xff\xff\x7fJ\x01\x00\x00\x80J\x00\x00\x00\x80(X\x03\x00\x00\x00a'
-+    b'bcq\x04h\x04c__main__\nC\nq\x05'
-+    b')\x81q\x06}q\x07(X\x03\x00\x00\x00fooq\x08K\x01'
-+    b'X\x03\x00\x00\x00barq\tK\x02ubh\x06tq\nh'
-+    b'\nK\x05e.'
-+)
-+
-+# Disassembly of DATA2
-+DATA2_DIS = """\
-+    0: \x80 PROTO      2
-+    2: ]    EMPTY_LIST
-+    3: q    BINPUT     0
-+    5: (    MARK
-+    6: K        BININT1    0
-+    8: K        BININT1    1
-+   10: G        BINFLOAT   2.0
-+   19: c        GLOBAL     'builtins complex'
-+   37: q        BINPUT     1
-+   39: G        BINFLOAT   3.0
-+   48: G        BINFLOAT   0.0
-+   57: \x86     TUPLE2
-+   58: q        BINPUT     2
-+   60: R        REDUCE
-+   61: q        BINPUT     3
-+   63: K        BININT1    1
-+   65: J        BININT     -1
-+   70: K        BININT1    255
-+   72: J        BININT     -255
-+   77: J        BININT     -256
-+   82: M        BININT2    65535
-+   85: J        BININT     -65535
-+   90: J        BININT     -65536
-+   95: J        BININT     2147483647
-+  100: J        BININT     -2147483647
-+  105: J        BININT     -2147483648
-+  110: (        MARK
-+  111: X            BINUNICODE 'abc'
-+  119: q            BINPUT     4
-+  121: h            BINGET     4
-+  123: c            GLOBAL     '__main__ C'
-+  135: q            BINPUT     5
-+  137: )            EMPTY_TUPLE
-+  138: \x81         NEWOBJ
-+  139: q            BINPUT     6
-+  141: }            EMPTY_DICT
-+  142: q            BINPUT     7
-+  144: (            MARK
-+  145: X                BINUNICODE 'foo'
-+  153: q                BINPUT     8
-+  155: K                BININT1    1
-+  157: X                BINUNICODE 'bar'
-+  165: q                BINPUT     9
-+  167: K                BININT1    2
-+  169: u                SETITEMS   (MARK at 144)
-+  170: b            BUILD
-+  171: h            BINGET     6
-+  173: t            TUPLE      (MARK at 110)
-+  174: q        BINPUT     10
-+  176: h        BINGET     10
-+  178: K        BININT1    5
-+  180: e        APPENDS    (MARK at 5)
-+  181: .    STOP
-+highest protocol among opcodes = 2
-+"""
-+
-+# set([1,2]) pickled from 2.x with protocol 2
-+DATA3 = b'\x80\x02c__builtin__\nset\nq\x00]q\x01(K\x01K\x02e\x85q\x02Rq\x03.'
-+
-+# xrange(5) pickled from 2.x with protocol 2
-+DATA4 = b'\x80\x02c__builtin__\nxrange\nq\x00K\x00K\x05K\x01\x87q\x01Rq\x02.'
-+
-+# a SimpleCookie() object pickled from 2.x with protocol 2
-+DATA5 = (b'\x80\x02cCookie\nSimpleCookie\nq\x00)\x81q\x01U\x03key'
-+         b'q\x02cCookie\nMorsel\nq\x03)\x81q\x04(U\x07commentq\x05U'
-+         b'\x00q\x06U\x06domainq\x07h\x06U\x06secureq\x08h\x06U\x07'
-+         b'expiresq\th\x06U\x07max-ageq\nh\x06U\x07versionq\x0bh\x06U'
-+         b'\x04pathq\x0ch\x06U\x08httponlyq\rh\x06u}q\x0e(U\x0b'
-+         b'coded_valueq\x0fU\x05valueq\x10h\x10h\x10h\x02h\x02ubs}q\x11b.')
-+
-+# set([3]) pickled from 2.x with protocol 2
-+DATA6 = b'\x80\x02c__builtin__\nset\nq\x00]q\x01K\x03a\x85q\x02Rq\x03.'
-+
-+python2_exceptions_without_args = (
-+    ArithmeticError,
-+    AssertionError,
-+    AttributeError,
-+    BaseException,
-+    BufferError,
-+    BytesWarning,
-+    DeprecationWarning,
-+    EOFError,
-+    EnvironmentError,
-+    Exception,
-+    FloatingPointError,
-+    FutureWarning,
-+    GeneratorExit,
-+    IOError,
-+    ImportError,
-+    ImportWarning,
-+    IndentationError,
-+    IndexError,
-+    KeyError,
-+    KeyboardInterrupt,
-+    LookupError,
-+    MemoryError,
-+    NameError,
-+    NotImplementedError,
-+    OSError,
-+    OverflowError,
-+    PendingDeprecationWarning,
-+    ReferenceError,
-+    RuntimeError,
-+    RuntimeWarning,
-+    # StandardError is gone in Python 3, we map it to Exception
-+    StopIteration,
-+    SyntaxError,
-+    SyntaxWarning,
-+    SystemError,
-+    SystemExit,
-+    TabError,
-+    TypeError,
-+    UnboundLocalError,
-+    UnicodeError,
-+    UnicodeWarning,
-+    UserWarning,
-+    ValueError,
-+    Warning,
-+    ZeroDivisionError,
-+)
-+
-+exception_pickle = b'\x80\x02cexceptions\n?\nq\x00)Rq\x01.'
-+
-+# Exception objects without arguments pickled from 2.x with protocol 2
-+DATA7 = {
-+    exception :
-+    exception_pickle.replace(b'?', exception.__name__.encode("ascii"))
-+    for exception in python2_exceptions_without_args
-+}
-+
-+# StandardError is mapped to Exception, test that separately
-+DATA8 = exception_pickle.replace(b'?', b'StandardError')
-+
-+# UnicodeEncodeError object pickled from 2.x with protocol 2
-+DATA9 = (b'\x80\x02cexceptions\nUnicodeEncodeError\n'
-+         b'q\x00(U\x05asciiq\x01X\x03\x00\x00\x00fooq\x02K\x00K\x01'
-+         b'U\x03badq\x03tq\x04Rq\x05.')
-+
-+
-+def create_data():
-+    c = C()
-+    c.foo = 1
-+    c.bar = 2
-+    x = [0, 1, 2.0, 3.0+0j]
-+    # Append some integer test cases at cPickle.c's internal size
-+    # cutoffs.
-+    uint1max = 0xff
-+    uint2max = 0xffff
-+    int4max = 0x7fffffff
-+    x.extend([1, -1,
-+              uint1max, -uint1max, -uint1max-1,
-+              uint2max, -uint2max, -uint2max-1,
-+               int4max,  -int4max,  -int4max-1])
-+    y = ('abc', 'abc', c, c)
-+    x.append(y)
-+    x.append(y)
-+    x.append(5)
-+    return x
-+
-+
-+class AbstractPickleTests(unittest.TestCase):
-+    # Subclass must define self.dumps, self.loads.
-+
-+    optimized = False
-+
-+    _testdata = create_data()
-+
-+    def setUp(self):
-+        pass
-+
-+    def assert_is_copy(self, obj, objcopy, msg=None):
-+        """Utility method to verify if two objects are copies of each others.
-+        """
-+        if msg is None:
-+            msg = "{!r} is not a copy of {!r}".format(obj, objcopy)
-+        self.assertEqual(obj, objcopy, msg=msg)
-+        self.assertIs(type(obj), type(objcopy), msg=msg)
-+        if hasattr(obj, '__dict__'):
-+            self.assertDictEqual(obj.__dict__, objcopy.__dict__, msg=msg)
-+            self.assertIsNot(obj.__dict__, objcopy.__dict__, msg=msg)
-+        if hasattr(obj, '__slots__'):
-+            self.assertListEqual(obj.__slots__, objcopy.__slots__, msg=msg)
-+            for slot in obj.__slots__:
-+                self.assertEqual(
-+                    hasattr(obj, slot), hasattr(objcopy, slot), msg=msg)
-+                self.assertEqual(getattr(obj, slot, None),
-+                                 getattr(objcopy, slot, None), msg=msg)
-+
-+    def test_misc(self):
-+        # test various datatypes not tested by testdata
-+        for proto in protocols:
-+            x = myint(4)
-+            s = self.dumps(x, proto)
-+            y = self.loads(s)
-+            self.assert_is_copy(x, y)
-+
-+            x = (1, ())
-+            s = self.dumps(x, proto)
-+            y = self.loads(s)
-+            self.assert_is_copy(x, y)
-+
-+            x = initarg(1, x)
-+            s = self.dumps(x, proto)
-+            y = self.loads(s)
-+            self.assert_is_copy(x, y)
-+
-+        # XXX test __reduce__ protocol?
-+
-+    def test_roundtrip_equality(self):
-+        expected = self._testdata
-+        for proto in protocols:
-+            s = self.dumps(expected, proto)
-+            got = self.loads(s)
-+            self.assert_is_copy(expected, got)
-+
-+    def test_load_from_data0(self):
-+        self.assert_is_copy(self._testdata, self.loads(DATA0))
-+
-+    def test_load_from_data1(self):
-+        self.assert_is_copy(self._testdata, self.loads(DATA1))
-+
-+    def test_load_from_data2(self):
-+        self.assert_is_copy(self._testdata, self.loads(DATA2))
-+
-+    def test_load_classic_instance(self):
-+        # See issue5180.  Test loading 2.x pickles that
-+        # contain an instance of old style class.
-+        for X, args in [(C, ()), (D, ('x',)), (E, ())]:
-+            xname = X.__name__.encode('ascii')
-+            # Protocol 0 (text mode pickle):
-+            """
-+            0: (    MARK
-+            1: i        INST       '__main__ X' (MARK at 0)
-+            15: p    PUT        0
-+            18: (    MARK
-+            19: d        DICT       (MARK at 18)
-+            20: p    PUT        1
-+            23: b    BUILD
-+            24: .    STOP
-+            """
-+            pickle0 = (b"(i__main__\n"
-+                       b"X\n"
-+                       b"p0\n"
-+                       b"(dp1\nb.").replace(b'X', xname)
-+            self.assert_is_copy(X(*args), self.loads(pickle0))
-+
-+            # Protocol 1 (binary mode pickle)
-+            """
-+            0: (    MARK
-+            1: c        GLOBAL     '__main__ X'
-+            15: q        BINPUT     0
-+            17: o        OBJ        (MARK at 0)
-+            18: q    BINPUT     1
-+            20: }    EMPTY_DICT
-+            21: q    BINPUT     2
-+            23: b    BUILD
-+            24: .    STOP
-+            """
-+            pickle1 = (b'(c__main__\n'
-+                       b'X\n'
-+                       b'q\x00oq\x01}q\x02b.').replace(b'X', xname)
-+            self.assert_is_copy(X(*args), self.loads(pickle1))
-+
-+            # Protocol 2 (pickle2 = b'\x80\x02' + pickle1)
-+            """
-+            0: \x80 PROTO      2
-+            2: (    MARK
-+            3: c        GLOBAL     '__main__ X'
-+            17: q        BINPUT     0
-+            19: o        OBJ        (MARK at 2)
-+            20: q    BINPUT     1
-+            22: }    EMPTY_DICT
-+            23: q    BINPUT     2
-+            25: b    BUILD
-+            26: .    STOP
-+            """
-+            pickle2 = (b'\x80\x02(c__main__\n'
-+                       b'X\n'
-+                       b'q\x00oq\x01}q\x02b.').replace(b'X', xname)
-+            self.assert_is_copy(X(*args), self.loads(pickle2))
-+
-+    # There are gratuitous differences between pickles produced by
-+    # pickle and cPickle, largely because cPickle starts PUT indices at
-+    # 1 and pickle starts them at 0.  See XXX comment in cPickle's put2() --
-+    # there's a comment with an exclamation point there whose meaning
-+    # is a mystery.  cPickle also suppresses PUT for objects with a refcount
-+    # of 1.
-+    def dont_test_disassembly(self):
-+        from io import StringIO
-+        from pickletools import dis
-+
-+        for proto, expected in (0, DATA0_DIS), (1, DATA1_DIS):
-+            s = self.dumps(self._testdata, proto)
-+            filelike = StringIO()
-+            dis(s, out=filelike)
-+            got = filelike.getvalue()
-+            self.assertEqual(expected, got)
-+
-+    def test_recursive_list(self):
-+        l = []
-+        l.append(l)
-+        for proto in protocols:
-+            s = self.dumps(l, proto)
-+            x = self.loads(s)
-+            self.assertIsInstance(x, list)
-+            self.assertEqual(len(x), 1)
-+            self.assertTrue(x is x[0])
-+
-+    def test_recursive_tuple(self):
-+        t = ([],)
-+        t[0].append(t)
-+        for proto in protocols:
-+            s = self.dumps(t, proto)
-+            x = self.loads(s)
-+            self.assertIsInstance(x, tuple)
-+            self.assertEqual(len(x), 1)
-+            self.assertEqual(len(x[0]), 1)
-+            self.assertTrue(x is x[0][0])
-+
-+    def test_recursive_dict(self):
-+        d = {}
-+        d[1] = d
-+        for proto in protocols:
-+            s = self.dumps(d, proto)
-+            x = self.loads(s)
-+            self.assertIsInstance(x, dict)
-+            self.assertEqual(list(x.keys()), [1])
-+            self.assertTrue(x[1] is x)
-+
-+    def test_recursive_set(self):
-+        h = H()
-+        y = set({h})
-+        h.attr = y
-+        for proto in protocols:
-+            s = self.dumps(y, proto)
-+            x = self.loads(s)
-+            self.assertIsInstance(x, set)
-+            self.assertIs(list(x)[0].attr, x)
-+            self.assertEqual(len(x), 1)
-+
-+    def test_recursive_frozenset(self):
-+        h = H()
-+        y = frozenset({h})
-+        h.attr = y
-+        for proto in protocols:
-+            s = self.dumps(y, proto)
-+            x = self.loads(s)
-+            self.assertIsInstance(x, frozenset)
-+            self.assertIs(list(x)[0].attr, x)
-+            self.assertEqual(len(x), 1)
-+
-+    def test_recursive_inst(self):
-+        i = C()
-+        i.attr = i
-+        for proto in protocols:
-+            s = self.dumps(i, proto)
-+            x = self.loads(s)
-+            self.assertIsInstance(x, C)
-+            self.assertEqual(dir(x), dir(i))
-+            self.assertIs(x.attr, x)
-+
-+    def test_recursive_multi(self):
-+        l = []
-+        d = {1:l}
-+        i = C()
-+        i.attr = d
-+        l.append(i)
-+        for proto in protocols:
-+            s = self.dumps(l, proto)
-+            x = self.loads(s)
-+            self.assertIsInstance(x, list)
-+            self.assertEqual(len(x), 1)
-+            self.assertEqual(dir(x[0]), dir(i))
-+            self.assertEqual(list(x[0].attr.keys()), [1])
-+            self.assertTrue(x[0].attr[1] is x)
-+
-+    def test_get(self):
-+        self.assertRaises(KeyError, self.loads, b'g0\np0')
-+        self.assert_is_copy([(100,), (100,)],
-+                            self.loads(b'((Kdtp0\nh\x00l.))'))
-+
-+    def test_unicode(self):
-+        endcases = ['', '<\\u>', '<\\\u1234>', '<\n>',
-+                    '<\\>', '<\\\U00012345>',
-+                    # surrogates
-+                    '<\udc80>']
-+        for proto in protocols:
-+            for u in endcases:
-+                p = self.dumps(u, proto)
-+                u2 = self.loads(p)
-+                self.assert_is_copy(u, u2)
-+
-+    def test_unicode_high_plane(self):
-+        t = '\U00012345'
-+        for proto in protocols:
-+            p = self.dumps(t, proto)
-+            t2 = self.loads(p)
-+            self.assert_is_copy(t, t2)
-+
-+    def test_bytes(self):
-+        for proto in protocols:
-+            for s in b'', b'xyz', b'xyz'*100:
-+                p = self.dumps(s, proto)
-+                self.assert_is_copy(s, self.loads(p))
-+            for s in [bytes([i]) for i in range(256)]:
-+                p = self.dumps(s, proto)
-+                self.assert_is_copy(s, self.loads(p))
-+            for s in [bytes([i, i]) for i in range(256)]:
-+                p = self.dumps(s, proto)
-+                self.assert_is_copy(s, self.loads(p))
-+
-+    def test_ints(self):
-+        import sys
-+        for proto in protocols:
-+            n = sys.maxsize
-+            while n:
-+                for expected in (-n, n):
-+                    s = self.dumps(expected, proto)
-+                    n2 = self.loads(s)
-+                    self.assert_is_copy(expected, n2)
-+                n = n >> 1
-+
-+    def test_maxint64(self):
-+        maxint64 = (1 << 63) - 1
-+        data = b'I' + str(maxint64).encode("ascii") + b'\n.'
-+        got = self.loads(data)
-+        self.assert_is_copy(maxint64, got)
-+
-+        # Try too with a bogus literal.
-+        data = b'I' + str(maxint64).encode("ascii") + b'JUNK\n.'
-+        self.assertRaises(ValueError, self.loads, data)
-+
-+    def test_long(self):
-+        for proto in protocols:
-+            # 256 bytes is where LONG4 begins.
-+            for nbits in 1, 8, 8*254, 8*255, 8*256, 8*257:
-+                nbase = 1 << nbits
-+                for npos in nbase-1, nbase, nbase+1:
-+                    for n in npos, -npos:
-+                        pickle = self.dumps(n, proto)
-+                        got = self.loads(pickle)
-+                        self.assert_is_copy(n, got)
-+        # Try a monster.  This is quadratic-time in protos 0 & 1, so don't
-+        # bother with those.
-+        nbase = int("deadbeeffeedface", 16)
-+        nbase += nbase << 1000000
-+        for n in nbase, -nbase:
-+            p = self.dumps(n, 2)
-+            got = self.loads(p)
-+            # assert_is_copy is very expensive here as it precomputes
-+            # a failure message by computing the repr() of n and got,
-+            # we just do the check ourselves.
-+            self.assertIs(type(got), int)
-+            self.assertEqual(n, got)
-+
-+    def test_float(self):
-+        test_values = [0.0, 4.94e-324, 1e-310, 7e-308, 6.626e-34, 0.1, 0.5,
-+                       3.14, 263.44582062374053, 6.022e23, 1e30]
-+        test_values = test_values + [-x for x in test_values]
-+        for proto in protocols:
-+            for value in test_values:
-+                pickle = self.dumps(value, proto)
-+                got = self.loads(pickle)
-+                self.assert_is_copy(value, got)
-+
-+    @run_with_locale('LC_ALL', 'de_DE', 'fr_FR')
-+    def test_float_format(self):
-+        # make sure that floats are formatted locale independent with proto 0
-+        self.assertEqual(self.dumps(1.2, 0)[0:3], b'F1.')
-+
-+    def test_reduce(self):
-+        for proto in protocols:
-+            inst = AAA()
-+            dumped = self.dumps(inst, proto)
-+            loaded = self.loads(dumped)
-+            self.assertEqual(loaded, REDUCE_A)
-+
-+    def test_getinitargs(self):
-+        for proto in protocols:
-+            inst = initarg(1, 2)
-+            dumped = self.dumps(inst, proto)
-+            loaded = self.loads(dumped)
-+            self.assert_is_copy(inst, loaded)
-+
-+    def test_pop_empty_stack(self):
-+        # Test issue7455
-+        s = b'0'
-+        self.assertRaises((pickle.UnpicklingError, IndexError), self.loads, s)
-+
-+    def test_metaclass(self):
-+        a = use_metaclass()
-+        for proto in protocols:
-+            s = self.dumps(a, proto)
-+            b = self.loads(s)
-+            self.assertEqual(a.__class__, b.__class__)
-+
-+    def test_dynamic_class(self):
-+        a = create_dynamic_class("my_dynamic_class", (object,))
-+        copyreg.pickle(pickling_metaclass, pickling_metaclass.__reduce__)
-+        for proto in protocols:
-+            s = self.dumps(a, proto)
-+            b = self.loads(s)
-+            self.assertEqual(a, b)
-+            self.assertIs(type(a), type(b))
-+
-+    def test_structseq(self):
-+        import time
-+        import os
-+
-+        t = time.localtime()
-+        for proto in protocols:
-+            s = self.dumps(t, proto)
-+            u = self.loads(s)
-+            self.assert_is_copy(t, u)
-+            if hasattr(os, "stat"):
-+                t = os.stat(os.curdir)
-+                s = self.dumps(t, proto)
-+                u = self.loads(s)
-+                self.assert_is_copy(t, u)
-+            if hasattr(os, "statvfs"):
-+                t = os.statvfs(os.curdir)
-+                s = self.dumps(t, proto)
-+                u = self.loads(s)
-+                self.assert_is_copy(t, u)
-+
-+    def test_ellipsis(self):
-+        for proto in protocols:
-+            s = self.dumps(..., proto)
-+            u = self.loads(s)
-+            self.assertIs(..., u)
-+
-+    def test_notimplemented(self):
-+        for proto in protocols:
-+            s = self.dumps(NotImplemented, proto)
-+            u = self.loads(s)
-+            self.assertIs(NotImplemented, u)
-+
-+    def test_singleton_types(self):
-+        # Issue #6477: Test that types of built-in singletons can be pickled.
-+        singletons = [None, ..., NotImplemented]
-+        for singleton in singletons:
-+            for proto in protocols:
-+                s = self.dumps(type(singleton), proto)
-+                u = self.loads(s)
-+                self.assertIs(type(singleton), u)
-+
-+    # Tests for protocol 2
-+
-+    def test_proto(self):
-+        for proto in protocols:
-+            pickled = self.dumps(None, proto)
-+            if proto >= 2:
-+                proto_header = pickle.PROTO + bytes([proto])
-+                self.assertTrue(pickled.startswith(proto_header))
-+            else:
-+                self.assertEqual(count_opcode(pickle.PROTO, pickled), 0)
-+
-+        oob = protocols[-1] + 1     # a future protocol
-+        build_none = pickle.NONE + pickle.STOP
-+        badpickle = pickle.PROTO + bytes([oob]) + build_none
-+        try:
-+            self.loads(badpickle)
-+        except ValueError as err:
-+            self.assertIn("unsupported pickle protocol", str(err))
-+        else:
-+            self.fail("expected bad protocol number to raise ValueError")
-+
-+    def test_long1(self):
-+        x = 12345678910111213141516178920
-+        for proto in protocols:
-+            s = self.dumps(x, proto)
-+            y = self.loads(s)
-+            self.assert_is_copy(x, y)
-+            self.assertEqual(opcode_in_pickle(pickle.LONG1, s), proto >= 2)
-+
-+    def test_long4(self):
-+        x = 12345678910111213141516178920 << (256*8)
-+        for proto in protocols:
-+            s = self.dumps(x, proto)
-+            y = self.loads(s)
-+            self.assert_is_copy(x, y)
-+            self.assertEqual(opcode_in_pickle(pickle.LONG4, s), proto >= 2)
-+
-+    def test_short_tuples(self):
-+        # Map (proto, len(tuple)) to expected opcode.
-+        expected_opcode = {(0, 0): pickle.TUPLE,
-+                           (0, 1): pickle.TUPLE,
-+                           (0, 2): pickle.TUPLE,
-+                           (0, 3): pickle.TUPLE,
-+                           (0, 4): pickle.TUPLE,
-+
-+                           (1, 0): pickle.EMPTY_TUPLE,
-+                           (1, 1): pickle.TUPLE,
-+                           (1, 2): pickle.TUPLE,
-+                           (1, 3): pickle.TUPLE,
-+                           (1, 4): pickle.TUPLE,
-+
-+                           (2, 0): pickle.EMPTY_TUPLE,
-+                           (2, 1): pickle.TUPLE1,
-+                           (2, 2): pickle.TUPLE2,
-+                           (2, 3): pickle.TUPLE3,
-+                           (2, 4): pickle.TUPLE,
-+
-+                           (3, 0): pickle.EMPTY_TUPLE,
-+                           (3, 1): pickle.TUPLE1,
-+                           (3, 2): pickle.TUPLE2,
-+                           (3, 3): pickle.TUPLE3,
-+                           (3, 4): pickle.TUPLE,
-+                          }
-+        a = ()
-+        b = (1,)
-+        c = (1, 2)
-+        d = (1, 2, 3)
-+        e = (1, 2, 3, 4)
-+        for proto in protocols:
-+            for x in a, b, c, d, e:
-+                s = self.dumps(x, proto)
-+                y = self.loads(s)
-+                self.assert_is_copy(x, y)
-+                expected = expected_opcode[min(proto, 3), len(x)]
-+                self.assertTrue(opcode_in_pickle(expected, s))
-+
-+    def test_singletons(self):
-+        # Map (proto, singleton) to expected opcode.
-+        expected_opcode = {(0, None): pickle.NONE,
-+                           (1, None): pickle.NONE,
-+                           (2, None): pickle.NONE,
-+                           (3, None): pickle.NONE,
-+
-+                           (0, True): pickle.INT,
-+                           (1, True): pickle.INT,
-+                           (2, True): pickle.NEWTRUE,
-+                           (3, True): pickle.NEWTRUE,
-+
-+                           (0, False): pickle.INT,
-+                           (1, False): pickle.INT,
-+                           (2, False): pickle.NEWFALSE,
-+                           (3, False): pickle.NEWFALSE,
-+                          }
-+        for proto in protocols:
-+            for x in None, False, True:
-+                s = self.dumps(x, proto)
-+                y = self.loads(s)
-+                self.assertTrue(x is y, (proto, x, s, y))
-+                expected = expected_opcode[min(proto, 3), x]
-+                self.assertTrue(opcode_in_pickle(expected, s))
-+
-+    def test_newobj_tuple(self):
-+        x = MyTuple([1, 2, 3])
-+        x.foo = 42
-+        x.bar = "hello"
-+        for proto in protocols:
-+            s = self.dumps(x, proto)
-+            y = self.loads(s)
-+            self.assert_is_copy(x, y)
-+
-+    def test_newobj_list(self):
-+        x = MyList([1, 2, 3])
-+        x.foo = 42
-+        x.bar = "hello"
-+        for proto in protocols:
-+            s = self.dumps(x, proto)
-+            y = self.loads(s)
-+            self.assert_is_copy(x, y)
-+
-+    def test_newobj_generic(self):
-+        for proto in protocols:
-+            for C in myclasses:
-+                B = C.__base__
-+                x = C(C.sample)
-+                x.foo = 42
-+                s = self.dumps(x, proto)
-+                y = self.loads(s)
-+                detail = (proto, C, B, x, y, type(y))
-+                self.assert_is_copy(x, y) # XXX revisit
-+                self.assertEqual(B(x), B(y), detail)
-+                self.assertEqual(x.__dict__, y.__dict__, detail)
-+
-+    def test_newobj_proxies(self):
-+        # NEWOBJ should use the __class__ rather than the raw type
-+        classes = myclasses[:]
-+        # Cannot create weakproxies to these classes
-+        for c in (MyInt, MyTuple):
-+            classes.remove(c)
-+        for proto in protocols:
-+            for C in classes:
-+                B = C.__base__
-+                x = C(C.sample)
-+                x.foo = 42
-+                p = weakref.proxy(x)
-+                s = self.dumps(p, proto)
-+                y = self.loads(s)
-+                self.assertEqual(type(y), type(x))  # rather than type(p)
-+                detail = (proto, C, B, x, y, type(y))
-+                self.assertEqual(B(x), B(y), detail)
-+                self.assertEqual(x.__dict__, y.__dict__, detail)
-+
-+    # Register a type with copyreg, with extension code extcode.  Pickle
-+    # an object of that type.  Check that the resulting pickle uses opcode
-+    # (EXT[124]) under proto 2, and not in proto 1.
-+
-+    def produce_global_ext(self, extcode, opcode):
-+        e = ExtensionSaver(extcode)
-+        try:
-+            copyreg.add_extension(__name__, "MyList", extcode)
-+            x = MyList([1, 2, 3])
-+            x.foo = 42
-+            x.bar = "hello"
-+
-+            # Dump using protocol 1 for comparison.
-+            s1 = self.dumps(x, 1)
-+            self.assertIn(__name__.encode("utf-8"), s1)
-+            self.assertIn(b"MyList", s1)
-+            self.assertFalse(opcode_in_pickle(opcode, s1))
-+
-+            y = self.loads(s1)
-+            self.assert_is_copy(x, y)
-+
-+            # Dump using protocol 2 for test.
-+            s2 = self.dumps(x, 2)
-+            self.assertNotIn(__name__.encode("utf-8"), s2)
-+            self.assertNotIn(b"MyList", s2)
-+            self.assertEqual(opcode_in_pickle(opcode, s2), True, repr(s2))
-+
-+            y = self.loads(s2)
-+            self.assert_is_copy(x, y)
-+        finally:
-+            e.restore()
-+
-+    def test_global_ext1(self):
-+        self.produce_global_ext(0x00000001, pickle.EXT1)  # smallest EXT1 code
-+        self.produce_global_ext(0x000000ff, pickle.EXT1)  # largest EXT1 code
-+
-+    def test_global_ext2(self):
-+        self.produce_global_ext(0x00000100, pickle.EXT2)  # smallest EXT2 code
-+        self.produce_global_ext(0x0000ffff, pickle.EXT2)  # largest EXT2 code
-+        self.produce_global_ext(0x0000abcd, pickle.EXT2)  # check endianness
-+
-+    def test_global_ext4(self):
-+        self.produce_global_ext(0x00010000, pickle.EXT4)  # smallest EXT4 code
-+        self.produce_global_ext(0x7fffffff, pickle.EXT4)  # largest EXT4 code
-+        self.produce_global_ext(0x12abcdef, pickle.EXT4)  # check endianness
-+
-+    def test_list_chunking(self):
-+        n = 10  # too small to chunk
-+        x = list(range(n))
-+        for proto in protocols:
-+            s = self.dumps(x, proto)
-+            y = self.loads(s)
-+            self.assert_is_copy(x, y)
-+            num_appends = count_opcode(pickle.APPENDS, s)
-+            self.assertEqual(num_appends, proto > 0)
-+
-+        n = 2500  # expect at least two chunks when proto > 0
-+        x = list(range(n))
-+        for proto in protocols:
-+            s = self.dumps(x, proto)
-+            y = self.loads(s)
-+            self.assert_is_copy(x, y)
-+            num_appends = count_opcode(pickle.APPENDS, s)
-+            if proto == 0:
-+                self.assertEqual(num_appends, 0)
-+            else:
-+                self.assertTrue(num_appends >= 2)
-+
-+    def test_dict_chunking(self):
-+        n = 10  # too small to chunk
-+        x = dict.fromkeys(range(n))
-+        for proto in protocols:
-+            s = self.dumps(x, proto)
-+            self.assertIsInstance(s, bytes_types)
-+            y = self.loads(s)
-+            self.assert_is_copy(x, y)
-+            num_setitems = count_opcode(pickle.SETITEMS, s)
-+            self.assertEqual(num_setitems, proto > 0)
-+
-+        n = 2500  # expect at least two chunks when proto > 0
-+        x = dict.fromkeys(range(n))
-+        for proto in protocols:
-+            s = self.dumps(x, proto)
-+            y = self.loads(s)
-+            self.assert_is_copy(x, y)
-+            num_setitems = count_opcode(pickle.SETITEMS, s)
-+            if proto == 0:
-+                self.assertEqual(num_setitems, 0)
-+            else:
-+                self.assertTrue(num_setitems >= 2)
-+
-+    def test_set_chunking(self):
-+        n = 10  # too small to chunk
-+        x = set(range(n))
-+        for proto in protocols:
-+            s = self.dumps(x, proto)
-+            y = self.loads(s)
-+            self.assert_is_copy(x, y)
-+            num_additems = count_opcode(pickle.ADDITEMS, s)
-+            if proto < 4:
-+                self.assertEqual(num_additems, 0)
-+            else:
-+                self.assertEqual(num_additems, 1)
-+
-+        n = 2500  # expect at least two chunks when proto >= 4
-+        x = set(range(n))
-+        for proto in protocols:
-+            s = self.dumps(x, proto)
-+            y = self.loads(s)
-+            self.assert_is_copy(x, y)
-+            num_additems = count_opcode(pickle.ADDITEMS, s)
-+            if proto < 4:
-+                self.assertEqual(num_additems, 0)
-+            else:
-+                self.assertGreaterEqual(num_additems, 2)
-+
-+    def test_simple_newobj(self):
-+        x = object.__new__(SimpleNewObj)  # avoid __init__
-+        x.abc = 666
-+        for proto in protocols:
-+            s = self.dumps(x, proto)
-+            self.assertEqual(opcode_in_pickle(pickle.NEWOBJ, s),
-+                             2 <= proto < 4)
-+            self.assertEqual(opcode_in_pickle(pickle.NEWOBJ_EX, s),
-+                             proto >= 4)
-+            y = self.loads(s)   # will raise TypeError if __init__ called
-+            self.assert_is_copy(x, y)
-+
-+    def test_newobj_list_slots(self):
-+        x = SlotList([1, 2, 3])
-+        x.foo = 42
-+        x.bar = "hello"
-+        s = self.dumps(x, 2)
-+        y = self.loads(s)
-+        self.assert_is_copy(x, y)
-+
-+    def test_reduce_overrides_default_reduce_ex(self):
-+        for proto in protocols:
-+            x = REX_one()
-+            self.assertEqual(x._reduce_called, 0)
-+            s = self.dumps(x, proto)
-+            self.assertEqual(x._reduce_called, 1)
-+            y = self.loads(s)
-+            self.assertEqual(y._reduce_called, 0)
-+
-+    def test_reduce_ex_called(self):
-+        for proto in protocols:
-+            x = REX_two()
-+            self.assertEqual(x._proto, None)
-+            s = self.dumps(x, proto)
-+            self.assertEqual(x._proto, proto)
-+            y = self.loads(s)
-+            self.assertEqual(y._proto, None)
-+
-+    def test_reduce_ex_overrides_reduce(self):
-+        for proto in protocols:
-+            x = REX_three()
-+            self.assertEqual(x._proto, None)
-+            s = self.dumps(x, proto)
-+            self.assertEqual(x._proto, proto)
-+            y = self.loads(s)
-+            self.assertEqual(y._proto, None)
-+
-+    def test_reduce_ex_calls_base(self):
-+        for proto in protocols:
-+            x = REX_four()
-+            self.assertEqual(x._proto, None)
-+            s = self.dumps(x, proto)
-+            self.assertEqual(x._proto, proto)
-+            y = self.loads(s)
-+            self.assertEqual(y._proto, proto)
-+
-+    def test_reduce_calls_base(self):
-+        for proto in protocols:
-+            x = REX_five()
-+            self.assertEqual(x._reduce_called, 0)
-+            s = self.dumps(x, proto)
-+            self.assertEqual(x._reduce_called, 1)
-+            y = self.loads(s)
-+            self.assertEqual(y._reduce_called, 1)
-+
-+    @no_tracing
-+    def test_bad_getattr(self):
-+        # Issue #3514: crash when there is an infinite loop in __getattr__
-+        x = BadGetattr()
-+        for proto in protocols:
-+            self.assertRaises(RuntimeError, self.dumps, x, proto)
-+
-+    def test_reduce_bad_iterator(self):
-+        # Issue4176: crash when 4th and 5th items of __reduce__()
-+        # are not iterators
-+        class C(object):
-+            def __reduce__(self):
-+                # 4th item is not an iterator
-+                return list, (), None, [], None
-+        class D(object):
-+            def __reduce__(self):
-+                # 5th item is not an iterator
-+                return dict, (), None, None, []
-+
-+        # Protocol 0 is less strict and also accept iterables.
-+        for proto in protocols:
-+            try:
-+                self.dumps(C(), proto)
-+            except (pickle.PickleError):
-+                pass
-+            try:
-+                self.dumps(D(), proto)
-+            except (pickle.PickleError):
-+                pass
-+
-+    def test_many_puts_and_gets(self):
-+        # Test that internal data structures correctly deal with lots of
-+        # puts/gets.
-+        keys = ("aaa" + str(i) for i in range(100))
-+        large_dict = dict((k, [4, 5, 6]) for k in keys)
-+        obj = [dict(large_dict), dict(large_dict), dict(large_dict)]
-+
-+        for proto in protocols:
-+            with self.subTest(proto=proto):
-+                dumped = self.dumps(obj, proto)
-+                loaded = self.loads(dumped)
-+                self.assert_is_copy(obj, loaded)
-+
-+    def test_attribute_name_interning(self):
-+        # Test that attribute names of pickled objects are interned when
-+        # unpickling.
-+        for proto in protocols:
-+            x = C()
-+            x.foo = 42
-+            x.bar = "hello"
-+            s = self.dumps(x, proto)
-+            y = self.loads(s)
-+            x_keys = sorted(x.__dict__)
-+            y_keys = sorted(y.__dict__)
-+            for x_key, y_key in zip(x_keys, y_keys):
-+                self.assertIs(x_key, y_key)
-+
-+    def test_unpickle_from_2x(self):
-+        # Unpickle non-trivial data from Python 2.x.
-+        loaded = self.loads(DATA3)
-+        self.assertEqual(loaded, set([1, 2]))
-+        loaded = self.loads(DATA4)
-+        self.assertEqual(type(loaded), type(range(0)))
-+        self.assertEqual(list(loaded), list(range(5)))
-+        loaded = self.loads(DATA5)
-+        self.assertEqual(type(loaded), SimpleCookie)
-+        self.assertEqual(list(loaded.keys()), ["key"])
-+        self.assertEqual(loaded["key"].value, "Set-Cookie: key=value")
-+
-+        for (exc, data) in DATA7.items():
-+            loaded = self.loads(data)
-+            self.assertIs(type(loaded), exc)
-+
-+        loaded = self.loads(DATA8)
-+        self.assertIs(type(loaded), Exception)
-+
-+        loaded = self.loads(DATA9)
-+        self.assertIs(type(loaded), UnicodeEncodeError)
-+        self.assertEqual(loaded.object, "foo")
-+        self.assertEqual(loaded.encoding, "ascii")
-+        self.assertEqual(loaded.start, 0)
-+        self.assertEqual(loaded.end, 1)
-+        self.assertEqual(loaded.reason, "bad")
-+
-+    def test_pickle_to_2x(self):
-+        # Pickle non-trivial data with protocol 2, expecting that it yields
-+        # the same result as Python 2.x did.
-+        # NOTE: this test is a bit too strong since we can produce different
-+        # bytecode that 2.x will still understand.
-+        dumped = self.dumps(range(5), 2)
-+        self.assertEqual(dumped, DATA4)
-+        dumped = self.dumps(set([3]), 2)
-+        self.assertEqual(dumped, DATA6)
-+
-+    def test_load_python2_str_as_bytes(self):
-+        # From Python 2: pickle.dumps('a\x00\xa0', protocol=0)
-+        self.assertEqual(self.loads(b"S'a\\x00\\xa0'\n.",
-+                                    encoding="bytes"), b'a\x00\xa0')
-+        # From Python 2: pickle.dumps('a\x00\xa0', protocol=1)
-+        self.assertEqual(self.loads(b'U\x03a\x00\xa0.',
-+                                    encoding="bytes"), b'a\x00\xa0')
-+        # From Python 2: pickle.dumps('a\x00\xa0', protocol=2)
-+        self.assertEqual(self.loads(b'\x80\x02U\x03a\x00\xa0.',
-+                                    encoding="bytes"), b'a\x00\xa0')
-+
-+    def test_load_python2_unicode_as_str(self):
-+        # From Python 2: pickle.dumps(u'π', protocol=0)
-+        self.assertEqual(self.loads(b'V\\u03c0\n.',
-+                                    encoding='bytes'), 'π')
-+        # From Python 2: pickle.dumps(u'π', protocol=1)
-+        self.assertEqual(self.loads(b'X\x02\x00\x00\x00\xcf\x80.',
-+                                    encoding="bytes"), 'π')
-+        # From Python 2: pickle.dumps(u'π', protocol=2)
-+        self.assertEqual(self.loads(b'\x80\x02X\x02\x00\x00\x00\xcf\x80.',
-+                                    encoding="bytes"), 'π')
-+
-+    def test_load_long_python2_str_as_bytes(self):
-+        # From Python 2: pickle.dumps('x' * 300, protocol=1)
-+        self.assertEqual(self.loads(pickle.BINSTRING +
-+                                    struct.pack("<I", 300) +
-+                                    b'x' * 300 + pickle.STOP,
-+                                    encoding='bytes'), b'x' * 300)
-+
-+    def test_large_pickles(self):
-+        # Test the correctness of internal buffering routines when handling
-+        # large data.
-+        for proto in protocols:
-+            data = (1, min, b'xy' * (30 * 1024), len)
-+            dumped = self.dumps(data, proto)
-+            loaded = self.loads(dumped)
-+            self.assertEqual(len(loaded), len(data))
-+            self.assertEqual(loaded, data)
-+
-+    def test_empty_bytestring(self):
-+        # issue 11286
-+        empty = self.loads(b'\x80\x03U\x00q\x00.', encoding='koi8-r')
-+        self.assertEqual(empty, '')
-+
-+    def test_int_pickling_efficiency(self):
-+        # Test compacity of int representation (see issue #12744)
-+        for proto in protocols:
-+            with self.subTest(proto=proto):
-+                pickles = [self.dumps(2**n, proto) for n in range(70)]
-+                sizes = list(map(len, pickles))
-+                # the size function is monotonic
-+                self.assertEqual(sorted(sizes), sizes)
-+                if proto >= 2:
-+                    for p in pickles:
-+                        self.assertFalse(opcode_in_pickle(pickle.LONG, p))
-+
-+    def check_negative_32b_binXXX(self, dumped):
-+        if sys.maxsize > 2**32:
-+            self.skipTest("test is only meaningful on 32-bit builds")
-+        # XXX Pure Python pickle reads lengths as signed and passes
-+        # them directly to read() (hence the EOFError)
-+        with self.assertRaises((pickle.UnpicklingError, EOFError,
-+                                ValueError, OverflowError)):
-+            self.loads(dumped)
-+
-+    def test_negative_32b_binbytes(self):
-+        # On 32-bit builds, a BINBYTES of 2**31 or more is refused
-+        self.check_negative_32b_binXXX(b'\x80\x03B\xff\xff\xff\xffxyzq\x00.')
-+
-+    def test_negative_32b_binunicode(self):
-+        # On 32-bit builds, a BINUNICODE of 2**31 or more is refused
-+        self.check_negative_32b_binXXX(b'\x80\x03X\xff\xff\xff\xffxyzq\x00.')
-+
-+    def test_negative_put(self):
-+        # Issue #12847
-+        dumped = b'Va\np-1\n.'
-+        self.assertRaises(ValueError, self.loads, dumped)
-+
-+    def test_negative_32b_binput(self):
-+        # Issue #12847
-+        if sys.maxsize > 2**32:
-+            self.skipTest("test is only meaningful on 32-bit builds")
-+        dumped = b'\x80\x03X\x01\x00\x00\x00ar\xff\xff\xff\xff.'
-+        self.assertRaises(ValueError, self.loads, dumped)
-+
-+    def test_badly_escaped_string(self):
-+        self.assertRaises(ValueError, self.loads, b"S'\\'\n.")
-+
-+    def test_badly_quoted_string(self):
-+        # Issue #17710
-+        badpickles = [b"S'\n.",
-+                      b'S"\n.',
-+                      b'S\' \n.',
-+                      b'S" \n.',
-+                      b'S\'"\n.',
-+                      b'S"\'\n.',
-+                      b"S' ' \n.",
-+                      b'S" " \n.',
-+                      b"S ''\n.",
-+                      b'S ""\n.',
-+                      b'S \n.',
-+                      b'S\n.',
-+                      b'S.']
-+        for p in badpickles:
-+            self.assertRaises(pickle.UnpicklingError, self.loads, p)
-+
-+    def test_correctly_quoted_string(self):
-+        goodpickles = [(b"S''\n.", ''),
-+                       (b'S""\n.', ''),
-+                       (b'S"\\n"\n.', '\n'),
-+                       (b"S'\\n'\n.", '\n')]
-+        for p, expected in goodpickles:
-+            self.assertEqual(self.loads(p), expected)
-+
-+    def _check_pickling_with_opcode(self, obj, opcode, proto):
-+        pickled = self.dumps(obj, proto)
-+        self.assertTrue(opcode_in_pickle(opcode, pickled))
-+        unpickled = self.loads(pickled)
-+        self.assertEqual(obj, unpickled)
-+
-+    def test_appends_on_non_lists(self):
-+        # Issue #17720
-+        obj = REX_six([1, 2, 3])
-+        for proto in protocols:
-+            if proto == 0:
-+                self._check_pickling_with_opcode(obj, pickle.APPEND, proto)
-+            else:
-+                self._check_pickling_with_opcode(obj, pickle.APPENDS, proto)
-+
-+    def test_setitems_on_non_dicts(self):
-+        obj = REX_seven({1: -1, 2: -2, 3: -3})
-+        for proto in protocols:
-+            if proto == 0:
-+                self._check_pickling_with_opcode(obj, pickle.SETITEM, proto)
-+            else:
-+                self._check_pickling_with_opcode(obj, pickle.SETITEMS, proto)
-+
-+    # Exercise framing (proto >= 4) for significant workloads
-+
-+    FRAME_SIZE_TARGET = 64 * 1024
-+
-+    def check_frame_opcodes(self, pickled):
-+        """
-+        Check the arguments of FRAME opcodes in a protocol 4+ pickle.
-+        """
-+        frame_opcode_size = 9
-+        last_arg = last_pos = None
-+        for op, arg, pos in pickletools.genops(pickled):
-+            if op.name != 'FRAME':
-+                continue
-+            if last_pos is not None:
-+                # The previous frame's size should be equal to the number
-+                # of bytes up to the current frame.
-+                frame_size = pos - last_pos - frame_opcode_size
-+                self.assertEqual(frame_size, last_arg)
-+            last_arg, last_pos = arg, pos
-+        # The last frame's size should be equal to the number of bytes up
-+        # to the pickle's end.
-+        frame_size = len(pickled) - last_pos - frame_opcode_size
-+        self.assertEqual(frame_size, last_arg)
-+
-+    def test_framing_many_objects(self):
-+        obj = list(range(10**5))
-+        for proto in range(4, pickle.HIGHEST_PROTOCOL + 1):
-+            with self.subTest(proto=proto):
-+                pickled = self.dumps(obj, proto)
-+                unpickled = self.loads(pickled)
-+                self.assertEqual(obj, unpickled)
-+                bytes_per_frame = (len(pickled) /
-+                                   count_opcode(pickle.FRAME, pickled))
-+                self.assertGreater(bytes_per_frame,
-+                                   self.FRAME_SIZE_TARGET / 2)
-+                self.assertLessEqual(bytes_per_frame,
-+                                     self.FRAME_SIZE_TARGET * 1)
-+                self.check_frame_opcodes(pickled)
-+
-+    def test_framing_large_objects(self):
-+        N = 1024 * 1024
-+        obj = [b'x' * N, b'y' * N, b'z' * N]
-+        for proto in range(4, pickle.HIGHEST_PROTOCOL + 1):
-+            with self.subTest(proto=proto):
-+                pickled = self.dumps(obj, proto)
-+                unpickled = self.loads(pickled)
-+                self.assertEqual(obj, unpickled)
-+                n_frames = count_opcode(pickle.FRAME, pickled)
-+                self.assertGreaterEqual(n_frames, len(obj))
-+                self.check_frame_opcodes(pickled)
-+
-+    def test_optional_frames(self):
-+        if pickle.HIGHEST_PROTOCOL < 4:
-+            return
-+
-+        def remove_frames(pickled, keep_frame=None):
-+            """Remove frame opcodes from the given pickle."""
-+            frame_starts = []
-+            # 1 byte for the opcode and 8 for the argument
-+            frame_opcode_size = 9
-+            for opcode, _, pos in pickletools.genops(pickled):
-+                if opcode.name == 'FRAME':
-+                    frame_starts.append(pos)
-+
-+            newpickle = bytearray()
-+            last_frame_end = 0
-+            for i, pos in enumerate(frame_starts):
-+                if keep_frame and keep_frame(i):
-+                    continue
-+                newpickle += pickled[last_frame_end:pos]
-+                last_frame_end = pos + frame_opcode_size
-+            newpickle += pickled[last_frame_end:]
-+            return newpickle
-+
-+        frame_size = self.FRAME_SIZE_TARGET
-+        num_frames = 20
-+        obj = [bytes([i]) * frame_size for i in range(num_frames)]
-+
-+        for proto in range(4, pickle.HIGHEST_PROTOCOL + 1):
-+            pickled = self.dumps(obj, proto)
-+
-+            frameless_pickle = remove_frames(pickled)
-+            self.assertEqual(count_opcode(pickle.FRAME, frameless_pickle), 0)
-+            self.assertEqual(obj, self.loads(frameless_pickle))
-+
-+            some_frames_pickle = remove_frames(pickled, lambda i: i % 2)
-+            self.assertLess(count_opcode(pickle.FRAME, some_frames_pickle),
-+                            count_opcode(pickle.FRAME, pickled))
-+            self.assertEqual(obj, self.loads(some_frames_pickle))
-+
-+    def test_nested_names(self):
-+        global Nested
-+        class Nested:
-+            class A:
-+                class B:
-+                    class C:
-+                        pass
-+
-+        for proto in range(4, pickle.HIGHEST_PROTOCOL + 1):
-+            for obj in [Nested.A, Nested.A.B, Nested.A.B.C]:
-+                with self.subTest(proto=proto, obj=obj):
-+                    unpickled = self.loads(self.dumps(obj, proto))
-+                    self.assertIs(obj, unpickled)
-+
-+    def test_py_methods(self):
-+        global PyMethodsTest
-+        class PyMethodsTest:
-+            @staticmethod
-+            def cheese():
-+                return "cheese"
-+            @classmethod
-+            def wine(cls):
-+                assert cls is PyMethodsTest
-+                return "wine"
-+            def biscuits(self):
-+                assert isinstance(self, PyMethodsTest)
-+                return "biscuits"
-+            class Nested:
-+                "Nested class"
-+                @staticmethod
-+                def ketchup():
-+                    return "ketchup"
-+                @classmethod
-+                def maple(cls):
-+                    assert cls is PyMethodsTest.Nested
-+                    return "maple"
-+                def pie(self):
-+                    assert isinstance(self, PyMethodsTest.Nested)
-+                    return "pie"
-+
-+        py_methods = (
-+            PyMethodsTest.cheese,
-+            PyMethodsTest.wine,
-+            PyMethodsTest().biscuits,
-+            PyMethodsTest.Nested.ketchup,
-+            PyMethodsTest.Nested.maple,
-+            PyMethodsTest.Nested().pie
-+        )
-+        py_unbound_methods = (
-+            (PyMethodsTest.biscuits, PyMethodsTest),
-+            (PyMethodsTest.Nested.pie, PyMethodsTest.Nested)
-+        )
-+        for proto in range(4, pickle.HIGHEST_PROTOCOL + 1):
-+            for method in py_methods:
-+                with self.subTest(proto=proto, method=method):
-+                    unpickled = self.loads(self.dumps(method, proto))
-+                    self.assertEqual(method(), unpickled())
-+            for method, cls in py_unbound_methods:
-+                obj = cls()
-+                with self.subTest(proto=proto, method=method):
-+                    unpickled = self.loads(self.dumps(method, proto))
-+                    self.assertEqual(method(obj), unpickled(obj))
-+
-+    def test_c_methods(self):
-+        global Subclass
-+        class Subclass(tuple):
-+            class Nested(str):
-+                pass
-+
-+        c_methods = (
-+            # bound built-in method
-+            ("abcd".index, ("c",)),
-+            # unbound built-in method
-+            (str.index, ("abcd", "c")),
-+            # bound "slot" method
-+            ([1, 2, 3].__len__, ()),
-+            # unbound "slot" method
-+            (list.__len__, ([1, 2, 3],)),
-+            # bound "coexist" method
-+            ({1, 2}.__contains__, (2,)),
-+            # unbound "coexist" method
-+            (set.__contains__, ({1, 2}, 2)),
-+            # built-in class method
-+            (dict.fromkeys, (("a", 1), ("b", 2))),
-+            # built-in static method
-+            (bytearray.maketrans, (b"abc", b"xyz")),
-+            # subclass methods
-+            (Subclass([1,2,2]).count, (2,)),
-+            (Subclass.count, (Subclass([1,2,2]), 2)),
-+            (Subclass.Nested("sweet").count, ("e",)),
-+            (Subclass.Nested.count, (Subclass.Nested("sweet"), "e")),
-+        )
-+        for proto in range(4, pickle.HIGHEST_PROTOCOL + 1):
-+            for method, args in c_methods:
-+                with self.subTest(proto=proto, method=method):
-+                    unpickled = self.loads(self.dumps(method, proto))
-+                    self.assertEqual(method(*args), unpickled(*args))
-+
-+
-+class AbstractBytestrTests(unittest.TestCase):
-+    def unpickleEqual(self, data, unpickled):
-+        loaded = self.loads(data, encoding="bytes")
-+        self.assertEqual(loaded, unpickled)
-+
-+    def test_load_str_protocol_0(self):
-+        """ Test str from protocol=0
-+            python 2: pickle.dumps('bytestring \x00\xa0', protocol=0) """
-+        self.unpickleEqual(
-+                b"S'bytestring \\x00\\xa0'\np0\n.",
-+                b'bytestring \x00\xa0')
-+
-+    def test_load_str_protocol_1(self):
-+        """ Test str from protocol=1
-+        python 2: pickle.dumps('bytestring \x00\xa0', protocol=1) """
-+        self.unpickleEqual(
-+                b'U\rbytestring \x00\xa0q\x00.',
-+                b'bytestring \x00\xa0')
-+
-+    def test_load_str_protocol_2(self):
-+        """ Test str from protocol=2
-+        python 2: pickle.dumps('bytestring \x00\xa0', protocol=2) """
-+        self.unpickleEqual(
-+                b'\x80\x02U\rbytestring \x00\xa0q\x00.',
-+                b'bytestring \x00\xa0')
-+
-+    def test_load_unicode_protocol_0(self):
-+        """ Test unicode with protocol=0
-+        python 2: pickle.dumps(u"\u041a\u043e\u043c\u043f\u044c\u044e\u0442\u0435\u0440", protocol=0) """
-+        self.unpickleEqual(
-+                b'V\\u041a\\u043e\\u043c\\u043f\\u044c\\u044e\\u0442\\u0435\\u0440\np0\n.',
-+                '\u041a\u043e\u043c\u043f\u044c\u044e\u0442\u0435\u0440')
-+
-+    def test_load_unicode_protocol_1(self):
-+        """ Test unicode with protocol=1
-+        python 2: pickle.dumps(u"\u041a\u043e\u043c\u043f\u044c\u044e\u0442\u0435\u0440", protocol=1) """
-+        self.unpickleEqual(
-+                b'X\x12\x00\x00\x00\xd0\x9a\xd0\xbe\xd0\xbc\xd0\xbf\xd1\x8c\xd1\x8e\xd1\x82\xd0\xb5\xd1\x80q\x00.',
-+                '\u041a\u043e\u043c\u043f\u044c\u044e\u0442\u0435\u0440')
-+
-+    def test_load_unicode_protocol_2(self):
-+        """ Test unicode with protocol=1
-+        python 2: pickle.dumps(u"\u041a\u043e\u043c\u043f\u044c\u044e\u0442\u0435\u0440", protocol=2) """
-+        self.unpickleEqual(
-+                b'\x80\x02X\x12\x00\x00\x00\xd0\x9a\xd0\xbe\xd0\xbc\xd0\xbf\xd1\x8c\xd1\x8e\xd1\x82\xd0\xb5\xd1\x80q\x00.',
-+                '\u041a\u043e\u043c\u043f\u044c\u044e\u0442\u0435\u0440')
-+
-+    def test_load_long_str_protocol_1(self):
-+        """ Test long str with protocol=1
-+        python 2: pickle.dumps('x'*300, protocol=1) """
-+        self.unpickleEqual(
-+                b'T,\x01\x00\x00xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxq\x00.',
-+                b'x'*300)
-+
-+class AbstractBytesFallbackTests(unittest.TestCase):
-+    def unpickleEqual(self, data, unpickled):
-+        loaded = self.loads(data, errors="bytes")
-+        self.assertEqual(loaded, unpickled)
-+
-+    def test_load_instance(self):
-+        r"""Test instance pickle.
-+
-+        Python 2: pickle.dumps({'x': 'ascii', 'y': '\xff'}) """
-+        self.unpickleEqual(
-+                b"(dp0\nS'y'\np1\nS'\\xff'\np2\nsS'x'\np3\nS'ascii'\np4\ns.",
-+                {'x': 'ascii', 'y': b'\xff'})
-+
-+
-+class BigmemPickleTests(unittest.TestCase):
-+
-+    # Binary protocols can serialize longs of up to 2GB-1
-+
-+    @bigmemtest(size=_2G, memuse=3.6, dry_run=False)
-+    def test_huge_long_32b(self, size):
-+        data = 1 << (8 * size)
-+        try:
-+            for proto in protocols:
-+                if proto < 2:
-+                    continue
-+                with self.subTest(proto=proto):
-+                    with self.assertRaises((ValueError, OverflowError)):
-+                        self.dumps(data, protocol=proto)
-+        finally:
-+            data = None
-+
-+    # Protocol 3 can serialize up to 4GB-1 as a bytes object
-+    # (older protocols don't have a dedicated opcode for bytes and are
-+    # too inefficient)
-+
-+    @bigmemtest(size=_2G, memuse=2.5, dry_run=False)
-+    def test_huge_bytes_32b(self, size):
-+        data = b"abcd" * (size // 4)
-+        try:
-+            for proto in protocols:
-+                if proto < 3:
-+                    continue
-+                with self.subTest(proto=proto):
-+                    try:
-+                        pickled = self.dumps(data, protocol=proto)
-+                        header = (pickle.BINBYTES +
-+                                  struct.pack("<I", len(data)))
-+                        data_start = pickled.index(data)
-+                        self.assertEqual(
-+                            header,
-+                            pickled[data_start-len(header):data_start])
-+                    finally:
-+                        pickled = None
-+        finally:
-+            data = None
-+
-+    @bigmemtest(size=_4G, memuse=2.5, dry_run=False)
-+    def test_huge_bytes_64b(self, size):
-+        data = b"acbd" * (size // 4)
-+        try:
-+            for proto in protocols:
-+                if proto < 3:
-+                    continue
-+                with self.subTest(proto=proto):
-+                    if proto == 3:
-+                        # Protocol 3 does not support large bytes objects.
-+                        # Verify that we do not crash when processing one.
-+                        with self.assertRaises((ValueError, OverflowError)):
-+                            self.dumps(data, protocol=proto)
-+                        continue
-+                    try:
-+                        pickled = self.dumps(data, protocol=proto)
-+                        header = (pickle.BINBYTES8 +
-+                                  struct.pack("<Q", len(data)))
-+                        data_start = pickled.index(data)
-+                        self.assertEqual(
-+                            header,
-+                            pickled[data_start-len(header):data_start])
-+                    finally:
-+                        pickled = None
-+        finally:
-+            data = None
-+
-+    # All protocols use 1-byte per printable ASCII character; we add another
-+    # byte because the encoded form has to be copied into the internal buffer.
-+
-+    @bigmemtest(size=_2G, memuse=8, dry_run=False)
-+    def test_huge_str_32b(self, size):
-+        data = "abcd" * (size // 4)
-+        try:
-+            for proto in protocols:
-+                if proto == 0:
-+                    continue
-+                with self.subTest(proto=proto):
-+                    try:
-+                        pickled = self.dumps(data, protocol=proto)
-+                        header = (pickle.BINUNICODE +
-+                                  struct.pack("<I", len(data)))
-+                        data_start = pickled.index(b'abcd')
-+                        self.assertEqual(
-+                            header,
-+                            pickled[data_start-len(header):data_start])
-+                        self.assertEqual((pickled.rindex(b"abcd") + len(b"abcd") -
-+                                          pickled.index(b"abcd")), len(data))
-+                    finally:
-+                        pickled = None
-+        finally:
-+            data = None
-+
-+    # BINUNICODE (protocols 1, 2 and 3) cannot carry more than 2**32 - 1 bytes
-+    # of utf-8 encoded unicode. BINUNICODE8 (protocol 4) supports these huge
-+    # unicode strings however.
-+
-+    @bigmemtest(size=_4G, memuse=8, dry_run=False)
-+    def test_huge_str_64b(self, size):
-+        data = "abcd" * (size // 4)
-+        try:
-+            for proto in protocols:
-+                if proto == 0:
-+                    continue
-+                with self.subTest(proto=proto):
-+                    if proto < 4:
-+                        with self.assertRaises((ValueError, OverflowError)):
-+                            self.dumps(data, protocol=proto)
-+                        continue
-+                    try:
-+                        pickled = self.dumps(data, protocol=proto)
-+                        header = (pickle.BINUNICODE8 +
-+                                  struct.pack("<Q", len(data)))
-+                        data_start = pickled.index(b'abcd')
-+                        self.assertEqual(
-+                            header,
-+                            pickled[data_start-len(header):data_start])
-+                        self.assertEqual((pickled.rindex(b"abcd") + len(b"abcd") -
-+                                          pickled.index(b"abcd")), len(data))
-+                    finally:
-+                        pickled = None
-+        finally:
-+            data = None
-+
-+
-+# Test classes for reduce_ex
-+
-+class REX_one(object):
-+    """No __reduce_ex__ here, but inheriting it from object"""
-+    _reduce_called = 0
-+    def __reduce__(self):
-+        self._reduce_called = 1
-+        return REX_one, ()
-+
-+class REX_two(object):
-+    """No __reduce__ here, but inheriting it from object"""
-+    _proto = None
-+    def __reduce_ex__(self, proto):
-+        self._proto = proto
-+        return REX_two, ()
-+
-+class REX_three(object):
-+    _proto = None
-+    def __reduce_ex__(self, proto):
-+        self._proto = proto
-+        return REX_two, ()
-+    def __reduce__(self):
-+        raise TestFailed("This __reduce__ shouldn't be called")
-+
-+class REX_four(object):
-+    """Calling base class method should succeed"""
-+    _proto = None
-+    def __reduce_ex__(self, proto):
-+        self._proto = proto
-+        return object.__reduce_ex__(self, proto)
-+
-+class REX_five(object):
-+    """This one used to fail with infinite recursion"""
-+    _reduce_called = 0
-+    def __reduce__(self):
-+        self._reduce_called = 1
-+        return object.__reduce__(self)
-+
-+class REX_six(object):
-+    """This class is used to check the 4th argument (list iterator) of
-+    the reduce protocol.
-+    """
-+    def __init__(self, items=None):
-+        self.items = items if items is not None else []
-+    def __eq__(self, other):
-+        return type(self) is type(other) and self.items == self.items
-+    def append(self, item):
-+        self.items.append(item)
-+    def __reduce__(self):
-+        return type(self), (), None, iter(self.items), None
-+
-+class REX_seven(object):
-+    """This class is used to check the 5th argument (dict iterator) of
-+    the reduce protocol.
-+    """
-+    def __init__(self, table=None):
-+        self.table = table if table is not None else {}
-+    def __eq__(self, other):
-+        return type(self) is type(other) and self.table == self.table
-+    def __setitem__(self, key, value):
-+        self.table[key] = value
-+    def __reduce__(self):
-+        return type(self), (), None, None, iter(self.table.items())
-+
-+
-+# Test classes for newobj
-+
-+class MyInt(int):
-+    sample = 1
-+
-+class MyFloat(float):
-+    sample = 1.0
-+
-+class MyComplex(complex):
-+    sample = 1.0 + 0.0j
-+
-+class MyStr(str):
-+    sample = "hello"
-+
-+class MyUnicode(str):
-+    sample = "hello \u1234"
-+
-+class MyTuple(tuple):
-+    sample = (1, 2, 3)
-+
-+class MyList(list):
-+    sample = [1, 2, 3]
-+
-+class MyDict(dict):
-+    sample = {"a": 1, "b": 2}
-+
-+class MySet(set):
-+    sample = {"a", "b"}
-+
-+class MyFrozenSet(frozenset):
-+    sample = frozenset({"a", "b"})
-+
-+myclasses = [MyInt, MyFloat,
-+             MyComplex,
-+             MyStr, MyUnicode,
-+             MyTuple, MyList, MyDict, MySet, MyFrozenSet]
-+
-+
-+class SlotList(MyList):
-+    __slots__ = ["foo"]
-+
-+class SimpleNewObj(object):
-+    def __init__(self, a, b, c):
-+        # raise an error, to make sure this isn't called
-+        raise TypeError("SimpleNewObj.__init__() didn't expect to get called")
-+    def __eq__(self, other):
-+        return self.__dict__ == other.__dict__
-+
-+class BadGetattr:
-+    def __getattr__(self, key):
-+        self.foo
-+
-+
-+class AbstractPickleModuleTests(unittest.TestCase):
-+
-+    def test_dump_closed_file(self):
-+        import os
-+        f = open(TESTFN, "wb")
-+        try:
-+            f.close()
-+            self.assertRaises(ValueError, pickle.dump, 123, f)
-+        finally:
-+            os.remove(TESTFN)
-+
-+    def test_load_closed_file(self):
-+        import os
-+        f = open(TESTFN, "wb")
-+        try:
-+            f.close()
-+            self.assertRaises(ValueError, pickle.dump, 123, f)
-+        finally:
-+            os.remove(TESTFN)
-+
-+    def test_load_from_and_dump_to_file(self):
-+        stream = io.BytesIO()
-+        data = [123, {}, 124]
-+        pickle.dump(data, stream)
-+        stream.seek(0)
-+        unpickled = pickle.load(stream)
-+        self.assertEqual(unpickled, data)
-+
-+    def test_highest_protocol(self):
-+        # Of course this needs to be changed when HIGHEST_PROTOCOL changes.
-+        self.assertEqual(pickle.HIGHEST_PROTOCOL, 4)
-+
-+    def test_callapi(self):
-+        f = io.BytesIO()
-+        # With and without keyword arguments
-+        pickle.dump(123, f, -1)
-+        pickle.dump(123, file=f, protocol=-1)
-+        pickle.dumps(123, -1)
-+        pickle.dumps(123, protocol=-1)
-+        pickle.Pickler(f, -1)
-+        pickle.Pickler(f, protocol=-1)
-+
-+    def test_bad_init(self):
-+        # Test issue3664 (pickle can segfault from a badly initialized Pickler).
-+        # Override initialization without calling __init__() of the superclass.
-+        class BadPickler(pickle.Pickler):
-+            def __init__(self): pass
-+
-+        class BadUnpickler(pickle.Unpickler):
-+            def __init__(self): pass
-+
-+        self.assertRaises(pickle.PicklingError, BadPickler().dump, 0)
-+        self.assertRaises(pickle.UnpicklingError, BadUnpickler().load)
-+
-+    def test_bad_input(self):
-+        # Test issue4298
-+        s = bytes([0x58, 0, 0, 0, 0x54])
-+        self.assertRaises(EOFError, pickle.loads, s)
-+
-+
-+class AbstractPersistentPicklerTests(unittest.TestCase):
-+
-+    # This class defines persistent_id() and persistent_load()
-+    # functions that should be used by the pickler.  All even integers
-+    # are pickled using persistent ids.
-+
-+    def persistent_id(self, object):
-+        if isinstance(object, int) and object % 2 == 0:
-+            self.id_count += 1
-+            return str(object)
-+        elif object == "test_false_value":
-+            self.false_count += 1
-+            return ""
-+        else:
-+            return None
-+
-+    def persistent_load(self, oid):
-+        if not oid:
-+            self.load_false_count += 1
-+            return "test_false_value"
-+        else:
-+            self.load_count += 1
-+            object = int(oid)
-+            assert object % 2 == 0
-+            return object
-+
-+    def test_persistence(self):
-+        L = list(range(10)) + ["test_false_value"]
-+        for proto in protocols:
-+            self.id_count = 0
-+            self.false_count = 0
-+            self.load_false_count = 0
-+            self.load_count = 0
-+            self.assertEqual(self.loads(self.dumps(L, proto)), L)
-+            self.assertEqual(self.id_count, 5)
-+            self.assertEqual(self.false_count, 1)
-+            self.assertEqual(self.load_count, 5)
-+            self.assertEqual(self.load_false_count, 1)
-+
-+
-+class AbstractPicklerUnpicklerObjectTests(unittest.TestCase):
-+
-+    pickler_class = None
-+    unpickler_class = None
-+
-+    def setUp(self):
-+        assert self.pickler_class
-+        assert self.unpickler_class
-+
-+    def test_clear_pickler_memo(self):
-+        # To test whether clear_memo() has any effect, we pickle an object,
-+        # then pickle it again without clearing the memo; the two serialized
-+        # forms should be different. If we clear_memo() and then pickle the
-+        # object again, the third serialized form should be identical to the
-+        # first one we obtained.
-+        data = ["abcdefg", "abcdefg", 44]
-+        f = io.BytesIO()
-+        pickler = self.pickler_class(f)
-+
-+        pickler.dump(data)
-+        first_pickled = f.getvalue()
-+
-+        # Reset BytesIO object.
-+        f.seek(0)
-+        f.truncate()
-+
-+        pickler.dump(data)
-+        second_pickled = f.getvalue()
-+
-+        # Reset the Pickler and BytesIO objects.
-+        pickler.clear_memo()
-+        f.seek(0)
-+        f.truncate()
-+
-+        pickler.dump(data)
-+        third_pickled = f.getvalue()
-+
-+        self.assertNotEqual(first_pickled, second_pickled)
-+        self.assertEqual(first_pickled, third_pickled)
-+
-+    def test_priming_pickler_memo(self):
-+        # Verify that we can set the Pickler's memo attribute.
-+        data = ["abcdefg", "abcdefg", 44]
-+        f = io.BytesIO()
-+        pickler = self.pickler_class(f)
-+
-+        pickler.dump(data)
-+        first_pickled = f.getvalue()
-+
-+        f = io.BytesIO()
-+        primed = self.pickler_class(f)
-+        primed.memo = pickler.memo
-+
-+        primed.dump(data)
-+        primed_pickled = f.getvalue()
-+
-+        self.assertNotEqual(first_pickled, primed_pickled)
-+
-+    def test_priming_unpickler_memo(self):
-+        # Verify that we can set the Unpickler's memo attribute.
-+        data = ["abcdefg", "abcdefg", 44]
-+        f = io.BytesIO()
-+        pickler = self.pickler_class(f)
-+
-+        pickler.dump(data)
-+        first_pickled = f.getvalue()
-+
-+        f = io.BytesIO()
-+        primed = self.pickler_class(f)
-+        primed.memo = pickler.memo
-+
-+        primed.dump(data)
-+        primed_pickled = f.getvalue()
-+
-+        unpickler = self.unpickler_class(io.BytesIO(first_pickled))
-+        unpickled_data1 = unpickler.load()
-+
-+        self.assertEqual(unpickled_data1, data)
-+
-+        primed = self.unpickler_class(io.BytesIO(primed_pickled))
-+        primed.memo = unpickler.memo
-+        unpickled_data2 = primed.load()
-+
-+        primed.memo.clear()
-+
-+        self.assertEqual(unpickled_data2, data)
-+        self.assertTrue(unpickled_data2 is unpickled_data1)
-+
-+    def test_reusing_unpickler_objects(self):
-+        data1 = ["abcdefg", "abcdefg", 44]
-+        f = io.BytesIO()
-+        pickler = self.pickler_class(f)
-+        pickler.dump(data1)
-+        pickled1 = f.getvalue()
-+
-+        data2 = ["abcdefg", 44, 44]
-+        f = io.BytesIO()
-+        pickler = self.pickler_class(f)
-+        pickler.dump(data2)
-+        pickled2 = f.getvalue()
-+
-+        f = io.BytesIO()
-+        f.write(pickled1)
-+        f.seek(0)
-+        unpickler = self.unpickler_class(f)
-+        self.assertEqual(unpickler.load(), data1)
-+
-+        f.seek(0)
-+        f.truncate()
-+        f.write(pickled2)
-+        f.seek(0)
-+        self.assertEqual(unpickler.load(), data2)
-+
-+    def _check_multiple_unpicklings(self, ioclass):
-+        for proto in protocols:
-+            with self.subTest(proto=proto):
-+                data1 = [(x, str(x)) for x in range(2000)] + [b"abcde", len]
-+                f = ioclass()
-+                pickler = self.pickler_class(f, protocol=proto)
-+                pickler.dump(data1)
-+                pickled = f.getvalue()
-+
-+                N = 5
-+                f = ioclass(pickled * N)
-+                unpickler = self.unpickler_class(f)
-+                for i in range(N):
-+                    if f.seekable():
-+                        pos = f.tell()
-+                    self.assertEqual(unpickler.load(), data1)
-+                    if f.seekable():
-+                        self.assertEqual(f.tell(), pos + len(pickled))
-+                self.assertRaises(EOFError, unpickler.load)
-+
-+    def test_multiple_unpicklings_seekable(self):
-+        self._check_multiple_unpicklings(io.BytesIO)
-+
-+    def test_multiple_unpicklings_unseekable(self):
-+        self._check_multiple_unpicklings(UnseekableIO)
-+
-+    def test_unpickling_buffering_readline(self):
-+        # Issue #12687: the unpickler's buffering logic could fail with
-+        # text mode opcodes.
-+        data = list(range(10))
-+        for proto in protocols:
-+            for buf_size in range(1, 11):
-+                f = io.BufferedRandom(io.BytesIO(), buffer_size=buf_size)
-+                pickler = self.pickler_class(f, protocol=proto)
-+                pickler.dump(data)
-+                f.seek(0)
-+                unpickler = self.unpickler_class(f)
-+                self.assertEqual(unpickler.load(), data)
-+
-+    def test_noload_object(self):
-+        global _NOLOAD_OBJECT
-+        after = {}
-+        _NOLOAD_OBJECT = object()
-+        aaa = AAA()
-+        bbb = BBB()
-+        ccc = 1
-+        ddd = 1.0
-+        eee = ('eee', 1)
-+        fff = ['fff']
-+        ggg = {'ggg': 0}
-+        unpickler = self.unpickler_class
-+        f = io.BytesIO()
-+        pickler = self.pickler_class(f, protocol=2)
-+        pickler.dump(_NOLOAD_OBJECT)
-+        after['_NOLOAD_OBJECT'] = f.tell()
-+        pickler.dump(aaa)
-+        after['aaa'] = f.tell()
-+        pickler.dump(bbb)
-+        after['bbb'] = f.tell()
-+        pickler.dump(ccc)
-+        after['ccc'] = f.tell()
-+        pickler.dump(ddd)
-+        after['ddd'] = f.tell()
-+        pickler.dump(eee)
-+        after['eee'] = f.tell()
-+        pickler.dump(fff)
-+        after['fff'] = f.tell()
-+        pickler.dump(ggg)
-+        after['ggg'] = f.tell()
-+        f.seek(0)
-+        unpickler = self.unpickler_class(f)
-+        unpickler.noload() # read past _NOLOAD_OBJECT
-+        self.assertEqual(f.tell(), after['_NOLOAD_OBJECT'])
-+        unpickler.noload() # read past aaa
-+        self.assertEqual(f.tell(), after['aaa'])
-+        unpickler.noload() # read past bbb
-+        self.assertEqual(f.tell(), after['bbb'])
-+        unpickler.noload() # read past ccc
-+        self.assertEqual(f.tell(), after['ccc'])
-+        unpickler.noload() # read past ddd
-+        self.assertEqual(f.tell(), after['ddd'])
-+        unpickler.noload() # read past eee
-+        self.assertEqual(f.tell(), after['eee'])
-+        unpickler.noload() # read past fff
-+        self.assertEqual(f.tell(), after['fff'])
-+        unpickler.noload() # read past ggg
-+        self.assertEqual(f.tell(), after['ggg'])
-+
-+# Tests for dispatch_table attribute
-+
-+REDUCE_A = 'reduce_A'
-+
-+class AAA(object):
-+    def __reduce__(self):
-+        return str, (REDUCE_A,)
-+
-+class BBB(object):
-+    pass
-+
-+class AbstractDispatchTableTests(unittest.TestCase):
-+
-+    def test_default_dispatch_table(self):
-+        # No dispatch_table attribute by default
-+        f = io.BytesIO()
-+        p = self.pickler_class(f, 0)
-+        with self.assertRaises(AttributeError):
-+            p.dispatch_table
-+        self.assertFalse(hasattr(p, 'dispatch_table'))
-+
-+    def test_class_dispatch_table(self):
-+        # A dispatch_table attribute can be specified class-wide
-+        dt = self.get_dispatch_table()
-+
-+        class MyPickler(self.pickler_class):
-+            dispatch_table = dt
-+
-+        def dumps(obj, protocol=None):
-+            f = io.BytesIO()
-+            p = MyPickler(f, protocol)
-+            self.assertEqual(p.dispatch_table, dt)
-+            p.dump(obj)
-+            return f.getvalue()
-+
-+        self._test_dispatch_table(dumps, dt)
-+
-+    def test_instance_dispatch_table(self):
-+        # A dispatch_table attribute can also be specified instance-wide
-+        dt = self.get_dispatch_table()
-+
-+        def dumps(obj, protocol=None):
-+            f = io.BytesIO()
-+            p = self.pickler_class(f, protocol)
-+            p.dispatch_table = dt
-+            self.assertEqual(p.dispatch_table, dt)
-+            p.dump(obj)
-+            return f.getvalue()
-+
-+        self._test_dispatch_table(dumps, dt)
-+
-+    def _test_dispatch_table(self, dumps, dispatch_table):
-+        def custom_load_dump(obj):
-+            return pickle.loads(dumps(obj, 0))
-+
-+        def default_load_dump(obj):
-+            return pickle.loads(pickle.dumps(obj, 0))
-+
-+        # pickling complex numbers using protocol 0 relies on copyreg
-+        # so check pickling a complex number still works
-+        z = 1 + 2j
-+        self.assertEqual(custom_load_dump(z), z)
-+        self.assertEqual(default_load_dump(z), z)
-+
-+        # modify pickling of complex
-+        REDUCE_1 = 'reduce_1'
-+        def reduce_1(obj):
-+            return str, (REDUCE_1,)
-+        dispatch_table[complex] = reduce_1
-+        self.assertEqual(custom_load_dump(z), REDUCE_1)
-+        self.assertEqual(default_load_dump(z), z)
-+
-+        # check picklability of AAA and BBB
-+        a = AAA()
-+        b = BBB()
-+        self.assertEqual(custom_load_dump(a), REDUCE_A)
-+        self.assertIsInstance(custom_load_dump(b), BBB)
-+        self.assertEqual(default_load_dump(a), REDUCE_A)
-+        self.assertIsInstance(default_load_dump(b), BBB)
-+
-+        # modify pickling of BBB
-+        dispatch_table[BBB] = reduce_1
-+        self.assertEqual(custom_load_dump(a), REDUCE_A)
-+        self.assertEqual(custom_load_dump(b), REDUCE_1)
-+        self.assertEqual(default_load_dump(a), REDUCE_A)
-+        self.assertIsInstance(default_load_dump(b), BBB)
-+
-+        # revert pickling of BBB and modify pickling of AAA
-+        REDUCE_2 = 'reduce_2'
-+        def reduce_2(obj):
-+            return str, (REDUCE_2,)
-+        dispatch_table[AAA] = reduce_2
-+        del dispatch_table[BBB]
-+        self.assertEqual(custom_load_dump(a), REDUCE_2)
-+        self.assertIsInstance(custom_load_dump(b), BBB)
-+        self.assertEqual(default_load_dump(a), REDUCE_A)
-+        self.assertIsInstance(default_load_dump(b), BBB)
-+
-+
-+if __name__ == "__main__":
-+    # Print some stuff that can be used to rewrite DATA{0,1,2}
-+    from pickletools import dis
-+    x = create_data()
-+    for i in range(3):
-+        p = pickle.dumps(x, i)
-+        print("DATA{0} = (".format(i))
-+        for j in range(0, len(p), 20):
-+            b = bytes(p[j:j+20])
-+            print("    {0!r}".format(b))
-+        print(")")
-+        print()
-+        print("# Disassembly of DATA{0}".format(i))
-+        print("DATA{0}_DIS = \"\"\"\\".format(i))
-+        dis(p)
-+        print("\"\"\"")
-+        print()
-diff --git a/src/zodbpickle/tests/test_pickle.py b/src/zodbpickle/tests/test_pickle.py
-index 9f68ee4..6c3e56b 100644
---- a/src/zodbpickle/tests/test_pickle.py
-+++ b/src/zodbpickle/tests/test_pickle.py
-@@ -24,7 +24,10 @@ class TestImportability(unittest.TestCase):
- def test_suite():
-     import sys
-     if sys.version_info[0] >= 3:
--        from .test_pickle_3 import test_suite
-+        if sys.version_info[1] >= 4:
-+            from .test_pickle_34 import test_suite
-+        else:
-+            from .test_pickle_3 import test_suite
-     else:
-         from .test_pickle_2 import test_suite
-     return unittest.TestSuite((
-diff --git a/src/zodbpickle/tests/test_pickle_34.py b/src/zodbpickle/tests/test_pickle_34.py
-new file mode 100644
-index 0000000..6f8d5f8
---- /dev/null
-+++ b/src/zodbpickle/tests/test_pickle_34.py
-@@ -0,0 +1,181 @@
-+import io
-+import collections
-+import unittest
-+import doctest
-+import sys
-+
-+from .pickletester_34 import AbstractPickleTests
-+from .pickletester_34 import AbstractPickleModuleTests
-+from .pickletester_34 import AbstractPersistentPicklerTests
-+from .pickletester_34 import AbstractPicklerUnpicklerObjectTests
-+from .pickletester_34 import AbstractDispatchTableTests
-+from .pickletester_34 import BigmemPickleTests
-+from .pickletester_34 import AbstractBytestrTests
-+from .pickletester_34 import AbstractBytesFallbackTests
-+
-+from zodbpickle import pickle_34 as pickle
-+
-+try:
-+    from zodbpickle import _pickle
-+    has_c_implementation = True
-+except ImportError:
-+    has_c_implementation = False
-+
-+
-+class PickleTests(AbstractPickleModuleTests):
-+    pass
-+
-+
-+class PyPicklerBase(object):
-+
-+    pickler = pickle._Pickler
-+    unpickler = pickle._Unpickler
-+
-+    def dumps(self, arg, proto=None):
-+        f = io.BytesIO()
-+        p = self.pickler(f, proto)
-+        p.dump(arg)
-+        f.seek(0)
-+        return bytes(f.read())
-+
-+    def loads(self, buf, **kwds):
-+        f = io.BytesIO(buf)
-+        u = self.unpickler(f, **kwds)
-+        return u.load()
-+
-+class PyPicklerTests(PyPicklerBase, AbstractPickleTests):
-+    pass
-+
-+class PyPicklerBytestrTests(PyPicklerBase, AbstractBytestrTests):
-+    pass
-+
-+class PyPicklerBytesFallbackTests(PyPicklerBase, AbstractBytesFallbackTests):
-+    pass
-+
-+class InMemoryPickleTests(AbstractPickleTests, BigmemPickleTests):
-+
-+    pickler = pickle._Pickler
-+    unpickler = pickle._Unpickler
-+
-+    def dumps(self, arg, protocol=None):
-+        return pickle.dumps(arg, protocol)
-+
-+    def loads(self, buf, **kwds):
-+        return pickle.loads(buf, **kwds)
-+
-+
-+class PyPersPicklerTests(AbstractPersistentPicklerTests):
-+
-+    pickler = pickle._Pickler
-+    unpickler = pickle._Unpickler
-+
-+    def dumps(self, arg, proto=None):
-+        class PersPickler(self.pickler):
-+            def persistent_id(subself, obj):
-+                return self.persistent_id(obj)
-+        f = io.BytesIO()
-+        p = PersPickler(f, proto)
-+        p.dump(arg)
-+        f.seek(0)
-+        return f.read()
-+
-+    def loads(self, buf, **kwds):
-+        class PersUnpickler(self.unpickler):
-+            def persistent_load(subself, obj):
-+                return self.persistent_load(obj)
-+        f = io.BytesIO(buf)
-+        u = PersUnpickler(f, **kwds)
-+        return u.load()
-+
-+
-+class PyPicklerUnpicklerObjectTests(AbstractPicklerUnpicklerObjectTests):
-+
-+    pickler_class = pickle._Pickler
-+    unpickler_class = pickle._Unpickler
-+
-+
-+class PyDispatchTableTests(AbstractDispatchTableTests):
-+
-+    pickler_class = pickle._Pickler
-+
-+    def get_dispatch_table(self):
-+        return pickle.dispatch_table.copy()
-+
-+
-+class PyChainDispatchTableTests(AbstractDispatchTableTests):
-+
-+    pickler_class = pickle._Pickler
-+
-+    def get_dispatch_table(self):
-+        return collections.ChainMap({}, pickle.dispatch_table)
-+
-+
-+if has_c_implementation:
-+    class CPicklerTests(PyPicklerTests):
-+        pickler = _pickle.Pickler
-+        unpickler = _pickle.Unpickler
-+
-+    class CPicklerBytestrTests(PyPicklerBytestrTests):
-+        pickler = _pickle.Pickler
-+        unpickler = _pickle.Unpickler
-+
-+    class CPicklerBytesFallbackTests(PyPicklerBytesFallbackTests):
-+        pickler = _pickle.Pickler
-+        unpickler = _pickle.Unpickler
-+
-+    class CPersPicklerTests(PyPersPicklerTests):
-+        pickler = _pickle.Pickler
-+        unpickler = _pickle.Unpickler
-+
-+    class CDumpPickle_LoadPickle(PyPicklerTests):
-+        pickler = _pickle.Pickler
-+        unpickler = pickle._Unpickler
-+
-+    class DumpPickle_CLoadPickle(PyPicklerTests):
-+        pickler = pickle._Pickler
-+        unpickler = _pickle.Unpickler
-+
-+    class CPicklerUnpicklerObjectTests(AbstractPicklerUnpicklerObjectTests):
-+        pickler_class = _pickle.Pickler
-+        unpickler_class = _pickle.Unpickler
-+
-+        def test_issue18339(self):
-+            unpickler = self.unpickler_class(io.BytesIO())
-+            with self.assertRaises(TypeError):
-+                unpickler.memo = object
-+            # used to cause a segfault
-+            with self.assertRaises(ValueError):
-+                unpickler.memo = {-1: None}
-+            unpickler.memo = {1: None}
-+
-+    class CDispatchTableTests(AbstractDispatchTableTests):
-+        pickler_class = pickle.Pickler
-+        def get_dispatch_table(self):
-+            return pickle.dispatch_table.copy()
-+
-+    class CChainDispatchTableTests(AbstractDispatchTableTests):
-+        pickler_class = pickle.Pickler
-+        def get_dispatch_table(self):
-+            return collections.ChainMap({}, pickle.dispatch_table)
-+
-+
-+def choose_tests():
-+    tests = [PickleTests, PyPicklerTests, PyPersPicklerTests,
-+             PyPicklerBytestrTests, PyPicklerBytesFallbackTests]
-+    tests.extend([PyDispatchTableTests, PyChainDispatchTableTests])
-+    if has_c_implementation:
-+        tests.extend([CPicklerTests, CPersPicklerTests,
-+                      CPicklerBytestrTests, CPicklerBytesFallbackTests,
-+                      CDumpPickle_LoadPickle, DumpPickle_CLoadPickle,
-+                      PyPicklerUnpicklerObjectTests,
-+                      CPicklerUnpicklerObjectTests,
-+                      CDispatchTableTests, CChainDispatchTableTests,
-+                      InMemoryPickleTests])
-+    return tests
-+
-+def test_suite():
-+    return unittest.TestSuite([
-+        unittest.makeSuite(t) for t in choose_tests()
-+    ] + [
-+        doctest.DocTestSuite(pickle),
-+    ])
diff --git a/python-zodbpickle.spec b/python-zodbpickle.spec
index 4a63f58..48f9ead 100644
--- a/python-zodbpickle.spec
+++ b/python-zodbpickle.spec
@@ -5,8 +5,8 @@
 %global pkgname zodbpickle
 
 Name:           python-%{pkgname}
-Version:        0.5.2
-Release:        3%{?dist}
+Version:        0.6.0
+Release:        1%{?dist}
 Summary:        Fork of Python 2 pickle module for ZODB
 
 # Code taken from the python 3 sources is covered by the Python license.
@@ -14,9 +14,6 @@ Summary:        Fork of Python 2 pickle module for ZODB
 License:        Python and ZPLv2.1
 URL:            https://pypi.python.org/pypi/%{pkgname}
 Source0:        https://pypi.python.org/packages/source/z/%{pkgname}/%{pkgname}-%{version}.tar.gz
-# Adapt to Python 3.4; sent upstream 9 Jun 2014:
-# https://groups.google.com/forum/#!topic/zodb/kGn2x--tFGA
-Patch0:         %{name}-python34.patch
 
 BuildRequires:  python-devel
 BuildRequires:  python-nose
@@ -52,9 +49,6 @@ Summary:        Fork of Python 3 pickle module for ZODB
 
 %prep
 %setup -q -c
-pushd %{pkgname}-%{version}
-%patch0 -p1
-popd
 
 # Remove prebuilt egg
 rm -fr %{pkgname}-%{version}/src/%{pkgname}.egg-info
@@ -123,6 +117,10 @@ popd
 %endif
 
 %changelog
+* Tue Apr 14 2015 Jerry James <loganjerry at gmail.com> - 0.6.0-1
+- New upstream version
+- Drop upstreamed -python34 patch
+
 * Sat Feb 21 2015 Jerry James <loganjerry at gmail.com> - 0.5.2-3
 - Use license macro
 
diff --git a/sources b/sources
index 07898d6..184b18f 100644
--- a/sources
+++ b/sources
@@ -1 +1 @@
-d401bd89f99ec8d56c22493e6f8c0443  zodbpickle-0.5.2.tar.gz
+bd050064511eaf2ac276a21bedf9d072  zodbpickle-0.6.0.tar.gz
-- 
cgit v0.10.2


	http://pkgs.fedoraproject.org/cgit/python-zodbpickle.git/commit/?h=master&id=10b1feddd0ed0a98135ac08f9ab08c98ccc7834e


More information about the scm-commits mailing list