Skip to content

Commit

Permalink
[SPARK-50621][PYTHON] Upgrade Cloudpickle to 3.1.0
Browse files Browse the repository at this point in the history
### What changes were proposed in this pull request?

This PR proposes to upgrade Cloudpickle to 3.1.0

### Why are the changes needed?

To leverage bug fixes.

### Does this PR introduce _any_ user-facing change?

Yes several bug fixes listed in https://github.com/cloudpipe/cloudpickle/blob/v3.1.0/CHANGES.md#310

### How was this patch tested?

Existing unittests should cover.

### Was this patch authored or co-authored using generative AI tooling?

No.

Closes apache#49241 from HyukjinKwon/upgrade-cloudpickle.

Authored-by: Hyukjin Kwon <gurwls223@apache.org>
Signed-off-by: Hyukjin Kwon <gurwls223@apache.org>
  • Loading branch information
HyukjinKwon committed Dec 20, 2024
1 parent d499400 commit 939f3df
Show file tree
Hide file tree
Showing 4 changed files with 76 additions and 43 deletions.
2 changes: 1 addition & 1 deletion python/pyspark/cloudpickle/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

__doc__ = cloudpickle.__doc__

__version__ = "3.0.0"
__version__ = "3.1.0"

__all__ = [ # noqa
"__version__",
Expand Down
98 changes: 74 additions & 24 deletions python/pyspark/cloudpickle/cloudpickle.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def _lookup_class_or_track(class_tracker_id, class_def):


def register_pickle_by_value(module):
"""Register a module to make it functions and classes picklable by value.
"""Register a module to make its functions and classes picklable by value.
By default, functions and classes that are attributes of an importable
module are to be pickled by reference, that is relying on re-importing
Expand Down Expand Up @@ -213,6 +213,7 @@ def _whichmodule(obj, name):
# sys.modules
if (
module_name == "__main__"
or module_name == "__mp_main__"
or module is None
or not isinstance(module, types.ModuleType)
):
Expand Down Expand Up @@ -409,7 +410,10 @@ def _walk_global_ops(code):

def _extract_class_dict(cls):
"""Retrieve a copy of the dict of a class without the inherited method."""
clsdict = dict(cls.__dict__) # copy dict proxy to a dict
# Hack to circumvent non-predictable memoization caused by string interning.
# See the inline comment in _class_setstate for details.
clsdict = {"".join(k): cls.__dict__[k] for k in sorted(cls.__dict__)}

if len(cls.__bases__) == 1:
inherited_dict = cls.__bases__[0].__dict__
else:
Expand Down Expand Up @@ -533,9 +537,15 @@ class id will also reuse this class definition.
The "extra" variable is meant to be a dict (or None) that can be used for
forward compatibility shall the need arise.
"""
# We need to intern the keys of the type_kwargs dict to avoid having
# different pickles for the same dynamic class depending on whether it was
# dynamically created or reconstructed from a pickled stream.
type_kwargs = {sys.intern(k): v for k, v in type_kwargs.items()}

skeleton_class = types.new_class(
name, bases, {"metaclass": type_constructor}, lambda ns: ns.update(type_kwargs)
)

return _lookup_class_or_track(class_tracker_id, skeleton_class)


Expand Down Expand Up @@ -694,8 +704,10 @@ def _function_getstate(func):
# unpickling time by iterating over slotstate and calling setattr(func,
# slotname, slotvalue)
slotstate = {
"__name__": func.__name__,
"__qualname__": func.__qualname__,
# Hack to circumvent non-predictable memoization caused by string interning.
# See the inline comment in _class_setstate for details.
"__name__": "".join(func.__name__),
"__qualname__": "".join(func.__qualname__),
"__annotations__": func.__annotations__,
"__kwdefaults__": func.__kwdefaults__,
"__defaults__": func.__defaults__,
Expand All @@ -721,7 +733,9 @@ def _function_getstate(func):
)
slotstate["__globals__"] = f_globals

state = func.__dict__
# Hack to circumvent non-predictable memoization caused by string interning.
# See the inline comment in _class_setstate for details.
state = {"".join(k): v for k, v in func.__dict__.items()}
return state, slotstate


Expand Down Expand Up @@ -802,6 +816,19 @@ def _code_reduce(obj):
# of the specific type from types, for example:
# >>> from types import CodeType
# >>> help(CodeType)

# Hack to circumvent non-predictable memoization caused by string interning.
# See the inline comment in _class_setstate for details.
co_name = "".join(obj.co_name)

# Create shallow copies of these tuple to make cloudpickle payload deterministic.
# When creating a code object during load, copies of these four tuples are
# created, while in the main process, these tuples can be shared.
# By always creating copies, we make sure the resulting payload is deterministic.
co_names = tuple(name for name in obj.co_names)
co_varnames = tuple(name for name in obj.co_varnames)
co_freevars = tuple(name for name in obj.co_freevars)
co_cellvars = tuple(name for name in obj.co_cellvars)
if hasattr(obj, "co_exceptiontable"):
# Python 3.11 and later: there are some new attributes
# related to the enhanced exceptions.
Expand All @@ -814,16 +841,16 @@ def _code_reduce(obj):
obj.co_flags,
obj.co_code,
obj.co_consts,
obj.co_names,
obj.co_varnames,
co_names,
co_varnames,
obj.co_filename,
obj.co_name,
co_name,
obj.co_qualname,
obj.co_firstlineno,
obj.co_linetable,
obj.co_exceptiontable,
obj.co_freevars,
obj.co_cellvars,
co_freevars,
co_cellvars,
)
elif hasattr(obj, "co_linetable"):
# Python 3.10 and later: obj.co_lnotab is deprecated and constructor
Expand All @@ -837,14 +864,14 @@ def _code_reduce(obj):
obj.co_flags,
obj.co_code,
obj.co_consts,
obj.co_names,
obj.co_varnames,
co_names,
co_varnames,
obj.co_filename,
obj.co_name,
co_name,
obj.co_firstlineno,
obj.co_linetable,
obj.co_freevars,
obj.co_cellvars,
co_freevars,
co_cellvars,
)
elif hasattr(obj, "co_nmeta"): # pragma: no cover
# "nogil" Python: modified attributes from 3.9
Expand All @@ -859,15 +886,15 @@ def _code_reduce(obj):
obj.co_flags,
obj.co_code,
obj.co_consts,
obj.co_varnames,
co_varnames,
obj.co_filename,
obj.co_name,
co_name,
obj.co_firstlineno,
obj.co_lnotab,
obj.co_exc_handlers,
obj.co_jump_table,
obj.co_freevars,
obj.co_cellvars,
co_freevars,
co_cellvars,
obj.co_free2reg,
obj.co_cell2reg,
)
Expand All @@ -882,14 +909,14 @@ def _code_reduce(obj):
obj.co_flags,
obj.co_code,
obj.co_consts,
obj.co_names,
obj.co_varnames,
co_names,
co_varnames,
obj.co_filename,
obj.co_name,
co_name,
obj.co_firstlineno,
obj.co_lnotab,
obj.co_freevars,
obj.co_cellvars,
co_freevars,
co_cellvars,
)
return types.CodeType, args

Expand Down Expand Up @@ -1127,7 +1154,30 @@ def _class_setstate(obj, state):
if attrname == "_abc_impl":
registry = attr
else:
# Note: setting attribute names on a class automatically triggers their
# interning in CPython:
# https://github.com/python/cpython/blob/v3.12.0/Objects/object.c#L957
#
# This means that to get deterministic pickling for a dynamic class that
# was initially defined in a different Python process, the pickler
# needs to ensure that dynamic class and function attribute names are
# systematically copied into a non-interned version to avoid
# unpredictable pickle payloads.
#
# Indeed the Pickler's memoizer relies on physical object identity to break
# cycles in the reference graph of the object being serialized.
setattr(obj, attrname, attr)

if sys.version_info >= (3, 13) and "__firstlineno__" in state:
# Set the Python 3.13+ only __firstlineno__ attribute one more time, as it
# will be automatically deleted by the `setattr(obj, attrname, attr)` call
# above when `attrname` is "__firstlineno__". We assume that preserving this
# information might be important for some users and that it not stale in the
# context of cloudpickle usage, hence legitimate to propagate. Furthermore it
# is necessary to do so to keep deterministic chained pickling as tested in
# test_deterministic_str_interning_for_chained_dynamic_class_pickling.
obj.__firstlineno__ = state["__firstlineno__"]

if registry is not None:
for subclass in registry:
obj.register(subclass)
Expand Down
1 change: 1 addition & 0 deletions python/pyspark/cloudpickle/cloudpickle_fast.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
See: tests/test_backward_compat.py
"""

from . import cloudpickle


Expand Down
18 changes: 0 additions & 18 deletions python/pyspark/cloudpickle/compat.py

This file was deleted.

0 comments on commit 939f3df

Please sign in to comment.