976 lines
38 KiB
Python
976 lines
38 KiB
Python
# uncompyle6 version 3.9.2
|
|
# Python bytecode version base 3.7.0 (3394)
|
|
# Decompiled from: Python 3.8.19 (default, Mar 20 2024, 15:27:52)
|
|
# [Clang 14.0.6 ]
|
|
# Embedded file name: /var/user/app/device_supervisorbak/device_supervisor/lib/Pyro4/util.py
|
|
# Compiled at: 2024-04-18 03:12:55
|
|
# Size of source mod 2**32: 41036 bytes
|
|
"""
|
|
Miscellaneous utilities, and serializers.
|
|
|
|
Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net).
|
|
"""
|
|
import array, sys, zlib, uuid, logging, linecache, traceback, inspect, struct, datetime, decimal, numbers
|
|
from Pyro4 import errors
|
|
from Pyro4.configuration import config
|
|
try:
|
|
import copyreg
|
|
except ImportError:
|
|
import copy_reg as copyreg
|
|
|
|
log = logging.getLogger("Pyro4.util")
|
|
|
|
def getPyroTraceback(ex_type=None, ex_value=None, ex_tb=None):
|
|
"""Returns a list of strings that form the traceback information of a
|
|
Pyro exception. Any remote Pyro exception information is included.
|
|
Traceback information is automatically obtained via ``sys.exc_info()`` if
|
|
you do not supply the objects yourself."""
|
|
|
|
def formatRemoteTraceback(remote_tb_lines):
|
|
result = [
|
|
" +--- This exception occured remotely (Pyro) - Remote traceback:"]
|
|
for line in remote_tb_lines:
|
|
if line.endswith("\n"):
|
|
line = line[None[:-1]]
|
|
lines = line.split("\n")
|
|
for line2 in lines:
|
|
result.append("\n | ")
|
|
result.append(line2)
|
|
|
|
result.append("\n +--- End of remote traceback\n")
|
|
return result
|
|
|
|
try:
|
|
if ex_type is not None:
|
|
if ex_value is None:
|
|
if ex_tb is None:
|
|
if type(ex_type) is not type:
|
|
raise TypeError("invalid argument: ex_type should be an exception type, or just supply no arguments at all")
|
|
if ex_type is None:
|
|
if ex_tb is None:
|
|
ex_type, ex_value, ex_tb = sys.exc_info()
|
|
remote_tb = getattr(ex_value, "_pyroTraceback", None)
|
|
local_tb = formatTraceback(ex_type, ex_value, ex_tb, config.DETAILED_TRACEBACK)
|
|
if remote_tb:
|
|
remote_tb = formatRemoteTraceback(remote_tb)
|
|
return local_tb + remote_tb
|
|
return local_tb
|
|
finally:
|
|
del ex_type
|
|
del ex_value
|
|
del ex_tb
|
|
|
|
|
|
def formatTraceback(ex_type=None, ex_value=None, ex_tb=None, detailed=False):
|
|
"""Formats an exception traceback. If you ask for detailed formatting,
|
|
the result will contain info on the variables in each stack frame.
|
|
You don't have to provide the exception info objects, if you omit them,
|
|
this function will obtain them itself using ``sys.exc_info()``."""
|
|
if ex_type is not None:
|
|
if ex_value is None:
|
|
if ex_tb is None:
|
|
if type(ex_type) is not type:
|
|
raise TypeError("invalid argument: ex_type should be an exception type, or just supply no arguments at all")
|
|
if ex_type is None:
|
|
if ex_tb is None:
|
|
ex_type, ex_value, ex_tb = sys.exc_info()
|
|
if detailed and sys.platform != "cli":
|
|
|
|
def makeStrValue(value):
|
|
try:
|
|
return repr(value)
|
|
except:
|
|
try:
|
|
return str(value)
|
|
except:
|
|
return "<ERROR>"
|
|
|
|
try:
|
|
result = [
|
|
"----------------------------------------------------\n"]
|
|
result.append(" EXCEPTION %s: %s\n" % (ex_type, ex_value))
|
|
result.append(" Extended stacktrace follows (most recent call last)\n")
|
|
skipLocals = True
|
|
while ex_tb:
|
|
frame = ex_tb.tb_frame
|
|
sourceFileName = frame.f_code.co_filename
|
|
if "self" in frame.f_locals:
|
|
location = "%s.%s" % (frame.f_locals["self"].__class__.__name__, frame.f_code.co_name)
|
|
else:
|
|
location = frame.f_code.co_name
|
|
result.append("----------------------------------------------------\n")
|
|
result.append('File "%s", line %d, in %s\n' % (sourceFileName, ex_tb.tb_lineno, location))
|
|
result.append("Source code:\n")
|
|
result.append(" " + linecache.getline(sourceFileName, ex_tb.tb_lineno).strip() + "\n")
|
|
if not skipLocals:
|
|
names = set()
|
|
names.update(getattr(frame.f_code, "co_varnames", ()))
|
|
names.update(getattr(frame.f_code, "co_names", ()))
|
|
names.update(getattr(frame.f_code, "co_cellvars", ()))
|
|
names.update(getattr(frame.f_code, "co_freevars", ()))
|
|
result.append("Local values:\n")
|
|
for name2 in sorted(names):
|
|
if name2 in frame.f_locals:
|
|
value = frame.f_locals[name2]
|
|
result.append(" %s = %s\n" % (name2, makeStrValue(value)))
|
|
if name2 == "self":
|
|
for name3, value in vars(value).items():
|
|
result.append(" self.%s = %s\n" % (name3, makeStrValue(value)))
|
|
|
|
skipLocals = False
|
|
ex_tb = ex_tb.tb_next
|
|
|
|
result.append("----------------------------------------------------\n")
|
|
result.append(" EXCEPTION %s: %s\n" % (ex_type, ex_value))
|
|
result.append("----------------------------------------------------\n")
|
|
return result
|
|
except Exception:
|
|
return [
|
|
"----------------------------------------------------\nError building extended traceback!!! :\n",
|
|
"".join((traceback.format_exception)(*sys.exc_info())) + "----------------------------------------------------" + "\n",
|
|
"Original Exception follows:\n",
|
|
"".join(traceback.format_exception(ex_type, ex_value, ex_tb))]
|
|
|
|
else:
|
|
return traceback.format_exception(ex_type, ex_value, ex_tb)
|
|
|
|
|
|
all_exceptions = {}
|
|
if sys.version_info < (3, 0):
|
|
import exceptions
|
|
for name, t in vars(exceptions).items():
|
|
if type(t) is type and issubclass(t, BaseException):
|
|
all_exceptions[name] = t
|
|
|
|
else:
|
|
import builtins
|
|
for name, t in vars(builtins).items():
|
|
if type(t) is type and issubclass(t, BaseException):
|
|
all_exceptions[name] = t
|
|
|
|
buffer = bytearray
|
|
for name, t in vars(errors).items():
|
|
if type(t) is type and issubclass(t, errors.PyroError):
|
|
all_exceptions[name] = t
|
|
|
|
class SerializerBase(object):
|
|
__doc__ = "Base class for (de)serializer implementations (which must be thread safe)"
|
|
_SerializerBase__custom_class_to_dict_registry = {}
|
|
_SerializerBase__custom_dict_to_class_registry = {}
|
|
|
|
def serializeData(self, data, compress=False):
|
|
"""Serialize the given data object, try to compress if told so.
|
|
Returns a tuple of the serialized data (bytes) and a bool indicating if it is compressed or not."""
|
|
data = self.dumps(data)
|
|
return self._SerializerBase__compressdata(data, compress)
|
|
|
|
def deserializeData(self, data, compressed=False):
|
|
"""Deserializes the given data (bytes). Set compressed to True to decompress the data first."""
|
|
if compressed:
|
|
if sys.version_info < (3, 0):
|
|
data = self._convertToBytes(data)
|
|
data = zlib.decompress(data)
|
|
return self.loads(data)
|
|
|
|
def serializeCall(self, obj, method, vargs, kwargs, compress=False):
|
|
"""Serialize the given method call parameters, try to compress if told so.
|
|
Returns a tuple of the serialized data and a bool indicating if it is compressed or not."""
|
|
data = self.dumpsCall(obj, method, vargs, kwargs)
|
|
return self._SerializerBase__compressdata(data, compress)
|
|
|
|
def deserializeCall(self, data, compressed=False):
|
|
"""Deserializes the given call data back to (object, method, vargs, kwargs) tuple.
|
|
Set compressed to True to decompress the data first."""
|
|
if compressed:
|
|
if sys.version_info < (3, 0):
|
|
data = self._convertToBytes(data)
|
|
data = zlib.decompress(data)
|
|
return self.loadsCall(data)
|
|
|
|
def loads(self, data):
|
|
raise NotImplementedError("implement in subclass")
|
|
|
|
def loadsCall(self, data):
|
|
raise NotImplementedError("implement in subclass")
|
|
|
|
def dumps(self, data):
|
|
raise NotImplementedError("implement in subclass")
|
|
|
|
def dumpsCall(self, obj, method, vargs, kwargs):
|
|
raise NotImplementedError("implement in subclass")
|
|
|
|
def _convertToBytes(self, data):
|
|
t = type(data)
|
|
if t is not bytes:
|
|
if t in (bytearray, buffer):
|
|
return bytes(data)
|
|
if t is memoryview:
|
|
return data.tobytes()
|
|
return data
|
|
|
|
def __compressdata(self, data, compress):
|
|
if not compress or len(data) < 200:
|
|
return (
|
|
data, False)
|
|
compressed = zlib.compress(data)
|
|
if len(compressed) < len(data):
|
|
return (
|
|
compressed, True)
|
|
return (
|
|
data, False)
|
|
|
|
@classmethod
|
|
def register_type_replacement(cls, object_type, replacement_function):
|
|
raise NotImplementedError("implement in subclass")
|
|
|
|
@classmethod
|
|
def register_class_to_dict(cls, clazz, converter, serpent_too=True):
|
|
"""Registers a custom function that returns a dict representation of objects of the given class.
|
|
The function is called with a single parameter; the object to be converted to a dict."""
|
|
cls._SerializerBase__custom_class_to_dict_registry[clazz] = converter
|
|
if serpent_too:
|
|
try:
|
|
get_serializer_by_id(SerpentSerializer.serializer_id)
|
|
import serpent
|
|
|
|
def serpent_converter(obj, serializer, stream, level):
|
|
d = converter(obj)
|
|
serializer.ser_builtins_dict(d, stream, level)
|
|
|
|
serpent.register_class(clazz, serpent_converter)
|
|
except errors.ProtocolError:
|
|
pass
|
|
|
|
@classmethod
|
|
def unregister_class_to_dict(cls, clazz):
|
|
"""Removes the to-dict conversion function registered for the given class. Objects of the class
|
|
will be serialized by the default mechanism again."""
|
|
if clazz in cls._SerializerBase__custom_class_to_dict_registry:
|
|
del cls._SerializerBase__custom_class_to_dict_registry[clazz]
|
|
try:
|
|
get_serializer_by_id(SerpentSerializer.serializer_id)
|
|
import serpent
|
|
serpent.unregister_class(clazz)
|
|
except errors.ProtocolError:
|
|
pass
|
|
|
|
@classmethod
|
|
def register_dict_to_class(cls, classname, converter):
|
|
"""
|
|
Registers a custom converter function that creates objects from a dict with the given classname tag in it.
|
|
The function is called with two parameters: the classname and the dictionary to convert to an instance of the class.
|
|
|
|
This mechanism is not used for the pickle serializer.
|
|
"""
|
|
cls._SerializerBase__custom_dict_to_class_registry[classname] = converter
|
|
|
|
@classmethod
|
|
def unregister_dict_to_class(cls, classname):
|
|
"""
|
|
Removes the converter registered for the given classname. Dicts with that classname tag
|
|
will be deserialized by the default mechanism again.
|
|
|
|
This mechanism is not used for the pickle serializer.
|
|
"""
|
|
if classname in cls._SerializerBase__custom_dict_to_class_registry:
|
|
del cls._SerializerBase__custom_dict_to_class_registry[classname]
|
|
|
|
@classmethod
|
|
def class_to_dict(cls, obj):
|
|
"""
|
|
Convert a non-serializable object to a dict. Partly borrowed from serpent.
|
|
Not used for the pickle serializer.
|
|
"""
|
|
for clazz in cls._SerializerBase__custom_class_to_dict_registry:
|
|
if isinstance(obj, clazz):
|
|
return cls._SerializerBase__custom_class_to_dict_registry[clazz](obj)
|
|
|
|
if type(obj) in (set, dict, tuple, list):
|
|
raise ValueError("can't serialize type " + str(obj.__class__) + " into a dict")
|
|
if hasattr(obj, "_pyroDaemon"):
|
|
obj._pyroDaemon = None
|
|
if isinstance(obj, BaseException):
|
|
return {'__class__':(obj.__class__.__module__ + ".") + (obj.__class__.__name__),
|
|
'__exception__':True,
|
|
'args':obj.args,
|
|
'attributes':vars(obj)}
|
|
try:
|
|
value = obj.__getstate__()
|
|
except AttributeError:
|
|
pass
|
|
else:
|
|
if isinstance(value, dict):
|
|
return value
|
|
else:
|
|
try:
|
|
value = dict(vars(obj))
|
|
value["__class__"] = obj.__class__.__module__ + "." + obj.__class__.__name__
|
|
return value
|
|
except TypeError:
|
|
if hasattr(obj, "__slots__"):
|
|
value = {}
|
|
for slot in obj.__slots__:
|
|
value[slot] = getattr(obj, slot)
|
|
|
|
value["__class__"] = obj.__class__.__module__ + "." + obj.__class__.__name__
|
|
return value
|
|
raise errors.SerializeError("don't know how to serialize class " + str(obj.__class__) + " using serializer " + str(cls.__name__) + ". Give it vars() or an appropriate __getstate__")
|
|
|
|
@classmethod
|
|
def dict_to_class(cls, data):
|
|
"""
|
|
Recreate an object out of a dict containing the class name and the attributes.
|
|
Only a fixed set of classes are recognized.
|
|
Not used for the pickle serializer.
|
|
"""
|
|
from Pyro4 import core, futures
|
|
classname = data.get("__class__", "<unknown>")
|
|
if isinstance(classname, bytes):
|
|
classname = classname.decode("utf-8")
|
|
if classname in cls._SerializerBase__custom_dict_to_class_registry:
|
|
converter = cls._SerializerBase__custom_dict_to_class_registry[classname]
|
|
return converter(classname, data)
|
|
if "__" in classname:
|
|
raise errors.SecurityError("refused to deserialize types with double underscores in their name: " + classname)
|
|
if classname.startswith("Pyro4.core."):
|
|
if classname == "Pyro4.core.URI":
|
|
uri = core.URI.__new__(core.URI)
|
|
uri.__setstate_from_dict__(data["state"])
|
|
return uri
|
|
if classname == "Pyro4.core.Proxy":
|
|
proxy = core.Proxy.__new__(core.Proxy)
|
|
proxy.__setstate_from_dict__(data["state"])
|
|
return proxy
|
|
if classname == "Pyro4.core.Daemon":
|
|
daemon = core.Daemon.__new__(core.Daemon)
|
|
daemon.__setstate_from_dict__(data["state"])
|
|
return daemon
|
|
else:
|
|
if classname.startswith("Pyro4.util."):
|
|
if classname == "Pyro4.util.SerpentSerializer":
|
|
return SerpentSerializer()
|
|
if classname == "Pyro4.util.PickleSerializer":
|
|
return PickleSerializer()
|
|
if classname == "Pyro4.util.MarshalSerializer":
|
|
return MarshalSerializer()
|
|
if classname == "Pyro4.util.JsonSerializer":
|
|
return JsonSerializer()
|
|
if classname == "Pyro4.util.MsgpackSerializer":
|
|
return MsgpackSerializer()
|
|
if classname == "Pyro4.util.CloudpickleSerializer":
|
|
return CloudpickleSerializer()
|
|
if classname == "Pyro4.util.DillSerializer":
|
|
return DillSerializer()
|
|
elif classname.startswith("Pyro4.errors."):
|
|
errortype = getattr(errors, classname.split(".", 2)[2])
|
|
if issubclass(errortype, errors.PyroError):
|
|
return SerializerBase.make_exception(errortype, data)
|
|
elif classname == "Pyro4.futures._ExceptionWrapper":
|
|
ex = data["exception"]
|
|
if isinstance(ex, dict):
|
|
if "__class__" in ex:
|
|
ex = SerializerBase.dict_to_class(ex)
|
|
return futures._ExceptionWrapper(ex)
|
|
if data.get("__exception__", False):
|
|
if classname in all_exceptions:
|
|
return SerializerBase.make_exception(all_exceptions[classname], data)
|
|
namespace, short_classname = classname.split(".", 1)
|
|
if namespace in ('builtins', 'exceptions'):
|
|
if sys.version_info < (3, 0):
|
|
exceptiontype = getattr(exceptions, short_classname)
|
|
if issubclass(exceptiontype, BaseException):
|
|
return SerializerBase.make_exception(exceptiontype, data)
|
|
else:
|
|
exceptiontype = getattr(builtins, short_classname)
|
|
if issubclass(exceptiontype, BaseException):
|
|
return SerializerBase.make_exception(exceptiontype, data)
|
|
else:
|
|
if namespace == "sqlite3":
|
|
if short_classname.endswith("Error"):
|
|
import sqlite3
|
|
exceptiontype = getattr(sqlite3, short_classname)
|
|
if issubclass(exceptiontype, BaseException):
|
|
return SerializerBase.make_exception(exceptiontype, data)
|
|
log.warning("unsupported serialized class: " + classname)
|
|
raise errors.SerializeError("unsupported serialized class: " + classname)
|
|
|
|
@staticmethod
|
|
def make_exception(exceptiontype, data):
|
|
ex = exceptiontype(*data["args"])
|
|
if "attributes" in data:
|
|
for attr, value in data["attributes"].items():
|
|
setattr(ex, attr, value)
|
|
|
|
return ex
|
|
|
|
def recreate_classesParse error at or near `LOAD_SETCOMP' instruction at offset 20
|
|
|
|
def __eq__(self, other):
|
|
"""this equality method is only to support the unit tests of this class"""
|
|
return isinstance(other, SerializerBase) and vars(self) == vars(other)
|
|
|
|
def __ne__(self, other):
|
|
return not self.__eq__(other)
|
|
|
|
__hash__ = object.__hash__
|
|
|
|
|
|
class PickleSerializer(SerializerBase):
|
|
__doc__ = "\n A (de)serializer that wraps the Pickle serialization protocol.\n It can optionally compress the serialized data, and is thread safe.\n "
|
|
serializer_id = 4
|
|
|
|
def dumpsCall(self, obj, method, vargs, kwargs):
|
|
return pickle.dumps((obj, method, vargs, kwargs), config.PICKLE_PROTOCOL_VERSION)
|
|
|
|
def dumps(self, data):
|
|
return pickle.dumps(data, config.PICKLE_PROTOCOL_VERSION)
|
|
|
|
def loadsCall(self, data):
|
|
data = self._convertToBytes(data)
|
|
return pickle.loads(data)
|
|
|
|
def loads(self, data):
|
|
data = self._convertToBytes(data)
|
|
return pickle.loads(data)
|
|
|
|
@classmethod
|
|
def register_type_replacement(cls, object_type, replacement_function):
|
|
|
|
def copyreg_function(obj):
|
|
return replacement_function(obj).__reduce__()
|
|
|
|
if not (object_type is type or inspect.isclass(object_type)):
|
|
raise ValueError("refusing to register replacement for a non-type or the type 'type' itself")
|
|
try:
|
|
copyreg.pickle(object_type, copyreg_function)
|
|
except TypeError:
|
|
pass
|
|
|
|
|
|
class CloudpickleSerializer(SerializerBase):
|
|
__doc__ = "\n A (de)serializer that wraps the Cloudpickle serialization protocol.\n It can optionally compress the serialized data, and is thread safe.\n "
|
|
serializer_id = 7
|
|
|
|
def dumpsCall(self, obj, method, vargs, kwargs):
|
|
return cloudpickle.dumps((obj, method, vargs, kwargs), config.PICKLE_PROTOCOL_VERSION)
|
|
|
|
def dumps(self, data):
|
|
return cloudpickle.dumps(data, config.PICKLE_PROTOCOL_VERSION)
|
|
|
|
def loadsCall(self, data):
|
|
return cloudpickle.loads(data)
|
|
|
|
def loads(self, data):
|
|
return cloudpickle.loads(data)
|
|
|
|
@classmethod
|
|
def register_type_replacement(cls, object_type, replacement_function):
|
|
|
|
def copyreg_function(obj):
|
|
return replacement_function(obj).__reduce__()
|
|
|
|
if not (object_type is type or inspect.isclass(object_type)):
|
|
raise ValueError("refusing to register replacement for a non-type or the type 'type' itself")
|
|
try:
|
|
copyreg.pickle(object_type, copyreg_function)
|
|
except TypeError:
|
|
pass
|
|
|
|
|
|
class DillSerializer(SerializerBase):
|
|
__doc__ = "\n A (de)serializer that wraps the Dill serialization protocol.\n It can optionally compress the serialized data, and is thread safe.\n "
|
|
serializer_id = 5
|
|
|
|
def dumpsCall(self, obj, method, vargs, kwargs):
|
|
return dill.dumps((obj, method, vargs, kwargs), config.DILL_PROTOCOL_VERSION)
|
|
|
|
def dumps(self, data):
|
|
return dill.dumps(data, config.DILL_PROTOCOL_VERSION)
|
|
|
|
def loadsCall(self, data):
|
|
return dill.loads(data)
|
|
|
|
def loads(self, data):
|
|
return dill.loads(data)
|
|
|
|
@classmethod
|
|
def register_type_replacement(cls, object_type, replacement_function):
|
|
|
|
def copyreg_function(obj):
|
|
return replacement_function(obj).__reduce__()
|
|
|
|
if not (object_type is type or inspect.isclass(object_type)):
|
|
raise ValueError("refusing to register replacement for a non-type or the type 'type' itself")
|
|
try:
|
|
copyreg.pickle(object_type, copyreg_function)
|
|
except TypeError:
|
|
pass
|
|
|
|
|
|
class MarshalSerializer(SerializerBase):
|
|
__doc__ = "(de)serializer that wraps the marshal serialization protocol."
|
|
serializer_id = 3
|
|
|
|
def dumpsCallParse error at or near `LOAD_DICTCOMP' instruction at offset 22
|
|
|
|
def dumps(self, data):
|
|
return marshal.dumps(self.convert_obj_into_marshallable(data))
|
|
|
|
if sys.platform == "cli":
|
|
|
|
def loadsCall(self, data):
|
|
if type(data) is not str:
|
|
data = str(data)
|
|
obj, method, vargs, kwargs = marshal.loads(data)
|
|
vargs = self.recreate_classes(vargs)
|
|
kwargs = self.recreate_classes(kwargs)
|
|
return (obj, method, vargs, kwargs)
|
|
|
|
def loads(self, data):
|
|
if type(data) is not str:
|
|
data = str(data)
|
|
return self.recreate_classes(marshal.loads(data))
|
|
|
|
else:
|
|
|
|
def loadsCall(self, data):
|
|
data = self._convertToBytes(data)
|
|
obj, method, vargs, kwargs = marshal.loads(data)
|
|
vargs = self.recreate_classes(vargs)
|
|
kwargs = self.recreate_classes(kwargs)
|
|
return (obj, method, vargs, kwargs)
|
|
|
|
def loads(self, data):
|
|
data = self._convertToBytes(data)
|
|
return self.recreate_classes(marshal.loads(data))
|
|
|
|
marshalable_types = (str, int, float, type(None), bool, complex, bytes, bytearray,
|
|
tuple, set, frozenset, list, dict)
|
|
if sys.version_info < (3, 0):
|
|
marshalable_types += (unicode,)
|
|
|
|
def convert_obj_into_marshallable(self, obj):
|
|
if isinstance(obj, self.marshalable_types):
|
|
return obj
|
|
if isinstance(obj, array.array):
|
|
if obj.typecode == "c":
|
|
return obj.tostring()
|
|
if obj.typecode == "u":
|
|
return obj.tounicode()
|
|
return obj.tolist()
|
|
return self.class_to_dict(obj)
|
|
|
|
@classmethod
|
|
def class_to_dict(cls, obj):
|
|
if isinstance(obj, uuid.UUID):
|
|
return str(obj)
|
|
return super(MarshalSerializer, cls).class_to_dict(obj)
|
|
|
|
@classmethod
|
|
def register_type_replacement(cls, object_type, replacement_function):
|
|
pass
|
|
|
|
|
|
class SerpentSerializer(SerializerBase):
|
|
__doc__ = "(de)serializer that wraps the serpent serialization protocol."
|
|
serializer_id = 1
|
|
|
|
def dumpsCall(self, obj, method, vargs, kwargs):
|
|
return serpent.dumps((obj, method, vargs, kwargs), module_in_classname=True)
|
|
|
|
def dumps(self, data):
|
|
return serpent.dumps(data, module_in_classname=True)
|
|
|
|
def loadsCall(self, data):
|
|
obj, method, vargs, kwargs = serpent.loads(data)
|
|
vargs = self.recreate_classes(vargs)
|
|
kwargs = self.recreate_classes(kwargs)
|
|
return (obj, method, vargs, kwargs)
|
|
|
|
def loads(self, data):
|
|
return self.recreate_classes(serpent.loads(data))
|
|
|
|
@classmethod
|
|
def register_type_replacement(cls, object_type, replacement_function):
|
|
|
|
def custom_serializer(object, serpent_serializer, outputstream, indentlevel):
|
|
replaced = replacement_function(object)
|
|
if replaced is object:
|
|
serpent_serializer.ser_default_class(replaced, outputstream, indentlevel)
|
|
else:
|
|
serpent_serializer._serialize(replaced, outputstream, indentlevel)
|
|
|
|
if not (object_type is type or inspect.isclass(object_type)):
|
|
raise ValueError("refusing to register replacement for a non-type or the type 'type' itself")
|
|
serpent.register_class(object_type, custom_serializer)
|
|
|
|
@classmethod
|
|
def dict_to_class(cls, data):
|
|
if data.get("__class__") == "float":
|
|
return float(data["value"])
|
|
return super(SerpentSerializer, cls).dict_to_class(data)
|
|
|
|
|
|
class JsonSerializer(SerializerBase):
|
|
__doc__ = "(de)serializer that wraps the json serialization protocol."
|
|
serializer_id = 2
|
|
_JsonSerializer__type_replacements = {}
|
|
|
|
def dumpsCall(self, obj, method, vargs, kwargs):
|
|
data = {
|
|
'object': obj, 'method': method, 'params': vargs, 'kwargs': kwargs}
|
|
data = json.dumps(data, ensure_ascii=False, default=(self.default))
|
|
return data.encode("utf-8")
|
|
|
|
def dumps(self, data):
|
|
data = json.dumps(data, ensure_ascii=False, default=(self.default))
|
|
return data.encode("utf-8")
|
|
|
|
def loadsCall(self, data):
|
|
data = self._convertToBytes(data).decode("utf-8")
|
|
data = json.loads(data)
|
|
vargs = self.recreate_classes(data["params"])
|
|
kwargs = self.recreate_classes(data["kwargs"])
|
|
return (data["object"], data["method"], vargs, kwargs)
|
|
|
|
def loads(self, data):
|
|
data = self._convertToBytes(data).decode("utf-8")
|
|
return self.recreate_classes(json.loads(data))
|
|
|
|
def default(self, obj):
|
|
replacer = self._JsonSerializer__type_replacements.get(type(obj), None)
|
|
if replacer:
|
|
obj = replacer(obj)
|
|
if isinstance(obj, set):
|
|
return tuple(obj)
|
|
if isinstance(obj, uuid.UUID):
|
|
return str(obj)
|
|
if isinstance(obj, (datetime.datetime, datetime.date)):
|
|
return obj.isoformat()
|
|
if isinstance(obj, decimal.Decimal):
|
|
return str(obj)
|
|
if isinstance(obj, array.array):
|
|
if obj.typecode == "c":
|
|
return obj.tostring()
|
|
if obj.typecode == "u":
|
|
return obj.tounicode()
|
|
return obj.tolist()
|
|
return self.class_to_dict(obj)
|
|
|
|
@classmethod
|
|
def register_type_replacement(cls, object_type, replacement_function):
|
|
if not (object_type is type or inspect.isclass(object_type)):
|
|
raise ValueError("refusing to register replacement for a non-type or the type 'type' itself")
|
|
cls._JsonSerializer__type_replacements[object_type] = replacement_function
|
|
|
|
|
|
class MsgpackSerializer(SerializerBase):
|
|
__doc__ = "(de)serializer that wraps the msgpack serialization protocol."
|
|
serializer_id = 6
|
|
_MsgpackSerializer__type_replacements = {}
|
|
|
|
def dumpsCall(self, obj, method, vargs, kwargs):
|
|
return msgpack.packb((obj, method, vargs, kwargs), use_bin_type=True, default=(self.default))
|
|
|
|
def dumps(self, data):
|
|
return msgpack.packb(data, use_bin_type=True, default=(self.default))
|
|
|
|
def loadsCall(self, data):
|
|
data = self._convertToBytes(data)
|
|
obj, method, vargs, kwargs = msgpack.unpackb(data, raw=False, object_hook=(self.object_hook))
|
|
return (obj, method, vargs, kwargs)
|
|
|
|
def loads(self, data):
|
|
data = self._convertToBytes(data)
|
|
return msgpack.unpackb(data, raw=False, object_hook=(self.object_hook), ext_hook=(self.ext_hook))
|
|
|
|
def default(self, obj):
|
|
replacer = self._MsgpackSerializer__type_replacements.get(type(obj), None)
|
|
if replacer:
|
|
obj = replacer(obj)
|
|
if isinstance(obj, set):
|
|
return tuple(obj)
|
|
if isinstance(obj, uuid.UUID):
|
|
return str(obj)
|
|
if isinstance(obj, bytearray):
|
|
return bytes(obj)
|
|
if isinstance(obj, complex):
|
|
return msgpack.ExtType(48, struct.pack("dd", obj.real, obj.imag))
|
|
if isinstance(obj, datetime.datetime):
|
|
if obj.tzinfo:
|
|
raise errors.SerializeError("msgpack cannot serialize datetime with timezone info")
|
|
return msgpack.ExtType(50, struct.pack("d", obj.timestamp()))
|
|
if isinstance(obj, datetime.date):
|
|
return msgpack.ExtType(51, struct.pack("l", obj.toordinal()))
|
|
if isinstance(obj, decimal.Decimal):
|
|
return str(obj)
|
|
if isinstance(obj, numbers.Number):
|
|
return msgpack.ExtType(49, str(obj).encode("ascii"))
|
|
if isinstance(obj, array.array):
|
|
if obj.typecode == "c":
|
|
return obj.tostring()
|
|
if obj.typecode == "u":
|
|
return obj.tounicode()
|
|
return obj.tolist()
|
|
return self.class_to_dict(obj)
|
|
|
|
def object_hook(self, obj):
|
|
if "__class__" in obj:
|
|
return self.dict_to_class(obj)
|
|
return obj
|
|
|
|
def ext_hook(self, code, data):
|
|
if code == 48:
|
|
real, imag = struct.unpack("dd", data)
|
|
return complex(real, imag)
|
|
if code == 49:
|
|
return int(data)
|
|
if code == 50:
|
|
return datetime.datetime.fromtimestamp(struct.unpack("d", data)[0])
|
|
if code == 51:
|
|
return datetime.date.fromordinal(struct.unpack("l", data)[0])
|
|
raise errors.SerializeError("invalid ext code for msgpack: " + str(code))
|
|
|
|
@classmethod
|
|
def register_type_replacement(cls, object_type, replacement_function):
|
|
if not (object_type is type or inspect.isclass(object_type)):
|
|
raise ValueError("refusing to register replacement for a non-type or the type 'type' itself")
|
|
cls._MsgpackSerializer__type_replacements[object_type] = replacement_function
|
|
|
|
|
|
_serializers = {}
|
|
_serializers_by_id = {}
|
|
|
|
def get_serializer(name):
|
|
try:
|
|
return _serializers[name]
|
|
except KeyError:
|
|
raise errors.SerializeError("serializer '%s' is unknown or not available" % name)
|
|
|
|
|
|
def get_serializer_by_id(sid):
|
|
try:
|
|
return _serializers_by_id[sid]
|
|
except KeyError:
|
|
raise errors.SerializeError("no serializer available for id %d" % sid)
|
|
|
|
|
|
try:
|
|
import cPickle as pickle
|
|
except ImportError:
|
|
import pickle
|
|
|
|
assert config.PICKLE_PROTOCOL_VERSION >= 2, "pickle protocol needs to be 2 or higher"
|
|
_ser = PickleSerializer()
|
|
_serializers["pickle"] = _ser
|
|
_serializers_by_id[_ser.serializer_id] = _ser
|
|
import marshal
|
|
_ser = MarshalSerializer()
|
|
_serializers["marshal"] = _ser
|
|
_serializers_by_id[_ser.serializer_id] = _ser
|
|
try:
|
|
import cloudpickle
|
|
_ser = CloudpickleSerializer()
|
|
_serializers["cloudpickle"] = _ser
|
|
_serializers_by_id[_ser.serializer_id] = _ser
|
|
except ImportError:
|
|
pass
|
|
|
|
try:
|
|
import dill
|
|
_ser = DillSerializer()
|
|
_serializers["dill"] = _ser
|
|
_serializers_by_id[_ser.serializer_id] = _ser
|
|
except ImportError:
|
|
pass
|
|
|
|
try:
|
|
try:
|
|
import importlib
|
|
json = importlib.import_module(config.JSON_MODULE)
|
|
except ImportError:
|
|
json = __import__(config.JSON_MODULE)
|
|
|
|
_ser = JsonSerializer()
|
|
_serializers["json"] = _ser
|
|
_serializers_by_id[_ser.serializer_id] = _ser
|
|
except ImportError:
|
|
pass
|
|
|
|
try:
|
|
import serpent
|
|
_ser = SerpentSerializer()
|
|
_serializers["serpent"] = _ser
|
|
_serializers_by_id[_ser.serializer_id] = _ser
|
|
except ImportError:
|
|
log.warning("serpent serializer is not available")
|
|
|
|
try:
|
|
import msgpack
|
|
_ser = MsgpackSerializer()
|
|
_serializers["msgpack"] = _ser
|
|
_serializers_by_id[_ser.serializer_id] = _ser
|
|
except ImportError:
|
|
pass
|
|
|
|
del _ser
|
|
|
|
def getAttribute(obj, attr):
|
|
"""
|
|
Resolves an attribute name to an object. Raises
|
|
an AttributeError if any attribute in the chain starts with a '``_``'.
|
|
Doesn't resolve a dotted name, because that is a security vulnerability.
|
|
It treats it as a single attribute name (and the lookup will likely fail).
|
|
"""
|
|
if is_private_attribute(attr):
|
|
raise AttributeError("attempt to access private attribute '%s'" % attr)
|
|
else:
|
|
obj = getattr(obj, attr)
|
|
if not config.REQUIRE_EXPOSE or getattr(obj, "_pyroExposed", False):
|
|
return obj
|
|
raise AttributeError("attempt to access unexposed attribute '%s'" % attr)
|
|
|
|
|
|
def excepthook(ex_type, ex_value, ex_tb):
|
|
"""An exception hook you can use for ``sys.excepthook``, to automatically print remote Pyro tracebacks"""
|
|
traceback = "".join(getPyroTraceback(ex_type, ex_value, ex_tb))
|
|
sys.stderr.write(traceback)
|
|
|
|
|
|
def fixIronPythonExceptionForPickle(exceptionObject, addAttributes):
|
|
"""
|
|
Function to hack around a bug in IronPython where it doesn't pickle
|
|
exception attributes. We piggyback them into the exception's args.
|
|
Bug report is at https://github.com/IronLanguages/main/issues/943
|
|
Bug is still present in Ironpython 2.7.7
|
|
"""
|
|
if hasattr(exceptionObject, "args"):
|
|
if addAttributes:
|
|
ironpythonArgs = vars(exceptionObject)
|
|
ironpythonArgs["__ironpythonargs__"] = True
|
|
exceptionObject.args += (ironpythonArgs,)
|
|
else:
|
|
if len(exceptionObject.args) > 0:
|
|
piggyback = exceptionObject.args[-1]
|
|
if type(piggyback) is dict:
|
|
if piggyback.get("__ironpythonargs__"):
|
|
del piggyback["__ironpythonargs__"]
|
|
exceptionObject.args = exceptionObject.args[None[:-1]]
|
|
exceptionObject.__dict__.update(piggyback)
|
|
|
|
|
|
__exposed_member_cache = {}
|
|
|
|
def reset_exposed_members(obj, only_exposed=True, as_lists=False):
|
|
"""Delete any cached exposed members forcing recalculation on next request"""
|
|
if not inspect.isclass(obj):
|
|
obj = obj.__class__
|
|
cache_key = (
|
|
obj, only_exposed, as_lists)
|
|
__exposed_member_cache.pop(cache_key, None)
|
|
|
|
|
|
def get_exposed_members(obj, only_exposed=True, as_lists=False, use_cache=True):
|
|
"""
|
|
Return public and exposed members of the given object's class.
|
|
You can also provide a class directly.
|
|
Private members are ignored no matter what (names starting with underscore).
|
|
If only_exposed is True, only members tagged with the @expose decorator are
|
|
returned. If it is False, all public members are returned.
|
|
The return value consists of the exposed methods, exposed attributes, and methods
|
|
tagged as @oneway.
|
|
(All this is used as meta data that Pyro sends to the proxy if it asks for it)
|
|
as_lists is meant for python 2 compatibility.
|
|
"""
|
|
if not inspect.isclass(obj):
|
|
obj = obj.__class__
|
|
cache_key = (obj, only_exposed, as_lists)
|
|
if use_cache:
|
|
if cache_key in __exposed_member_cache:
|
|
return __exposed_member_cache[cache_key]
|
|
methods = set()
|
|
oneway = set()
|
|
attrs = set()
|
|
for m in dir(obj):
|
|
if is_private_attribute(m):
|
|
continue
|
|
v = getattr(obj, m)
|
|
if inspect.ismethod(v) or inspect.isfunction(v) or inspect.ismethoddescriptor(v):
|
|
if getattr(v, "_pyroExposed", not only_exposed):
|
|
methods.add(m)
|
|
if getattr(v, "_pyroOneway", False):
|
|
oneway.add(m)
|
|
elif inspect.isdatadescriptor(v):
|
|
func = getattr(v, "fget", None) or getattr(v, "fset", None) or getattr(v, "fdel", None)
|
|
if func is not None:
|
|
if getattr(func, "_pyroExposed", not only_exposed):
|
|
attrs.add(m)
|
|
|
|
if as_lists:
|
|
methods = list(methods)
|
|
oneway = list(oneway)
|
|
attrs = list(attrs)
|
|
result = {'methods':methods, 'oneway':oneway,
|
|
'attrs':attrs}
|
|
__exposed_member_cache[cache_key] = result
|
|
return result
|
|
|
|
|
|
def get_exposed_property_value(obj, propname, only_exposed=True):
|
|
"""
|
|
Return the value of an @exposed @property.
|
|
If the requested property is not a @property or not exposed,
|
|
an AttributeError is raised instead.
|
|
"""
|
|
v = getattr(obj.__class__, propname)
|
|
if inspect.isdatadescriptor(v):
|
|
if v.fget:
|
|
if getattr(v.fget, "_pyroExposed", not only_exposed):
|
|
return v.fget(obj)
|
|
raise AttributeError("attempt to access unexposed or unknown remote attribute '%s'" % propname)
|
|
|
|
|
|
def set_exposed_property_value(obj, propname, value, only_exposed=True):
|
|
"""
|
|
Sets the value of an @exposed @property.
|
|
If the requested property is not a @property or not exposed,
|
|
an AttributeError is raised instead.
|
|
"""
|
|
v = getattr(obj.__class__, propname)
|
|
if inspect.isdatadescriptor(v):
|
|
pfunc = v.fget or v.fset or v.fdel
|
|
if v.fset:
|
|
if getattr(pfunc, "_pyroExposed", not only_exposed):
|
|
return v.fset(obj, value)
|
|
raise AttributeError("attempt to access unexposed or unknown remote attribute '%s'" % propname)
|
|
|
|
|
|
_private_dunder_methods = frozenset([
|
|
'__init__', '__init_subclass__', '__class__', '__module__', '__weakref__',
|
|
'__call__',
|
|
'__new__', '__del__', '__repr__', '__unicode__',
|
|
'__str__', '__format__',
|
|
'__nonzero__', '__bool__', '__coerce__',
|
|
'__cmp__', '__eq__', '__ne__',
|
|
'__hash__', '__ge__', '__gt__', '__le__', '__lt__',
|
|
'__dir__', '__enter__',
|
|
'__exit__', '__copy__', '__deepcopy__', '__sizeof__',
|
|
'__getattr__', '__setattr__',
|
|
'__hasattr__', '__getattribute__', '__delattr__',
|
|
'__instancecheck__',
|
|
'__subclasscheck__', '__getinitargs__', '__getnewargs__',
|
|
'__getstate__',
|
|
'__setstate__', '__reduce__', '__reduce_ex__',
|
|
'__getstate_for_dict__',
|
|
'__setstate_from_dict__', '__subclasshook__'])
|
|
|
|
def is_private_attribute(attr_name):
|
|
"""returns if the attribute name is to be considered private or not."""
|
|
if attr_name in _private_dunder_methods:
|
|
return True
|
|
if not attr_name.startswith("_"):
|
|
return False
|
|
elif len(attr_name) > 4:
|
|
if attr_name.startswith("__") and attr_name.endswith("__"):
|
|
return False
|
|
return True |