Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def tearDownClass(cls):
cloudpickle.loads(cls.r.get('env'))
cloudpickle.loads(cls.r.get('traj'))
cloudpickle.loads(cls.r.get('gpol'))
cloudpickle.loads(cls.r.get('dpol'))
cloudpickle.loads(cls.r.get('mpcpol'))
cloudpickle.loads(cls.r.get('qfunc'))
cloudpickle.loads(cls.r.get('aqpol'))
cloudpickle.loads(cls.r.get('vfunc'))
cloudpickle.loads(cls.r.get('mcpol'))
def zloads(pickled_data):
"""
loads pickleable object from bz2 compressed string
:param pickled_data: BZ2 compressed byte sequence
:type pickled_data: bytes
:returns: An unpickled version of the compressed byte sequence.
"""
return cloudpickle.loads(bz2.decompress(pickled_data))
def mpistart(f, i):
c = MPIComm()
return cloudpickle.loads(f)(c)
def logic(self):
return cloudpickle.loads(self._logic)
def deserialize_flow_from_bytes(self, serialized_flow: bytes) -> "prefect.Flow":
"""
Deserializes a Flow to binary.
Args:
- serialized_flow (bytes): the Flow to deserialize
Returns:
- Flow: the deserialized Flow
"""
decoded_pickle = base64.b64decode(serialized_flow)
decrypted_pickle = Fernet(self.encryption_key).decrypt(decoded_pickle)
flow = cloudpickle.loads(decrypted_pickle)
return flow
def loads(obj):
return pickle.loads(obj)
Read a result from the given URI location.
Args:
- uri (str): the path to the location of a result
Returns:
- the deserialized result from the provided URI
"""
self._initialize_client()
self.logger.debug("Starting to read result from {}...".format(uri))
res = self._client.get( # type: ignore
"/", server=self.result_handler_service, **{"uri": uri}
)
try:
return_val = cloudpickle.loads(base64.b64decode(res.get("result", "")))
except EOFError:
return_val = None
self.logger.debug("Finished reading result from {}...".format(uri))
return return_val
if not flow_location in self.flows.values():
raise ValueError("Flow is not contained in this Storage")
bucket = self._gcs_client.get_bucket(self.bucket)
self.logger.info("Downloading {} from {}".format(flow_location, self.bucket))
blob = bucket.get_blob(flow_location)
if not blob:
raise StorageError(
"Flow not found in bucket: flow={} bucket={}".format(
flow_location, self.bucket
)
)
content = blob.download_as_string()
return cloudpickle.loads(content)
Returns:
- the read result from the provided file
"""
# based on the path given, this may never work?!? that is, if the abs path is given then what's the point?
# FIX: this should be based on a key within the stored constructor dir
# if self.accumulate:
# sequence = '0'
# for dir in sorted(glob.glob(os.path.join(self.dir, "*") + os.path.sep), reverse=True):
# sequence = dir
# break
self.logger.debug("Starting to read result from {}...".format(fpath))
with open(fpath, "rb") as f:
val = cloudpickle.loads(f.read())
self.logger.debug("Finished reading result from {}...".format(fpath))
return val
def launch_job(f):
with ctx.socket(zmq.REQ) as send_socket:
send_socket.connect(bind_addr)
task_bytes = cloudpickle.dumps(f)
send_socket.send(task_bytes)
out_bytes = send_socket.recv()
out = cloudpickle.loads(out_bytes)
return out