Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def load_bytes(fname):
import cloudpickle
with open(fname, "rb") as f:
obj = cloudpickle.load(f)
return obj(1, 2, 3)
self.on_start()
try:
from prefect.engine import (
get_default_flow_runner_class,
get_default_executor_class,
)
# Load serialized flow from file and run it with the executor
with open(
prefect.context.get(
"flow_file_path", "/root/.prefect/flow_env.prefect"
),
"rb",
) as f:
flow = cloudpickle.load(f)
runner_cls = get_default_flow_runner_class()
executor_cls = get_default_executor_class()()
runner_cls(flow=flow).run(executor=executor_cls)
except Exception as exc:
self.logger.exception(
"Unexpected error raised during flow run: {}".format(exc)
)
raise exc
finally:
# Call on_exit callback if specified
if self.on_exit:
self.on_exit()
def _load_dumped_plugins():
f_handler = open(CmdManager.dumped_plugin_path, "rb")
CmdManager.cmd_obj_list, CmdManager.parser = cloudpickle.load(f_handler)
f_handler.close()
for cur_cmd in sorted(CmdManager.cmd_obj_list):
# fix some issues related to dumping of the parser
for cur_arg in CmdManager.cmd_obj_list[cur_cmd].parser._option_string_actions:
obj = CmdManager.cmd_obj_list[
cur_cmd].parser._option_string_actions[cur_arg]
if obj.default == '==stdin==':
CmdManager.cmd_obj_list[cur_cmd].parser._option_string_actions[
cur_arg].default = sys.stdin
if obj.default == '==SUPPRESS==':
CmdManager.cmd_obj_list[cur_cmd].parser._option_string_actions[
cur_arg].default = argparse.SUPPRESS
def _calc_Mq_g(self, lambdify=True):
""" Uses Sympy to generate the force of gravity in
joint space for the ur5
lambdify boolean: if True returns a function to calculate
the Jacobian. If False returns the Sympy
matrix
"""
# check to see if we have our gravity term saved in file
if os.path.isfile('%s/Mq_g' % self.config_folder):
Mq_g = cloudpickle.load(open('%s/Mq_g' % self.config_folder,
'rb'))
else:
# get the Jacobians for each link's COM
J = [self._calc_J('link%s' % ii, x=[0, 0, 0], lambdify=False)
for ii in range(self.num_links)]
# transform each inertia matrix into joint space and
# sum together the effects of arm segments' inertia on each motor
Mq_g = sp.zeros(self.num_joints, 1)
for ii in range(self.num_joints):
Mq_g += J[ii].T * self._M[ii] * self.gravity
Mq_g = sp.Matrix(Mq_g)
# save to file
cloudpickle.dump(Mq_g, open('%s/Mq_g' % self.config_folder,
'wb'))
def load(cls, model_dir=None, model_metadata=None, cached_component=None, **kwargs):
#def load(cls, model_dir, intent_2_stage):
# type: (Text, Text) -> SklearnIntentClassifier
import cloudpickle
if model_dir and model_metadata.get("intent_2_stage"):
classifier_file = os.path.join(model_dir, model_metadata.get("intent_2_stage"))
with io.open(classifier_file, 'rb') as f: # pragma: no test
if PY3:
return cloudpickle.load(f, encoding="latin-1")
else:
return cloudpickle.load(f)
else:
return Intent2Stage()
def _calc_J(self, name, x, lambdify=True):
""" Uses Sympy to generate the Jacobian for a joint or link
name string: name of the joint or link, or end-effector
lambdify boolean: if True returns a function to calculate
the Jacobian. If False returns the Sympy
matrix
"""
# check to see if we have our Jacobian saved in file
if os.path.isfile('%s/%s.J' % (self.config_folder, name)):
J = cloudpickle.load(open('%s/%s.J' %
(self.config_folder, name), 'rb'))
else:
Tx = self._calc_Tx(name, x=x, lambdify=False)
J = []
# calculate derivative of (x,y,z) wrt to each joint
for ii in range(self.num_joints):
J.append([])
J[ii].append(Tx[0].diff(self.q[ii])) # dx/dq[ii]
J[ii].append(Tx[1].diff(self.q[ii])) # dy/dq[ii]
J[ii].append(Tx[2].diff(self.q[ii])) # dz/dq[ii]
end_point = name.strip('link').strip('joint')
if end_point != 'EE':
end_point = min(int(end_point) + 1, self.num_joints)
# add on the orientation information up to the last joint
for ii in range(end_point):
def load(self):
"""
Loads solution data using dill if not already loaded
"""
if not self.is_loaded:
with open(self.filename,'rb') as f:
logging.info("Loading datafile "+self.filename+"...")
self._data = pickle.load(f)
if 'solution' not in self._data:
self.is_loaded = False
logging.error("Solution missing in data file :"+self.filename)
raise RuntimeError("Solution missing in data file :"+self.filename)
if 'problem_data' not in self._data:
self.is_loaded = False
logging.error("Problem data missing in data file :"+self.filename)
raise RuntimeError("Problem data missing in data file :"+self.filename)
logging.info("Loaded "+str(len(self._data['solution']))+" solution sets from "+self.filename)
self.is_loaded = True
def load(path):
with open(path, "rb") as f:
model_data = cloudpickle.load(f)
sess = U.get_session()
sess.__enter__()
with tempfile.TemporaryDirectory() as td:
arc_path = os.path.join(td, "packed.zip")
with open(arc_path, "wb") as f:
f.write(model_data)
zipfile.ZipFile(arc_path, 'r', zipfile.ZIP_DEFLATED).extractall(td)
U.load_state(os.path.join(td, "model"))
#return ActWrapper(act, act_params)
def pickle_load(f):
r"""Read a pickled data from a file.
Args:
f (str/Path): file path
"""
if isinstance(f, Path):
f = f.as_posix()
with open(f, 'rb') as file:
return cloudpickle.load(file)