Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
flow_runner = MagicMock()
monkeypatch.setattr(
"prefect.engine.get_default_flow_runner_class",
MagicMock(return_value=flow_runner),
)
kube_cluster = MagicMock()
monkeypatch.setattr("dask_kubernetes.KubeCluster", kube_cluster)
with tempfile.TemporaryDirectory() as directory:
with open(os.path.join(directory, "flow_env.prefect"), "w+") as env:
flow = prefect.Flow("test")
flow_path = os.path.join(directory, "flow_env.prefect")
with open(flow_path, "wb") as f:
cloudpickle.dump(flow, f)
with set_temporary_config({"cloud.auth_token": "test"}):
with prefect.context(
flow_file_path=os.path.join(directory, "flow_env.prefect")
):
environment.run_flow()
assert flow_runner.call_args[1]["flow"].name == "test"
assert start_func.called
assert exit_func.called
def test_pickle_english(EN):
file_ = io.BytesIO()
cloudpickle.dump(EN, file_)
file_.seek(0)
loaded = pickle.load(file_)
def _dump_pickle(path, data):
path = str(path)
with open(path, 'wb') as f, Timer(f"Dumped pickle: {path}"):
cloudpickle.dump(data, f)
def persist(self, model_dir):
# type: (Text) -> Dict[Text, Any]
"""Persist this model into the passed directory. Returns the metadata necessary to load the model again."""
import cloudpickle
classifier_file = os.path.join(model_dir, "intent_classifier.pkl")
with io.open(classifier_file, 'wb') as f:
cloudpickle.dump(self, f)
return {
"intent_classifier_sklearn": "intent_classifier.pkl"
}
if bvp is not None:
save_dict['bvp'] = bvp
if bvp_solver is not None:
assert issubclass(bvp_solver.__class__, beluga.bvpsol.BaseAlgorithm), 'bvp_solver should be subclass ' \
'of beluga.bvpsol.BaseAlgorithm'
save_dict['bvp solver'] = bvp_solver
if sol_set is not None:
assert all([sol.__class__ is beluga.ivpsol.ivpsol.Trajectory for cont_set in sol_set for sol in cont_set]),\
'all solutions in sol_set should be of class beluga.ivpsol.ivpsol.Trajectory'
save_dict['solutions'] = sol_set
with open(filename, 'wb') as file:
pickle.dump(save_dict, file)
Args:
- fpath (str, optional): the filepath where your Flow will be saved; defaults to
`~/.prefect/flows/FLOW-NAME.prefect`
Returns:
- str: the full location the Flow was saved to
"""
if fpath is None:
path = "{home}/flows".format(home=prefect.context.config.home_dir)
fpath = Path(os.path.expanduser(path)) / "{}.prefect".format( # type: ignore
slugify(self.name)
)
assert fpath is not None # mypy assert
fpath.parent.mkdir(exist_ok=True, parents=True)
with open(str(fpath), "wb") as f:
cloudpickle.dump(self, f)
return str(fpath)
if os.path.isfile('%s/Mq' % self.config_folder):
Mq = cloudpickle.load(open('%s/Mq' % self.config_folder, 'rb'))
else:
# get the Jacobians for each link's COM
J = [self._calc_J('link%s' % ii, x=[0, 0, 0], lambdify=False)
for ii in range(self.num_links)]
# transform each inertia matrix into joint space
# sum together the effects of arm segments' inertia on each motor
Mq = sp.zeros(self.num_joints)
for ii in range(self.num_links):
Mq += J[ii].T * self._M[ii] * J[ii]
Mq = sp.Matrix(Mq)
# save to file
cloudpickle.dump(Mq, open('%s/Mq' % self.config_folder, 'wb'))
if lambdify is False:
return Mq
return sp.lambdify(self.q + self.x, Mq)
with _joblib_para_backend(_backend):
result = func()
except Exception:
print("Error! Attempting to record exception.")
# Wrap the exception in joblib's TransportableException
# so that joblib can properly display the results.
e_type, e_value, e_tb = sys.exc_info()
text = format_exc(e_type, e_value, e_tb, context=10, tb_offset=1)
result = TransportableException(text, e_type)
raise
finally:
# Serialize the result and upload it to the Files API.
if result is not None:
# If the function exits without erroring, we may not have a result.
result_buffer = BytesIO()
cloudpickle.dump(result, result_buffer, pickle.HIGHEST_PROTOCOL)
result_buffer.seek(0)
output_name = "Results from Joblib job {} / run {}".format(job_id,
run_id)
output_file_id = _robust_file_to_civis(result_buffer, output_name,
n_retries=5, delay=0.5,
expires_at=expires_at,
client=client)
client.scripts.post_containers_runs_outputs(job_id, run_id,
'File', output_file_id)
print("Results output to file ID: {}".format(output_name,
output_file_id))
def 存檔(self, 路徑, 存檔信息=None):
虛擬機狀態 = {
'讀者狀態': self.狀態,
'角色表': 角色.角色表,
'鏡頭對應': 鏡頭.鏡頭對應,
'劇本棧': self.劇本棧,
'箱庭': self.箱庭,
}
with open(路徑, 'wb') as f:
pickle.dump({
'虛擬機狀態': 虛擬機狀態,
'存檔信息': 存檔信息
}, f)
"""
# check to see if we have our transformation saved in file
if (os.path.isfile('%s/%s.T_inv' % (self.config_folder,
name))):
T_inv = cloudpickle.load(open('%s/%s.T_inv' %
(self.config_folder, name), 'rb'))
else:
T = self._calc_T(name=name)
rotation_inv = T[:3, :3].T
translation_inv = -rotation_inv * T[:3, 3]
T_inv = rotation_inv.row_join(translation_inv).col_join(
sp.Matrix([[0, 0, 0, 1]]))
# save to file
cloudpickle.dump(T_inv, open('%s/%s.T_inv' %
(self.config_folder, name), 'wb'))
if lambdify is False:
return T_inv
return sp.lambdify(self.q + self.x, T_inv)