Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def pytest_configure_node(node):
"""
Configure node of tests.
:param node: _pytest.nodes.Node
:return: pickle of RPService
"""
if node.config._reportportal_configured is False:
# Stop now if the plugin is not properly configured
return
node.slaveinput['py_test_service'] = pickle.dumps(node.config.
py_test_service)
def _add_item(self, key, item):
'''Add an item to its queue list according to its key type.
The operations are not performed in a transaction because the initial `incr` is atomic and
reserves an id, after which the order of the following operations is not important nor
required to be atomic The only benefit of a transaction would be to improve performance,
however it would imply `watch`-ing the item_id operation and re-trying until everything is
atomic, which could actually result in a loss of performance.
'''
# Get new item id as incremental integer
item_id = self._store.incr(self._make_key(key, self.K_COUNT))
# Add pickled item to relevant list of items
self._store.set(self._make_key(key, str(item_id)), dumps(item, byref=True))
# Set jobs and results status in the stats
if key in (self.K_JOBS, self.K_RESULTS):
self._store.hset(self._make_stats_key(key, str(item_id)),
self.K_STATS_STATUS, JobStatuses.QUEUED.value)
# For a job, also add it to the queued list
if key == self.K_JOBS:
self._store.rpush(self._make_list_key(key, JobStatuses.QUEUED), item_id)
return item_id
def encode(self, sample):
return dill.dumps(sample)
"""Pickle the functions that produce generator and steps and dump to disk
Args:
filename (str): Filename on disk to dump the function data
train_func (function): Function to return train generator and #mini-batches
train_data: Input to train_func
val_func (function): Function to return val generator and #mini-batches
val_data: Input to val_func
test_func (function): Function to return test generator and #mini-batches
test_data: Input to test_func
Return:
"""
if not os.path.exists(filename):
data = {'train_func': dill.dumps(train_func, recurse=True),
'train_data': train_data,
'val_func': dill.dumps(val_func, recurse=True) if val_func is not None else None,
'val_data': val_data,
'test_func': dill.dumps(test_func, recurse=True) if test_func is not None else None,
'test_data': test_data,
'train_func_path': os.path.dirname(inspect.getabsfile(train_func)),
'val_func_path': os.path.dirname(inspect.getabsfile(val_func)) if val_func is not None else None,
'test_func_path': os.path.dirname(inspect.getabsfile(test_func)) if test_func is not None else None}
with open(filename, 'wb') as fid:
pickle.dump(data, fid)
return
def send_to_scheduler(self, header, payload):
log(self.address, 'Send to scheduler', header)
if 'address' not in header:
header['address'] = self.address
header['timestamp'] = datetime.utcnow()
header['loads'] = dill.loads
self.socket.send_multipart([pickle.dumps(header), dill.dumps(payload)])
def __cache_save__(self, item): # save 1 item to reduce overhead
self.cache_fd[item] = pickle.dumps(self.lockerbox[item])
self.cache_reopen()
return True
def add_function(self, cond, name, func):
"""Add a new function that will be executed as a when intercepting
packets.
Parameters
----------
cond : :obj:`str`
Name of the condition set (preconditions, postconditions,
executions).
name : :obj:`str`
Name to identify the function in the `Template`.
func : :obj:`function`
Pointer to a function.
"""
fdump = dill.dumps(func)
self._functions[cond][name] = fdump.hex()
def cudaworker(devices: str, exchange: str):
os.environ['CUDA_VISIBLE_DEVICES'] = devices
with open(exchange, 'rb') as buffer:
package = buffer.read()
# decode the package
init_name, init_path, func, args, kwargs = dill.loads(package)
# import stuff from init
if init_name and init_path:
init_spec = importlib.util.spec_from_file_location(init_name, init_path)
init_module = importlib.util.module_from_spec(init_spec)
init_spec.loader.exec_module(init_module)
globals()[init_name] = init_module
# run the function, encode the output and write to stdout
output = func(*args, **kwargs)
output_encoded = dill.dumps(output)
with open(exchange, 'wb') as buffer:
buffer.write(output_encoded)
def _dumpval(self, value): # lookup a value in the archive
'convert to a value stored in the HDF file'
if self.__state__['serialized']:
protocol = self.__state__['protocol'] #XXX: fix at 0?
if type(protocol) is str:
value = json.dumps(value).encode()
else:
value = dill.dumps(value, protocol=protocol)
return value if self.__state__['meta'] else [value]
return value #XXX: or [value]? (so no scalars)
def __asdict__(self):
else:
chats.append(entity.id)
if users and whitelistedUsers:
count = 0
usertext = ''
for user in users:
if user in whitelistedUsers:
whitelistedUsers.remove(user)
usertext += f" `{user}`"
count = 1
else:
skipped.append(f"`{user}`")
if count:
if whitelistedUsers:
redis.set('whitelist:users', dill.dumps(whitelistedUsers))
else:
redis.delete('whitelist:users')
text += "**Un-whitelisted users:**\n" + usertext
log += text
await event.answer(
text, log=None if chats else ("whitelist", text)
)
if chats and whitelistedChats:
count = 0
chattext = ''
for chat in chats:
if chat in whitelistedChats:
whitelistedChats.remove(chat)
chattext += f" `{chat}`"
count = 1
else: