Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _make_fake_socket(self, family, type):
udp_socket = self.mox.CreateMockAnything()
udp_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
udp_socket.bind((self.CONF.collector.udp_address,
self.CONF.collector.udp_port))
def stop_udp(anything):
# Make the loop stop
self.srv.stop()
udp_socket.recvfrom(64 * 1024).WithSideEffects(
stop_udp).AndReturn(
(msgpack.dumps(self.counter),
('127.0.0.1', 12345)))
self.mox.ReplayAll()
return udp_socket
path, fname = os.path.split(path_fname)
split_path = path.split('/')
for i in xrange(len(split_path) + 1):
test_path = '/'.join(split_path[:i])
if not os.path.exists(test_path):
msg = ('Directory does not exist: "%s" while trying to'
' write DB history to "%s"')
raise IOError(msg % (test_path, path_fname))
raise
data = (self.request.to_dict(),
self.response.to_dict(),
self._MSGPACK_CANARY)
msgpack_data = msgpack.dumps(data)
req_res.write(msgpack_data)
req_res.close()
response_id = resp.get_id()
self._queue_compression_requests(response_id)
pending_compression = self._get_pending_compression_job()
if pending_compression is not None:
self._process_pending_compression(pending_compression)
return True
def int_out(self, invocation):
log.debug("EncoderInterceptor.int_out: %s", invocation)
log.debug("Pre-transform: %s", invocation.message)
invocation.message = msgpack.dumps(invocation.message)
log.debug("Post-transform: %s", invocation.message)
return invocation
"""
cid = client.add_contents(
policy_pubkey=policy_pub_key
)
"""
policy_pubkey = alicia.get_policy_pubkey_from_label(my_label)
data_source = Enrico(policy_encrypting_key=policy_pubkey)
data_source_public_key = bytes(data_source.stamp)
heart_rate = 80
now = time.time()
kits = list()
heart_rate = contents
now += 3
heart_rate_data = { 'heart_rate': heart_rate, 'timestamp': now, }
plaintext = msgpack.dumps(heart_rate_data, use_bin_type=True)
message_kit, _signature = data_source.encrypt_message(plaintext)
kit_bytes = message_kit.to_bytes()
kits.append(kit_bytes)
data = { 'data_source': data_source_public_key, 'kits': kits, }
# print("🚀 ADDING TO IPFS D-STORAGE NETWORK 🚀")
d = msgpack.dumps(data, use_bin_type=True)
### NETWORK ERROR OUT ON FALLBACK
cid = self.ipfs_gateway_api.add_bytes(d)
# print("File Address:\t%s" % cid)
return cid
removes outdated updates.
:param db: A database handle for the Redis database.
:param buck_key: A turnstile.limits.BucketKey instance containing
the bucket key.
:param limit: The turnstile.limits.Limit object corresponding to
the bucket.
"""
# Suck in the bucket records and generate our bucket
records = db.lrange(str(buck_key), 0, -1)
loader = limits.BucketLoader(limit.bucket_class, db, limit,
str(buck_key), records, stop_summarize=True)
# We now have the bucket loaded in; generate a 'bucket' record
buck_record = msgpack.dumps(dict(bucket=loader.bucket.dehydrate(),
uuid=str(uuid.uuid4())))
# Now we need to insert it into the record list
result = db.linsert(str(buck_key), 'after', loader.last_summarize_rec,
buck_record)
# Were we successful?
if result < 0:
# Insert failed; we'll try again when max_age is hit
LOG.warning("Bucket compaction on %s failed; will retry" % buck_key)
return
# OK, we have confirmed that the compacted bucket record has been
# inserted correctly; now all we need to do is trim off the
# outdated update records
db.ltrim(str(buck_key), loader.last_summarize_idx + 1, -1)
async def main(self):
context = zmq.asyncio.Context()
socket = context.socket(zmq.REP)
socket.bind('tcp://*:8120')
while True:
print(await socket.poll(10))
msg = msgpack.loads(await socket.recv())
print(msg)
await socket.send(msgpack.dumps({'i': msg[b'i'] + 1}))
def sendline(self,data):
packed = msgpack.dumps(data)
self.zmqsocket.send_multipart([self.TOPIC, packed])
def close(self):
return msgpack.ExtType(EMBEDDED_MSGPACK_TYPE, data)
def ext_hook(code, data):
if code == EMBEDDED_MSGPACK_TYPE:
return msgpack_serializer.loads(data)
return msgpack.ExtType(code, data)
def _msgpack_load(stream, *args, **kwargs):
# temporary workaround for https://github.com/msgpack/msgpack-python/pull/143
return msgpack.loads(stream.read(), *args, **kwargs)
msgpack_serializer = BaseSerializer(
dumps=functools.partial(msgpack.dumps, use_bin_type=True),
loads=functools.partial(msgpack.loads, encoding='utf-8', ext_hook=ext_hook),
dump=functools.partial(msgpack.dump, use_bin_type=True),
load=functools.partial(_msgpack_load, encoding='utf-8', ext_hook=ext_hook),
)
json_serializer = BaseSerializer(dumps=json.dumps, loads=json.loads, dump=json.dump, load=json.load)
def msgpack_encode(data_type, obj):
return msgpack.dumps(
msgpack_compat_obj_encode(data_type, obj), encoding='utf-8')
data[base][fn]['blake'] = hashlib.blake2b(raw).hexdigest()
data[base][fn]['sha512'] = hashlib.sha512(raw).hexdigest()
data[base][fn]['sha3_512'] = hashlib.sha3_512(raw).hexdigest()
for base, rels in data.items():
names = []
for fn, rel in rels.items():
names.append(rel['version'])
names = sorted(names, key=StrictVersion)
latest = os.path.join(f'salt-{names[-1]}')
latest_lk = os.path.join('dist', base, 'salt')
if os.path.exists(latest_lk):
os.remove(latest_lk)
os.symlink(latest, latest_lk)
with open('dist/repo.mp', 'wb+') as wfh:
wfh.write(msgpack.dumps(data))
with open('dist/repo.json', 'w+') as wfh:
wfh.write(json.dumps(data))