Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def string_hash(value,length=11):
s = ''
for i in range(0,length,11):
s = s + xxhash.xxh64(value+str(i)).hexdigest()
s = encode_hash(int(s,16))[:length]
if len(s) < length:
s = s + "A" * (length - len(s))
return s
def _hash_xxhash(buf):
"""
Produce a 8-bytes hash of *buf* using xxHash.
"""
return xxhash.xxh64(buf).digest()
def select_action(self, state_new):
user = {0: 'Player', 1: 'Opponent'}
print("{}'s Turn!".format(user[self.user_type]))
self.action_count += 1
state_reshape = state_new.reshape(9, 3, 3)
board = state_reshape[0] + state_reshape[4]
empty_loc = np.argwhere(board == 0)
node = xxhash.xxh64(state_new.tostring()).hexdigest()
if node in self.tree_memory:
edge = self.tree_memory[node]
pi_memory = self._get_pi(edge)
if self.action_count <= self.tau:
print('"stochastic"')
choice = np.random.choice(9, p=pi_memory)
else:
print('"deterministic"')
pi_max_idx = [i for i, v in enumerate(
pi_memory) if v == max(pi_memory)]
choice = np.random.choice(pi_max_idx)
move_target = self.action_space[choice]
else:
print('"random"')
move_target = empty_loc[np.random.choice(len(empty_loc))]
action = np.r_[self.user_type, move_target]
def hash_object(obj, as_int=False):
pkl = jsonpickle.encode(obj, unpicklable=False).encode()
xhash = xxhash.xxh64(pkl)
if as_int:
return xhash.intdigest()
else:
return xhash.hexdigest()
has_dups = False
with arcpy.da.SearchCursor(crate.source, [field for field in fields if field != hash_field]) as cursor, \
arcpy.da.InsertCursor(changes.table, changes.fields) as insert_cursor:
for row in cursor:
total_rows += 1
if not crate.is_table():
#: skip features with empty geometry
if row[-1] is None:
log.warning('empty geometry found in %s', row)
total_rows -= 1
continue
#: do this in two parts to prevent creating an unnecessary copy of the WKT
row_hash = xxh64(str(row[:-1]))
row_hash.update(row[-1])
else:
row_hash = xxh64(str(row))
digest = row_hash.hexdigest()
#: check for duplicate hashes
while digest in changes.adds or digest in changes.unchanged:
has_dups = True
row_hash.update(digest)
digest = row_hash.hexdigest()
#: check for new feature
if digest not in attribute_hashes:
#: update or add
#: insert into temp table
def fileMD5(filename, partial=True):
'''Calculate partial MD5, basically the first and last 8M
of the file for large files. This should signicicantly reduce
the time spent on the creation and comparison of file signature
when dealing with large bioinformat ics datasets. '''
filesize = os.path.getsize(filename)
# calculate md5 for specified file
md5 = hash_md5()
block_size = 2**20 # buffer of 1M
try:
# 2**24 = 16M
if (not partial) or filesize < 2**24:
with open(filename, 'rb') as f:
while True:
data = f.read(block_size)
if not data:
break
md5.update(data)
else:
count = 16
# otherwise, use the first and last 8M
with open(filename, 'rb') as f:
while True:
data = f.read(block_size)
def _hash(value: str) -> bytes:
return xxhash.xxh64(value).digest()
def play(self, tau):
root_node = xxhash.xxh64(self.root.tostring()).hexdigest()
edge = self.tree[root_node]
pi = np.zeros((3, 3), 'float')
total_visit = 0
action_space = []
for i in range(3):
for j in range(3):
total_visit += edge[i, j][N]
action_space.append([i, j])
for i in range(3):
for j in range(3):
pi[i, j] = edge[i, j][N] / total_visit
if tau == 0:
deterministic = np.argwhere(pi == pi.max())
final_move = deterministic[np.random.choice(len(deterministic))]
else:
stochactic = np.random.choice(9, p=pi.flatten())