Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
sess.run(tf.assign(model.is_train, tf.constant(False, dtype=tf.bool)))
losses = []
answer_dict = {}
remapped_dict = {}
for step in tqdm(range(total // config.batch_size + 1)):
qa_id, loss, yp1, yp2 = sess.run(
[model.qa_id, model.loss, model.yp1, model.yp2])
answer_dict_, remapped_dict_ = convert_tokens(
eval_file, qa_id.tolist(), yp1.tolist(), yp2.tolist())
answer_dict.update(answer_dict_)
remapped_dict.update(remapped_dict_)
losses.append(loss)
loss = np.mean(losses)
metrics = evaluate(eval_file, answer_dict)
with open(config.answer_file, "w") as fh:
json.dump(remapped_dict, fh)
print("Exact Match: {}, F1: {}".format(
metrics['exact_match'], metrics['f1']))
remapped_dict = {}
for step in tqdm(range(total // config.batch_size + 1)):
qa_id, loss, yp1, yp2 = sess.run(
[model.qa_id, model.loss, model.yp1, model.yp2])
answer_dict_, remapped_dict_, outlier = convert_tokens(
eval_file, qa_id.tolist(), yp1.tolist(), yp2.tolist())
answer_dict.update(answer_dict_)
remapped_dict.update(remapped_dict_)
losses.append(loss)
loss = np.mean(losses)
# evaluate with answer_dict, but in evaluate-v1.1.py, evaluate with remapped_dict
# since only that is saved. Both dict are a little bit different, check evaluate-v1.1.py
metrics = evaluate(eval_file, answer_dict)
with open(config.answer_file, "w") as fh:
json.dump(remapped_dict, fh)
print("Exact Match: {}, F1: {} Rouge-L-F1: {} Rouge-L-p: {} Rouge-l-r: {}".format(\
metrics['exact_match'], metrics['f1'], metrics['rouge-l-f'], metrics['rouge-l-p'],\
metrics['rouge-l-r']))
vo_desc_file = vo_desc['id'] + '.json'
if os.path.isfile(vo_desc_file):
raise FileExistsError('verification object description file {!r} already exists'.format(vo_desc_file))
self.logger.debug('Dump verification object description {!r} to file {!r}'.format(vo_desc['id'], vo_desc_file))
dir_path = os.path.dirname(vo_desc_file).encode('utf8')
if dir_path:
os.makedirs(dir_path, exist_ok=True)
# Add dir to exlcuded from cleaning by lkvog
root_dir_id = vo_desc_file.split('/')[0]
if root_dir_id not in self.dynamic_excluded_clean:
self.logger.debug("Do not clean dir {!r} on component termination".format(root_dir_id))
self.dynamic_excluded_clean.append(root_dir_id)
with open(vo_desc_file, 'w', encoding='utf8') as fp:
ujson.dump(vo_desc, fp, sort_keys=True, indent=4, ensure_ascii=False, escape_forward_slashes=False)
return vo_desc_file
created = incident['created']
incident_dir = os.path.join(archive_path, str(created.year), str(created.month), str(created.day), str(incident['incident_id']))
try:
os.makedirs(incident_dir)
except OSError as e:
if e.errno != errno.EEXIST:
logger.exception('Failed creating %s DIR', incident_dir)
return
incident_file = os.path.join(incident_dir, 'incident_data.json')
try:
with open(incident_file, 'w') as handle:
ujson.dump(incident, handle, indent=2)
except IOError:
logger.exception('Failed writing incident metadata to %s', incident_file)
def save_points(self):
json.dump(self.points, open(self.points_path, 'w'))
def attributes(self):
data = dict()
data['target fragments'] = [f.name for f in self.target_fragments]
data['fragments'] = {f.name: list(f.in_files) for f in self.fragments}
with open(self.DESC_FILE, 'w', encoding='utf8') as fp:
ujson.dump(data, fp, sort_keys=True, indent=4, ensure_ascii=False, escape_forward_slashes=False)
return [
{
'name': 'Fragmentation strategy',
'value': [
{'name': 'name', 'value': self.conf['Fragmentation strategy']['name'],
'data': self.DESC_FILE}
]
},
], [self.DESC_FILE]
with meta_path.open(mode="r", encoding="utf-8") as f:
meta_dicts[meta_path] = json.load(f)
_LOG.info("Modifying {}".format(gulp_dir))
for meta_path, meta_dict in meta_dicts.items():
segment_ids = set(meta_dict.keys())
for segment_id in segment_ids:
_update_metadata(
segment_id, meta_dict, transform_func, drop_nones=drop_nones
)
if not skip_backup:
_backup_meta_data(meta_path)
with meta_path.open(mode="w", encoding="utf-8") as f:
json.dump(meta_dict, f)
def dump_data(data=None, file=None):
"""
Dump data in json format to file
:param data: (json-able)
:param file: file on which to dump data
:return: -
"""
with open(file, 'w') as f:
json.dump(data, f)
print("Results written on {}".format(file))
return
progress['categorias'] = True
saveConfig(progress, configFile)
# Obtiene los comercios que todavía no se levantaron precios
comCursor = dbLayer.conn.cursor()
comercios = comCursor.execute("SELECT * FROM comercios WHERE pendiente = 1").fetchall()
if (os.path.isfile('_cantarticulos.json')):
with open('_cantarticulos.json', mode='r') as cantArtFile:
cantArticulos = ujson.load(cantArtFile)
else:
cantArticulos = getCantArticulos(stemUrl, comercios)
with open('_cantarticulos.json', 'w') as outfile:
ujson.dump(cantArticulos, outfile)
if len(comercios) == 0:
progress['productos'] = True
with open(configFile, 'w') as outfile:
ujson.dump(progress, outfile)
return
for comercio in comercios:
for item in cantArticulos:
if comercio['id'] == item['id']:
cantProductos = item['total']
maxPermitido = item['maxLimitPermitido']
def update_status(self, x):
self.status.update(x)
with open_atomic(str(self.status_file)) as f:
json.dump(self.status, f)