Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
'min_recent_requests': min_recent_requests,
'min_popularity': min_popularity
}
instance_id = str(uuid4()).split('-')[0]
elastic_url = config_get('c3po', 'elastic_url')
elastic_index = config_get('c3po', 'elastic_index')
ca_cert = False
if 'ca_cert' in c3po_options:
ca_cert = config_get('c3po', 'ca_cert')
auth = False
if ('elastic_user' in c3po_options) and ('elastic_pass' in c3po_options):
auth = HTTPBasicAuth(config_get('c3po', 'elastic_user'), config_get('c3po', 'elastic_pass'))
w = waiting_time
while not GRACEFUL_STOP.is_set():
if w < waiting_time:
w += 10
sleep(10)
continue
len_dids = did_queue.qsize()
if len_dids > 0:
logging.debug('(%s) %d did(s) in queue' % (instance_id, len_dids))
else:
logging.debug('(%s) no dids in queue' % (instance_id))
for _ in range(0, len_dids):
did = did_queue.get()
"""
src_rse = f_request.get('src_rse')
dst_rse = f_request.get('dst_rse')
src_site = f_request.get('src_site')
dst_site = f_request.get('dst_site')
request_states = f_request.get('request_states')
if not request_states:
return generate_http_error_flask(400, 'MissingParameter', 'Request state is missing')
if src_rse and not dst_rse:
return generate_http_error_flask(400, 'MissingParameter', 'Destination RSE is missing')
elif dst_rse and not src_rse:
return generate_http_error_flask(400, 'MissingParameter', 'Source RSE is missing')
elif src_site and not dst_site:
return generate_http_error_flask(400, 'MissingParameter', 'Destination site is missing')
elif dst_site and not src_site:
return generate_http_error_flask(400, 'MissingParameter', 'Source site is missing')
try:
states = [RequestState.from_string(state) for state in request_states.split(',')]
except ValueError:
return generate_http_error_flask(400, 'Invalid', 'Request state value is invalid')
src_rses = []
dst_rses = []
if src_site:
src_rses = get_rses_with_attribute_value(key='site', value=src_site, lookup_key='site')
if not src_rses:
return generate_http_error_flask(404, 'NotFound', 'Could not resolve site name %s to RSE' % src_site)
src_rses = [get_rse_name(rse['rse_id']) for rse in src_rses]
dst_rses = get_rses_with_attribute_value(key='site', value=dst_site, lookup_key='site')
:param account: Account name.
:param rse: RSE name.
:status 200: Successfully deleted.
:status 401: Invalid auth token.
:status 404: RSE not found.
:status 404: Account not found
"""
try:
delete_local_account_limit(account=account, rse=rse, issuer=request.environ.get('issuer'))
except AccessDenied as exception:
return generate_http_error_flask(401, 'AccessDenied', exception.args[0])
except AccountNotFound as exception:
return generate_http_error_flask(404, 'AccountNotFound', exception.args[0])
except RSENotFound as exception:
return generate_http_error_flask(404, 'RSENotFound', exception.args[0])
except Exception as exception:
print(format_exc())
return exception, 500
return "OK", 200
:param account: The account name.
:param rse_expression: The rse expression.
:param issuer: The issuer account_core.
:returns: True if successful; False otherwise.
"""
kwargs = {'account': account, 'rse_expression': rse_expression}
if not rucio.api.permission.has_permission(issuer=issuer, action='delete_global_account_limit', kwargs=kwargs):
raise rucio.common.exception.AccessDenied('Account %s can not delete global account limits.' % (issuer))
account = InternalAccount(account)
if not account_exists(account=account):
raise rucio.common.exception.AccountNotFound('Account %s does not exist' % (account))
return account_limit_core.delete_global_account_limit(account=account, rse_expression=rse_expression)
fname = generate_didname(metadata=metadata, dsn=dsn, did_type='file')
lfns.append(fname)
logging.info('%s Generating file %s in dataset %s', prepend_str, fname, dsn)
physical_fname = '%s/%s' % (tmpdir, "".join(fname.split('/')))
physical_fnames.append(physical_fname)
generate_file(physical_fname, filesize)
fnames.append(fname)
logging.info('%s Upload %s to %s', prepend_str, dsn, site)
dsn = '%s:%s' % (scope, dsn)
status = upload(files=lfns, scope=scope, metadata=metadata, rse=site, account=account, source_dir=tmpdir, worker_number=worker_number, total_workers=total_workers, dataset_lifetime=dataset_lifetime, did=dsn, set_metadata=set_metadata)
for physical_fname in physical_fnames:
remove(physical_fname)
rmdir(tmpdir)
if status:
monitor.record_counter(counters='automatix.addnewdataset.done', delta=1)
monitor.record_counter(counters='automatix.addnewfile.done', delta=nbfiles)
monitor.record_timer('automatix.datasetinjection', (time() - start_time) * 1000)
break
else:
logging.info('%s Failed to upload files. Will retry another time (attempt %s/%s)', prepend_str, str(retry + 1), str(totretries))
if once is True:
logging.info('%s Run with once mode. Exiting', prepend_str)
break
tottime = time() - starttime
if status:
logging.info('%s It took %s seconds to upload one dataset on %s', prepend_str, str(tottime), str(sites))
if sleep_time > tottime:
logging.info('%s Will sleep for %s seconds', prepend_str, str(sleep_time - tottime))
sleep(sleep_time - tottime)
else:
logging.info('%s Retrying a new upload', prepend_str)
heartbeat.die(executable, hostname, pid, hb_thread)
ssl_key_file=config_get('messaging-fts3', 'ssl_key_file'),
ssl_cert_file=config_get('messaging-fts3', 'ssl_cert_file'),
vhost=config_get('messaging-fts3', 'broker_virtual_host', raise_exception=False),
reconnect_attempts_max=999))
logging.info('receiver started')
while not graceful_stop.is_set():
heartbeat.live(executable, hostname, pid, hb_thread)
for conn in conns:
if not conn.is_connected():
logging.info('connecting to %s' % conn.transport._Transport__host_and_ports[0][0])
record_counter('daemons.messaging.fts3.reconnect.%s' % conn.transport._Transport__host_and_ports[0][0].split('.')[0])
conn.set_listener('rucio-messaging-fts3', Receiver(broker=conn.transport._Transport__host_and_ports[0], id=id, total_threads=total_threads, full_mode=full_mode))
conn.start()
conn.connect()
conn.subscribe(destination=config_get('messaging-fts3', 'destination'),
id='rucio-messaging-fts3',
ack='auto')
time.sleep(1)
logging.info('receiver graceful stop requested')
for conn in conns:
try:
conn.disconnect()
except Exception:
def get_global_account_limit(self, account, rse_expression):
"""
List the account limit for the specific RSE expression.
:param account: The account name.
:param rse_expression: The rse expression.
"""
path = '/'.join([self.ACCOUNTS_BASEURL, account, 'limits', 'global', quote_plus(rse_expression)])
url = build_url(choice(self.list_hosts), path=path)
res = self._send_request(url, type='GET')
if res.status_code == codes.ok:
return next(self._load_json_data(res))
exc_cls, exc_msg = self._get_exception(headers=res.headers, status_code=res.status_code, data=res.content)
raise exc_cls(exc_msg)
json_data = data()
try:
parameter = loads(json_data)
except ValueError:
raise generate_http_error(400, 'ValueError', 'cannot decode json parameter dictionary')
try:
bytes = parameter['bytes']
except KeyError as exception:
if exception.args[0] == 'type':
raise generate_http_error(400, 'KeyError', '%s not defined' % str(exception))
except TypeError:
raise generate_http_error(400, 'TypeError', 'body must be a json dictionary')
try:
set_local_account_limit(account=account, rse=rse, bytes=bytes, issuer=ctx.env.get('issuer'))
except AccessDenied as exception:
raise generate_http_error(401, 'AccessDenied', exception.args[0])
except RSENotFound as exception:
raise generate_http_error(404, 'RSENotFound', exception.args[0])
except AccountNotFound as exception:
raise generate_http_error(404, 'AccountNotFound', exception.args[0])
except Exception as exception:
print(format_exc())
raise InternalError(exception)
raise Created()
args = argparser.parse_args()
# if args.all:
# print 'all'
# elif args.fraction is not None:
# print 'fraction'
# elif args.num is not None:
# print 'num'
session = get_session()
total_cnt = session.query(models.ReplicationRule).count()
print "There are currently %d replication rules registered in Rucio" % total_cnt
if session.bind.dialect.name != 'sqlite':
query = session.query(models.ReplicationRule).order_by('dbms_random.value')
else:
query = session.query(models.ReplicationRule).order_by('RANDOM()')
if args.fraction is not None:
print 'Reading up to %d rules (fraction=%f)' % (int(total_cnt * args.fraction), args.fraction)
if args.fraction > 1 or args.fraction <= 0:
raise ValueError('The fraction value must be between 0 and 1')
query = query.limit(int(total_cnt * args.fraction))
elif args.num is not None:
print 'Reading up to %d rules (num)' % args.num
if args.num <= 0:
raise ValueError('The num value must be bigger than 0')
query = query.limit(args.num)
elif args.all:
print 'Reading all rules'
def re_evaluator(once=False):
"""
Main loop to check the re-evaluation of dids.
"""
hostname = socket.gethostname()
pid = os.getpid()
current_thread = threading.current_thread()
paused_dids = {} # {(scope, name): datetime}
# Make an initial heartbeat so that all judge-evaluators have the correct worker number on the next try
live(executable='rucio-judge-evaluator', hostname=hostname, pid=pid, thread=current_thread, older_than=60 * 30)
graceful_stop.wait(1)
while not graceful_stop.is_set():
try:
# heartbeat
heartbeat = live(executable='rucio-judge-evaluator', hostname=hostname, pid=pid, thread=current_thread, older_than=60 * 30)
start = time.time() # NOQA
# Refresh paused dids
paused_dids = dict((k, v) for k, v in iteritems(paused_dids) if datetime.utcnow() < v)
# Select a bunch of dids for re evaluation for this worker
dids = get_updated_dids(total_workers=heartbeat['nr_threads'] - 1,
worker_number=heartbeat['assign_thread'],
limit=100,