Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def get_account_limits(self, account, rse_expression, locality):
"""
Return the correct account limits for the given locality.
:param account: The account name.
:param rse_expression: Valid RSE expression
:param locality: The scope of the account limit. 'local' or 'global'.
"""
if locality == 'local':
return self.get_local_account_limit(account, rse_expression)
elif locality == 'global':
return self.get_global_account_limit(account, rse_expression)
else:
from rucio.common.exception import UnsupportedOperation
raise UnsupportedOperation('The provided locality (%s) for the account limit was invalid' % locality)
available_dids = {}
child_type = None
for row in session.query(models.DataIdentifier.scope,
models.DataIdentifier.name,
models.DataIdentifier.did_type).with_hint(models.DataIdentifier, "INDEX(DIDS DIDS_PK)", 'oracle').filter(condition):
if row.did_type == DIDType.FILE:
raise exception.UnsupportedOperation("Adding a file (%s:%s) to a container (%s:%s) is forbidden" % (row.scope, row.name, scope, name))
if not child_type:
child_type = row.did_type
available_dids['%s:%s' % (row.scope.internal, row.name)] = row.did_type
if child_type != row.did_type:
raise exception.UnsupportedOperation("Mixed collection is not allowed: '%s:%s' is a %s(expected type: %s)" % (row.scope, row.name, row.did_type, child_type))
for c in collections:
did_asso = models.DataIdentifierAssociation(scope=scope, name=name, child_scope=c['scope'], child_name=c['name'],
did_type=DIDType.CONTAINER, child_type=available_dids.get('%s:%s' % (c['scope'].internal, c['name'])), rule_evaluation=True)
did_asso.save(session=session, flush=False)
# Send AMI messages
if child_type == DIDType.CONTAINER:
chld_type = 'CONTAINER'
elif child_type == DIDType.DATASET:
chld_type = 'DATASET'
else:
chld_type = 'UNKNOWN'
add_message('REGISTER_CNT', {'account': account.external,
'scope': scope.external,
'name': name,
'childscope': c['scope'].external,
:status 500: Database Exception
"""
json_data = request.data
try:
kwargs = loads(json_data)
except ValueError:
return generate_http_error_flask(400, 'ValueError', 'Cannot decode json data parameter')
try:
set_status(scope=scope, name=name, issuer=request.environ.get('issuer'), **kwargs)
except DataIdentifierNotFound as error:
return generate_http_error_flask(404, 'DataIdentifierNotFound', error.args[0])
except UnsupportedStatus as error:
return generate_http_error_flask(409, 'UnsupportedStatus', error.args[0])
except UnsupportedOperation as error:
return generate_http_error_flask(409, 'UnsupportedOperation', error.args[0])
except AccessDenied as error:
return generate_http_error_flask(401, 'AccessDenied', error.args[0])
except RucioException as error:
return generate_http_error_flask(500, error.__class__.__name__, error.args[0])
except Exception as error:
print(format_exc())
return error, 500
return "Ok", 200
files_query = session.query(models.DataIdentifier.scope, models.DataIdentifier.name,
models.DataIdentifier.bytes, models.DataIdentifier.guid,
models.DataIdentifier.events, models.DataIdentifier.availability,
models.DataIdentifier.adler32, models.DataIdentifier.md5).\
filter(models.DataIdentifier.did_type == DIDType.FILE).\
with_hint(models.DataIdentifier, "INDEX(DIDS DIDS_PK)", 'oracle')
file_condition = []
for file in files:
file_condition.append(and_(models.DataIdentifier.scope == file['scope'], models.DataIdentifier.name == file['name']))
rows = []
for row in files_query.filter(or_(*file_condition)):
file = row._asdict()
rows.append(file)
if file['availability'] == DIDAvailability.LOST:
raise exception.UnsupportedOperation('File %s:%s is LOST and cannot be attached' % (file['scope'], file['name']))
# Check meta-data, if provided
for f in files:
if f['name'] == file['name'] and f['scope'] == file['scope']:
for key in ['bytes', 'adler32', 'md5']:
if key in f and str(f.get(key)) != str(file[key]):
raise exception.FileConsistencyMismatch(key + " mismatch for '%(scope)s:%(name)s': " % file + str(f.get(key)) + '!=' + str(file[key]))
break
if len(rows) != len(files):
for file in files:
found = False
for row in rows:
if row['scope'] == file['scope'] and row['name'] == file['name']:
found = True
break
if not found:
def __add_collections_to_container(scope, name, collections, account, session):
"""
Add collections (datasets or containers) to container.
:param scope: The scope name.
:param name: The container name.
:param collections: .
:param account: The account owner.
:param session: The database session in use.
"""
condition = or_()
for cond in collections:
if (scope == cond['scope']) and (name == cond['name']):
raise exception.UnsupportedOperation('Self-append is not valid!')
condition.append(and_(models.DataIdentifier.scope == cond['scope'],
models.DataIdentifier.name == cond['name']))
available_dids = {}
child_type = None
for row in session.query(models.DataIdentifier.scope,
models.DataIdentifier.name,
models.DataIdentifier.did_type).with_hint(models.DataIdentifier, "INDEX(DIDS DIDS_PK)", 'oracle').filter(condition):
if row.did_type == DIDType.FILE:
raise exception.UnsupportedOperation("Adding a file (%s:%s) to a container (%s:%s) is forbidden" % (row.scope, row.name, scope, name))
if not child_type:
child_type = row.did_type
"""
json_data = data()
try:
meta = loads(json_data)
except ValueError:
raise generate_http_error(400, 'ValueError', 'Cannot decode json parameter list')
try:
add_did_meta(scope=scope, name=name, meta=meta)
except DataIdentifierNotFound as error:
raise generate_http_error(404, 'DataIdentifierNotFound', error.args[0])
except DataIdentifierAlreadyExists as error:
raise generate_http_error(409, 'DataIdentifierAlreadyExists', error.args[0])
except AccessDenied as error:
raise generate_http_error(401, 'AccessDenied', error.args[0])
except UnsupportedOperation as error:
raise generate_http_error(409, 'UnsupportedOperation', error.args[0])
except NotImplementedError:
raise generate_http_error(409, 'NotImplementedError', 'Feature not in current database')
except DatabaseException as error:
raise generate_http_error(500, 'DatabaseException', error.args[0])
except RucioException as error:
raise generate_http_error(500, error.__class__.__name__, error.args[0])
except Exception as error:
print(format_exc())
raise InternalError(error)
raise Created()
@transactional_session
def update_request_state(response, session=None):
"""
Used by poller and consumer to update the internal state of requests,
after the response by the external transfertool.
:param response: The transfertool response dictionary, retrieved via request.query_request().
:param session: The database session to use.
:returns commit_or_rollback: Boolean.
"""
try:
request.touch_request(response['request_id'], session=session)
except exception.UnsupportedOperation, e:
logging.warning("Request %s doesn't exist anymore, should not be updated again. Error: %s" % (response['request_id'], str(e).replace('\n', '')))
return False
if not response['new_state']:
return False
elif response['new_state'] == RequestState.LOST:
logging.debug('UPDATING REQUEST %s FOR STATE %s' % (str(response['request_id']), str(response['new_state'])))
elif response['new_state'] and 'job_state' in response and response['job_state']:
logging.debug('UPDATING REQUEST %s FOR TRANSFER(%s) STATE %s' % (str(response['request_id']), str(response['transfer_id']), str(response['job_state'])))
else:
return False
if response['new_state']:
request.set_request_state(response['request_id'],
response['new_state'],
query = session.query(models.DataIdentifier).filter_by(scope=scope, name=name).\
filter(or_(models.DataIdentifier.did_type == DIDType.CONTAINER, models.DataIdentifier.did_type == DIDType.DATASET))
try:
did = query.one()
# Mark for rule re-evaluation
models.UpdatedDID(scope=scope, name=name, rule_evaluation_action=DIDReEvaluation.DETACH).save(session=session, flush=False)
except NoResultFound:
raise exception.DataIdentifierNotFound("Data identifier '%(scope)s:%(name)s' not found" % locals())
# TODO: should judge target did's status: open, monotonic, close.
query_all = session.query(models.DataIdentifierAssociation).filter_by(scope=scope, name=name)
if query_all.first() is None:
raise exception.DataIdentifierNotFound("Data identifier '%(scope)s:%(name)s' has no child data identifiers." % locals())
for source in dids:
if (scope == source['scope']) and (name == source['name']):
raise exception.UnsupportedOperation('Self-detach is not valid.')
child_scope = source['scope']
child_name = source['name']
associ_did = query_all.filter_by(child_scope=child_scope, child_name=child_name).first()
if associ_did is None:
raise exception.DataIdentifierNotFound("Data identifier '%(child_scope)s:%(child_name)s' not found under '%(scope)s:%(name)s'" % locals())
child_type = associ_did.child_type
child_size = associ_did.bytes
child_events = associ_did.events
if did.length:
did.length -= 1
if did.bytes and child_size:
did.bytes -= child_size
if did.events and child_events:
did.events -= child_events
associ_did.delete(session=session)
:status 401: Invalid Auth Token.
:status 404: Lifetime Exception Not Found.
:Status 500: Internal Error.
"""
json_data = request.data
try:
params = loads(json_data)
except ValueError:
return generate_http_error_flask(400, 'ValueError', 'Cannot decode json parameter list')
try:
state = params['state']
except KeyError:
state = None
try:
update_exception(exception_id=exception_id, state=state, issuer=request.environ.get('issuer'))
except UnsupportedOperation as error:
return generate_http_error_flask(400, 'UnsupportedOperation', error.args[0])
except AccessDenied as error:
return generate_http_error_flask(401, 'AccessDenied', error.args[0])
except LifetimeExceptionNotFound as error:
return generate_http_error_flask(404, 'LifetimeExceptionNotFound', error.args[0])
except RucioException as error:
return generate_http_error_flask(500, error.__class__.__name__, error.args[0])
except Exception as error:
return error, 500
return "Created", 201
files=attachment['dids'],
account=account,
ignore_duplicate=ignore_duplicate,
session=session)
# mark parent dataset as is_archive
session.query(models.DataIdentifier).\
filter(models.DataIdentifier.did_type == DIDType.FILE).\
filter(models.DataIdentifier.scope == attachment['scope']).\
filter(models.DataIdentifier.name == attachment['name']).\
update({'is_archive': True})
return
raise exception.UnsupportedOperation("Data identifier '%(scope)s:%(name)s' is a file" % attachment)
elif not parent_did.is_open:
raise exception.UnsupportedOperation("Data identifier '%(scope)s:%(name)s' is closed" % attachment)
elif parent_did.did_type == DIDType.DATASET:
__add_files_to_dataset(scope=attachment['scope'], name=attachment['name'],
files=attachment['dids'], account=account,
ignore_duplicate=ignore_duplicate,
rse_id=attachment.get('rse_id'),
session=session)
elif parent_did.did_type == DIDType.CONTAINER:
__add_collections_to_container(scope=attachment['scope'],
name=attachment['name'],
collections=attachment['dids'],
account=account, session=session)
parent_did_condition.append(and_(models.DataIdentifier.scope == parent_did.scope,
models.DataIdentifier.name == parent_did.name))