Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
try:
process = process_query.latest()
except Process.DoesNotExist:
return Response({'process': ['Invalid process slug "{}" - object does not exist.'.format(process_slug)]},
status=status.HTTP_400_BAD_REQUEST)
request.data['process'] = process.pk
# perform "get_or_create" if requested - return existing object
# if found
if kwargs.pop('get_or_create', False):
process_input = request.data.get('input', {})
# use default values if they are not given
for field_schema, fields, path in iterate_schema(process_input, process.input_schema):
if 'default' in field_schema and field_schema['name'] not in fields:
dict_dot(process_input, path, field_schema['default'])
checksum = get_data_checksum(process_input, process.slug, process.version)
data_qs = Data.objects.filter(
checksum=checksum,
process__persistence__in=[Process.PERSISTENCE_CACHED, Process.PERSISTENCE_TEMP],
)
data_qs = get_objects_for_user(request.user, 'view_data', data_qs)
if data_qs.exists():
data = data_qs.order_by('created').last()
serializer = self.get_serializer(data)
return Response(serializer.data)
# create the objects
resp = super(ResolweCreateDataModelMixin, self).create(request, *args, **kwargs)
# run manager
# Trim process_* fields to not exceed max length of the database field.
for i, entry in enumerate(val):
max_length = Data._meta.get_field(key).base_field.max_length
if len(entry) > max_length:
val[i] = entry[: max_length - 3] + "..."
getattr(d, key).extend(val)
elif key != "output":
setattr(d, key, val)
if "output" in changeset:
if not isinstance(d.output, dict):
d.output = {}
for key, val in changeset["output"].items():
dict_dot(d.output, key, val)
try:
d.save(update_fields=list(changeset.keys()))
except ValidationError as exc:
logger.error(
__(
"Validation error when saving Data object of process '{}' (handle_update):\n\n{}",
d.process.slug,
traceback.format_exc(),
),
extra={"data_id": data_id},
)
d.refresh_from_db()
d.process_error.append(exc.message)
d.status = Data.STATUS_ERROR
with suppress(Exception):
process.type.replace(":", ".") + "__schema__",
process.output_schema,
)
errors = []
for path, key, value in iterate_dict(
processes, exclude=lambda key, value: key == "__schema__"
):
if "__schema__" not in value:
continue
# Validate with any parent types.
for length in range(len(path), 0, -1):
parent_type = ".".join(path[:length] + ["__schema__"])
try:
parent_schema = dict_dot(processes, parent_type)
except KeyError:
continue
errors += validate_process_subtype(
supertype_name=":".join(path[:length]),
supertype=parent_schema,
subtype_name=":".join(path + [key]),
subtype=value["__schema__"],
)
return errors
p.setdefault("output_schema", []).extend(extra_output_schema)
except InvalidEngineError:
self.stderr.write(
"Skip processor {}: execution engine '{}' not supported".format(
slug, p["run"]["language"]
)
)
continue
# Validate if container image is allowed based on the configured pattern.
# NOTE: This validation happens here and is not deferred to executors because the idea
# is that this will be moved to a "container" requirement independent of the
# executor.
if hasattr(settings, "FLOW_CONTAINER_VALIDATE_IMAGE"):
try:
container_image = dict_dot(p, "requirements.executor.docker.image")
if not re.match(
settings.FLOW_CONTAINER_VALIDATE_IMAGE, container_image
):
self.stderr.write(
"Skip processor {}: container image does not match '{}'".format(
slug, settings.FLOW_CONTAINER_VALIDATE_IMAGE,
)
)
continue
except KeyError:
pass
version = p["version"]
int_version = convert_version_string_to_int(version, VERSION_NUMBER_BITS)
# `latest version` is returned as `int` so it has to be compared to `int_version`
def descriptor(obj, path=""):
"""Return descriptor of given object.
If ``path`` is specified, only the content on that path is
returned.
"""
if isinstance(obj, dict):
# Current object is hydrated, so we need to get descriptor from
# dict representation.
desc = obj["__descriptor"]
else:
desc = obj.descriptor
resp = dict_dot(desc, path)
if isinstance(resp, list) or isinstance(resp, dict):
return json.dumps(resp)
return resp
:param data: Queryset containing all data objects that need
to be migrated
:param from_state: Database model state
"""
if not self.default:
return
self.default.prepare(data, from_state)
for instance in data:
value = self.default.get_default_for(instance, from_state)
if not value and not self.schema.get("required", True):
continue
# Set default value.
container = getattr(instance, self.schema_type, {})
dict_dot(container, ".".join(self.field), value)
setattr(instance, self.schema_type, container)
instance.save()
try:
with open(file_path) as file_handler:
value = json.load(file_handler)
except json.JSONDecodeError:
with open(file_path) as file_handler:
content = file_handler.read()
content = content.rstrip()
raise ValidationError(
"Value of '{}' must be a valid JSON, current: {}".format(
name, content
)
)
existing_storage_pk = None
with suppress(KeyError):
existing_storage_pk = dict_dot(self._original_output, path)
if isinstance(existing_storage_pk, int):
self.storages.filter(pk=existing_storage_pk).update(json=value)
fields[name] = existing_storage_pk
else:
storage = self.storages.create(
name="Storage for data id {}".format(self.pk),
contributor=self.contributor,
json=value,
)
fields[name] = storage.pk
try:
# use get_X_value function
get_value_function = getattr(self, "get_{}_value".format(field), None)
if get_value_function:
setattr(document, field, get_value_function(obj))
continue
# use `mapping` dict
if field in self.mapping:
if callable(self.mapping[field]):
setattr(document, field, self.mapping[field](obj))
continue
try:
object_attr = dict_dot(obj, self.mapping[field])
except (KeyError, AttributeError):
object_attr = None
if callable(object_attr):
# use method on object
setattr(document, field, object_attr(obj))
else:
# use attribute on object
setattr(document, field, object_attr)
continue
# get value from the object
try:
object_value = dict_dot(obj, field)
setattr(document, field, object_value)
continue
)
d.process_error.append(
"No entity to annotate for process '{}' (handle_annotate)".format(
d.process.slug
)
)
d.status = Data.STATUS_ERROR
with suppress(Exception):
d.save(update_fields=["process_error", "status"])
report_failure()
return
for key, val in annotations.items():
logger.debug(__("Annotating entity {}: {} -> {}", d.entity, key, val))
dict_dot(d.entity.descriptor, key, val)
try:
d.entity.save()
async_to_sync(self._send_reply)(
obj, {ExecutorProtocol.RESULT: ExecutorProtocol.RESULT_OK}
)
except ValidationError as exc:
logger.error(
__(
"Validation error when saving Data object of process '{}' (handle_annotate):\n\n{}",
d.process.slug,
traceback.format_exc(),
),
extra={"data_id": data_id},
)
d.refresh_from_db()