Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
pass
version = p["version"]
int_version = convert_version_string_to_int(version, VERSION_NUMBER_BITS)
# `latest version` is returned as `int` so it has to be compared to `int_version`
latest_version = Process.objects.filter(slug=slug).aggregate(
Max("version")
)["version__max"]
if latest_version is not None and latest_version > int_version:
self.stderr.write(
"Skip processor {}: newer version installed".format(slug)
)
continue
previous_process_qs = Process.objects.filter(slug=slug)
if previous_process_qs.exists():
previous_process = previous_process_qs.latest()
else:
previous_process = None
process_query = Process.objects.filter(slug=slug, version=version)
if process_query.exists():
if not force:
if verbosity > 0:
self.stdout.write(
"Skip processor {}: same version installed".format(slug)
)
continue
process_query.update(**p)
log_processors.append("Updated {}".format(slug))
data_id,
),
extra={"data_id": data_id},
)
try:
# This transaction is needed because we're running
# asynchronously with respect to the main Django code
# here; the manager can get nudged from elsewhere.
with transaction.atomic():
parent_data = Data.objects.get(pk=data_id)
# Spawn processes.
for d in obj[ExecutorProtocol.FINISH_SPAWN_PROCESSES]:
d["contributor"] = parent_data.contributor
d["process"] = Process.objects.filter(
slug=d["process"]
).latest()
d["tags"] = parent_data.tags
d["collection"] = parent_data.collection
d["subprocess_parent"] = parent_data
for field_schema, fields in iterate_fields(
d.get("input", {}), d["process"].input_schema
):
type_ = field_schema["type"]
name = field_schema["name"]
value = fields[name]
if type_ == "basic:file:":
fields[name] = self.hydrate_spawned_files(
exported_files_mapper, value, data_id
for collection_id in collections:
try:
collection = Collection.objects.get(pk=collection_id)
except Collection.DoesNotExist:
return Response({'collections': ['Invalid pk "{}" - object does not exist.'.format(collection_id)]},
status=status.HTTP_400_BAD_REQUEST)
if not request.user.has_perm('add_collection', obj=collection):
if request.user.is_authenticated():
raise exceptions.PermissionDenied
else:
raise exceptions.NotFound
# translate processe's slug to id
process_slug = request.data.get('process', None)
process_query = Process.objects.filter(slug=process_slug)
process_query = get_objects_for_user(request.user, 'view_process', process_query)
try:
process = process_query.latest()
except Process.DoesNotExist:
return Response({'process': ['Invalid process slug "{}" - object does not exist.'.format(process_slug)]},
status=status.HTTP_400_BAD_REQUEST)
request.data['process'] = process.pk
# perform "get_or_create" if requested - return existing object
# if found
if kwargs.pop('get_or_create', False):
process_input = request.data.get('input', {})
# use default values if they are not given
for field_schema, fields, path in iterate_schema(process_input, process.input_schema):
if 'default' in field_schema and field_schema['name'] not in fields:
def retire(self, process_schemas):
"""Retire obsolete processes.
Remove old process versions without data. Find processes that have been
registered but do not exist in the code anymore, then:
- If they do not have data: remove them
- If they have data: flag them not active (``is_active=False``)
"""
process_slugs = set(ps["slug"] for ps in process_schemas)
# Processes that are in DB but not in the code
retired_processes = Process.objects.filter(~Q(slug__in=process_slugs))
# Remove retired processes which do not have data
retired_processes.filter(data__exact=None).delete()
# Remove non-latest processes which do not have data
latest_version_processes = Process.objects.order_by(
"slug", "-version"
).distinct("slug")
Process.objects.filter(data__exact=None).difference(
latest_version_processes
).delete()
# Deactivate retired processes which have data
retired_processes.update(is_active=False)
registered but do not exist in the code anymore, then:
- If they do not have data: remove them
- If they have data: flag them not active (``is_active=False``)
"""
process_slugs = set(ps["slug"] for ps in process_schemas)
# Processes that are in DB but not in the code
retired_processes = Process.objects.filter(~Q(slug__in=process_slugs))
# Remove retired processes which do not have data
retired_processes.filter(data__exact=None).delete()
# Remove non-latest processes which do not have data
latest_version_processes = Process.objects.order_by(
"slug", "-version"
).distinct("slug")
Process.objects.filter(data__exact=None).difference(
latest_version_processes
).delete()
# Deactivate retired processes which have data
retired_processes.update(is_active=False)
relation = self.get_object()
for entity_id in request.data['ids']:
PositionInRelation.objects.filter(relation=relation, entity=entity_id).delete()
return Response()
class ProcessViewSet(ResolweCreateModelMixin,
mixins.RetrieveModelMixin,
mixins.ListModelMixin,
ResolweProcessPermissionsMixin,
ResolweCheckSlugMixin,
viewsets.GenericViewSet):
"""API view for :class:`Process` objects."""
queryset = Process.objects.all().prefetch_related('contributor')
serializer_class = ProcessSerializer
permission_classes = (permissions_cls,)
filter_class = ProcessFilter
ordering_fields = ('id', 'created', 'modified', 'name', 'version')
ordering = ('id',)
class DataViewSet(ResolweCreateDataModelMixin,
mixins.RetrieveModelMixin,
ResolweUpdateModelMixin,
mixins.DestroyModelMixin,
mixins.ListModelMixin,
ResolwePermissionsMixin,
ResolweCheckSlugMixin,
viewsets.GenericViewSet):
"""API view for :class:`Data` objects."""
# process.modified =
process.save()
# copy permissions from latest process
for user, perms in six.iteritems(get_users_with_perms(latest, attach_perms=True)):
for perm in perms:
assign_perm(perm, user, process)
for group, perms in six.iteritems(get_groups_with_perms(latest, attach_perms=True)):
for perm in perms:
assign_perm(perm, group, process)
else:
# Create dummy processor if there is no other version
dummy_name = 'Dummy processor of type {}'.format(data[u'type'])
try:
process = Process.objects.get(name=dummy_name)
except Process.DoesNotExist:
process = Process.objects.create(
name=dummy_name,
slug='non-existent',
contributor=get_user_model().objects.filter(is_superuser=True).first(),
type=data[u'type'],
category='data:non-existent',
run={'script': {'gen-require common\ngen-error "This processor is not intendent to be run."'}},
)
# DATA #########################################################
new = Data()
new.name = data.get(u'static', {}).get(u'name', '')
if len(new.name) > 100:
self.long_names.append(new.name)
new.name = new.name[:97] + '...'
from resolwe.permissions.mixins import ResolwePermissionsMixin
from .mixins import ResolweCheckSlugMixin, ResolweCreateModelMixin
class ProcessViewSet(
ResolweCreateModelMixin,
mixins.RetrieveModelMixin,
mixins.ListModelMixin,
ResolwePermissionsMixin,
ResolweCheckSlugMixin,
viewsets.GenericViewSet,
):
"""API view for :class:`Process` objects."""
queryset = Process.objects.all().select_related("contributor")
serializer_class = ProcessSerializer
permission_classes = (get_permissions_class(),)
filterset_class = ProcessFilter
ordering_fields = ("id", "created", "modified", "name", "version")
ordering = ("id",)
def create(self, request, *args, **kwargs):
"""Only superusers can create new processes."""
if not request.user.is_superuser:
raise exceptions.NotFound
return super().create(request, *args, **kwargs)
qs_descriptor_schema = DescriptorSchema.objects.select_related("contributor")
qs_entity_col_ds = DescriptorSchema.objects.select_related("contributor")
qs_entity_col = Collection.objects.select_related("contributor")
qs_entity_col = qs_entity_col.prefetch_related(
"data", "entity_set", Prefetch("descriptor_schema", queryset=qs_entity_col_ds),
)
qs_entity_ds = DescriptorSchema.objects.select_related("contributor")
qs_entity = Entity.objects.select_related("contributor")
qs_entity = qs_entity.prefetch_related(
"data",
Prefetch("collection", queryset=qs_entity_col),
Prefetch("descriptor_schema", queryset=qs_entity_ds),
)
qs_process = Process.objects.select_related("contributor")
queryset = Data.objects.select_related("contributor").prefetch_related(
Prefetch("collection", queryset=qs_collection),
Prefetch("descriptor_schema", queryset=qs_descriptor_schema),
Prefetch("entity", queryset=qs_entity),
Prefetch("process", queryset=qs_process),
)
serializer_class = DataSerializer
filter_class = DataFilter
permission_classes = (get_permissions_class(),)
ordering_fields = (
"contributor",
"contributor__first_name",
"contributor__last_name",
"created",