Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
o.parseOptions(argv[1:])
except UsageError as e:
raise SystemExit(e)
docs = freeze(list(safe_load_all(stdin)))
if o["git-tag"] is not None:
tag = check_output(["git", "rev-parse", "--short", o["git-tag"]]).strip()
docs = rewrite_tags(docs, tag)
elif o["tag"] is not None:
docs = rewrite_tags(docs, o["tag"])
if o["no-volumes"]:
docs = stub_all_volumes(docs)
stdout.write(safe_dump_all(thaw(docs)))
"""
Updates the cache, adding servers, with a flag if autoscale is active on
each one. All arguments after ``now`` are resources specific to
``launch_server`` config that are used by that planner. Here we only cache
servers that are in desired LBs since as it is needed by REST API and
ignore ``lbs``.
:param group: scaling group
:param list servers: list of NovaServer objects
:param list lb_nodes: list of CLBNode objects
:param dict lbs: load balancer objects keyed on ID (currently ignored)
:param include_deleted: Include deleted servers in cache. Defaults to True.
"""
server_dicts = []
for server in servers:
sd = thaw(server.json)
if is_autoscale_active(server, lb_nodes):
sd["_is_as_active"] = True
if server.state != ServerState.DELETED or include_deleted:
server_dicts.append(sd)
return Effect(
UpdateServersCache(group.tenant_id, group.uuid, now, server_dicts))
"""
Return a dataset dict which conforms to
``/v1/endpoints.json#/definitions/configuration_datasets_array``
:param Dataset dataset: A dataset present in the cluster.
:param UUID node_uuid: UUID of the primary node for the
`dataset`.
:return: A ``dict`` containing the dataset information and the
hostname of the primary node, conforming to
``/v1/endpoints.json#/definitions/configuration_datasets_array``.
"""
result = dict(
dataset_id=dataset.dataset_id,
deleted=dataset.deleted,
primary=unicode(node_uuid),
metadata=thaw(dataset.metadata)
)
if dataset.maximum_size is not None:
result[u'maximum_size'] = dataset.maximum_size
return result
def write(self, event):
with open(self.output_file, 'a+') as f:
f.write("{}\n".format(json.dumps(
thaw(event), default=json_serializer)))
name='disk',
type='SCALAR',
role=role,
scalar=addict.Dict(value=task_config.disk)
),
addict.Dict(
name='gpus',
type='SCALAR',
role=role,
scalar=addict.Dict(value=task_config.gpus)
),
addict.Dict(
name='ports',
type='RANGES',
role=role,
ranges=addict.Dict(range=thaw(task_config.ports)),
),
def to_document(self):
"""
Serialize this specification to a JSON-compatible object representing a
Swagger specification.
"""
return dict(
info=thaw(self.info),
paths=thaw(self.paths),
definitions=thaw(self.definitions),
securityDefinitions=thaw(self.securityDefinitions),
security=thaw(self.security),
swagger=thaw(self.swagger),
)
def make_mesos_container_info(task_config: MesosTaskConfig) -> addict.Dict:
container_info = addict.Dict(
type=task_config.containerizer,
volumes=thaw(task_config.volumes),
)
port_mappings = [addict.Dict(
host_port=task_config.ports[0]['begin'], container_port=8888)]
if container_info.type == 'DOCKER':
container_info.docker = addict.Dict(
image=task_config.image,
network='BRIDGE',
port_mappings=port_mappings,
parameters=thaw(task_config.docker_parameters),
force_pull_image=(not task_config.use_cached_image),
)
elif container_info.type == 'MESOS':
container_info.network_infos = addict.Dict(port_mappings=port_mappings)
# For this to work, image_providers needs to be set to 'docker' on mesos agents (as opposed
# to 'appc' or 'oci'; we're still running docker images, we're just
# using the UCR to do it).
def to_document(self):
"""
Serialize this specification to a JSON-compatible object representing a
Swagger specification.
"""
return dict(
info=thaw(self.info),
paths=thaw(self.paths),
definitions=thaw(self.definitions),
securityDefinitions=thaw(self.securityDefinitions),
security=thaw(self.security),
swagger=thaw(self.swagger),
)
url = "/studip/dispatch.php/"
if self._file:
url += "file/details/%s?cid=%s" % (self._file["id"], self._course["course_id"])
elif self._folder:
url += "course/files/index/%s?cid=%s" % (self._folder["id"], self._course["course_id"])
elif self._course:
url += "course/files?cid=%s" % (self._course["course_id"])
elif self._semester:
url += "my_courses/set_semester?sem_select=%s" % (self._semester["id"])
else:
url += "my_courses"
xattrs["url"] = self.session.studip_url(url)
xattrs["json"] = json.dumps({
"semester": thaw(self._semester),
"course": thaw(self._course),
"folder": thaw(self._folder),
"file": thaw(self._file)
})
return xattrs