Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
blobs = layer.blobs
param = layer.convolution_param
ksize = _get_ksize(param)
stride = _get_stride(param)
pad = _get_pad(param)
num = _get_num(blobs[0])
channels = _get_channels(blobs[0])
n_in = channels * param.group
n_out = num
func = convolution_2d.Convolution2D(n_in, n_out, ksize, stride, pad,
nobias=not param.bias_term)
func.W.data[...] = 0
part_size = len(blobs[0].data) // param.group
for i in six.moves.range(param.group):
in_slice = slice(i * n_in // param.group,
(i + 1) * n_in // param.group)
out_slice = slice(i * n_out // param.group,
(i + 1) * n_out // param.group)
w = func.W.data[out_slice, in_slice]
data = numpy.array(
blobs[0].data[i * part_size:(i + 1) * part_size])
w[:] = data.reshape(w.shape)
if param.bias_term:
func.b.data[:] = blobs[1].data
with self.init_scope():
setattr(self, layer.name, func)
self.forwards[layer.name] = _CallChildLink(self, layer.name)
# The DIA SDK isn't necessary for normal use, but can be used when e.g.
# compiling LLVM.
mergeTrees(os.path.join(unpack, "DIA SDK"), os.path.join(dest, "DIA SDK"))
if __name__ == "__main__":
parser = getArgsParser()
args = parser.parse_args()
lowercaseIgnores(args)
packages = getPackages(getManifest(args))
if args.print_version:
sys.exit(0)
if not args.accept_license:
response = six.moves.input("Do you accept the license at " + findPackage(packages, "Microsoft.VisualStudio.Product.BuildTools", None)["localizedResources"][0]["license"] + " (yes/no)? ")
while response != "yes" and response != "no":
response = six.moves.input("Do you accept the license? Answer \"yes\" or \"no\": ")
if response == "no":
sys.exit(0)
setPackageSelection(args, packages)
if args.list_components or args.list_workloads or args.list_packages:
if args.list_components:
listPackageType(packages, "Component")
if args.list_workloads:
listPackageType(packages, "Workload")
if args.list_packages:
listPackageType(packages, None)
sys.exit(0)
"""
if ((not queues) or (not isinstance(queues, list)) or
(not all(isinstance(x, QueueBase) for x in queues))):
raise TypeError("A list of queues expected")
dtypes = queues[0].dtypes
if not all([dtypes == q.dtypes for q in queues[1:]]):
raise TypeError("Queues do not have matching component dtypes.")
names = queues[0].names
if not all([names == q.names for q in queues[1:]]):
raise TypeError("Queues do not have matching component names.")
queue_shapes = [q.shapes for q in queues]
reduced_shapes = [
six.moves.reduce(_shape_common, s) for s in zip(*queue_shapes)
]
queue_refs = array_ops.stack([x.queue_ref for x in queues])
selected_queue = array_ops.gather(queue_refs, index)
return QueueBase(
dtypes=dtypes,
shapes=reduced_shapes,
names=names,
queue_ref=selected_queue)
def compress(path, output):
with np.load(path, mmap_mode="r") as data:
images = data["images"]
array = []
for ii in tqdm(six.moves.xrange(images.shape[0]), desc='compress'):
im = images[ii]
im_str = cv2.imencode('.png', im)[1]
array.append(im_str)
with open(output, 'wb') as f:
pkl.dump(array, f, protocol=pkl.HIGHEST_PROTOCOL)
def update_trackerlist_from_url(self):
if self.config["dynamic_trackerlist_url"]:
now = datetime.datetime.utcnow()
last_update = datetime.datetime.utcfromtimestamp(self.config["last_dynamic_trackers_update"])
if now - last_update > datetime.timedelta(days=self.config["dynamic_trackers_update_interval"]):
try:
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:62.0) Gecko/20100101 Firefox/62.0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
}
req = six.moves.urllib.request.Request(self.config["dynamic_trackerlist_url"], headers=headers)
try:
page = six.moves.urllib.request.urlopen(req, context=ssl._create_unverified_context()).read()
except:
# maybe an older Python version without a "context" argument
page = six.moves.urllib.request.urlopen(req).read()
new_trackers = [six.ensure_str(url) for url in re.findall(b'\w+://[\w\-.:/]+', page) if is_url(six.ensure_text(url))]
if new_trackers:
# replace all existing trackers
self.config["trackers"] = []
for new_tracker in new_trackers:
self.config["trackers"].append({"url": new_tracker})
self.config["last_dynamic_trackers_update"] = time.mktime(now.timetuple())
except:
traceback.print_exc()
return self.config.config
def uri_to_file(uri):
"""
>>> print(uri_to_file('file:///home/user/proj/%40types.js'))
/home/user/proj/@types.js
>>> print(uri_to_file('http://example.com'))
None
"""
if uri.startswith('file://'):
f = drop_prefix(uri, 'file://')
return six.moves.urllib.parse.unquote(f)
else:
return None
if args.initmodel:
print('Load model from', args.initmodel)
serializers.load_hdf5(args.initmodel, model)
if args.resume:
print('Load optimizer state from', args.resume)
serializers.load_hdf5(args.resume, optimizer)
# Learning loop
for epoch in six.moves.range(1, n_epoch + 1):
print('epoch', epoch)
# training
perm = np.random.permutation(N)
sum_accuracy = 0
sum_loss = 0
for i in six.moves.range(0, N, batchsize):
x = chainer.Variable(xp.asarray(x_train[perm[i:i + batchsize]]))
t = chainer.Variable(xp.asarray(y_train[perm[i:i + batchsize]]))
# Pass the loss function (Classifier defines it) and its arguments
optimizer.update(model, x, t)
if epoch == 1 and i == 0:
with open('graph.dot', 'w') as o:
g = computational_graph.build_computational_graph(
(model.loss, ), remove_split=True)
o.write(g.dump())
print('graph generated')
sum_loss += float(model.loss.data) * len(t.data)
sum_accuracy += float(model.accuracy.data) * len(t.data)
if out_h is None:
out_h = get_conv_outsize(h, kh, sy, ph, cover_all, dy)
assert out_h > 0, 'Height in the output should be positive.'
if out_w is None:
out_w = get_conv_outsize(w, kw, sx, pw, cover_all, dx)
assert out_w > 0, 'Width in the output should be positive.'
img = numpy.pad(img,
((0, 0), (0, 0), (ph, ph + sy - 1), (pw, pw + sx - 1)),
mode='constant', constant_values=(pval,))
col = numpy.ndarray((n, c, kh, kw, out_h, out_w), dtype=img.dtype)
for j in six.moves.range(kh):
jdy = j * dy
j_lim = jdy + sy * out_h
for i in six.moves.range(kw):
idx = i * dx
i_lim = idx + sx * out_w
col[:, :, j, i, :, :] = img[:, :, jdy:j_lim:sy, idx:i_lim:sx]
return col
def _parseConfigFile(path):
config = six.moves.configparser.ConfigParser(allow_no_value=True)
params = ['username', 'password', 'api_key', 'api_url', 'scheme', 'host', 'port', 'api_root',
'no_ssl_verify', 'certificate']
data = {}
try:
with open(path, 'r') as configFile:
config.readfp(configFile)
for key in params:
try:
value = config.get('CLI', key)
if value:
data[key] = value
except six.moves.configparser.NoOptionError:
continue
return data
except six.moves.configparser.NoSectionError:
return []
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import six
http_client = six.moves.http_client
__all__ = ['HTTP_SUCCESS', 'parse_content_type_header']
HTTP_SUCCESS = [
http_client.OK,
http_client.CREATED,
http_client.ACCEPTED,
http_client.NON_AUTHORITATIVE_INFORMATION,
http_client.NO_CONTENT,
http_client.RESET_CONTENT,
http_client.PARTIAL_CONTENT,
http_client.MULTI_STATUS,
http_client.IM_USED,
]