Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
timeout=config.DEFAULT_TIMEOUT,
headers={},
):
if not endpoint:
endpoint = config.HUB_REST_ENDPOINT
request_url = urljoin(endpoint, relative_url)
headers["hub-cli-version"] = get_cli_version()
if (
"Authorization" not in headers
or headers["Authorization"] != self.auth_header
):
headers["Authorization"] = self.auth_header
try:
logger.debug("Sending: Headers {}, Json: {}".format(headers, json))
response = requests.request(
method,
request_url,
params=params,
data=data,
json=json,
headers=headers,
files=files,
timeout=timeout,
)
except requests.exceptions.ConnectionError as e:
logger.debug("Exception: {}".format(e, exc_info=True))
sys.exit("Connection error. Please retry or check your internet connection")
except requests.exceptions.Timeout as e:
logger.debug("Exception: {}".format(e, exc_info=True))
def chunkify(self, cloudpaths, requested_bbox, item):
chunks = []
for path in cloudpaths:
cloudchunk = Bbox.from_filename(path)
intersection = Bbox.intersection(cloudchunk, requested_bbox)
chunk_slices = (intersection-cloudchunk.minpt).to_slices()
item_slices = (intersection-requested_bbox.minpt).to_slices()
chunk = None
if np.any(np.array(intersection.to_shape()) != np.array(self.chunk_shape)):
logger.debug('Non aligned write')
chunk, _ = self.download_chunk(path)
else:
chunk = np.zeros(shape=self.chunk_shape,
dtype=self.dtype)
chunk.setflags(write=1)
chunk[chunk_slices] = item[item_slices]
chunks.append(chunk)
return zip(cloudpaths, chunks)
tensor_paths = [os.path.join(path, t) for t in self._tensors]
for tensor_path in tensor_paths:
fs.makedir(tensor_path)
tensor_meta = {
name: _preprocess_meta_before_save(t._meta)
for name, t in self._tensors.items()
}
count = self.count
try:
if count == -1:
count = self._store_unknown_sized_ds(fs, path)
else:
self._store_known_sized_ds(fs, path)
except Exception as e:
logger.debug(e)
raise PermissionException(tag)
for _, el in tensor_meta.items():
el["shape"] = (count,) + tuple(el["shape"][1:])
ds_meta = {"tensors": tensor_meta, "len": count}
with fs.open(os.path.join(path, "meta.json"), "w") as f:
f.write(json.dumps(ds_meta, indent=2, sort_keys=True))
return load(tag, creds)
def get(self, path):
path = os.path.join(self.bucket, path)
try:
with open(path, 'rb') as f:
data = f.read()
return data
except IOError as err:
logger.debug(err)
return None
def set_token(cls, token):
logger.debug(
"Putting the key {} into {}.".format(token, config.TOKEN_FILE_PATH)
)
with open(config.TOKEN_FILE_PATH, "w") as f:
f.write(token)
def get_token(cls):
logger.debug("Getting token...")
if not os.path.exists(config.TOKEN_FILE_PATH):
return None
with open(config.TOKEN_FILE_PATH, "r") as f:
token = f.read()
logger.debug("Got the key {} from {}.".format(token, config.TOKEN_FILE_PATH))
return token
access_key = username.strip()
if not password:
logger.debug("Prompting for Secret Key")
password = click.prompt('AWS Secret Access Key', type=str, hide_input=False)
secret_key = password.strip()
if not bucket:
logger.debug("Prompting for bucket name")
bucket = click.prompt('Bucket Name (e.g. company-name)', type=str, hide_input=False)
bucket = bucket.strip()
success, creds = Verify(access_key, secret_key).verify_aws(bucket)
if success:
StoreControlClient().save_config(creds)
logger.info("Login Successful.")
else:
logger.error("Login error, please try again")
"""
fs, path = _load_fs_and_path(tag, creds, session_creds=session_creds)
fs: fsspec.AbstractFileSystem = fs
path_2 = os.path.join(path, "meta.json")
if not fs.exists(path):
from hub.exceptions import DatasetNotFound
raise DatasetNotFound(tag)
with fs.open(path_2, "r") as f:
ds_meta = json.loads(f.read())
for name in ds_meta["tensors"]:
assert fs.exists(os.path.join(path, name))
if ds_meta["len"] == 0:
logger.warning("The dataset is empty (has 0 samples)")
return Dataset(
{
name: Tensor(
tmeta,
dask.array.from_array(
np.empty(shape=(0,) + tuple(tmeta["shape"][1:]), dtype="uint8"),
),
)
for name, tmeta in ds_meta["tensors"].items()
}
)
len_ = ds_meta["len"]
return Dataset(
{
name: Tensor(
tmeta,
def configure(username, password, bucket):
""" Logs in to Hub"""
logger.info("Please log in using your AWS credentials.")
if not username:
logger.debug("Prompting for Access Key")
username = click.prompt('AWS Access Key ID', type=str, hide_input=False)
access_key = username.strip()
if not password:
logger.debug("Prompting for Secret Key")
password = click.prompt('AWS Secret Access Key', type=str, hide_input=False)
secret_key = password.strip()
if not bucket:
logger.debug("Prompting for bucket name")
bucket = click.prompt('Bucket Name (e.g. company-name)', type=str, hide_input=False)
bucket = bucket.strip()
success, creds = Verify(access_key, secret_key).verify_aws(bucket)