Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
raise ValueError(err)
if not (0
raise ValueError(err)
elif compression_opts is not None:
# Can't specify just compression_opts by itself.
raise TypeError("Compression method must be specified")
# End argument validation
if (chunks is True) or \
(chunks is None and any((shuffle, fletcher32, compression, maxshape))):
chunks = guess_chunk(shape, maxshape, dtype.itemsize)
if maxshape is True:
maxshape = (None,)*len(shape)
plist = h5p.create(h5p.DATASET_CREATE)
if chunks is not None:
plist.set_chunk(chunks)
plist.set_fill_time(h5d.FILL_TIME_ALLOC) # prevent resize glitch
# MUST be first, to prevent 1.6/1.8 compatibility glitch
if fletcher32:
plist.set_fletcher32()
if shuffle:
plist.set_shuffle()
if compression == 'gzip':
plist.set_deflate(gzip_level)
elif compression == 'lzf':
plist.set_filter(h5z.FILTER_LZF, h5z.FLAG_OPTIONAL)
elif compression == 'szip':
def default_lapl():
""" Default link access property list """
lapl = h5p.create(h5p.LINK_ACCESS)
fapl = h5p.create(h5p.FILE_ACCESS)
fapl.set_fclose_degree(h5f.CLOSE_STRONG)
lapl.set_elink_fapl(fapl)
return lapl
class Group(HLObject, MutableMappingHDF5):
""" Represents an HDF5 group.
"""
def __init__(self, bind):
""" Create a new Group object by binding to a low-level GroupID.
"""
with phil:
if not isinstance(bind, h5g.GroupID):
raise ValueError("%s is not a GroupID" % bind)
super(Group, self).__init__(bind)
_gcpl_crt_order = h5p.create(h5p.GROUP_CREATE)
_gcpl_crt_order.set_link_creation_order(
h5p.CRT_ORDER_TRACKED | h5p.CRT_ORDER_INDEXED)
_gcpl_crt_order.set_attr_creation_order(
h5p.CRT_ORDER_TRACKED | h5p.CRT_ORDER_INDEXED)
def create_group(self, name, track_order=None):
""" Create and return a new subgroup.
Name may be absolute or relative. Fails if the target name already
exists.
track_order
Track dataset/group/attribute creation order under this group
if True. If None use global default h5.get_config().track_order.
"""
def make_fapl(driver, libver, rdcc_nslots, rdcc_nbytes, rdcc_w0, **kwds):
""" Set up a file access property list """
plist = h5p.create(h5p.FILE_ACCESS)
if libver is not None:
if libver in libver_dict:
low = libver_dict[libver]
high = h5f.LIBVER_LATEST
else:
low, high = (libver_dict[x] for x in libver)
else:
# we default to earliest
low, high = h5f.LIBVER_EARLIEST, h5f.LIBVER_LATEST
plist.set_libver_bounds(low, high)
cache_settings = list(plist.get_cache())
if rdcc_nslots is not None:
cache_settings[1] = rdcc_nslots
if rdcc_nbytes is not None:
Size of the dataset in bytes.
"""
# Create/open file
with h5py.File(fname, 'a' if create else 'r') as f:
## Check if dset does not exist, and create if we need to
if dsetname not in f:
# If not, create it.
if create:
# Create dataspace and HDF5 datatype
sp = h5py.h5s.create_simple(shape, shape)
tp = h5py.h5t.py_create(dtype)
# Create a new plist and tell it to allocate the space for dataset
# immediately, but don't fill the file with zeros.
plist = h5py.h5p.create(h5py.h5p.DATASET_CREATE)
plist.set_alloc_time(h5py.h5d.ALLOC_TIME_EARLY)
plist.set_fill_time(h5py.h5d.FILL_TIME_NEVER)
# Create the dataset
dset = h5py.h5d.create(f.id, dsetname, tp, sp, plist)
# Get the offset of the dataset into the file.
state = dset.get_offset(), dset.get_storage_size()
else:
raise core.ScalapackException("Dataset does not exist.")
## If the dataset does exist, check that it is suitable, and return its info
else:
dataset = f[dsetname]
dset = dataset.id
# Check to ensure dataset is not chunked
def default_lcpl():
""" Default link creation property list """
lcpl = h5p.create(h5p.LINK_CREATE)
lcpl.set_create_intermediate_group(True)
return lcpl
def __init__(self, filename, X=None, topo_view=None, y=None,
load_all=False, cache_size=None, **kwargs):
self.load_all = load_all
if h5py is None:
raise RuntimeError("Could not import h5py.")
if cache_size:
propfaid = h5py.h5p.create(h5py.h5p.FILE_ACCESS)
settings = list(propfaid.get_cache())
settings[2] = cache_size
propfaid.set_cache(*settings)
fid = h5py.h5f.open(filename, fapl=propfaid)
self._file = h5py.File(fid)
else:
self._file = h5py.File(filename)
if X is not None:
X = self.get_dataset(X, load_all)
if topo_view is not None:
topo_view = self.get_dataset(topo_view, load_all)
if y is not None:
y = self.get_dataset(y, load_all)
super(HDF5Dataset, self).__init__(X=X, topo_view=topo_view, y=y,
**kwargs)