Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_contains(key, expected):
col = intake.open_esm_datastore(cdf_col_sample_cmip6)
actual = key in col
assert actual == expected
def test_search(esmcol_path, query):
col = intake.open_esm_datastore(esmcol_path)
cat = col.search(**query)
assert len(cat.df) > 0
assert len(col.df.columns) == len(cat.df.columns)
def test_opendap_endpoint():
col = intake.open_esm_datastore('http://haden.ldeo.columbia.edu/catalogs/hyrax_cmip6.json')
cat = col.search(
source_id='CAMS-CSM1-0',
experiment_id='historical',
member_id='r1i1p1f1',
table_id='Amon',
grid_label='gn',
version='v1',
)
dsets = cat.to_dataset_dict(cdf_kwargs={'chunks': {'time': 36}})
_, ds = dsets.popitem()
assert isinstance(ds, xr.Dataset)
def test_serialize_to_json():
with TemporaryDirectory() as local_store:
col = intake.open_esm_datastore(catalog_dict_records)
name = 'test_serialize_dict'
col.serialize(name=name, directory=local_store, catalog_type='dict')
output_catalog = os.path.join(local_store, name + '.json')
col2 = intake.open_esm_datastore(output_catalog)
pd.testing.assert_frame_equal(col.df, col2.df)
def test_to_dataset_dict_aggfalse(esmcol_path, query):
col = intake.open_esm_datastore(esmcol_path)
cat = col.search(**query)
nds = len(cat.df)
dsets = cat.to_dataset_dict(zarr_kwargs={'consolidated': True}, aggregate=False)
assert len(dsets.keys()) == nds
key, ds = dsets.popitem()
assert 'tasmax' in key
def test_read_catalog_dict():
col = intake.open_esm_datastore(catalog_dict_records)
assert isinstance(col.df, pd.DataFrame)
assert col.catalog_file is None
def zarr_aws_cesmle_col():
url = 'https://raw.githubusercontent.com/NCAR/cesm-lens-aws/master/intake-catalogs/aws-cesm1-le.json'
return intake.open_esm_datastore(url)
def test_progressbar(progressbar):
c = intake.open_esm_datastore(cdf_col_sample_cmip5)
cat = c.search(variable=['hfls'], frequency='mon', modeling_realm='atmos', model=['CNRM-CM5'])
_ = cat.to_dataset_dict(cdf_kwargs=dict(chunks={}), progressbar=progressbar)
def test_df_property():
col = intake.open_esm_datastore(catalog_dict_records)
assert len(col.df) == 5
col.df = col.df.iloc[0:2, :]
assert isinstance(col.df, pd.DataFrame)
assert len(col) == 1
assert len(col.df) == 2
def test_to_dataset_dict(esmcol_path, query):
col = intake.open_esm_datastore(esmcol_path)
cat = col.search(**query)
_, ds = cat.to_dataset_dict(
zarr_kwargs={'consolidated': True},
cdf_kwargs={'chunks': {'time': 1}},
storage_options={'token': 'anon'},
).popitem()
assert 'member_id' in ds.dims
assert len(ds.__dask_keys__()) > 0
assert ds.time.encoding