Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_odd_bounds():
cutout = Cutout(path = tmp_dir / (path + '_odd_bounds'), module="era5", time=time,
bounds=(x0-0.1, y0-0.02, x1+0.03, y1+0.13))
cutout.prepare()
assert_allclose(cutout.data.wnd100m, ref.data.wnd100m,
atol=1e-5, rtol=1e-5)
def test_available_features(ref):
modules = ref.available_features.index.unique('module')
assert len(modules) == 1
assert modules[0] == 'era5'
cutout = Cutout(path="sarah_first", module=['sarah', 'era5'],
time=slice('2013-01-01', '2013-01-01'),
x=slice(X0, X1), y = slice(Y0, Y1))
modules = cutout.available_features.index.unique('module')
assert len(modules) == 2
assert len(cutout.available_features) > len(ref.available_features)
import atlite
from atlite import Cutout
from xarray.testing import assert_allclose, assert_equal
time='2013-01-01'
x0 = -4
y0 = 56
x1 = 1.5
y1 = 61
path="era5_test"
tmp_dir = Path('tmp_files_test')
tmp_dir.mkdir()
ref = Cutout(path=tmp_dir / path, module="era5", bounds=(x0, y0, x1, y1), time=time)
ref.prepare()
# Backwards compatibility with name and cutout_dir
def test_old_style_loading_args():
cutout = Cutout(name=path, cutout_dir=tmp_dir)
assert_equal(cutout.data.coords.to_dataset(), ref.data.coords.to_dataset())
def test_odd_bounds():
cutout = Cutout(path = tmp_dir / (path + '_odd_bounds'), module="era5", time=time,
bounds=(x0-0.1, y0-0.02, x1+0.03, y1+0.13))
cutout.prepare()
assert_allclose(cutout.data.wnd100m, ref.data.wnd100m,
atol=1e-5, rtol=1e-5)
def cutout_era5(tmp_path_factory):
tmp_path = tmp_path_factory.mktemp("era5")
cutout = Cutout(path=tmp_path / "era5", module="era5", bounds=BOUNDS, time=TIME)
cutout.prepare()
return cutout
def test_odd_bounds_coords(ref):
cutout = Cutout(path="odd_bounds", module="era5", time=TIME,
bounds=(X0-0.1, Y0-0.02, X1+0.03, Y1+0.13))
assert_equal(cutout.coords.to_dataset(), ref.coords.to_dataset())
def cutout_sarah(tmp_path_factory):
tmp_path = tmp_path_factory.mktemp("sarah")
cutout = Cutout(path=tmp_path / "sarah", module=["sarah", "era5"],
bounds=BOUNDS, time=TIME, sarah_dir=SARAH_DIR)
cutout.prepare()
return cutout
def test_time_sclice_coords(ref):
cutout = Cutout(path="time_slice", module="era5",
time=slice('2013-01-01', '2013-01-01'),
x=slice(X0, X1), y = slice(Y0, Y1))
assert_equal(cutout.coords.to_dataset(), ref.coords.to_dataset())
# resolution)
import sys, os
from vresutils import shapes as vshapes, mapping as vmapping, transfer as vtransfer
from load import europe
import pandas as pd
import numpy as np
import scipy as sp
import atlite
cutout = atlite.Cutout('europe-2011-2014')
#list of grid cells
grid_cells = cutout.grid_cells()
print(len(grid_cells))
#pd.Series nuts3 code -> 2-letter country codes
mapping = vmapping.countries_to_nuts3()
countries = mapping.value_counts().index.sort_values()
#pd.Series
gdp,pop = europe.gdppop_nuts3()
#pd.Series nuts3 code -> polygon
# In this example we assume you have set in config.py
# ncep_dir = '/path/to/weather_data/'
# where the files have format e.g.
# 'ncep_dir/{year}{month:0>2}/tmp2m.*.grb2'
import logging
logging.basicConfig(level=logging.DEBUG)
import atlite
cutout = atlite.Cutout(name="europe-2011-01",
module="ncep",
xs=slice(-12.18798349, 41.56244222),
ys=slice(71.65648314, 33.56459975),
years=slice(2011, 2011),
months=slice(1,1))
#this is where all the work happens - it took 105 minutes on FIAS'
#beast resi, with 16 cores; the resulting cutout takes 57 GB
cutout.prepare()
# It is assumed the city is half on grid cell 43 and half on grid cell
# 44
# It assumes you have already created a cutout for Europe called
# "europe-2011-2014" in the directory "/home/vres/data/cutouts"
from scipy import sparse
import atlite
cutout = atlite.Cutout('europe-2011-2014')
# A sparse matrix describing the city relative to the grid coordinates
matrix = sparse.csr_matrix(([0.5,0.5], ([0,1],[43,44])), shape=(2, len(cutout.grid_coordinates())))
print(matrix)
hd = cutout.heat_demand(matrix)
print(hd)