Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_userapi():
"""Test that we can act as human API user
Functionality should be extensively tested in other code, but this is here
to illustrate how a user could work, and ensure that it works.
To the user reading the source: Skip all 'assert' lines, read the rest.
"""
eclfiles = ecl2df.EclFiles(DATAFILE)
compdatdf = ecl2df.compdat.df(eclfiles)
equil = ecl2df.equil.df(eclfiles)
faults = ecl2df.faults.df(eclfiles)
fipreports = ecl2df.fipreports.df(eclfiles)
grid_df = ecl2df.grid.df(eclfiles)
grst_df = ecl2df.grid.df(eclfiles, rstdates="last")
gruptree = ecl2df.gruptree.df(eclfiles)
nnc = ecl2df.nnc.df(eclfiles)
pillars = ecl2df.pillars.df(eclfiles)
rft = ecl2df.rft.df(eclfiles)
satfund = ecl2df.satfunc.df(eclfiles)
smry = ecl2df.summary.df(eclfiles)
trans = ecl2df.trans.df(eclfiles)
wcon = ecl2df.wcon.df(eclfiles)
"""Test unrolling of k1-k2 ranges in COMPDAT"""
schstr = """
COMPDAT
-- K1 to K2 is a range of 11 layers, should be automatically
-- unrolled to 11 rows.
'OP1' 33 44 10 20 /
/
"""
df = compdat.deck2dfs(EclFiles.str2deck(schstr))["COMPDAT"]
assert df["I"].unique() == 33
assert df["J"].unique() == 44
assert (df["K1"].values == range(10, 20 + 1)).all()
assert (df["K2"].values == range(10, 20 + 1)).all()
# Check that we can read withoug unrolling:
df_noroll = compdat.deck2dfs(EclFiles.str2deck(schstr), unroll=False)["COMPDAT"]
assert len(df_noroll) == 1
def test_df():
"""Test main dataframe API, only testing that something comes out"""
eclfiles = EclFiles(DATAFILE)
compdat_df = compdat.df(eclfiles)
assert not compdat_df.empty
assert "ZONE" in compdat_df
assert "K1" in compdat_df
assert "WELL" in compdat_df
def test_nonstandardzones(tmpdir):
"""Test that we can read zones from a specific filename"""
zonefile = tmpdir / "formations.lyr"
zonefilecontent = """
-- foo
# foo
'Eiriksson' 1-10
Raude 20-30
# Difficult quote parsing above, might not run in ResInsight.
"""
zonefile.write(zonefilecontent)
eclfiles = ecl2df.EclFiles(DATAFILE)
zonemap = eclfiles.get_zonemap(str(zonefile))
assert zonemap[1] == "Eiriksson"
def test_stdzoneslyr():
"""Test that we can read zones if the zonemap is in a standard location"""
eclfiles = ecl2df.EclFiles(DATAFILE)
zonemap = eclfiles.get_zonemap()
assert isinstance(zonemap, dict)
assert zonemap[3] == "UpperReek"
assert zonemap[10] == "MidReek"
assert zonemap[11] == "LowerReek"
with pytest.raises(KeyError):
assert zonemap[0]
with pytest.raises(KeyError):
assert zonemap["foo"]
with pytest.raises(KeyError):
assert zonemap[-10]
assert len(zonemap) == 15
def pillars_main(args):
"""This is the command line API"""
if args.verbose:
logger.setLevel(logging.INFO)
eclfiles = ecl2df.EclFiles(args.DATAFILE)
dframe = df(
eclfiles,
region=args.region,
rstdates=args.rstdates,
soilcutoff=args.soilcutoff,
sgascutoff=args.sgascutoff,
swatcutoff=args.swatcutoff,
stackdates=args.stackdates,
)
groupbies = []
aggregators = {
key: AGGREGATORS[key.split("@")[0]]
for key in dframe
if key.split("@")[0] in AGGREGATORS
}
if args.region and args.group:
def nnc_main(args):
"""Command line access point from main() or from ecl2csv via subparser"""
if args.verbose:
logger.setLevel(logging.INFO)
eclfiles = EclFiles(args.DATAFILE)
nncdf = df(eclfiles, coords=args.coords, pillars=args.pillars)
if nncdf.empty:
logger.warning("Empty NNC dataframe being written to disk!")
nncdf.to_csv(args.output, index=False)
print("Wrote to " + args.output)