Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
@pytest.mark.parametrize('spec_file,spec_name', [
('experimental/sil/ppo_sil_cartpole.json', 'ppo_sil_shared_cartpole'),
('experimental/sil/ppo_sil_cartpole.json', 'ppo_sil_separate_cartpole'),
('experimental/sil/ppo_sil_cartpole.json', 'ppo_sil_rnn_shared_cartpole'),
('experimental/sil/ppo_sil_cartpole.json', 'ppo_sil_rnn_separate_cartpole'),
])
def test_ppo_sil(spec_file, spec_name):
run_trial_test(spec_file, spec_name)
pytest.param("fd", marks=pytest.mark.xfail(reason="capture cleanup needed"))])
def test_functional_boxed_capturing(testdir, capmode):
p1 = testdir.makepyfile("""
import os
import sys
def test_function():
sys.stdout.write("hello\\n")
sys.stderr.write("world\\n")
os.kill(os.getpid(), 15)
""")
result = testdir.runpytest(p1, "--forked", "--capture=%s" % capmode)
result.stdout.fnmatch_lines("""
*CRASHED*
noCuda = 0
try:
p = Popen(["nvidia-smi","--query-gpu=index,utilization.gpu,memory.total,memory.used,memory.free,driver_version,name,gpu_serial,display_active,display_mode", "--format=csv,noheader,nounits"], stdout=PIPE)
except OSError:
noCuda = 1
maxGPU = 0
if noCuda == 0:
try:
p = os.popen('nvidia-smi --query-gpu=index --format=csv,noheader,nounits')
i = p.read().split('\n')
maxGPU = int(i[-2])+1
except OSError:
noCuda = 1
skipIfNoCuda = pytest.mark.skipif(noCuda == 1,reason = "NO cuda insatllation, found through nvidia-smi")
skipIfOnlyOneGPU = pytest.mark.skipif(maxGPU < 2,reason = "Only one gpu")
def test_tempalte_invertibleMLP():
print("test mlp")
gaussian = Gaussian([2])
sList = [MLP(2, 10), MLP(2, 10), MLP(2, 10), MLP(2, 10)]
tList = [MLP(2, 10), MLP(2, 10), MLP(2, 10), MLP(2, 10)]
realNVP = RealNVP([2], sList, tList, gaussian)
x = realNVP.prior(10)
mask = realNVP.createMask(["channel"]*4,ifByte=0)
print("original")
#print(x)
def test_gdaltransform_ct_4D():
if test_cli_utilities.get_gdaltransform_path() is None:
pytest.skip()
ret = gdaltest.runexternal(test_cli_utilities.get_gdaltransform_path() + ' -ct "+proj=pipeline +step +proj=unitconvert +xy_in=deg +xy_out=rad +step +proj=cart +step +proj=helmert +convention=position_vector +x=0.0127 +dx=-0.0029 +rx=-0.00039 +drx=-0.00011 +y=0.0065 +dy=-0.0002 +ry=0.00080 +dry=-0.00019 +z=-0.0209 +dz=-0.0006 +rz=-0.00114 +drz=0.00007 +s=0.00195 +ds=0.00001 +t_epoch=1988.0 +step +proj=cart +inv +step +proj=unitconvert +xy_in=rad +xy_out=deg" -coord 2 49 0 2000')
values = [float(x) for x in ret.split(' ')]
assert len(values) == 3, ret
assert values[0] == pytest.approx(2.0000005420366, abs=1e-10), ret
assert values[1] == pytest.approx(49.0000003766711, abs=1e-10), ret
assert values[2] == pytest.approx(-0.0222802283242345, abs=1e-8), ret
def test_gdaltransform_6():
if test_cli_utilities.get_gdaltransform_path() is None:
pytest.skip()
strin = '440720 3751320\n'
ret = gdaltest.runexternal(test_cli_utilities.get_gdaltransform_path() + ' ../gcore/data/byte.tif ../gcore/data/byte.tif', strin)
text_split = ret.split(' ')
x = float(text_split[0])
y = float(text_split[1])
assert x == pytest.approx(440720, abs=1e-4) and y == pytest.approx(3751320, abs=1e-4), ret
def run_pytest(argv):
subprocess.run(
[sys.executable, "-m", "pip", "install", STUBS_ROOT],
capture_output=True,
check=True,
)
return pytest.main([STUBS_ROOT] + argv)
@pytest.mark.xfail(reason="FATS say must be 2/pi, but actual is -0.20")
def test_FATS_doc_StetsonK():
random = np.random.RandomState(42)
ext = extractors.StetsonK()
values = np.empty(1000)
for idx in range(values.size):
mags = random.normal(size=1000)
errors = random.normal(scale=0.001, size=1000)
values[idx] = ext.fit(magnitude=mags, error=errors)["StetsonK"]
np.testing.assert_allclose(values.mean(), 0.798)
@pytest.mark.xfail(strict=True)
def test_shared_static_init(d):
d.cmds(install_cmds(init='--shared --static', url=get_exists_path('libsimple'), lib='simple'))
patch_virtual_server_from_yaml(kube_apis.custom_objects, virtual_server_setup.vs_name, vs_file,
virtual_server_setup.namespace)
except ApiException as ex:
assert ex.status == 422 \
and "spec.routes.errorPages.codes" in ex.body \
and "spec.routes.errorPages.redirect.code" in ex.body \
and "spec.routes.errorPages.redirect.url" in ex.body \
and "spec.routes.errorPages.return.code" in ex.body \
and "spec.routes.errorPages.return.type" in ex.body \
and "spec.routes.errorPages.return.body" in ex.body \
and "spec.routes.errorPages.return.headers.name" in ex.body \
and "spec.routes.errorPages.return.headers.value" in ex.body
except Exception as ex:
pytest.fail(f"An unexpected exception is raised: {ex}")
else:
pytest.fail("Expected an exception but there was none")
wait_before_test(1)
config_new = get_vs_nginx_template_conf(kube_apis.v1,
virtual_server_setup.namespace,
virtual_server_setup.vs_name,
ic_pod_name,
ingress_controller_prerequisites.namespace)
assert config_old == config_new, "Expected: config doesn't change"
def verify_checked_items_compared(self, checkedList, view):
# The first and last header items do not contain template names, so are not iterated upon.
for header_with_template in view.comparison_table.headers[1:-1]:
try:
# Split and slice are used to remove extra characters in the header item
checkedList.remove(header_with_template.split(' ')[0])
except ValueError:
pytest.fail(f"Entity {header_with_template.split(' ')[0]} is in compare view, "
f"but was not checked.")
except TypeError:
pytest.fail('No entities found in compare view.')
if len(checkedList) > 0:
pytest.fail(f'Some checked items did not appear in the compare view: {checkedList}.')
return True