Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
patch_virtual_server_from_yaml(kube_apis.custom_objects, virtual_server_setup.vs_name, vs_file,
virtual_server_setup.namespace)
except ApiException as ex:
assert ex.status == 422 \
and "spec.routes.errorPages.codes" in ex.body \
and "spec.routes.errorPages.redirect.code" in ex.body \
and "spec.routes.errorPages.redirect.url" in ex.body \
and "spec.routes.errorPages.return.code" in ex.body \
and "spec.routes.errorPages.return.type" in ex.body \
and "spec.routes.errorPages.return.body" in ex.body \
and "spec.routes.errorPages.return.headers.name" in ex.body \
and "spec.routes.errorPages.return.headers.value" in ex.body
except Exception as ex:
pytest.fail(f"An unexpected exception is raised: {ex}")
else:
pytest.fail("Expected an exception but there was none")
wait_before_test(1)
config_new = get_vs_nginx_template_conf(kube_apis.v1,
virtual_server_setup.namespace,
virtual_server_setup.vs_name,
ic_pod_name,
ingress_controller_prerequisites.namespace)
assert config_old == config_new, "Expected: config doesn't change"
def verify_checked_items_compared(self, checkedList, view):
# The first and last header items do not contain template names, so are not iterated upon.
for header_with_template in view.comparison_table.headers[1:-1]:
try:
# Split and slice are used to remove extra characters in the header item
checkedList.remove(header_with_template.split(' ')[0])
except ValueError:
pytest.fail(f"Entity {header_with_template.split(' ')[0]} is in compare view, "
f"but was not checked.")
except TypeError:
pytest.fail('No entities found in compare view.')
if len(checkedList) > 0:
pytest.fail(f'Some checked items did not appear in the compare view: {checkedList}.')
return True
def test_parscit_train_write_file_works(self):
parscit_train_path = pathlib.Path(DATA_DIR, "test_parscit_conll_write.txt")
try:
write_parscit_to_conll_file(parscit_train_path)
except:
pytest.fail("Failed to write parscit train conll format file")
else:
parscit_train_path.unlink()
try:
getattr(department, "is_public_{}".format(ois_lookup["var_suffix"]))
except AttributeError:
pytest.fail("Unexpected AttributeError")
try:
getattr(department, "is_public_{}".format(assaults_lookup["var_suffix"]))
except AttributeError:
pytest.fail("Unexpected AttributeError")
# test that the class prefixes are valid
try:
getattr(importlib.import_module("comport.data.models"), "{}{}".format(complaints_lookup["class_prefix"], department.short_name))
except AttributeError:
pytest.fail("Unexpected AttributeError")
try:
getattr(importlib.import_module("comport.data.models"), "{}{}".format(uof_lookup["class_prefix"], department.short_name))
except AttributeError:
pytest.fail("Unexpected AttributeError")
try:
getattr(importlib.import_module("comport.data.models"), "{}{}".format(ois_lookup["class_prefix"], department.short_name))
except AttributeError:
pytest.fail("Unexpected AttributeError")
try:
getattr(importlib.import_module("comport.data.models"), "{}{}".format(assaults_lookup["class_prefix"], department.short_name))
except AttributeError:
pytest.fail("Unexpected AttributeError")
def test_addcall_funcargs(self):
def func(x):
pass
metafunc = self.Metafunc(func)
class obj(object):
pass
metafunc.addcall(funcargs={"x": 2})
metafunc.addcall(funcargs={"x": 3})
pytest.raises(pytest.fail.Exception, "metafunc.addcall({'xyz': 0})")
assert len(metafunc._calls) == 2
assert metafunc._calls[0].funcargs == {"x": 2}
assert metafunc._calls[1].funcargs == {"x": 3}
assert not hasattr(metafunc._calls[1], "param")
def test_load_model(self, setup_engine_test_with_simple_classifier):
"""
Test whether engine loads the model without any error.
"""
engine = setup_engine_test_with_simple_classifier
try:
engine.train_epoch_end(0)
engine.load_model_from_file(
os.path.join(engine.save_dir, "model_epoch_{0}.pt".format(1))
)
except:
pytest.fail("Engine train epoch end failed")
'owner_email': vmware_linux_setup_data['owner_email'],
})
result = soap_client.service.VmProvisionRequest('1.1',
template_fields, vm_fields, requester, '', '')
request_id = result.id
Assert.not_none(request_id)
# Poll for VM to be provisioned
start_time = time()
vm_guid = None
while (time() - start_time < 300): # Give EVM 5 mins to change status
result = soap_client.service.GetVmProvisionRequest(request_id)
if result.approval_state == 'approved':
if result.status == 'Error':
pytest.fail(result.message)
Assert.equal(result.status, 'Ok')
if result.request_state == 'finished':
while not vm_guid:
sleep(10)
result = soap_client.service.GetVmProvisionRequest(request_id)
if result.vms[0]:
vm_guid = result.vms[0].guid
break
sleep(30)
Assert.not_none(vm_guid)
result = soap_client.service.FindVmByGuid(vm_guid)
Assert.equal(result.name, vmware_linux_setup_data['vm_name'])
Assert.equal(result.guid, vm_guid)
def credentials(request):
if request.param not in request.config.option.cli_api:
pytest.skip("{0} API is skipped for test.".format(request.param.upper()))
if request.param == "nns" and os.getenv("USER") != "root":
pytest.fail("NNS unittests require root permissions.")
ipaddr = request.config.option.ssh_ip
username = request.config.option.ssh_user
password = request.config.option.ssh_pass
return ipaddr, username, password, request.param
sp_data = deepcopy(self.BASIC_PROFILE)
sp_data[SPKeys.CONNECTIONS] = [conn]
self.mock_ov_client.fc_networks.get_by.return_value = []
self.mock_ov_client.fcoe_networks.get_by.return_value = []
self.mock_ov_client.ethernet_networks.get_by.return_value = []
expected_error = ServerProfileReplaceNamesByUris.SERVER_PROFILE_NETWORK_NOT_FOUND + "FC Network"
try:
ServerProfileReplaceNamesByUris().replace(self.mock_ov_client, sp_data)
except OneViewModuleResourceNotFound as e:
assert e.msg == expected_error
else:
pytest.fail(msg="Expected Exception was not raised")
test_links = get_expected_links(sel_vars,
range(pcmci.N),
range(TAU_MIN, TAU_MAX + 1))
# Test the good parameter set
try:
_ = pcmci._set_sel_links(test_links, TAU_MIN, TAU_MAX)
# Ensure no exception is raised
except:
pytest.fail("Selected links fail incorrectly!")
# Ensure an exception is raised for a bad parameter set
for bad_val, message in [(pcmci.N + 1, "Out of range")]:
err_msg = message + " selected links do not fail!"
with pytest.raises(ValueError):
test_links[bad_val] = [(bad_val, TAU_MAX)]
_ = pcmci._set_sel_links(test_links, TAU_MIN, TAU_MAX)
pytest.fail(err_msg)