Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_run_multiple_instances_in_same_command():
instance_count = 4
client = boto3.client("ec2", region_name="us-east-1")
client.run_instances(
ImageId="ami-1234abcd", MinCount=instance_count, MaxCount=instance_count
)
reservations = client.describe_instances()["Reservations"]
reservations[0]["Instances"].should.have.length_of(instance_count)
instances = reservations[0]["Instances"]
for i in range(0, instance_count):
instances[i]["AmiLaunchIndex"].should.be(i)
def cloudformation_outputs():
response = boto3.client("cloudformation").describe_stacks(StackName="aws-data-wrangler-test-arena")
outputs = {}
for output in response.get("Stacks")[0].get("Outputs"):
outputs[output.get("OutputKey")] = output.get("OutputValue")
yield outputs
def _download_templates(app_id, template_file_path):
sar = boto3.client("serverlessrepo")
response = sar.get_application(ApplicationId=app_id)
template_url = response["Version"]["TemplateUrl"]
with open(template_file_path, "wb") as fp:
r = requests.get(template_url, stream=True)
for chunk in r.iter_content(chunk_size=128):
fp.write(chunk)
def test_update_table_gsi_throughput():
dynamodb = boto3.resource("dynamodb", region_name="us-east-1")
# Create the DynamoDB table.
table = dynamodb.create_table(
TableName="users",
KeySchema=[
{"AttributeName": "forum_name", "KeyType": "HASH"},
{"AttributeName": "subject", "KeyType": "RANGE"},
],
GlobalSecondaryIndexes=[
{
"IndexName": "TestGSI",
"KeySchema": [
{"AttributeName": "username", "KeyType": "HASH"},
{"AttributeName": "created", "KeyType": "RANGE"},
],
"Projection": {"ProjectionType": "ALL"},
def test_unpack_archive(self):
conn = boto3.resource('s3', region_name='us-east-1')
conn.create_bucket(Bucket='test')
file_path = os.path.join('s3://test/', 'test.zip')
in_temporary_directory = os.path.join(_get_temporary_directory(), 'in', 'dummy')
out_temporary_directory = os.path.join(_get_temporary_directory(), 'out', 'dummy')
# make dummy zip file.
os.makedirs(in_temporary_directory, exist_ok=True)
in_zip_client = S3ZipClient(file_path=file_path, temporary_directory=in_temporary_directory)
in_zip_client.make_archive()
# load dummy zip file.
out_zip_client = S3ZipClient(file_path=file_path, temporary_directory=out_temporary_directory)
self.assertFalse(os.path.exists(out_temporary_directory))
out_zip_client.unpack_archive()
def create_docker_services(command, tmpdir, hosts, image, additional_volumes, additional_env_vars,
customer_script, source_dir, entrypoint, use_gpu=False):
environment = []
session = boto3.Session()
optml_dirs = set()
if command == 'train':
optml_dirs = {'output', 'input'}
elif command == 'serve':
environment.extend(DEFAULT_HOSTING_ENV)
if customer_script:
timestamp = utils.sagemaker_timestamp()
s3_script_path = fw_utils.tar_and_upload_dir(session=session,
bucket=default_bucket(session),
s3_key_prefix='test-{}'.format(timestamp),
script=customer_script,
directory=source_dir)[0]
def setup(self):
self.helper_configuration = helpers.Configuration()
cis_environment = os.getenv("CIS_ENVIRONMENT", "development")
os.environ["CIS_ENVIRONMENT"] = cis_environment
os.environ["CIS_ASSUME_ROLE_ARN"] = "None"
self.connection_object = connect.AWS()
self.connection_object._boto_session = boto3.session.Session(region_name="us-west-2")
self.idv = self.connection_object.identity_vault_client()
# u = fake_profile.FakeUser()
# u = helpers.ensure_appropriate_publishers_and_sign(fake_profile=u, condition="create")
# u.verify_all_publishers(profile.User(user_structure_json=None))
fh = open("fixtures/durable.json")
self.durable_profile = fh.read()
fh.close()
self.durable_profiles = []
logger.info("Loading 10 fake users.")
for x in range(0, 10):
fh = open("fixtures/{}.json".format(x))
self.durable_profiles.append(fh.read())
fh.close()
def test_storage_provider_aws(release_config_aws, tmpdir):
s3 = boto3.session.Session().resource('s3')
bucket = release_config_aws['bucket']
s3_bucket = s3.Bucket(bucket)
assert s3_bucket in s3.buckets.all(), (
"Bucket '{}' must exist with full write access to AWS testing account and created objects must be globally "
"downloadable from: {}").format(bucket, release_config_aws['download_url'])
exercise_storage_provider(tmpdir, 'aws_s3', release_config_aws)
def get_article_liked_user(self, article_id, user_id):
query_params = {
'KeyConditionExpression': Key('article_id').eq(article_id) & Key('user_id').eq(user_id)
}
article_liked_user_table = self.dynamodb.Table(os.environ['ARTICLE_LIKED_USER_TABLE_NAME'])
return article_liked_user_table.query(**query_params)['Items'][0]
response = MeArticlesDraftsPublishWithHeader(params, {}, dynamodb=self.dynamodb,
elasticsearch=self.elasticsearch).main()
article_info_after = self.article_info_table.scan()['Items']
article_history_after = self.article_history_table.scan()['Items']
article_content_edit_after = self.article_content_edit_table.scan()['Items']
self.assertEqual(response['statusCode'], 200)
article_info = self.article_info_table.get_item(Key={'article_id': params['pathParameters']['article_id']})['Item']
article_content = self.article_content_table.get_item(
Key={'article_id': params['pathParameters']['article_id']}
)['Item']
article_history = self.article_history_table.query(
KeyConditionExpression=Key('article_id').eq(params['pathParameters']['article_id'])
)['Items'][-1]
self.assertEqual(article_info['status'], 'public')
self.assertEqual(article_info['sort_key'], 1520150552000000)
self.assertEqual(article_info['published_at'], 1525000000)
self.assertEqual(article_info['sync_elasticsearch'], 1)
self.assertEqual(article_info['topic'], 'crypto')
self.assertEqual(article_info['tags'], ['A', 'B', 'C', 'D', 'E' * 25])
self.assertEqual(article_info['eye_catch_url'], 'https://' + os.environ['DOMAIN'] + '/test.png')
self.assertEqual(article_info.get('price'), None)
self.assertEqual(article_content['title'], article_history['title'])
self.assertEqual(article_content.get('paid_body'), None)
self.assertEqual(len(article_info_after) - len(article_info_before), 0)
self.assertEqual(len(article_history_after) - len(article_history_before), 1)
self.assertEqual(len(article_content_edit_after) - len(article_content_edit_before), 0)