How to use the gbdxtools.Interface function in gbdxtools

To help you get started, we’ve selected a few gbdxtools examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github DigitalGlobe / gbdxtools / tests / unit / test_task_registry.py View on Github external
'''
Contact: dmitry.zviagintsev@digitalglobe.com

Unit test the task registry class
'''
import os

from gbdxtools import Interface
from gbdxtools.task_registry import TaskRegistry
import vcr
from auth_mock import get_mock_gbdx_session

mock_gbdx_session = get_mock_gbdx_session(token="dummytoken")
gbdx = Interface(gbdx_connection=mock_gbdx_session)


def test_init():
    tr = TaskRegistry()
    assert isinstance(tr, TaskRegistry)


@vcr.use_cassette('tests/unit/cassettes/test_list_tasks.yaml', filter_headers=['authorization'])
def test_list_tasks():
    tr = TaskRegistry()
    task_list = tr.list()
    assert task_list is not None
    assert 'HelloGBDX' in task_list


@vcr.use_cassette('tests/unit/cassettes/test_describe_tasks.yaml', filter_headers=['authorization'])
github DigitalGlobe / gbdxtools / tests / unit / test_interface.py View on Github external
def test_init():
    gi = gbdxtools.Interface(gbdx_connection=mock_gbdx_session)
    assert isinstance(gi, gbdxtools.Interface)
github DigitalGlobe / gbdxtools / tests / integration_tests.py View on Github external
def test_get_s3tmp_cred(self):
        gi = Interface()
        s3creds = gi.get_s3_info()
        self.assertTrue("bucket" in s3creds.keys() )
        self.assertTrue("prefix" in s3creds.keys() )
        self.assertTrue("S3_secret_key" in s3creds.keys() )
        self.assertTrue("S3_access_key" in s3creds.keys() )
        self.assertTrue("S3_session_token" in s3creds.keys() )
github DigitalGlobe / gbdxtools / tests / unit / test_catalog.py View on Github external
def setUpClass(cls):
        mock_gbdx_session = get_mock_gbdx_session(token="dummytoken")
        cls.gbdx = Interface(gbdx_connection=mock_gbdx_session)
github DigitalGlobe / gbdxtools / tests / integration_tests.py View on Github external
def test_instantiate_interface(self):
        gi = Interface()
        self.assertNotEquals(None, gi.gbdx_connection)
github DigitalGlobe / gbdxtools / examples / save_data_to_your_own_s3_bucket.py View on Github external
# First we'll run atmospheric compensation on Landsat8 data
from gbdxtools import Interface
gbdx = Interface()

acomp = gbdx.Task('AComp', data='s3://landsat-pds/L8/033/032/LC80330322015035LGN00')

# Now we'll save the result to our own S3 bucket.  First we need to generate temporary AWS credentials
# (this assumes you have an AWS account and your IAM credentials are appropriately accessible via boto)
import boto3
client = boto3.client('sts')
response = client.get_session_token(DurationSeconds=86400)
access_key_id = response['Credentials']['AccessKeyId']
secret_key = response['Credentials']['SecretAccessKey']
session_token = response['Credentials']['SessionToken']

# Save the data to your s3 bucket using the SaveToS3 task:
savetask = gbdx.Task('SaveToS3')
savetask.inputs.data = acomp.outputs.data.value
savetask.inputs.destination = "s3://your-bucket/your-path/"
github DigitalGlobe / gbdxtools / examples / vectors_search.py View on Github external
import json, gbdxtools
gbdx = gbdxtools.Interface()

# Let's find all the Worldview 3 vector footprints in colorado
colorado_aoi = "POLYGON((-108.89 40.87,-102.19 40.87,-102.19 37.03,-108.89 37.03,-108.89 40.87))"
results = gbdx.vectors.query(colorado_aoi, query="item_type:WV03_VNIR")

geojson = {
	'type': 'FeatureCollection',
	'features': results
}

with open("vectors.geojson", "w") as f:
	f.write(json.dumps(geojson))
github DigitalGlobe / gbdxtools / examples / acomp_landsat8.py View on Github external
# Run atmospheric compensation on Landsat8 data
from gbdxtools import Interface
gbdx = Interface()

acomp = gbdx.Task('AComp', data='s3://landsat-pds/L8/033/032/LC80330322015035LGN00')
workflow = gbdx.Workflow([acomp])
workflow.savedata(acomp.outputs.data, location='acomp_output_folder')
workflow.execute()
github DigitalGlobe / gbdxtools / gbdxtools / ipe / util.py View on Github external
def preview(image, **kwargs):
    try:
        from IPython.display import Javascript, HTML, display
        from gbdxtools import Interface
        gbdx = Interface()
    except:
        print("IPython is required to produce maps.")
        return

    zoom = kwargs.get("zoom", 16)
    bands = kwargs.get("bands")
    if bands is None:
        bands = image._rgb_bands
    wgs84_bounds = kwargs.get("bounds", list(loads(image.ipe_metadata["image"]["imageBoundsWGS84"]).bounds))
    center = kwargs.get("center", list(shape(image).centroid.bounds[0:2]))
    graph_id = image.ipe_id
    node_id = image.ipe.graph()['nodes'][0]['id']

    stats = image.display_stats
    offsets = [stats['offset'][b] for b in bands]
    scales = [stats['scale'][b] for b in bands]
github DigitalGlobe / gbdxtools / examples / answer_factory_recipe_creation.py View on Github external
from gbdxtools.simple_answerfactory import Recipe, RecipeParameter, Project, RecipeConfig
from gbdxtools import Interface
gbdx = Interface()


## the workflow that must be defined in order to specify a recipe
aop = gbdx.Task('AOP_Strip_Processor')
aop.inputs.ortho_interpolation_type = 'Bilinear'
aop.inputs.ortho_pixel_size = 'auto'
aop.inputs.bands = 'PAN+MS'
aop.inputs.ortho_epsg = 'UTM'
aop.inputs.enable_acomp = 'true'
aop.inputs.enable_pansharpen = 'true'
aop.inputs.enable_dra = 'true'
aop.inputs.ortho_pixel_size = '0.5'

# Answerfactory will automatically prepend an auto-ordering task and replace
# {raster_path} with the actual s3 path to the raster data
aop.inputs.data = '{raster_path}'