Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
{'name': 'Column6', 'type': 'TIMESTAMP'}
]
}
}
mock_table_exists.return_value = True
raw_data = self._get_raw_rows()
def tabledata_list(*args, **kwargs):
start_index = kwargs['start_index']
max_results = kwargs['max_results']
if max_results < 0:
max_results = len(data)
return {'rows': raw_data[start_index:start_index + max_results]}
mock_api_tabledata_list.side_effect = tabledata_list
t = datalab.bigquery.Table('foo.bar')
self._test_get_data(t, TestCases._get_expected_cols(), TestCases._get_expected_rows(), 6,
datalab.utils.commands._utils._get_data_from_table)
self._test_get_data(t, TestCases._get_expected_cols(), TestCases._get_expected_rows(), 6,
datalab.utils.commands._utils.get_data)
def test_decorators(self):
tbl = datalab.bigquery.Table('testds.testTable0', context=TestCases._create_context())
tbl2 = tbl.snapshot(dt.timedelta(hours=-1))
self.assertEquals('test:testds.testTable0@-3600000', str(tbl2))
with self.assertRaises(Exception) as error:
tbl2 = tbl2.snapshot(dt.timedelta(hours=-2))
self.assertEqual('Cannot use snapshot() on an already decorated table',
str(error.exception))
with self.assertRaises(Exception) as error:
tbl2.window(dt.timedelta(hours=-2), 0)
self.assertEqual('Cannot use window() on an already decorated table',
str(error.exception))
with self.assertRaises(Exception) as error:
tbl.snapshot(dt.timedelta(days=-8))
self.assertEqual(
"""Implements the BigQuery load magic used to load data from GCS to a table.
The supported syntax is:
%bigquery load -S|--source <source> -D|--destination
Args:
args: the arguments following '%bigquery load'.
schema: a JSON schema for the destination table.
Returns:
A message about whether the load succeeded or failed.
"""
name = args['destination']
table = _get_table(name)
if not table:
table = datalab.bigquery.Table(name)
if table.exists():
if args['mode'] == 'create':
raise Exception('%s already exists; use --append or --overwrite' % name)
elif schema:
table.create(json.loads(schema))
elif not args['infer']:
raise Exception(
'Table does not exist, no schema specified in cell and no --infer flag; cannot load')
# TODO(gram): we should probably try do the schema infer ourselves as BQ doesn't really seem
# to be able to do it. Alternatively we can drop the --infer argument and force the user
# to use a pre-existing table or supply a JSON schema.
csv_options = datalab.bigquery.CSVOptions(delimiter=args['delimiter'],
skip_leading_rows=args['skip'],
allow_jagged_rows=not args['strict'],<table> </table>
""" Given a variable or table name, get a Table if it exists.
Args:
name: the name of the Table or a variable referencing the Table.
Returns:
The Table, if found.
"""
# If name is a variable referencing a table, use that.
item = datalab.utils.commands.get_notebook_item(name)
if isinstance(item, datalab.bigquery.Table):
return item
# Else treat this as a BQ table name and return the (cached) table if it exists.
try:
return _table_cache[name]
except KeyError:
table = datalab.bigquery.Table(name)
if table.exists():
_table_cache[name] = table
return table
return None
def _get_table(name):
""" Given a variable or table name, get a Table if it exists.
Args:
name: the name of the Table or a variable referencing the Table.
Returns:
The Table, if found.
"""
# If name is a variable referencing a table, use that.
item = datalab.utils.commands.get_notebook_item(name)
if isinstance(item, datalab.bigquery.Table):
return item
# Else treat this as a BQ table name and return the (cached) table if it exists.
try:
return _table_cache[name]
except KeyError:
table = datalab.bigquery.Table(name)
if table.exists():
_table_cache[name] = table
return table
return None