Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_init(self):
self.assert_(isinstance(sql.Literal('foo'), sql.Literal))
self.assert_(isinstance(sql.Literal(u'foo'), sql.Literal))
self.assert_(isinstance(sql.Literal(b'foo'), sql.Literal))
self.assert_(isinstance(sql.Literal(42), sql.Literal))
self.assert_(isinstance(
sql.Literal(dt.date(2016, 12, 31)), sql.Literal))
def _collection_table_meta(self):
if not self.exists():
return None
with self.storage.conn.cursor() as c:
c.execute(SQL('SELECT column_name, data_type from information_schema.columns '
'WHERE table_schema={} and table_name={} '
'ORDER BY ordinal_position'
).format(Literal(self.storage.schema), Literal(self.name)))
return collections.OrderedDict(c.fetchall())
def table_exists(storage, table_name):
if storage.temporary:
raise NotImplementedError("don't know how to check existence of temporary table: {!r}".format(table_name))
with storage.conn.cursor() as c:
c.execute(SQL("SELECT EXISTS (SELECT 1 FROM pg_tables WHERE schemaname = {} AND tablename = {});"
).format(Literal(storage.schema), Literal(table_name))
)
return c.fetchone()[0]
if start and start.tzinfo != pytz.UTC:
start = start.astimezone(pytz.UTC)
if end and end.tzinfo != pytz.UTC:
end = end.astimezone(pytz.UTC)
if start and start == end:
query.append(SQL(' AND ts = {}').format(Literal(start)))
else:
if start:
query.append(SQL(' AND ts >= {}').format(Literal(start)))
if end:
query.append(SQL(' AND ts < {}').format(Literal(end)))
query.append(SQL('ORDER BY ts {}'.format(
'DESC' if order == 'LAST_TO_FIRST' else 'ASC')))
if skip or count:
query.append(SQL('LIMIT {} OFFSET {}').format(
Literal(None if not count or count < 0 else count),
Literal(None if not skip or skip < 0 else skip)))
query = SQL('\n').join(query)
values = {}
for topic_id._wrapped in topic_ids:
name = id_name_map[topic_id.wrapped]
with self.select(query, fetch_all=False) as cursor:
values[name] = [(ts, jsonapi.loads(value))
for ts, value in cursor]
return values
def insert_data(ts, topic_id, data):
"""
Inserts data records to the list
:param ts: time stamp
:type string
:param topic_id: topic ID
:type string
:param data: data value
:type any valid JSON serializable value
:return: Returns True after insert
:rtype: bool
"""
value = jsonapi.dumps(data)
records.append(SQL('({}, {}, {})').format(Literal(ts), Literal(topic_id), Literal(value)))
return True
postgres_credentials = {
'dbname': dbname,
'host': host,
'port': port,
'user': root_user,
'password': root_password,
}
with open('/code/sql/query.sql', 'r') as fp:
CREATE_READONLY_USER = fp.read()
with psycopg2.connect(**postgres_credentials) as conn:
cursor = conn.cursor()
query = sql.SQL(CREATE_READONLY_USER).format(
database=sql.Identifier(dbname),
role_id=sql.Identifier(ro_user),
role_literal=sql.Literal(ro_user),
password=sql.Literal(ro_password),
)
cursor.execute(query)
for layer_name in text.layers:
layer = text[layer_name]
layer_struct = self._structure[layer_name]
assert layer_struct['layer_type'] == 'attached'
assert layer_struct['attributes'] == layer.attributes, '{} != {}'.format(
layer_struct['attributes'], layer.attributes)
assert layer_struct['ambiguous'] == layer.ambiguous
assert layer_struct['parent'] == layer.parent
assert layer_struct['enveloping'] == layer.enveloping
assert layer_struct['serialisation_module'] == layer.serialisation_module
if key is None:
key = DEFAULT
else:
key = Literal(key)
row = [key, Literal(text_to_json(text))]
for k in self.column_names[2:]:
if k in meta_data:
m = Literal(meta_data[k])
else:
m = DEFAULT
row.append(m)
q = SQL('({})').format(SQL(', ').join(row))
self._buffered_insert_query_length += get_query_length(q)
self._insert_counter += 1
buffer.append(q)
if len(buffer) >= buffer_size or self._buffered_insert_query_length >= query_length_limit:
self._flush_insert_buffer(cursor=cursor, table_identifier=table_identifier,
column_identifiers=column_identifiers,
buffer=buffer)
def insert(self, layer, layer_type: str, meta: dict = None, loader: str = None):
self._modified = True
meta = list(meta or [])
with self.collection.storage.conn.cursor() as c:
c.execute(SQL("INSERT INTO {} (layer_name, layer_type, attributes, ambiguous, parent, enveloping, _base, meta) "
"VALUES ({}, {}, {}, {}, {}, {}, {}, {});").format(
pg.structure_table_identifier(self.collection.storage, self.collection.name),
Literal(layer.name),
Literal(layer_type),
Literal(list(layer.attributes)),
Literal(layer.ambiguous),
Literal(layer.parent),
Literal(layer.enveloping),
Literal(layer._base),
Literal(meta)
)
If no primary key, return None
Ref: https://wiki.postgresql.org/wiki/Retrieve_primary_key_columns
'''
p_key = namedtuple('PrimaryKey', ['column', 'type'])
cur = conn.cursor()
cur.execute(sql.SQL('''
SELECT a.attname, format_type(a.atttypid, a.atttypmod) AS data_type
FROM pg_index i
JOIN pg_attribute a ON a.attrelid = i.indrelid
AND a.attnum = ANY(i.indkey)
WHERE i.indrelid = {}::regclass
AND i.indisprimary;
''').format(
sql.Literal(table)))
data = cur.fetchall()[0]
try:
return p_key(column=data[0], type=data[1])
except IndexError:
return None