Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def check_connection(params):
delay = 0.01
for _ in range(20):
try:
with psycopg2.connect(**params) as conn:
with conn.cursor() as cursor:
cursor.execute('SELECT version();')
break
except psycopg2.Error:
time.sleep(delay)
delay *= 2
else:
pytest.fail('Cannot start postgres server')
cursor = connection.cursor()
print("Executing Community Partner status inactive")
update_k12_flag_for_existing_projects= """Update projects_project
Set k12_flag = 't'
Where total_k12_students > 0 or total_k12_hours > 0;"""
cursor.execute(update_k12_flag_for_existing_projects)
connection.commit()
except (psycopg2.Error) as error:
print("Error while connecting to Postgres SQL", error)
cursor.close()
connection.close()
print("Postgres SQL connection is closed")
url = asset.html_url
with self.env.cr.savepoint():
self.env['ir.attachment'].sudo().create(dict(
datas=base64.b64encode(asset.content.encode('utf8')),
mimetype='text/css',
type='binary',
name=url,
url=url,
datas_fname=fname,
res_model=False,
res_id=False,
))
if self.env.context.get('commit_assetsbundle') is True:
self.env.cr.commit()
except psycopg2.Error:
pass
return '\n'.join(asset.minify() for asset in self.stylesheets)
GROUP BY product_id, company_id, location_id, lot_id, package_id, owner_id, in_date
HAVING count(id) > 1
),
_up AS (
UPDATE stock_quant q
SET quantity = d.quantity,
reserved_quantity = d.reserved_quantity
FROM dupes d
WHERE d.to_update_quant_id = q.id
)
DELETE FROM stock_quant WHERE id in (SELECT unnest(to_delete_quant_ids) from dupes)
"""
try:
with self.env.cr.savepoint():
self.env.cr.execute(query)
except Error as e:
_logger.info('an error occured while merging quants: %s', e.pgerror)
except (psycopg2.Error, psycopg2.Warning) as error:
if fd in self._futures and not self._futures[fd].done():
self._futures[fd].set_exception(error)
else:
if state == extensions.POLL_OK:
if fd in self._futures and not self._futures[fd].done():
self._futures[fd].set_result(True)
elif state == extensions.POLL_WRITE:
self._ioloop.update_handler(fd, ioloop.IOLoop.WRITE)
elif state == extensions.POLL_READ:
self._ioloop.update_handler(fd, ioloop.IOLoop.READ)
elif state == extensions.POLL_ERROR:
self._ioloop.remove_handler(fd)
if fd in self._futures and not self._futures[fd].done():
self._futures[fd].set_exception(
psycopg2.Error('Poll Error'))
self.dlg = EventDialog(self.iface.mainWindow(),
self.connection_wrapper_read,
self.connection_wrapper_write,
self.iface.mapCanvas(),
project_audit_table(),
replay_function=project_replay_function(),
table_map=table_map,
selected_layer_id=layer_id,
selected_feature_id=feature_id)
# Populate dialog & catch error if any.
try:
self.dlg.populate()
except Error as e:
QMessageBox.critical(None, "Configuration problem",
"Database configuration is invalid, please check the project configuration")
r = self.onConfigure()
# Retry if needed.
if r == 1:
self.connection_wrapper_read.closeConnection()
self.connection_wrapper_write.closeConnection()
self.onListEvents(layer_id, feature_id)
return
self.dlg.show()
variants_to_activate.write({'active': True})
# create new product
for variant_ids in to_create_variants:
new_variant = Product.create({
'product_tmpl_id': tmpl_id.id,
'attribute_value_ids': [(6, 0, variant_ids.ids)]
})
# unlink or inactive product
for variant in variants_to_unlink:
try:
with self._cr.savepoint(), tools.mute_logger('odoo.sql_db'):
variant.unlink()
# We catch all kind of exception to be sure that the operation doesn't fail.
except (psycopg2.Error, except_orm):
variant.write({'active': False})
pass
return True
def getColumnMinMaxAverage(baseQuery, colName):
try:
baseQuery = baseQuery.replace(";", " ")
sql = "SELECT min(%s), max(%s), avg(%s) FROM (%s) baseQuery" % (colName, colName, colName, baseQuery)
cur.execute(sql)
record = cur.fetchone()
return (record[0],record[1],record[2])
except psycopg2.Error as e:
pass
return (-1,-1,-1)
def add(self, connection):
"""Add a new connection to the pool
:param connection: The connection to add to the pool
:type connection: psycopg2.extensions.connection
:raises: PoolFullError
"""
if id(connection) in self.connections:
raise ValueError('Connection already exists in pool')
if len(self.connections) == self.max_size:
LOGGER.warning('Race condition found when adding new connection')
try:
connection.close()
except (psycopg2.Error, psycopg2.Warning) as error:
LOGGER.error('Error closing the conn that cant be used: %s',
error)
raise PoolFullError(self)
with self._lock:
self.connections[id(connection)] = Connection(connection)
LOGGER.debug('Pool %s added connection %s', self.id, id(connection))
def close(self):
self.validate_thread_sharing()
if self.connection is None:
return
try:
self.pool.put(self.connection)
self.connection = None
#self.connection.close()
#self.connection = None
except Database.Error:
# In some cases (database restart, network connection lost etc...)
# the connection to the database is lost without giving Django a
# notification. If we don't set self.connection to None, the error
# will occur a every request.
self.connection = None
logger.warning('psycopg2 error while closing the connection.',
exc_info=sys.exc_info()
)
raise