how to fix this dict and postgreSQL - python-3.x

i just want to update my keys,values from my dict
this is for updating PostgreSQL using python dict
data ={
'created_by':'obama',
'last_updated_by':'nandu',
'effective_from':'2019-12-30',
'effective_to':'2017-12-30'
}
lst= list()
lst1 = list()
for key,value in data.items():
lst.append(key)
lst1.append(value)
keys = tuple(lst)
values = tuple(lst1)
update = "UPDATE table_name SET %s = %s where name = 'kumar'"
cur.execute(update,(keys,values))
i want to set keys as field name and values as values
but i am getting the field names(keys) as string
so i am getting syntax error

You cannot use tuples for SET. Best is to loop over the dict and set each key one at the time:
data ={'created_by':'obama',
'last_updated_by':'nandu',
'effective_from':'2019-12-30',
'effective_to':'2017-12-30',
}
for key, value in data.items():
update = "UPDATE table_name SET %s = %s where name = 'kumar'"
cur.execute(update,(key, value))
or following the suggestion using f-strings
data ={'created_by':'obama',
'last_updated_by':'nandu',
'effective_from':'2019-12-30',
'effective_to':'2017-12-30',
}
arguments = ','.join([f'{key}=\'{val}\'' for key, val in data.items()])
update = f"UPDATE table_name SET {arguments} where name = 'kumar'"
cur.execute(update)

Related

Get sqlalchemy table Model and Field objects from strings?

Very simple trying to run a query in Python 3 sqlalchemy to delete some records given string names of table and field to query against.
How do you get the table object from a string?
Given 1. how do you run a query via ORM with just a string of the field name?
I would assume all ORM's have an internal array or method like get with the name.
json_config = [
{"table": "tableA",
"field": "modified_on"
"expires": 30},
{"table": "tableB",
"field": "event_on"
"expires": 30}
]
for table_conf_item in self.json_config:
table_name = table_conf_item["table"]
field_name = table_conf_item["field"]
expire_after = table_conf_item["expires"]
table_obj = self.orm_session.TABLES[table_name]
field_obj = self.orm_session.TABLES[table_name].FIELDS[field_name]
result = self.orm_session.delete(table_obj).where(field_obj < expire_after)
self.orm_session.commit()
print(f"{table_name}: removed {result.row_count} objects")
Given the table's name, you can use reflection to get a Table object. Using SQLAlchemy's core layer, this is reasonably straightforward:
import sqlalchemy as sa
engine = sa.create_engine(...)
tbl = sa.Table(name_of_table, metadata, autoload_with=engine)
If you want to work with multiple tables, it may be more efficient to store them a Metadata instance for later access:
metadata = sa.MetaData()
metadata.reflect(engine, only=list_of_table_names)
tbl = metadata.tables[name_of_table]
Once you have a Table object you can reference columns by name like this: tbl.c[name_of_field].
Full example:
import sqlalchemy as sa
# Setup
engine = sa.create_engine('sqlite://', echo=True, future=True)
tbl = sa.Table(
't',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('foo', sa.Integer),
)
tbl.create(engine)
with engine.begin() as conn:
vals = [42, 43, 42, 43, 56, 87, 89]
conn.execute(tbl.insert(), [{'foo': v} for v in vals])
del tbl
# Reflect the table.
metadata = sa.MetaData()
metadata.reflect(engine, only=['t'])
tbl = metadata.tables['t']
# Define some statements.
q1 = sa.select(tbl).where(tbl.c['foo'] == 42)
q2 = sa.select(tbl.c['id'], tbl.c['foo']).where(tbl.c['foo'] == 43)
q3 = sa.delete(tbl).where(tbl.c['foo'] != 42)
# Execute the statements.
with engine.connect() as conn:
rows = conn.execute(q1)
for row in rows:
print(row)
print()
rows = conn.execute(q2)
for row in rows:
print(row)
print()
with engine.begin() as conn:
conn.execute(q3)
with engine.connect() as conn:
rows = conn.execute(q1)
for row in rows:
print(row)
print()
Doing the same through the ORM layer is more complicated, as table and column names must be mapped to ORM entity classes (models) and their attributes. This replicates the previous example for a simple mapping (it assumes the same initial data as above).
import sqlalchemy as sa
from sqlalchemy import orm
Base = orm.declarative_base()
class Thing(Base):
__tablename__ = 't'
id = sa.Column(sa.Integer, primary_key=True)
thing_foo = sa.Column('foo', sa.Integer)
engine = sa.create_engine(...)
Base.metadata.create_all(engine)
Session = orm.sessionmaker(engine, future=True)
tablename = 't'
columnname = 'foo'
with Session.begin() as s:
# Get the mappers for the Base class.
mappers = Base.registry.mappers
# Get the mapper for our table.
mapper = next(m for m in mappers if m.entity.__tablename__ == tablename)
# Get the entity class (Thing).
entity = mapper.entity
# Get the column from the Table.
table_column = mapper.selectable.c[columnname]
# Get the mapper property that corresponds to the column
# (the entity attribute may have a different name to the
# column in the database).
mapper_property = mapper.get_property_by_column(table_column)
# Get the queryable entity attribute (Thing.thing_foo).
attr = mapper.all_orm_descriptors[mapper_property.key]
q = sa.select(entity).where(attr != 42)
entities = s.scalars(q)
for entity in entities:
s.delete(entity)
with Session() as s:
for thing in s.scalars(sa.select(Thing)):
print(thing.id, thing.thing_foo)

How to insert/update postgresql table using WHERE clause?

I'm using pscopg2 module to write into my database (postgresql). I'm trying to insert or update a column/row value based on an equivalence statement such as:
INSERT x INTO TABLE A WHERE variable_x MATCHES/EQUAL TO value y IN SAME TABLE;
Code:
def update_existing_record(dev_eui, device_serial_num):
cur = con.cursor()
con.autocommit = True
sql_command = " IF EXISTS (dev_eui == %s) SET severn_db.device_serial_num VALUES (%s)"
#sql_command = "INSERT INTO severn_db (device_serial_num) WHERE EXISTS (dev_eui == dev_eui) VALUES (%s) "
sql_values = (dev_eui, device_serial_num)
cur.execute(sql_command, sql_values,)
cur.close()
All I am trying to do is insert or update table column that matches the condition x == y (where x would be a parameter passed in as a local variable to the python function).
Any help?

update statement using loop over tuple of query and data fails in psycopg2

I have created a mini functional pipeline which creates an update statement with regex and then passes the statement and the data to pycopg2 to execute.
If I copy paste the statement outside of the loop it works, if I try to loop over all statements I get an error.
# Function to create statement
def psycopg2_regex_replace_chars(table, col, regex_chars_old, char_new):
query = "UPDATE {} SET {} = regexp_replace({}, %s , %s, 'g');".format(table, col, col)
data = (regex_chars_old, char_new)
return (query, data)
# Create functions with intelligible names
replace_separators_with_space = partial(psycopg2_regex_replace_chars,regex_chars_old='[.,/[-]]',char_new=' ')
replace_amper_with_and = partial(psycopg2_regex_replace_chars, regex_chars_old='&', char_new='and')
# create funcs_list
funcs_edit = [replace_separators_with_space,
replace_amper_with_and]
So far, so good.
This works
stmt = "UPDATE persons SET name = regexp_replace(name, %s , %s, 'g');"
data = ('[^a-zA-z0-9]', ' ')
cur.execute(stmt, data)
conn.commit()
This fails
tables = ["persons"]
cols = ["name", "dob"]
for table in tables:
for col in cols:
for func in funcs_edit:
query, data = func(table=table, col=col)
cur.execute(query, data)
conn.commit()
error
<ipython-input-92-c8ba5d469f88> in <module>
6 for func in funcs_edit:
7 query, data = func(table=table, col=col)
----> 8 cur.execute(query, data)
9 conn.commit()
ProgrammingError: function regexp_replace(date, unknown, unknown, unknown) does not exist
LINE 1: UPDATE persons SET dob = regexp_replace(dob, '[.,/[-]]' , ' ...
^
HINT: No function matches the given name and argument types. You might need to add explicit type casts.```

How to create update query with QSqlQuery

I'm trying to create an update query in Python3/PyQt5.10/Sqlite . A select/insert query made the same way runs fine. Fields & corresponding record exist.
def updateRecords():
theDict = {
"Loc": "PyQt121",
"BoekNr" : "dfdf",
"BoekTitel" : "eeee",
"BoekBedrag" : 999
}
theFilter = " WHERE Loc = 'PyQt'"
query = QSqlQuery()
columns = ', '.join(pDict.keys())
placeholders = ':'+', :'.join(pDict.keys())
sql = 'UPDATE %s SET (%s) VALUES (%s) %s' % (pTable, columns, placeholders, pFilter)
query.prepare(sql)
for key, value in pDict.items():
query.bindValue(":"+key, value)
print (sql)
query.exec_()
print(query.lastError().databaseText())
return query.numRowsAffected()
The sql generated is UPDATE tempbooks SET (Loc, BoekNr, BoekTitel, BoekBedrag) VALUES (:Loc, :BoekNr, :BoekTitel, :BoekBedrag) WHERE Loc = 'PyQt'.
query.lastError().databaseText()) give me "No Query" and updated rows is -1.
The correct syntax for an update query:
UPDATE tablename
set col1 = val1,
col2 = val2,
col3 = val3
WHERE condition
Probably query.prepare(sql) is returning False because of invalid syntax.

How to read and insert bytea columns using psycopg2?

I am working on a Python script to replicate some Postgresql tables from one environment to another (which does a little more than pg_dump). It works except when I am copying a table that has bytea data type.
I read the source table data in memory, then I dump the memory in the target database with concatenated inserts.
Here is my method that produces an insert statement:
def generateInsert(self, argCachedRow):
colOrd = 0;
valClauseList = []
hasBinary = False
for colData in argCachedRow:
colOrd += 1
colName = self.colOrdLookup.get(colOrd)
col = self.colLookup.get(colName)
dataType = col.dataType
insVal = None
if colData is not None:
strVal = str(colData)
if dataType.useQuote:
if "'" in strVal:
strVal = strVal.replace("'", "''")
insVal = "'%s'" % strVal
else:
if dataType.binary:
hasBinary = True
#insVal = psycopg2.Binary(colData)
#else:
insVal = strVal
else:
insVal = "NULL"
valClauseList.append(insVal)
valClause = ", ".join(valClauseList)
if hasBinary:
valClause = psycopg2.Binary(valClause)
result = "INSERT INTO %s VALUES (%s)" % (self.name, valClause)
return result
which works with every table that doesn't have binary data.
I also tried (intuitively) to wrap just the binary column data in psycopg2.Binary, which is the commented out line and then not do it to the whole row value list but that didn't work either.
Here is my simple DataType wrapper, which is loaded by reading Postgres' information_schema tables:
class DataType(object):
def __init__(self, argDispName, argSqlName, argUseQuote, argBin):
self.dispName = argDispName
self.sqlName = argSqlName
self.useQuote = argUseQuote
self.binary = argBin
How do I read and insert bytea columns using psycopg2?
If you have this database structure:
CREATE TABLE test (a bytea,
b int,
c text)
then inserting binary data into the request can be done like so, without any wrappers:
bin_data = b'bytes object'
db = psycopg2.connect(*args) # DB-API 2.0
c = db.cursor()
c.execute('''INSERT INTO test VALUES (%s, %s, %s)''', (bin_data, 1337, 'foo'))
c.execute('''UPDATE test SET a = %s''', (bin_data + b'1',))
Then, when you query it:
c.execute('''SELECT a FROM test''')
You'll receive a memoryview, which is easily converted back to bytes:
mview = c.fetchone()
new_bin_data = bytes(mview)
print(new_bin_data)
Output: b'bytes object1'
Also, I'd suggest you not to assemble queries by string formatting. psycopg2's built-in parameter substitution is much more convenient and you don't have to worry about validating data to protect from SQL injections.

Resources