Python - Deleting Treeview record from table & SQLite - python-3.x

I'm currently trying to make a 'delete record' button that deletes a treeview record, this part works within the treeview table, however I am unable to get the record in the SQLite database to be deleted aswell.
def deleterec():
try:
curItem = tree1.focus()
valueList=tree1.item(curItem,'values')
global selectedItem2
selectedItem2=valueList[0]
prodID = tree1.selection()[0] # get selected item from tree
messageDelete = messagebox.askyesno("Confirmation", "Do you want to permanently delete this record?")
if messageDelete > 0:
# Remove one record
tree1.delete(prodID)
print(selectedItem2) #this prints the accurate productID that's in my SQLite database
global conn87
conn87 = sqlite3.connect('Sqlite_Python.db')
#Create cursor instant
global c87
c87 = conn87.cursor()
print("Successfully Connected to SQLite")
sqlite_delete_queryL = str(f"""DELETE from stockDatabase WHERE productID='%s';""") % selectedItem2
c87.execute(str(sqlite_delete_queryL))
c87.close()
except sqlite3.Error as error:
print("Error: ", error)
finally:
if conn87:
conn87.close()
print("sqlite connection is closed")
My code is not outputting any errors and it is giving the confirmation that the connection is successfully made then closed, however the query is not complete.............

Related

relation does not exist in postgreSQL but already exist

I've read a lot of articles about my problem but no one solve it. So u can see my code here
DATABASE_URL = os.environ.get('url_of_my_db')
con = None
try:
con = psycopg2.connect(DATABASE_URL)
cur = con.cursor()
print('PostgreSQL database version:')
#cur.execute('SELECT version()')
#cur.execute('SELECT * FROM qwerty')
#cur.execute(sql.SQL('SELECT * FROM {}').format(sql.Identifier('qwerty')))
#cur.execute(sql.SQL("INSERT INTO {} (chat_id, username, created_on) VALUES (8985972942, vovakirdan, 2022-01-05)").format(sql.Identifier('users')))
cur.execute("""INSERT INTO users (chat_id, username, created_on)
VALUES (3131,
vovakirdan,
2022-01-05)""")
# display the PostgreSQL database server version
db_version = cur.fetchone()
print(db_version)
# close the communication with the HerokuPostgres
cur.close()
except Exception as error:
print('Cause: {}'.format(error))
finally:
# close the communication with the database server by calling the close()
if con is not None:
con.close()
print('Database connection closed.')
and in my DB table named "users" (named without quotes) are exist, but I still have this error:
error
...relation "users" does not exist
all the #commented code doesn't work and send the same error besides SELECT version(), it works perfectly that proves that connection works.
The problem was that PostgreDB wants me to use SELECT colum FROM schema.table instead of SELECT colum FROM table. And that's all. Thanks everyone

Python3 pika channel.basic_consume() causing MySQL too many connections

I had using pika to make a connection to RabbitMQ and consume message, once I start the script on ubuntu prod environment it is working as expected but is opening mysql connection and never closes them and ends up in Too many connection on mysql server.
Will appreciate any recommendation on the code below, as well can not understand what is going wrong. Thanking you in advance.
The flow is the following
Starting pika on Python3
Subscribe to a channel and waiting for messages
In callback i do various validation and save or update data inside MySql
The result that is showing the problem is the at the end of question a screenshot from ubuntu htop, that is showing new connection on MySql and keep adding them on the top
Pika Verion = 0.13.0
For MySql I use pymysql.
Pika Script
def main():
credentials = pika.PlainCredentials(tunnel['queue']['name'], tunnel['queue']['password'])
while True:
try:
cp = pika.ConnectionParameters(
host=tunnel['queue']['host'],
port=tunnel['queue']['port'],
credentials=credentials,
ssl=tunnel['queue']['ssl'],
heartbeat=600,
blocked_connection_timeout=300
)
connection = pika.BlockingConnection(cp)
channel = connection.channel()
def callback(ch, method, properties, body):
if 'messageType' in properties.headers:
message_type = properties.headers['messageType']
if events.get(message_type):
result = Descriptors._reflection.ParseMessage(events[message_type]['decode'], body)
if result:
result = protobuf_to_dict(result)
model.write_response(external_response=result, message_type=message_type)
else:
app_log.warning('Message type not in allowed list = ' + str(message_type))
app_log.warning('continue listening...')
channel.basic_consume(callback, queue=tunnel['queue']['name'], no_ack=True)
try:
channel.start_consuming()
except KeyboardInterrupt:
channel.stop_consuming()
connection.close()
break
except pika.connection.exceptions.ConnectionClosed as e:
app_log.error('ConnectionClosed :: %s' % str(e))
continue
except pika.connection.exceptions.AMQPChannelError as e:
app_log.error('AMQPChannelError :: %s' % str(e))
continue
except Exception as e:
app_log.error('Connection was closed, retrying... %s' % str(e))
continue
if __name__ == '__main__':
main()
Inside the script i have a model that doing inserts or updated in the database, code below
def write_response(self, external_response, message_type):
table_name = events[message_type]['table_name']
original_response = external_response[events[message_type]['response']]
if isinstance(original_response, list):
external_response = []
for o in original_response:
record = self.map_keys(o, message_type, events[message_type].get('values_fix', {}))
external_response.append(self.validate_fields(record))
else:
external_response = self.map_keys(original_response, message_type, events[message_type].get('values_fix', {}))
external_response = self.validate_fields(external_response)
if not self.mysql.open:
self.mysql.ping(reconnect=True)
with self.mysql.cursor() as cursor:
if isinstance(original_response, list):
for e in external_response:
id_name = events[message_type]['id_name']
filters = {id_name: e[id_name]}
self.event(
cursor=cursor,
table_name=table_name,
filters=filters,
external_response=e,
message_type=message_type,
event_id=e[id_name],
original_response=e # not required here
)
else:
id_name = events[message_type]['id_name']
filters = {id_name: external_response[id_name]}
self.event(
cursor=cursor,
table_name=table_name,
filters=filters,
external_response=external_response,
message_type=message_type,
event_id=external_response[id_name],
original_response=original_response
)
cursor.close()
self.mysql.close()
return
On ubuntu i use systemd to run the script and restart in case something goes wrong, below is systemd file
[Unit]
Description=Pika Script
Requires=stunnel4.service
Requires=mysql.service
Requires=mongod.service
[Service]
User=user
Group=group
WorkingDirectory=/home/pika_script
ExecStart=/home/user/venv/bin/python pika_script.py
Restart=always
[Install]
WantedBy=multi-user.target
Image from ubuntu htop, how the MySql keeps adding in the list and never close it
Error
tornado_mysql.err.OperationalError: (1040, 'Too many connections')
i have found the issue, posting if will help somebody else.
the problem was that mysqld went into infinite loop trying to create indexing to a specific database, after found to which database was trying to create the indexes and never succeed and was trying again and again.
solution was to remove the database and recreate it, and the mysqld process went back to normal. and the infinite loop to create indexes dissapeared as well.
I would say increasing connection may solve your problem temperately.
1st find out why the application is not closing the connection after completion of task.
2nd Any slow queries/calls on the DB and fix them if any.
3rd considering no slow queries/calls on DB and also application is closing the connection/thread after immediately completing the task, then consider playing with "wait_timeout" on mysql side.
According to this answer, if you have MySQL 5.7 and 5.8 :
It is worth knowing that if you run out of usable disc space on your
server partition or drive, that this will also cause MySQL to return
this error. If you're sure it's not the actual number of users
connected then the next step is to check that you have free space on
your MySQL server drive/partition.
From the same thread. You can inspect and increase number of MySQL connections.

psycopg2 named cursor withhold=True

I am using a named cursor to fetch 200K+ rows, and using the attribute, 'withhold=True', this way I can iterate by fetching many (50K) at a time - but my cursor is not persisting...
Here is the error / stacktrace
Traceback (most recent call last):
File "/home/me/code/etl/etl.py", line 179, in main
_pg_data = _fetch(_some)
psycopg2.ProgrammingError: named cursor isn't valid anymore
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/me/code/etl/etl.py", line 330, in <module>
main()
File "/home/me/code/etl/etl.py", line 271, in main
logging.error(Fore.LIGHTRED_EX + e + Fore.RESET, exc_info=True)
TypeError: must be str, not ProgrammingError
Here is my code
from colorama import Fore
from datetime import datetime
import argparse, logging, psycopg2, pyodbc, sys, time, yaml
import _classes.Utils as cpu
def main():
_cfg_path = "/home/me/code/etl/db.yml"
with open(_cfg_path, 'r') as _ymlfile:
_cfg = yaml.load(_ymlfile, Loader=yaml.CLoader)
# create a connection to the database
_conn = psycopg2.connect("host={0} dbname={1} user={2} password={3} port={4}".format(_cfg['local_postgres']['host'], _cfg['local_postgres']['db'],
_cfg['local_postgres']['user'], _cfg['local_postgres']['passwd'],
_cfg['local_postgres']['port']))
_curs_pgsql = _conn.cursor()
_curs_pgsql.callproc('usp_outbound', ['curs'])
_curs2_pgsql = _conn.cursor('curs', withhold=True)
_push_date = datetime.now().strftime("%Y-%m-%d")
_some = 50000
_fetch = _curs2_pgsql.fetchmany
while True:
_pg_data = _fetch(_some)
if not _pg_data:
break
for _row in _pg_data:
_params = ()
_sql = "INSERT INTO dbo.tbl VALUES (?, ?, ?)"
_params = (_row[0], _row[1], _row[2])
# ...insert into destination database
# ...now update source database and set the push and push date flags
_curs_pgsql.execute("UPDATE products SET pushed = TRUE, pushed_date = (%s) WHERE id = (%s)", (_push_date, _row[2],))
_conn.commit()
if _conn:
# close cursor / close the communication with the PostgreSQL database server
_curs2_pgsql.close()
_curs_pgsql.close()
_conn.close()
Clearly I am missing something with my named cursor and how it's supposed to be defined...
According to the documentation -
Set the value before calling execute() or use the connection.cursor() withhold parameter, otherwise the value will have no effect.
... ... ...
Trying to fetch from a named cursor after a commit() or to create a named cursor when the connection is in autocommit mode will result in an exception. It is possible to create a WITH HOLD cursor by specifying a True value for the withhold parameter to cursor() or by setting the withhold attribute to True before calling execute() on the cursor.
What am I missing?
The code seems reasonable. It's hard to say what the stored procedure (usp_outbound) might be doing though. Your named cursor is being created after the procedure - is that creating it? Is something happening there that might close it? Perhaps the stored procedure needs a WITH HOLD?
Try reorganizing the code to something like this and see if it helps (or you get an error that provides a hint).
with psycopg2.connect("your_connection_string") as _conn:
# first (unnamed) cursor
_curs_pgsql = _conn.cursor()
with _conn.cursor(name='curs', withhold=True) as _curs2_pgsql:
_curs2_pgsql.itersize = 50000 # fetch rows from database in batches of this value
_push_date = datetime.now().strftime("%Y-%m-%d")
_curs_pgsql.callproc('usp_outbound', ['curs'])
for _row in _curs2_pgsql:
_sql = "INSERT INTO dbo.tbl VALUES (?, ?, ?)"
_params = (_row[0], _row[1], _row[2])
# ...insert into destination database
# ...now update source database and set the push and push date flags
_curs_pgsql.execute("UPDATE products SET pushed = TRUE, pushed_date = (%s) WHERE id = (%s)", (_push_date, _row[2],))
# if there are no errors, connection will automatically
# commit() here at end of with block (but remain open)
_conn.close()

How do I fix this 'no such table' error in my database code?

I am trying to run a database for storing certain data. Unfortunately, when I run my code in terminal, it throws an error stating that "no such table: analytics_sessions". What must I do/implement within my code in order to fix this issue.
I already have connected this code to a database file. Nothing is inside the file but it does exist.
This is my python3 code:
from ._db import db_connect
import contextlib
#
# Game sessions-- measuring how long players play each of our games
# for at a stretch.
#
def post_game_session_info(game_id, start_datetime, duration_sec, user_id):
# TODO: Procure user_ids so we can link against the 'users' table.
# TODO: Add client time!
# TODO: Ensure datetime is stored in UTC.
with db_connect() as model_db_connection:
with contextlib.closing(model_db_connection.cursor()) as cursor:
cursor.execute(
"insert into analytics_sessions (game_id, start_server_date_time, duration_sec, user_id) "
"values (?,?,?,?)",
(game_id, str(start_datetime), duration_sec, user_id)
)
This is the terminal error message that is thrown when I try running the code:
File "/Users/elliotfayman/Documents/GitHub/whale-beta/model/analytics.py", line 19, in post_game_session_info
(game_id, str(start_datetime), duration_sec, user_id)
sqlite3.OperationalError: no such table: analytics_sessions

Error while fetching data from PostgreSQL column "user2" does not exist

My Django views.py passes a email address to a python function. The
python function queries a PostgreSQL table using a where clause. The
where clause is equal to the email address that was passed in
(someuser). My goal is to retrieve the userid value for this email
address. But I get an error message telling me that there is no column
called "user2" which is correct (there is not). But why does the
select see that as an existing column?
I print the email out only to verify that it got passed in. someuser
for select= user2#gmail.com
Error while fetching data from PostgreSQL
column "user2" does not exist LINE 1: SELECT userid from accounts_user
WHERE email = user2#gmail.c...
I have ran this locally on my Windows 10 box as well as on AWS Cloud9
with the same result. It tells me that no column exists that begins
with the email address up to the '#' sign.
import psycopg2
def get_userid(someuser):
try:
connection = psycopg2.connect(bla, bla")
cursor = connection.cursor()
print("someuser for select= ", someuser)
postgreSQL_select_Query = ("""SELECT userid from accounts WHERE email = %s;""" %(someuser))
cursor.execute(postgreSQL_select_Query)
mobile_records = cursor.fetchall()
for row in mobile_records:
print("userid = ", row[0], )
except (Exception, psycopg2.Error) as error :
print ("Error while fetching data from PostgreSQL", error)
finally:
if(connection):
cursor.close()
if __name__ == '__main__':
get_userid()
table has this:
id | email | userid
----+------------------------+--------
18 | user2#gmail.com | u48923
I expect to get the userid of "u48923" after running the select.
Change it to the following
postgreSQL_select_Query = ("""SELECT userid from accounts WHERE email = '%s';""" %(someuser))
But a better way is to use the built in functionality of psycopg2 and write it as follows
cursor.execute("""SELECT userid from accounts WHERE email = %s;""", (someuser,))
Psycopg2 Documentation
def getEmail(someuser): print("received someuser in 'run_select' from views.py= ", someuser) try:
connection = psycopg2.connect(user="my user",
password="xxxxxx",
host="127.0.0.1",
port="5432",
database="my_db")
cursor = connection.cursor()
print("again someuser for select= ", someuser)
someuser = str(someuser) # convert to string or you get: Error while fetching data from PostgreSQL can't adapt type 'User'
print("someuser= ", someuser)
cursor.execute("""SELECT userid from accounts_user WHERE email = %s""", (someuser,))
print("Selecting rows from c9_max_forms_kw using cursor.fetchall")
mobile_records = cursor.fetchall()
print("Print each row and it's columns values")
for row in mobile_records:
print("Id = ", row[0], )
#print("email = ", row[5])
#print("userid = ", row[9], "\n") # should be "u48923"
#save = ''.join(row[9]) # convert this tuple to a string except (Exception, psycopg2.Error) as error :
print ("Error while fetching data from PostgreSQL", error) finally:
#closing database connection.
if(connection):
cursor.close()
connection.close()
print("PostgreSQL connection is closed") return someuser
Joshua is the man. The string conversion was the key. The error given without it doesn't seem to make sense to me. Best to all. Ken.

Resources