interator should return strings, not bytes - python-3.x

I found the following code on Stackoverflow.The coding makes the table "student" but nothing is populated. I get the following error:
"ERROR:iterator should return strings, not bytes(did you open the file in text mode?)
Here is the code:
import csv
import sqlite3
# Create the database
connection = sqlite3.connect('attendance5.db')
cursor = connection.cursor()
# Create the table
cursor.execute ('DROP TABLE IF EXISTS student')
cursor.execute ('CREATE TABLE student (StuID REAL, LastName TEXT, FirstName TEXT, Grade REAL, Address TEXT, Phone TEXT, Parent TEXT)')
connection.commit()
# Load the CSV file into CSV reader
csvfile = open ('attendance.csv','rb')
creader = csv.reader(csvfile, delimiter=',', quotechar='|')
# Iterate through the CSV reader, inserting values into the database
for t in creader
cursor.execute ('INSERT INTO student VALUES (?,?,?,?,?,?,?)'t)
# Close the csv file, commit changes, and close the connection
csvfile.close()
connection.commit()
connection.close()
Does it have anything to do with the t in creader?
What am I doing wrong? Is it the creader t value? Thank you.

Related

windows The process cannot access the file because it is being used by another process: 'temp.csv'

I am trying to insert values of a dataframe to postgres database. for that i am treating a temporary csv file and load the csv file, load the csv in buffer and using copy_from() i am inserting it into the database.
But Here I am getting an error [WinError 32] The process cannot access the file because it is being used by another process: 'temp.csv'
No other application is open except pycharm. Can some please help!
def copy_from_datafile(conn, df_s, table):
"""
save the given dataframe on disk as a csv file,
load the csv file in buffer and use copy_from() to copy it to the pgsql database table
"""
if table == "symbols_option":
command= '''CREATE TEMPORARY TABLE IF NOT EXISTS _symbol (
instrument_token INTEGER NOT NULL, exchange_token INTEGER, tradingsymbol TEXT NOT NULL, name TEXT,
expiry TIMESTAMP, strike DOUBLE PRECISION, lot_size INTEGER,instrument_type TEXT, segment TEXT NOT NULL, exchange TEXT, PRIMARY KEY (instrument_token,
tradingsymbol));'''
query_main = '''INSERT INTO {table}(instrument_token, exchange_token, tradingsymbol, name, expiry, strike, lot_size,instrument_type, segment, exchange )
SELECT * FROM _symbol
ON CONFLICT (instrument_token, tradingsymbol)
DO NOTHING
'''.format(table=table)
if table == "symbols_future":
command= '''CREATE TEMPORARY TABLE IF NOT EXISTS _symbol (
instrument_token INTEGER NOT NULL,exchange_token INTEGER, tradingsymbol TEXT NOT NULL,name TEXT,
expiry TIMESTAMP,lot_size INTEGER,instrument_type TEXT, segment TEXT NOT NULL, exchange TEXT, PRIMARY KEY (instrument_token,
tradingsymbol));'''
query_main = '''INSERT INTO {table}(instrument_token, exchange_token, tradingsymbol,name, expiry,lot_size,instrument_type, segment, exchange )
SELECT * FROM _symbol
ON CONFLICT (instrument_token, tradingsymbol)
DO NOTHING
'''.format(table=table)
tmp_df = 'temp.csv'
df_s.to_csv(tmp_df, header=False, index=False)
f = open(tmp_df, 'r')
cursor = conn.cursor()
try:
if table == "symbols_option":
# cursor.execute(command)
cursor.execute(command)
cursor.copy_from(f, '_symbol', sep=",", columns=["instrument_token", "exchange_token", "tradingsymbol", "name", "expiry", "strike", "lot_size","instrument_type", "segment", "exchange" ])
cursor.execute(query_main)
print("Data inserted using copy_from_datafile() successfully....")
else:
cursor.execute(command)
cursor.copy_from(f, '_symbol', sep=",", columns=[
"instrument_token", "exchange_token", "tradingsymbol", "name", "expiry", "lot_size",
"instrument_type", "segment", "exchange"])
cursor.execute(query_main)
print("Data inserted using copy_from_datafile() successfully....")
except (Exception, psycopg2.DatabaseError) as err:
os.remove(tmp_df)
print(err)
cursor.close()
else:
os.remove(tmp_df)

Trying to save a sqlite table inside another table using python

The problem now is that I can only enter one record. No errors are recorded. It just takes the first record from one database and puts in the other database. I am trying to create a machine usable database from the user interface database. I will try to transfer around 100 records once it is working. I would appreciate in comments or suggestions. Thank you!
import sqlite3
sql = 'INSERT INTO heavenStream (scene, cascade, enclosure, sensor, streamer, dither) VALUES (?, ?, ?, ?, ?, ?)'
def dropTable(crs,conn):
crs.execute("DROP TABLE IF EXISTS heavenStream")
def createTable(crs,conn):
sql ='''CREATE TABLE heavenStream(
id INTEGER PRIMARY KEY AUTOINCREMENT,
scene TEXT,
cascade TEXT,
enclosure TEXT,
sensor TEXT,
streamer TEXT,
dither TEXT,
timeStream TEXT,
streamTime TEXT
)'''
crs.execute(sql)
print("Table created successfully........")
def insert_one(conn, crs):
crs.execute("SELECT * FROM animalStream")
for row in crs:
scene = row[1]
cascade = row[2]
enclosure = row[3]
sensor = row[4]
streamer = row[5]
dither = row[6]
print(f"{row[1]} {row[2]} {row[3]} {row[4]} {row[5]} {row[6]}")
try:
crs.execute(sql, (scene, cascade, enclosure,
sensor,streamer,dither))
except sqlite3.IntegrityError as err:
print('sqlite error: ', err.args[0]) # column name is
not unique
conn.commit()
def main():
conn = sqlite3.connect("/home/harry/interface/wildlife.db")
crs = conn.cursor()
dropTable(crs,conn)
createTable(crs,conn)
insert_one(conn, crs)
# conn.commit()
conn.close()
print('done')
main()
The user interface database has had records deleted. There is one record with an id of 64 and the rest are in the 90's.
The cursor (crs) changes here
crs.execute(sql, (scene, cascade, enclosure,sensor,streamer,dither))
after the first insert. Therefore, there are "no more rows to fetch" in the orginal crs.
One solution would be to instantiate another cursor for the insert. Another solution would be to fetchall() the rows into a variable and iterate over that variable as with:
rows = crs.execute("SELECT * FROM animalStream").fetchall()
for row in rows:

psycopg2 export DB to csv with column names

I'm using psycopg2 to connect to postgre DB, and to export the data into CSV file.
This is how I made the export DB to csv:
def export_table_to_csv(self, table, csv_path):
sql = "COPY (SELECT * FROM %s) TO STDOUT WITH CSV DELIMITER ','" % table
self.cur.execute(sql)
with open(csv_path, "w") as file:
self.cur.copy_expert(sql, file)
But the data is just the rows - without the column names.
How can I export the data with the column names?
P.S. I am able to print the column names:
sql = '''SELECT * FROM test'''
self.cur.execute(sql)
column_names = [desc[0] for desc in self.cur.description]
for i in column_names:
print(i)
I want the cleanest way to do export the DB with columns name (i.e. I prefer to do this in one method, and not rename columns In retrospect).
As I said in my comment, you can add HEADER to the WITH clause of your SQL:
sql = "COPY (SELECT * FROM export_test) TO STDOUT WITH CSV HEADER"
By default, comma delimiters are used with CSV option so you don't need to specify.
For future Questions, you should submit a minimal reproducible example. That is, code we can directly copy and paste and run. I was curious if this would work so I made one and tried it:
import psycopg2
conn = psycopg2.connect('host=<host> dbname=<dbname> user=<user>')
cur = conn.cursor()
# create test table
cur.execute('DROP TABLE IF EXISTS export_test')
sql = '''CREATE TABLE export_test
(
id integer,
uname text,
fruit1 text,
fruit2 text,
fruit3 text
)'''
cur.execute(sql)
# insert data into table
sql = '''BEGIN;
insert into export_test
(id, uname, fruit1, fruit2, fruit3)
values(1, 'tom jones', 'apple', 'banana', 'pear');
insert into export_test
(id, uname, fruit1, fruit2, fruit3)
values(2, 'billy idol', 'orange', 'cherry', 'strawberry');
COMMIT;'''
cur.execute(sql)
# export to csv
fid = open('export_test.csv', 'w')
sql = "COPY (SELECT * FROM export_test) TO STDOUT WITH CSV HEADER"
cur.copy_expert(sql, fid)
fid.close()
And the resultant file is:
id,uname,fruit1,fruit2,fruit3
1,tom jones,apple,banana,pear
2,billy idol,orange,cherry,strawberry

python, SQLite3 showing existing Tables and data

I have been given a .db file, that has already been populated with both Tables and Data. However, no description of the content of the database has been made available.
Is there a way for me to retrieve individual lists listing the different tables, and their respective sets of columns using SQLite3 and python?
This code help you to show tables with keys , when you get tables and their keys you can get data.
import sqlite3
def readDb():
connection = sqlite3.connect('data.db')
connection.row_factory = sqlite3.Row
cursor = connection.cursor()
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
rows = cursor.fetchall()
tabs=[]
for row in rows:
for r in row:
tabs.append(r)
d={}
for tab in tabs:
cursor.execute("SELECT * FROM "+tab+";")
rows = cursor.fetchone()
t=[]
for row in rows.keys():
t.append(row)
d[tab]=t
connection.commit()
return d
print(readDb())

How to convert sqlite3 to csv format using API for a chatbot?

When I run my chatbot it creates a db.sqlite3 file in backend for storing all the conversation. I want to convert this db.sqlite3 file into a csv file using API. How should I implement it in python? The image contains the type of file.
There are multiple tables in the db file associated with Chatterbot. They are conversation_association, conversation, response, statement, tag_association and tag. Out of all these tables only response and statement tables have proper data (at least in my case). However I tried to convert all tables into csv. So you may find some empty csv files too.
import sqlite3, csv
db = sqlite3.connect("chatterbot-database") # enter your db name here
cursor = db.cursor()
tables = [table[0] for table in cursor.execute("select name from sqlite_master where type = 'table'")] # fetch table names from db
for table in tables:
with open('%s.csv'%table, 'w') as fd:
csvwriter = csv.writer(fd)
for data in cursor.execute("select * from '%s'"%table): # get data from each table
csvwriter.writerow(data)

Resources