I'm trying to send the inserted data with raw queries using sequelize then show it. Below is my code:
const c_product_post = async (req, res) => {
try {
const sql = `INSERT INTO products (p_name, p_price, p_stock, p_review, "createdAt", "updatedAt")
VALUES ('${req.body.product_name}', ${req.body.product_price}, ${req.body.product_stock}, ${req.body.product_review}, now(), now());`
const postData = await Product.sequelize.query(sql)
// await postData.save()
res.send({
message: "success add new product",
data: postData
})
}
catch (err) {
res.send({
message: err
})
}
}
what I'm trying to achieve is that after the data is inserted then it will be shown (see below image in red):
Add RETURNING clause to your query. Try this
INSERT INTO products (p_name, p_price, p_stock, p_review, "createdAt", "updatedAt")
VALUES ('${req.body.product_name}', ${req.body.product_price}, ${req.body.product_stock}, ${req.body.product_review}, now(), now())
RETURNING *;
Please note that your approach is highly SQLi prone. Consider using prepared statements instead of text substitution.
Related
First of all, I'm sorry that I couldn't come up with a better title for this post. Also, I'm a beginner in nodejs. I have a problem for which I am hoping to find answer. Please help me and thank you so much.
I am trying to create a collection and insert data into it and retrieve the same data that has been inserted in the same code. This piece of code dbo.collection(nameofCollection).insertMany(data) inserts the data into the collection if exists already and if it doesn't exist, it will create the collection and then insert.
This is the code that I've written for this task :
var excel = require('excel4node');
const mongoose = require('mongoose');
const mongodb = require('mongodb')
const mdb = mongodb.MongoClient;
mdb.connect("mongodb://localhost:27017/", async function(err, db) {
if (err) throw err
const nameCollection = "tempCollection"
const dbo = db.db("reports")
const data = [
{
empID : "001ev",
empName : "xyz",
salary : 20000
},
{
empID : "00234",
empName : "abc",
salary : 10000
},
{
empID : "11345",
empName : "pqr",
salary : 15000
}
];
dbo.collection(nameCollection).insertMany(data, function(err, res) {
if (err) throw err;
console.log("Number of documents inserted: " + res.insertedCount);
console.log("first")
})
const records = await dbo.collection(nameCollection).find({}, { projection: { _id: 0} }).toArray()
console.log(records)
console.log("second")
})
Now, the issue that I'm facing with this code is that, when there is no collection in the data base, it is creating one and inserting the data. Up to that it is working fine. But, the thing is, find query is executing first and then insert is executing. And if that collection already exists then it is working fine, it is inserting the data and then find query is executing.
So, to put it short, if there is no collection with the name, nameCollection; find query is executed first, which returns []. Then the collection is created and insertion of data is happening. But, why is this happening, although in my code I wrote to create the collection first, insert data and then retrieve it?
This is because of asynchronous runtime. There is no guarantee that find function will be executed the last.
try adding this:
try {
const res = await dbo.collection(nameCollection).insertMany(data);
console.log("Number of documents inserted: " + res.insertedCount);
console.log("first");
const records = await dbo.collection(nameCollection).find({}, { projection: { _id: 0} }).toArray()
console.log(records)
console.log("second")
} catch (error) {
throw err;
}
In this case you can omit try/catch block since it does not have impact on error handling.
I am trying to make a POST request that will insert the same UUID value into two tables: 'employee' and 'skill'. I have tried this a few different ways, but have not been able to do so. Here is my query for posting the UUID (and a 'summary') into one table:
app.post("/employees/:id/skills", async(req, res) => {
try {
const { summary } = req.body;
const addEmployeeSkill = await pool.query(
"INSERT INTO skill(skill_uuid, summary)VALUES(uuid_generate_v4(), $1) RETURNING *",
[summary],
);
res.json(addEmployeeSkill.rows[0]);
} catch (err) {
console.error(err.message);
}
});
My question is: how do I get the same UUID that is being generated into the 'skill' table to also insert into the skill_uuid column of the 'employee' table?
I'm developing a backend to interact with a PostgreSQL database and am looking for some help preventing SQL injection. I understand the concept of SQL injection, and have found some examples online in preventing those attacks, but not sure if prevention techniques differ between SQL providers.
This is the function I use to query data:
var pg = require("pg");
var client = new pg.Client(connectionString);
client.connect();
module.exports = async function newQuery(query) {
var result = await client.query({
rowMode: 'array',
text: query
});
return result.rows
}
And here are some standard queries using that function (query()):
SELECT
query("SELECT profilename, profiledescription, approved FROM profiledb
WHERE usercompany='"+ req.query.userCompany +"';").then(data => {
res.send(data)
})
UPDATE
query("UPDATE profiledb SET approved='Approved' WHERE id='"+ req.query.id +"';").then(data =>
res.send(data)
)
INSERT
query("INSERT INTO profiledb (profilename, profiledescription, approved) VALUES ('"+
req.query.profileTitle +"', '"+ req.query.profileBody +"', 'Pending');");
What code can I use to query the data without risking SQL injection attack.
Thanks!!!
Use a parameterized query and pass your request arguments as values.
module.exports = async function newQuery(query, values) {
var result = await client.query({
rowMode: 'array',
text: query,
values
});
return result.rows
}
query("SELECT profilename, profiledescription, approved FROM profiledb WHERE usercompany=$1;", [req.query.userCompany]).then(data => {
res.send(data)
});
query("UPDATE profiledb SET approved='Approved' WHERE id=$1;", [req.query.id]).then(data => {
res.send(data)
})
query("INSERT INTO profiledb (profilename, profiledescription, approved) VALUES ($1, $2, 'Pending');", [req.query.profileTitle, req.query.profileBody]);
You should use parameterized queries or prepared statements, just don't concatenate strings yourself ever.
the docs of this specific library are good so i suggest you read them in more details.
queries examples: docs and client.query signature: example
Your query could be written like this:
query("SELECT profilename, profiledescription, approved FROM profiledb
WHERE usercompany = $1", [req.query.userCompany]).then(...)
same is for updates, and inserts etc.
or you can just pass an object with properties: text and values like this
const queryOpts = {
text: "SELECT profilename, profiledescription, approved FROM profiledb WHERE usercompany = $1",
values: [req.query.userCompany]
}
query(queryOpts).then(...)
I have an update query which seems to be not working. The underlying database used is postgres. Could you please check why it is not working? I have included my api and schema. Thanks in advance
exports.patch_meetup = async (req, res) => {
const findOneQuery = 'SELECT * FROM meetups WHERE id=$1';
const updateOneQuery = `UPDATE meetups
SET topic=$1, location=$2, body=$3, happeningOn=$4, Tags=$5, meetupImage=$6, createdOn=$7
WHERE id=$8 returning *`;
try {
const {
rows
} = await db.query(findOneQuery, [req.params.id]);
if (!rows[0]) {
return res.status(404).json({
'message': 'meetup not found'
});
}
const values = [
req.body.topic,
req.body.location,
req.body.body,
req.body.happeningOn,
req.body.Tags,
req.file.path,
moment(new Date()),
req.params.id
];
const response = await db.query(updateOneQuery, values);
return res.status(200).json(response.rows[0]);
} catch (err) {
return res.status(400).send(err);
}
};`
Here is my model
const meetupTable = CREATE TABLE IF NOT EXISTS
meetups(
id UUID PRIMARY KEY,
topic VARCHAR(128) NOT NULL,
location VARCHAR(128) NOT NULL,
body TEXT NOT NULL,
happeningOn TIMESTAMPTZ NOT NULL,
Tags TEXT[] NOT NULL,
meetupImage bytea,
createdOn TIMESTAMPTZ DEFAULT Now()
)
I am not sure what the issue might be, but one could be you forgot to add multer middleware to the API endpoint and the other is like I mentioned in the comments, you shouldn't pass moment object, instead you should pass date object as it is.
I have unexpected behaviour when loading data into BigQuery just after creating the schema.
I'm using Node API to insert data with BigQuery streaming API.
In order to reset the data I delete and create the tables before loading any data.
My Problem: the first time it works fine, but if I execute it again it fails.
The process always delete and creates the table schema, but does not insert the data until I wait a moment to execute it again.
This is the code which reproduces the case:
async function loadDataIntoBigquery() {
const {BigQuery} = require('#google-cloud/bigquery')
const tableName = "users"
const dataset = "data_analisis"
const schemaUsers = "name:string,date:string,type:string"
const userData = [{name: "John", date: "20/08/93", type: "reader"}, {
name: "Marie",
date: "20/08/90",
type: "owner"
}]
try {
const bigquery = new BigQuery()
await bigquery.createDataset(dataset).then(err => console.log("dataset created successfully")).catch(err => {
console.log("warn: maybe the dataset already exists")
})
await bigquery.dataset(dataset).table(tableName).delete().then(err => console.log("table deleted successfully")).catch((err) => {
console.log("Error: maybe the table does not exist")
})
await bigquery.dataset(dataset).createTable(tableName, {schema: schemaUsers}).then(() => console.log("table created successfully")).catch(err => console.log("Error: maybe the table already exists"))
await bigquery.dataset(dataset).table(tableName).insert(userData).then((data) => console.log("Ok inserted ", data)).catch(err => console.log("Error: can't insert "))
} catch (err) {
console.log("err", err)
}
}
to verify that the data was inserted I'm using this query
select * from `data_analisis.users`
I have the same issue. As a workaround, i insert data with a query instead :
const query = "INSERT INTO `"+dataset+"."+tableName"` (name, date, type ) VALUES ("+name+",'"+date+"','"+type+"')";
await bigQuery.query({
query: query,
useLegacySql: false,
location: 'EU'
}, (err) => {
console.log("Insertion error : ",err);
})