How to join table that contains no data yet with sqlite - node.js

I am trying to join two tables: users and favourites. There is a possibility that a user has no favourites yet and when I tried to INNER JOIN the two I didn't get back the user without any favourites. Is there any way to join even if the second table has no data for that user?
I created the users tabel with the following code:
db.run(`CREATE TABLE Users(
UserId INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
Name TEXT NOT NULL,
Password TEXT NOT NULL,
Phone VARCHAR,
Email TEXT,
RestaurantId INTEGER,
FOREIGN KEY(RestaurantId) REFERENCES Restaurants(RestaurantId))`, (err) => {
if(err) {
console.error(err.message);
} else {
//insert some values
var insert = 'INSERT INTO Users (Name, Password, Phone, Email, RestaurantId) VALUES (?, ?, ?, ?, ?)';
db.run(insert, [
'Liam',
'blabla',
'+32412345678',
'email#email.com',
1
]);
}
}
);
And the favourites table with:
db.run(`CREATE TABLE Favourites(
FavouriteId INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
UserId INTEGER NOT NULL,
RestaurantId INTEGER NOT NULL,
FOREIGN KEY(UserId) REFERENCES Users(UserId),
FOREIGN KEY(RestaurantId) REFERENCES Restaurants(RestaurantId))`, (err) => {
if(err) {
console.error(err.message);
} else {
//insert some values
var insert = 'INSERT INTO Favourites (UserId, RestaurantId) VALUES (?, ?)';
db.run(insert, [
1,
1
]);
}
}
);
There is no problem with the data that exists in the table that was generated after these inserts. The problem only exists when a new user without favourites gets added to the database.

You are looking for LEFT JOIN. Take a look at the documentation: https://www.w3resource.com/sqlite/sqlite-left-join.php.
LEFT JOIN returns all the records on the left side of the join, with the matched records from the right side.

Related

NodeJs : bulk insert into SQL Server one-to-many

I want to using nodejs mssql package to bulk insert data with below json:
[
{
"name": "Tom",
"registerDate": "2021-10-10 00:00:00",
"gender": 0,
"consumeRecord":[
{
"date": "2021-10-11 00:00:00",
"price": 102.5
},
{
"date": "2021-10-12 00:00:00",
"price": 200
}
]
},
{
"name": "Mary",
"registerDate": "2021-06-10 00:00:00",
"gender": 1,
"consumeRecord":[
{
"date": "2021-07-11 00:00:00",
"price": 702.5
},
{
"date": "2021-12-12 00:00:00",
"price": 98.2
}
]
}
]
I am try to mssql bulk insert for the member record with multiple consume data?
Is there anything can insert one to many with bulk insert like below.
because it seems need to insert the member table and get the id (primary key) first. Then using the id (primary key) for the consume table relation data
const sql = require('mssql')
// member table
const membertable = new sql.Table('Member')
table.columns.add('name', sql.Int, {nullable: false})
table.columns.add('registerDate', sql.VarChar(50), {nullable: false})
table.columns.add('gender', sql.VarChar(50), {nullable: false})
// consume record table
const consumeTable = new sql.Table('ConsumeRecord')
table.columns.add('MemberId', sql.Int, {nullable: false})
table.columns.add('Date', sql.VarChar(50), {nullable: false})
table.columns.add('price', sql.Money, {nullable: false})
// insert into member table
jsonList.forEach(data => {
table.rows.add(data.name)
table.rows.add(data.registerDate)
table.rows.add(data.gender)
consumeTable.rows.add(data.memberId) // <---- should insert member table id
consumeTable.rows.add(data.consumeRecord.data)
consumeTable.rows.add(data.consumeRecord.price)
const request = new sql.Request()
request.bulk(consumeTable , (err, result) => {
})
})
const request = new sql.Request()
request.bulk(membertable , (err, result) => {
})
Expected Record:
Member Table
id (auto increment)
name
registerDate
gender
1
Tom
2021-10-10 00:00:00
0
2
Mary
2021-06-10 00:00:00
1
Consume Record Table
id
MemberId
Date
price
1
1
2021-10-10 00:00:00
102.5
2
1
2021-10-12 00:00:00
200
3
2
2021-07-11 00:00:00
702.5
4
2
2021-12-12 00:00:00
98.2
The best way to do this is to upload the whole thing in batch to SQL Server, and ensure that it inserts the correct foreign key.
You have two options
Option 1
Upload the main table as a Table Valued Parameter or JSON blob
Insert with OUTPUT clause to select the inserted IDs back to the client
Correlate those IDs back to the child table data
Bulk Insert that as well
Option 2 is a bit easier: do the whole thing in SQL
Upload everything as one big JSON blob
Insert main table with OUTPUT clause into table variable
Insert child table, joining the IDs from the table variable
CREATE TABLE Member(
Id int IDENTITY PRIMARY KEY,
name varchar(50),
registerDate datetime NOT NULL,
gender tinyint NOT NULL
);
CREATE TABLE ConsumeRecord(
MemberId Int NOT NULL REFERENCES Member (Id),
Date datetime not null,
price decimal(9,2)
);
Note the more sensible datatypes of the columns
DECLARE #ids TABLE (jsonIndex nvarchar(5) COLLATE Latin1_General_BIN2 not null, memberId int not null);
WITH Source AS (
SELECT
j1.[key],
j2.*
FROM OPENJSON(#json) j1
CROSS APPLY OPENJSON(j1.value)
WITH (
name varchar(50),
registerDate datetime,
gender tinyint
) j2
)
MERGE Member m
USING Source s
ON 1=0 -- never match
WHEN NOT MATCHED THEN
INSERT (name, registerDate, gender)
VALUES (s.name, s.registerDate, s.gender)
OUTPUT s.[key], inserted.ID
INTO #ids(jsonIndex, memberId);
INSERT ConsumeRecord (MemberId, Date, price)
SELECT
i.memberId,
j2.date,
j2.price
FROM OPENJSON(#json) j1
CROSS APPLY OPENJSON(j1.value, '$.consumeRecord')
WITH (
date datetime,
price decimal(9,2)
) j2
JOIN #ids i ON i.jsonIndex = j1.[key];
db<>fiddle
Unfortunately, INSERT only allows you to OUTPUT from the inserted table, not from any non-inserted columns. So we need to hack it with a weird MERGE

How to use uuid as item id?

My get request seems to work. I used to have id's like this 1,2,3,4
Now I want to make them as uuid
I've made a table for this
CREATE TABLE todo_list(
todo_id UUID NOT NULL PRIMARY KEY,
description VARCHAR(255),
);
router.post("/todo", async (req, res) => {
try {
const { description } = req.body;
const newTodo = await pool.query("INSERT INTO todo_list (description) VALUES($1)"
, [description]);
res.json(newTodo.rows[0]);
} catch (err) {
console.log(err.message);
}
});
But my post request wont work anymore after I changed ID to uuid.
How can I fix this?
You're not providing a default value for todo_id, and aren't passing it into insert statement. You could either generate a UUID at the application layer, or change the DDL from:
CREATE TABLE todo_list(
todo_id UUID NOT NULL PRIMARY KEY,
description VARCHAR(255),
);
to:
CREATE TABLE todo_list(
todo_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
description VARCHAR(255),
);
in order to have PostgreSQL dynamically generate a UUID.
related

Inserting multiple rows into SQL Server from Node.js

I am working on a project that will upload some records to SQL Server from a node.js program. Right now, this is my approach (inside an async function):
con = await sql.connect(`mssql://${SQL.user}:${SQL.password}#${SQL.server}/${SQL.database}?encrypt=true`);
for (r of RECORDS) {
columns = `([column1], [column2], [column3])`;
values = `(#col1, #col2, #col3)`;
await con
.request()
.input("col1", sql.Int, r.col1)
.input("col2", sql.VarChar, r.col2)
.input("col3", sql.VarChar, r.col3)
.query(`INSERT INTO [dbo].[table1] ${columns} VALUES ${values}`);
}
Where records is an array of objects in the form:
RECORDS = [
{ col1: 1, col2: "asd", col3: "A" },
{ col1: 2, col2: "qwerty", col3: "B" },
// ...
];
This code works, nevertheless, I have the feeling that it is not efficient at all. I have an upload of around 4k records and it takes roughly 10 minutes, it does not look good.
I believe if I can create a single query - instead of wrapping single inserts inside a for loop - with all the record values it will be faster, and I know there is a syntax for reaching that in SQL:
INSERT INTO table1 (column1, column2, column3) VALUES (1, "asd", "A"), (2, "qwerty", "B"), (...);
However I cannot find any documentation from mssql module for node on how to prepare the parameterized inputs to do everything in a single transaction.
Can anyone guide me into the right direction?
Thanks in advance.
Also, very similar to the bulk insert, you can use a table valued parameter.
sql.connect("mssql://${SQL.user}:${SQL.password}#${SQL.server}/${SQL.database}?encrypt=true")
.then(() => {
const table = new sql.Table();
table.columns.add('col1', sql.Int);
table.columns.add('col2', sql.VarChar(20));
table.columns.add('col3', sql.VarChar(20));
// add data
table.rows.add(1, 'asd', 'A');
table.rows.add(2, 'qwerty', 'B');
const request = new sql.Request();
request.input('table1', table);
request.execute('procMyProcedure', function (err, recordsets, returnValue) {
console.dir(JSON.stringify(recordsets[0][0]));
res.end(JSON.stringify(recordsets[0][0]));
});
});
And then for the SQL side, create a user defined table type
CREATE TYPE typeMyType AS TABLE
(
Col1 int,
Col2 varchar(20),
Col3 varchar(20)
)
And then use this in the stored procedure
CREATE PROCEDURE procMyProcedure
#table1 typeMyType READONLY
AS
BEGIN
INSERT INTO table1 (Col1, Col2, Col3)
SELECT Col1, Col2, Col3
FROM #MyRecords
END
This gives you more control over the data and lets you do more with the data in sql before you actually insert.
As pointed out by #JoaquinAlvarez, bulk insert should be used as replied here: Bulk inserting with Node mssql package
For my case, the code was like:
return await sql.connect(`mssql://${SQL.user}:${SQL.password}#${SQL.server}/${SQL.database}?encrypt=true`).then(() => {
table = new sql.Table("table1");
table.create = true;
table.columns.add("column1", sql.Int, { nullable: false });
table.columns.add("column2", sql.VarChar, { length: Infinity, nullable: true });
table.columns.add("column3", sql.VarChar(250), { nullable: true });
// add here rows to insert into the table
for (r of RECORDS) {
table.rows.add(r.col1, r.col2, r.col3);
}
return new sql.Request().bulk(table);
});
The SQL data types have to match (obviously) the column type of the existing table table1. Note the case of column2, which is a column defined in SQL as varchar(max).
Thanks Joaquin! I went down on the time significantly from 10 minutes to a few seconds

How to get inserted row immediately after query execution in cassandra?

I am trying to get last inserted row in cassandra but I am getting undefined
Here is how code looks for insert:
const cassandra = require('cassandra-driver');
const client = new cassandra.Client({ contactPoints: ['h1', 'h2'], keyspace: 'ks1' });
let query = "INSERT INTO users (id, name, email ) values (uuid(), ?, ?)";
client.execute(query, [ 'badis', 'badis#badis.com' ])
.then(result => console.log('User with id %s', result.rows[0].id));
Remember you are dealing with NoSQL ("non relational")
If I were to run a SELECT * FROM users LIMIT 1 on this table, my
result set would contain a single row. That row would be the one
containing the lowest hashed value of username (my partition key),
because that's how Cassandra stores data in the cluster. I would have
no way of knowing if it was the last one added or not, so this
wouldn't be terribly useful to you.
The workings are very different from those that you might be used to in MySQL.
If obtaining the last inserted row's information is needed for your system, then you will have to know it before you insert it.
Generating UUID
const Uuid = require('cassandra-driver').types.Uuid;
const id = Uuid.random();
Then you can use the id as another value for the prepared insert query
const cassandra = require('cassandra-driver');
const client = new cassandra.Client({ contactPoints: ['h1', 'h2'], keyspace: 'ks1' });
let query = "INSERT INTO users (id, name, email ) values (?, ?, ?)";
client.execute(query, [id, 'badis', 'badis#badis.com' ])
.then(result => console.log('User with id %s', id));

Inserting timestamp into Cassandra

I have a table created as follows:
CREATE TABLE my_table (
date text,
id text,
time timestamp,
value text,
PRIMARY KEY (id));
CREATE INDEX recordings_date_ci ON recordings (date);
I'm able to simply add a new row to the table using the following Node code:
const cassandra = require('cassandra-driver');
const client = new cassandra.Client({ contactPoints: ['localhost'], keyspace: 'my_keyspace'});
const query = 'INSERT INTO my_table (date, id, time, url) VALUES (?, ?, ?, ?)';
client.execute(query, ['20160901', '0000000000', '2016-09-01 00:00:00+0000', 'random url'], function(err, result) {
if (err){
console.log(err);
}
console.log('Insert row ended:' + result);
});
However, I get the following error:
'Error: Expected 8 or 0 byte long for date (24)
When I change the timestamp to epoc time:
client.execute(query, ['20160901', '0000000000', 1472688000, 'random url']
I get:
d
OverflowError: normalized days too large to fit in a C int
I'm able to insert new rows via cqlsh so I'm probably missing something with the node.js driver.
Any idea?
Thanks
Where you have a string 2016-09-01 00:00:00+0000, instead use new Date('2016-09-01 00:00:00+0000').

Resources