I am querying all the data from a single table in a schema called data in postgres using the following node code :
const getperson = (request, response) => {
db.query('SELECT * FROM data.person', (error, results) => {
if (error) {
throw error
}
response.status(200).json(results.rows)
})
app.get('/person', getperson)
This schema also contains other tables, I would also like to get data from those tables, put together and displayed when someone lands on /getall.
I tried changing the query to this SELECT * FROM data.person JOIN data.animal, but it returned nothing.
Or this SELECT * FROM data.person, data.animal, but this returned only the results of uncommon objects in the table, for example if the id of data.person was 1 and data.animal was 1 it would only return the id of one of the two.
The best way to do this is probably to make several SELECT * FROM table and handle it with Node…
However, if all the queries returns the same number of columns, then you can use UNION like:
SELECT * FROM tbl_name_1
UNION
SELECT * FROM tbl_name_2
...
The easiest is to concatenate the queries:
const getAll = (request, response) => {
db.query('SELECT * FROM data.person;SELECT * FROM data.animal', (error, data) => {
if (error) {
throw error;
}
response.status(200).json({people: data[0].rows, animals: data[1].rows});
})
app.get('/all', getAll);
Related
I'm using the mssql npm module (with Tedious driver) to read/write to Azure Sql database from my node Server. https://www.npmjs.com/package/mssql
All the examples I've found provide an hardcoded example of the query whether to read or write records, like this:
var insertRecordIntoTable = function (callback) {
sql.connect(dbConfig).then(pool => {
return pool.request()
.input('ID', sql.Int, 210)
.input('Name', sql.NVarChar, "John Doe")
.input('EmailAddress', sql.NVarChar, "test#test.com")
.query("INSERT INTO Accounts (ID, Name, EmailAddress) VALUES (#ID, #Name, #EmailAddress)")
}).then(result => {
console.dir(result)
callback(result);
}).catch(err => {
// ... error checks
console.log("Error occured: " + err);
callback(err);
});
}
Obviously, I'd like to write one standard method to write records to any table in the database.
Now I can fetch structure of each table and use that to find how what datatype each field should be from the key in jsonRecord and write something like this:
var insertRecordIntoTable = function (jsonRecord, tableName, callback) {
let arrKeys = jsonRecord.allKeys();
let columnNames = getCommaSeparatedColumnNames(arrKeys);
let valuePlaceholders = getValuePlaceholdersForSql(arrKeys);
sql.connect(dbConfig).then(pool => {
return pool.request()
// how do I write something like this so that dynamic number of fields and values get populated in the query inside this promise.
// I'm open to methods without promise as well.
for(let x=0; x < arrKeys.length; x++){
let key = arrKeys[x];
// .input('ID', sql.Int, 210)
.input(key, getTypeForKey(key, tableName), jsonRecord[ key ] )
}
.query("INSERT INTO " + tableName + " (" + columnNames + ") VALUES (" + valuePlaceholders + ")")
}).then(result => {
console.dir(result)
callback(result);
}).catch(err => {
// ... error checks
console.log("Error occured: " + err);
callback(err);
});
}
function getTypeForKey(key){. // looks up table schema and returns keyType }
function getCommaSeparatedColumnNames(arrKeys){ return arrKeys.join(", "); }
function getValuePlaceholdersForSql(arrKeys){ // write code to append '#' before every key and then join using comma's and return that string }
I'm sure node.js writing to SQL is a fairly common functionality and there may be better ways to achieve what I'm trying to do here. Please feel free to go a different route.
P.S. - Although I should say that I prefer mssql over Tedious package. It just seems better in functionality after going through the documentation in the last several hours.
If you want to interact with your database without creating all the queries by yourself, you can use a query builder like knex to manage the data as objects:
knex('Accounts').insert({ID: 210, Name: "John Doe", EmailAddress: "test#test.com"})
Would be similar to:
insert into `Accounts` (`EmailAddress`, `ID`, `Name`) values ('test#test.com', 210, 'John Doe')
Also I see you are checking types. If you need validation, maybe a complete ORM (I like Objection.js) would be a good choice.
I'm new to node(express) and pg-promise, and have not been able to figure out how to add the result of each nested query(loop) into the main json array result query.
I have two tables: Posts and comments.
CREATE TABLE post(
id serial,
content text not null,
linkExterno text,
usuario VARCHAR(50) NOT NULL REFERENCES usuarios(alias) ON UPDATE cascade ON DELETE cascade,
multimedia text,
ubicacation VARCHAR(100),
likes integer default 0,
time VARCHAR default now(),
reported boolean default false,
PRIMARY KEY (id) );
CREATE TABLE comment(
id serial,
idPost integer NOT NULL REFERENCES post(id) ON UPDATE cascade ON DELETE cascade,
acount VARCHAR(50) NOT NULL REFERENCES users(alias) ON UPDATE cascade ON DELETE cascade,
content text NOT NULL,
date date default now(),
PRIMARY KEY (id));
So I want to add the result of each comments to each post and return the posts.
I have this, but doesn't work:
con.task(t => {
return t.any('select *, avatar from post, users where user= $1 and user = alias ORDER BY time DESC LIMIT 10 OFFSET $2', [username, pos])
.then(posts => {
if(posts.length > 0){
for (var post of posts){
post.coments = t.any('select * from comment where idPost = $1 ', post.id);
}
}
});
}).then(posts => {
res.send(posts);
}).catch(error => {
console.log(error);
});
Any suggestions?
PD: I think my question is kind of similar to this one:
get JOIN table as array of results with PostgreSQL/NodeJS
ANSWERS:
Option 1 (best choice):
Making a single query through JSON to psql (JSON query)
See answer by #vitaly-t
OR
Getting the nested data asynchronously using ajax.
Option 2:
function buildTree(t) {
return t.map("select *, avatar from publicacion, usuarios where usuario = $1 and usuario = alias ORDER BY hora DESC LIMIT 10 OFFSET $2", [username, cantidad], posts => {
return t.any('select * from comentario where idPublicacion = $1', posts.id)
.then(coments => {
posts.coments = coments;
console.log(posts.coments);
return posts;
});
}).then(t.batch); // settles the array of generated promises
}
router.get('/publicaciones', function (req, res) {
cantidad = req.query.cantidad || 0; //num de publicaciones que hay
username = req.session.user.alias;
con.task(buildTree)
.then(data => {
res.send(data);
})
.catch(error => {
console.log(error);
});
});
Option 3(async):
try{
var posts = await con.any('select *, avatar from post, users where user = $1 and user = alias ORDER BY time DESC LIMIT 10 OFFSET $2', [username, q])
for (var post of posts){
post.coments = await con.any('select * from comment where idPublictcion = $1', post.id);
}
}catch(e){
console.log(e);
}
I'm the author of pg-promise ;)
con.task(t => {
const a = post => t.any('SELECT * FROM comment WHERE idPost = $1', post.id)
.then(comments => {
post.comments = comments;
return post;
});
return t.map('SELECT *, avatar FROM post, users WHERE user = $1 AND user = alias ORDER BY time DESC LIMIT 10 OFFSET $2', [username, pos], a)
.then(t.batch);
})
.then(posts => {
res.send(posts);
})
.catch(error => {
console.log(error);
});
Also see this question: get JOIN table as array of results with PostgreSQL/NodeJS.
UPDATE
In case you do not want to go all the way with the JSON query approach, then the following will scale much better than the original solution, as we concatenate all child queries, and then execute them as one query:
con.task(async t => {
const posts = await t.any('SELECT *, avatar FROM post, users WHERE user = $1 AND user = alias ORDER BY time DESC LIMIT 10 OFFSET $2', [username, pos]);
const a = post => ({query: 'SELECT * FROM comment WHERE idPost = ${id}', values: post});
const queries = pgp.helpers.concat(posts.map(a));
await t.multi(queries)
.then(comments => {
posts.forEach((p, index) => {
p.comments = comments[index];
});
});
return posts;
})
.then(posts => {
res.send(posts);
})
.catch(error => {
console.log(error);
});
See API:
helpers.concat
Database.multi
If you want structured (nested) data, without having to
A) re-write your sql using json function, or split it out into multiple task queries, or
B) refactor your code to use the API of a heavy ORM
you could check out sql-toolkit. It's a node library built for pg-promise which allows you to write regular native SQL and receive back properly structured (nested) pure business objects. It's strictly an enhancement toolkit on top of pg-promise, and does not seek to abstract out pg-promise (you still set up pg-promise and can use it directly).
For example:
class Article extends BaseDAO {
getBySlug(slug) {
const query = `
SELECT
${Article.getSQLSelectClause()},
${Person.getSQLSelectClause()},
${ArticleTag.getSQLSelectClause()},
${Tag.getSQLSelectClause()}
FROM article
JOIN person
ON article.author_id = person.id
LEFT JOIN article_tags
ON article.id = article_tags.article_id
LEFT JOIN tag
ON article_tags.tag_id = tag.id
WHERE article.slug = $(slug);
`;
return this.one(query, { slug });
// OUTPUT: Article {person: Person, tags: Tags[Tag, Tag, Tag]}
}
The select clause uses the business object "getSQLSelectClause" methods to save tedium in typing the columns, as well as ensure no collisions of names (nothing magical going on, and could just be written out instead).
The this.one is a call into sql-toolkits base DAO class. It is responsible for structuring the flat result records into a nice nested structure.
(Also notice that it is "one" which matches our mental model for the SQL. The DAO methods for one, oneOrNone, many, and any ensure their count against the number of generated top level business objects - not the number of rows the sql expression returns!)
Check out the repository for details on how to set it up on top of pg-promise. (Disclamer, I am the author of sql-toolkit.)
You can use await but it will work sync.
return t.any('select *, avatar from post, users where user= $1 and user = alias ORDER BY time DESC LIMIT 10 OFFSET $2', [username, pos])
.then(posts => {
if(posts.length > 0){
for (var post of posts){
post.coments = await t.any('select * from comment where idPost = ', post.id);
}
}
return posts;
});
Actually i recommend you to use orm tools like bookshelf, knex, typeorm
I'm a newbie at Node JS, and I'm using NodeJS (v. 8.7.0), sqlite3 and Express.
I have two tables in a SQLite database:
releases (id, title, image)
links (id, url)
Each "release" has one or more "links" associated with it.
I can get all the releases using:
dbh.all("SELECT * FROM releases ORDER BY id DESC", (err, rows) => { ... })
And I can get all the links for a given release using:
dbh.all("SELECT * FROM links WHERE id = ?", (err, rows) => { ... })
But I can't figure out how to add a "links" property to each "release", which contains their corresponding links, so that I can feed the resulting object to Mustache, and generate a HTML page.
I know that storing hierarchical data inside of a relational database is not the best idea, and I could easily do this using PHP, but I really want to learn how to use NodeJS.
This is what I've come up so far:
var sqlite3 = require("sqlite3")
function main() {
db = new sqlite3.Database("releases.sqlite3")
all = []
db.each(
"SELECT * FROM releases ORDER BY id DESC",
(err, release) => {
release.links = []
db.all("SELECT url FROM links WHERE id = ?", [release.id], (err, links) => {
links = links.map((e) => { return e.url })
release.links = links
// line above: tried
// links.forEach((e) => { release.links.push(e.url) })
// too, but that didn't work either.
})
all.push(release)
},
(complete) => { console.log(all) }
)
}
main()
Though, when I run it, it inevitably shows:
links: []
Every time. How can I fix this?
Thank you in advance.
Edit 1:
This SQL snippet generates the database, and populates it with some data.
CREATE TABLE `links` ( `id` TEXT, `url` TEXT );
CREATE TABLE `releases` ( `id` TEXT, `title` TEXT, `image` TEXT );
INSERT INTO links VALUES
('rel-001', 'https://example.com/mirror1'),
('rel-001', 'https://example.com/mirror2');
INSERT INTO releases VALUES
('rel-001', 'Release 001', 'https://example.com/image.jpg');
The goal is to have something like this:
{
releases:[
{
id:'rel-001',
title:'Release 001',
image:'https://example.com/image.jpg',
links:[
'https://example.com/mirror1',
'https://example.com/mirror2'
]
}
]
}
try to see if both queries are being executed by adding console.log in the callbacks, moreover you should push the links only within the second callback since before the callback is fired the value is not existing, thus you are trying to push an empty value, also you don't need to initialize release.links = [], all will be only filled after all queries are executed, so therefore we need to execute console.log(all); in the last child callback:
function main() {
all = []
var parentComplete = false;
db.each("SELECT * FROM releases ORDER BY id DESC", (err, release) => {
db.all("SELECT url FROM links WHERE id = ?", [release.id], (err, links) => {
release.links = links.map(e => e.url);
all.push(release);
if (parentComplete){
console.log(all);
}
})
},
(complete) => {
parentComplete = true;
}
)
}
main();
p.s. in order to get the result you want you will need to initialize all as an object all = {releases:[]}
function main() {
all = {releases:[]};
var parentComplete = false;
db.each("SELECT * FROM releases ORDER BY id DESC", (err, release) => {
db.all("SELECT url FROM links WHERE id = ?", [release.id], (err, links) => {
release.links = links.map(e => e.url);
all.releases.push(release);
if (parentComplete){
console.log(all);
}
})
},
(complete) => {
parentComplete = true;
}
)
}
main();
I want to find data with key from google datastore
in normal RDB
select * from items where key = '123124124234221'
in GQL Query
SELECT * from items WHERE __key__ HAS ANCESTOR KEY(item, 123124124234221)
it works!
but in nodejs
let query = datastoreClient.createQuery('items')
.hasAncestor(datastoreClient.key(['item', '123124124234221']))
datastoreClient.runQuery(query, (err, entity) => {
if (err) { reject(err) }
console.log(entity)
})
result is empty []
please help how to get a data with key in node.
Rather than doing a query, you should do a direct get on the entity.
var key = datastoreClient.key(['item', datastore.int('123124124234221')]);
datastoreClient.get(key, function(err, entity) {
console.log(err || entity);
});
i answer my self
need datastoreClient.int(_id)
let query = datastoreClient.createQuery(keyName)
.hasAncestor(datastoreClient.key([keyName, datastoreClient.int(_id)]))
I stumbled upon problem that my search results are of a mixed data, which is located in different collections (posts/venues/etc), currently Im doing separate requests to retrieve this data, but its obviously sorted among its types (posts array, venues array)
How can I query multiple collections (posts/venues) and sort them by date/any other parameter (via mongoose)?
or maybe there is a better solution?
Thanks
I believe its not possible with Mongoose, you can in the meanwhile do something like this:
var async = require('async');
function getPosts(cb) {
Post.find({"foo": "bar"}, function(err, posts) {
cb(err, posts);
})
}
function getVenues(cb) {
Venue.find({"foo": "bar"}, function(err, venues) {
cb(err, venues);
})
}
async.parallel([getPosts, getVenues], function(err, results) {
if(err) {
next(err);
}
res.send(results.sort(function(a, b) {
//if default sorting is not enough you can change it here
return a.date < b.date ? -1 : a.date > b.date ? 1 : 0;
}));
});
This code assumes you are inside an express route and that both Posts and Venues have a common attribute; date. In case you named these dates attributes differently you would have to improve the sort algorithm.