How can I get data with key - node.js

I want to find data with key from google datastore
in normal RDB
select * from items where key = '123124124234221'
in GQL Query
SELECT * from items WHERE __key__ HAS ANCESTOR KEY(item, 123124124234221)
it works!
but in nodejs
let query = datastoreClient.createQuery('items')
.hasAncestor(datastoreClient.key(['item', '123124124234221']))
datastoreClient.runQuery(query, (err, entity) => {
if (err) { reject(err) }
console.log(entity)
})
result is empty []
please help how to get a data with key in node.

Rather than doing a query, you should do a direct get on the entity.
var key = datastoreClient.key(['item', datastore.int('123124124234221')]);
datastoreClient.get(key, function(err, entity) {
console.log(err || entity);
});

i answer my self
need datastoreClient.int(_id)
let query = datastoreClient.createQuery(keyName)
.hasAncestor(datastoreClient.key([keyName, datastoreClient.int(_id)]))

Related

Javascript promise to iterate/include dynamic number of Arguments

I'm using the mssql npm module (with Tedious driver) to read/write to Azure Sql database from my node Server. https://www.npmjs.com/package/mssql
All the examples I've found provide an hardcoded example of the query whether to read or write records, like this:
var insertRecordIntoTable = function (callback) {
sql.connect(dbConfig).then(pool => {
return pool.request()
.input('ID', sql.Int, 210)
.input('Name', sql.NVarChar, "John Doe")
.input('EmailAddress', sql.NVarChar, "test#test.com")
.query("INSERT INTO Accounts (ID, Name, EmailAddress) VALUES (#ID, #Name, #EmailAddress)")
}).then(result => {
console.dir(result)
callback(result);
}).catch(err => {
// ... error checks
console.log("Error occured: " + err);
callback(err);
});
}
Obviously, I'd like to write one standard method to write records to any table in the database.
Now I can fetch structure of each table and use that to find how what datatype each field should be from the key in jsonRecord and write something like this:
var insertRecordIntoTable = function (jsonRecord, tableName, callback) {
let arrKeys = jsonRecord.allKeys();
let columnNames = getCommaSeparatedColumnNames(arrKeys);
let valuePlaceholders = getValuePlaceholdersForSql(arrKeys);
sql.connect(dbConfig).then(pool => {
return pool.request()
// how do I write something like this so that dynamic number of fields and values get populated in the query inside this promise.
// I'm open to methods without promise as well.
for(let x=0; x < arrKeys.length; x++){
let key = arrKeys[x];
// .input('ID', sql.Int, 210)
.input(key, getTypeForKey(key, tableName), jsonRecord[ key ] )
}
.query("INSERT INTO " + tableName + " (" + columnNames + ") VALUES (" + valuePlaceholders + ")")
}).then(result => {
console.dir(result)
callback(result);
}).catch(err => {
// ... error checks
console.log("Error occured: " + err);
callback(err);
});
}
function getTypeForKey(key){. // looks up table schema and returns keyType }
function getCommaSeparatedColumnNames(arrKeys){ return arrKeys.join(", "); }
function getValuePlaceholdersForSql(arrKeys){ // write code to append '#' before every key and then join using comma's and return that string }
I'm sure node.js writing to SQL is a fairly common functionality and there may be better ways to achieve what I'm trying to do here. Please feel free to go a different route.
P.S. - Although I should say that I prefer mssql over Tedious package. It just seems better in functionality after going through the documentation in the last several hours.
If you want to interact with your database without creating all the queries by yourself, you can use a query builder like knex to manage the data as objects:
knex('Accounts').insert({ID: 210, Name: "John Doe", EmailAddress: "test#test.com"})
Would be similar to:
insert into `Accounts` (`EmailAddress`, `ID`, `Name`) values ('test#test.com', 210, 'John Doe')
Also I see you are checking types. If you need validation, maybe a complete ORM (I like Objection.js) would be a good choice.

Dynamo DB Query Filter Node.js

Running a Node.js serverless backend through AWS.
Main objective: to filter and list all LOCAL jobs (table items) that included the available services and zip codes provided to the filter.
Im passing in multiple zip codes, and multiple available services.
data.radius would be an array of zip codes = to something like this:[ '93901', '93902', '93905', '93906', '93907', '93912', '93933', '93942', '93944', '93950', '95377', '95378', '95385', '95387', '95391' ]
data.availableServices would also be an array = to something like this ['Snow removal', 'Ice Removal', 'Salting', 'Same Day Response']
I am trying to make an API call that returns only items that have a matching zipCode from the array of zip codes provided by data.radius, and the packageSelected has a match of the array data.availableServices provided.
API CALL
import * as dynamoDbLib from "./libs/dynamodb-lib";
import { success, failure } from "./libs/response-lib";
export async function main(event, context) {
const data = JSON.parse(event.body);
const params = {
TableName: "jobs",
FilterExpression: "zipCode = :radius, packageSelected = :availableServices",
ExpressionAttributeValues: {
":radius": data.radius,
":availableServices": data.availableServices
}
};
try {
const result = await dynamoDbLib.call("query", params);
// Return the matching list of items in response body
return success(result.Items);
} catch (e) {
return failure({ status: false });
}
Do I need to map the array of zip codes and available services first for this to work?
Should I be using comparison operators?
https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/LegacyConditionalParameters.QueryFilter.html
Is a sort key value or partition key required to query and filter? (the table has a sort key and partition key but i would like to avoid using them in this call)
Im not 100% sure on how to go about this so if anyone could point me in the right direction that would be wonderful and greatly appreciated!!
I'm not sure what your dynamodb-lib refers to but here's an example of how you can scan for attribute1 in a given set of values and attribute2 in a different set of values. This uses the standard AWS JavaScript SDK, and specifically the high-level document client.
Note that you cannot use an equality (==) test here, you have to use an inclusion (IN) test. And you cannot use query, but must use scan.
const AWS = require('aws-sdk');
let dc = new AWS.DynamoDB.DocumentClient({'region': 'us-east-1'});
const data = {
radius: [ '93901', '93902', '93905', '93906', '93907', '93912', '93933', '93942', '93944', '93950', '95377', '95378', '95385', '95387', '95391' ],
availableServices: ['Snow removal', 'Ice Removal', 'Salting', 'Same Day Response'],
};
// These hold ExpressionAttributeValues
const zipcodes = {};
const services = {};
data.radius.forEach((zipcode, i) => {
zipcodes[`:zipcode${i}`] = zipcode;
})
data.availableServices.forEach((service, i) => {
services[`:services${i}`] = service;
})
// These hold FilterExpression attribute aliases
const zipcodex = Object.keys(zipcodes).toString();
const servicex = Object.keys(services).toString();
const params = {
TableName: "jobs",
FilterExpression: `zipCode IN (${zipcodex}) AND packageSelected IN (${servicex})`,
ExpressionAttributeValues : {...zipcodes, ...services},
};
dc.scan(params, (err, data) => {
if (err) {
console.log('Error', err);
} else {
for (const item of data.Items) {
console.log('item:', item);
}
}
});

query all data from 2 postgres tables using node.js

I am querying all the data from a single table in a schema called data in postgres using the following node code :
const getperson = (request, response) => {
db.query('SELECT * FROM data.person', (error, results) => {
if (error) {
throw error
}
response.status(200).json(results.rows)
})
app.get('/person', getperson)
This schema also contains other tables, I would also like to get data from those tables, put together and displayed when someone lands on /getall.
I tried changing the query to this SELECT * FROM data.person JOIN data.animal, but it returned nothing.
Or this SELECT * FROM data.person, data.animal, but this returned only the results of uncommon objects in the table, for example if the id of data.person was 1 and data.animal was 1 it would only return the id of one of the two.
The best way to do this is probably to make several SELECT * FROM table and handle it with Node…
However, if all the queries returns the same number of columns, then you can use UNION like:
SELECT * FROM tbl_name_1
UNION
SELECT * FROM tbl_name_2
...
The easiest is to concatenate the queries:
const getAll = (request, response) => {
db.query('SELECT * FROM data.person;SELECT * FROM data.animal', (error, data) => {
if (error) {
throw error;
}
response.status(200).json({people: data[0].rows, animals: data[1].rows});
})
app.get('/all', getAll);

How to pass data between nested callbacks?

I'm a newbie at Node JS, and I'm using NodeJS (v. 8.7.0), sqlite3 and Express.
I have two tables in a SQLite database:
releases (id, title, image)
links (id, url)
Each "release" has one or more "links" associated with it.
I can get all the releases using:
dbh.all("SELECT * FROM releases ORDER BY id DESC", (err, rows) => { ... })
And I can get all the links for a given release using:
dbh.all("SELECT * FROM links WHERE id = ?", (err, rows) => { ... })
But I can't figure out how to add a "links" property to each "release", which contains their corresponding links, so that I can feed the resulting object to Mustache, and generate a HTML page.
I know that storing hierarchical data inside of a relational database is not the best idea, and I could easily do this using PHP, but I really want to learn how to use NodeJS.
This is what I've come up so far:
var sqlite3 = require("sqlite3")
function main() {
db = new sqlite3.Database("releases.sqlite3")
all = []
db.each(
"SELECT * FROM releases ORDER BY id DESC",
(err, release) => {
release.links = []
db.all("SELECT url FROM links WHERE id = ?", [release.id], (err, links) => {
links = links.map((e) => { return e.url })
release.links = links
// line above: tried
// links.forEach((e) => { release.links.push(e.url) })
// too, but that didn't work either.
})
all.push(release)
},
(complete) => { console.log(all) }
)
}
main()
Though, when I run it, it inevitably shows:
links: []
Every time. How can I fix this?
Thank you in advance.
Edit 1:
This SQL snippet generates the database, and populates it with some data.
CREATE TABLE `links` ( `id` TEXT, `url` TEXT );
CREATE TABLE `releases` ( `id` TEXT, `title` TEXT, `image` TEXT );
INSERT INTO links VALUES
('rel-001', 'https://example.com/mirror1'),
('rel-001', 'https://example.com/mirror2');
INSERT INTO releases VALUES
('rel-001', 'Release 001', 'https://example.com/image.jpg');
The goal is to have something like this:
{
releases:[
{
id:'rel-001',
title:'Release 001',
image:'https://example.com/image.jpg',
links:[
'https://example.com/mirror1',
'https://example.com/mirror2'
]
}
]
}
try to see if both queries are being executed by adding console.log in the callbacks, moreover you should push the links only within the second callback since before the callback is fired the value is not existing, thus you are trying to push an empty value, also you don't need to initialize release.links = [], all will be only filled after all queries are executed, so therefore we need to execute console.log(all); in the last child callback:
function main() {
all = []
var parentComplete = false;
db.each("SELECT * FROM releases ORDER BY id DESC", (err, release) => {
db.all("SELECT url FROM links WHERE id = ?", [release.id], (err, links) => {
release.links = links.map(e => e.url);
all.push(release);
if (parentComplete){
console.log(all);
}
})
},
(complete) => {
parentComplete = true;
}
)
}
main();
p.s. in order to get the result you want you will need to initialize all as an object all = {releases:[]}
function main() {
all = {releases:[]};
var parentComplete = false;
db.each("SELECT * FROM releases ORDER BY id DESC", (err, release) => {
db.all("SELECT url FROM links WHERE id = ?", [release.id], (err, links) => {
release.links = links.map(e => e.url);
all.releases.push(release);
if (parentComplete){
console.log(all);
}
})
},
(complete) => {
parentComplete = true;
}
)
}
main();

how to improve the view with map/reduce in couchdb and nodejs

I'm using nodejs with the module cradle to interact with the couchdb server, the question is to let me understanding the reduce process to improve the view query...
For example, I should get the user data from his ID with a view like this:
map: function (doc) { emit(null, doc); }
And in node.js (with cradle):
db.view('users/getUserByID', function (err, resp) {
var found = false;
resp.forEach(function (key, row, id) {
if (id == userID) {
found = true;
userData = row;
}
});
if (found) {
//good, works
}
});
As you can see, this is really bad for large amount of documents (users in the database), so I need to improve this view with a reduce but I don't know how because I don't understand of reduce works.. thank you
First of all, you're doing views wrong. View are indexes at first place and you shouldn't use them for full-scan operations - that's ineffective and wrong. Use power of Btree index with key, startkey and endkey query parameters and emit field you like to search for as key value.
In second, your example could be easily transformed to:
db.get(userID, function(err, body) {
if (!err) {
// found!
}
});
Since in your loop you're checking row's document id with your userID value. There is no need for that loop - you may request document by his ID directly.
In third, if your userID value isn't matches document's ID, your view should be:
function (doc) { emit(doc.userID, null); }
and your code will be looks like:
db.view('users/getUserByID', {key: userID}, function (err, resp) {
if (!err) {
// found!
}
});
Simple. Effective. Fast. If you need matched doc, use include_docs: true query parameter to fetch it.

Resources