Cosmos db readDocument api not work in stored procedure - azure

A simple stored procedure using readDocument function in CosmosDB/DocumentDB, but it does not work.
function testRead() {
var collection = getContext().getCollection();
var docId = collection.getSelfLink() + 'docs/myDocId';
// Query documents and take 1st item.
var isAccepted = collection.readDocument(docId, {}, function (err, doc, options) {
if (err) throw err;
response.setBody(JSON.stringify(doc));
});
if (!isAccepted) throw new Error('The query was not accepted by the server.');
}
it always get error code 400.
{"code":400,"body":"{\"code\":\"BadRequest\",\"message\":\"Message:
{\\"Errors\\":[\\"Encountered exception while executing Javascript.
Exception = Error: Error creating request message\\r\\nStack
trace: Error: Error creating request message\\n at readDocument
(testRead.js:512:17)\\n at testRead (testRead.js:8:5)\\n at
__docDbMain (testRead.js:18:5)\\n at Global code (testRead.js:1:2)\\"]}\r\nActivityId:
2fb0f7ef-c192-4b56-b8bb-9681c9f8fa6e, Request URI:
/apps/DocDbApp/services/DocDbServer22/partitions/a4cb4962-38c8-11e6-8106-8cdcd42c33be/replicas/1p/,
RequestStats: , SDK:
Microsoft.Azure.Documents.Common/1.22.0.0\"}","activityId":"2fb0f7ef-c192-4b56-b8bb-9681c9f8fa6e","substatus":400}
Anyone can help me ?

Can you try this: var docId = collection.getAltLink() + 'docs/myDocId';
-- self link is not for "name routing".

According to Michael's suggestion, my sample works now, here is the code
function testRead() {
var collection = getContext().getCollection();
var response = getContext().getResponse();
var docId = collection.getAltLink() + '/docs/myDocId';
// Query documents and take 1st item.
var isAccepted = collection.readDocument(docId, {}, function (err, doc, options) {
if (err) throw err;
response.setBody(JSON.stringify(doc));
});
if (!isAccepted) throw new Error('The query was not accepted by the server.');
}

You could modify your code like :
function testRead() {
var collection = getContext().getCollection();
var docId = collection.getAltLink() + 'docs/myDocId';
console.log(collection.getSelfLink() + 'docs/myDocId');
var isAccepted = collection.readDocument(docId, {}, function (err, doc, options) {
if (err) throw err;
response.setBody(JSON.stringify(doc));
});
if (!isAccepted) throw new Error('The query was not accepted by the server.');
}
Or you could use follow sample code to query the document, it also contains all of the fields.
function testRead() {
var collection = getContext().getCollection();
var query = "select * from c where c.id = '1'";
var isAccepted = collection.queryDocuments(collection.getSelfLink(), query,function (err, doc, options) {
if (err) throw err;
var response = getContext().getResponse();
response.setBody(JSON.stringify(doc));
});
if (!isAccepted) throw new Error('The query was not accepted by the server.');
}

Related

How to perform bulk field rename in CosmosDB Javascript Stored Procedure

I've been following along the javascript stored proc examples shown here
The code below is an attempt at writing a modified version of the update stored proc sample. Here's what I'm trying to do:
Instead of operating on a single document, I'd like to perform the
update on the set of documents returned by a provided query.
(Optional) Return a count of updated documents in the response body.
Here's the code:
function updateSproc(query, update) {
var collection = getContext().getCollection();
var collectionLink = collection.getSelfLink();
var response = getContext().getResponse();
var responseBody = {
updated: 0,
continuation: false
};
// Validate input.
if (!query) throw new Error("The query is undefined or null.");
if (!update) throw new Error("The update is undefined or null.");
tryQueryAndUpdate();
// Recursively queries for a document by id w/ support for continuation tokens.
// Calls tryUpdate(document) as soon as the query returns a document.
function tryQueryAndUpdate(continuation) {
var requestOptions = {continuation: continuation};
var isAccepted = collection.queryDocuments(collectionLink, query, requestOptions, function (err, documents, responseOptions) {
if (err) throw err;
if (documents.length > 0) {
tryUpdate(documents);
}
else if (responseOptions.continuation) {
// Else if the query came back empty, but with a continuation token; repeat the query w/ the token.
tryQueryAndUpdate(responseOptions.continuation);
}
else {
// Else if there are no more documents and no continuation token - we are finished updating documents.
responseBody.continuation = false;
response.setBody(responseBody);
}
});
// If we hit execution bounds - return continuation:true
if (!isAccepted) {
response.setBody(responseBody);
}
}
// Updates the supplied document according to the update object passed in to the sproc.
function tryUpdate(documents) {
if (documents.length > 0) {
var requestOptions = {etag: documents[0]._etag};
// Rename!
rename(documents[0], update);
// Update the document.
var isAccepted = collection.replaceDocument(
documents[0]._self,
documents[0],
requestOptions,
function (err, updatedDocument, responseOptions) {
if (err) throw err;
responseBody.updated++;
documents.shift();
// Try updating the next document in the array.
tryUpdate(documents);
}
);
if (!isAccepted) {
response.setBody(responseBody);
}
}
else {
tryQueryAndUpdate();
}
}
// The $rename operator renames a field.
function rename(document, update) {
var fields, i, existingFieldName, newFieldName;
if (update.$rename) {
fields = Object.keys(update.$rename);
for (i = 0; i < fields.length; i++) {
existingFieldName = fields[i];
newFieldName = update.$rename[fields[i]];
if (existingFieldName == newFieldName) {
throw new Error("Bad $rename parameter: The new field name must differ from the existing field name.")
} else if (document[existingFieldName]) {
// If the field exists, set/overwrite the new field name and unset the existing field name.
document[newFieldName] = document[existingFieldName];
delete document[existingFieldName];
} else {
// Otherwise this is a noop.
}
}
}
}
}
I'm running this sproc via the azure web portal, and these are my input parameters:
SELECT * FROM root r
{$rename: {A: "B"}}
My documents look something like this:
{ id: someId, A: "ChangeThisField" }
After the field rename, I would like them to look like this:
{ id: someId, B: "ChangeThisField" }
I'm trying to debug two issues with this code:
The updated count is wildly inaccurate. I suspect I'm doing something really stupid with the continuation token - part of the problem is that I'm not really sure about what to do with it.
The rename itself is not occurring. console.log() debugging shows that I'm never getting into the if (update.$rename) block in the rename function.
I modified your stored procedure code as below and it works for me.I didn't use object or array as my $rename parameter, I used oldKey and newKey instead. If you do concern the construct of parameters, you could change the rename method back which does not affect other logic. Please refer to my code:
function updateSproc(query, oldKey, newKey) {
var collection = getContext().getCollection();
var collectionLink = collection.getSelfLink();
var response = getContext().getResponse();
var responseBody = {
updated: 0,
continuation: ""
};
// Validate input.
if (!query) throw new Error("The query is undefined or null.");
if (!oldKey) throw new Error("The oldKey is undefined or null.");
if (!newKey) throw new Error("The newKey is undefined or null.");
tryQueryAndUpdate();
function tryQueryAndUpdate(continuation) {
var requestOptions = {
continuation: continuation,
pageSize: 1
};
var isAccepted = collection.queryDocuments(collectionLink, query, requestOptions, function (err, documents, responseOptions) {
if (err) throw err;
if (documents.length > 0) {
tryUpdate(documents);
if(responseOptions.continuation){
tryQueryAndUpdate(responseOptions.continuation);
}else{
response.setBody(responseBody);
}
}
});
if (!isAccepted) {
response.setBody(responseBody);
}
}
function tryUpdate(documents) {
if (documents.length > 0) {
var requestOptions = {etag: documents[0]._etag};
// Rename!
rename(documents[0]);
// Update the document.
var isAccepted = collection.replaceDocument(
documents[0]._self,
documents[0],
requestOptions,
function (err, updatedDocument, responseOptions) {
if (err) throw err;
responseBody.updated++;
documents.shift();
// Try updating the next document in the array.
tryUpdate(documents);
}
);
if (!isAccepted) {
response.setBody(responseBody);
}
}
}
// The $rename operator renames a field.
function rename(document) {
if (oldKey&&newKey) {
if (oldKey == newKey) {
throw new Error("Bad $rename parameter: The new field name must differ from the existing field name.")
} else if (document[oldKey]) {
document[newKey] = document[oldKey];
delete document[oldKey];
}
}
}
}
I only have 3 test documents, so I set the pagesize to 1 to test the usage of continuation.
Test documents:
Output:
Hope it helps you.Any concern,please let me know.

TypeError: Cannot read property 'collection' of null ---node to insertData with mongoDb

this is my problem. now the code is below
var mongoClient = require('mongodb').MongoClient;
var db = 'mongodb://localhost:27017/lcl';
var insertData = function(db,callback){
var collection = db.collection('user');
var data = [{"name":"lcl","age":"23","sex":"男"},{"name":"王小猫","age":"22","sex":"女"}];
collection.insert(data,function(err,result){
if (err) {
console.log(err);
return;
};
callback(result);
})
}
mongoClient.connect(db,function(err,db){
console.log("连接成功");
insertData(db,function(result){
console.log(result);
db.close();
})
})
above is the code I have written.
Please help me find the solution.
Try something like this. Check your Db and collections name.
var mongoClient = require('mongodb').MongoClient;
var db_config = 'mongodb://localhost:27017/conapp';
var insertData = function(db_config,callback){
var collection = db_config.collection('users');
var data = [{'username':'ddaaaa'}];
collection.insert(data,function(err,result){
if (err) {
console.log(err);
return;
};
callback(result);
})
}
mongoClient.connect(db_config,function(err,db_config){
insertData(db_config,function(result){
console.log(result);
db_config.close();
})
})
From official documentation:
callback (function) – this will be called after executing this method. The first parameter will contain the Error object if an error occured, or null otherwise. While the second parameter will contain the initialized db object or null if an error occured.
Logically you have to check for an error existence first and only then perform some operations on db

Error: Missing query string MariaDB NodeJS

I have this error every time I try to run a query to DB:
Error: Missing query string
I don't understand what could be wrong with my code, the query is correct I have tested it:
function getChannelCTSize(octopusMac, channelID, adcTicks, callback){
var queryString = "SELECT Channels.CT_size FROM Channels INNER JOIN Octopus ON Octopus.Id=Channels.Octopus_Id WHERE Octopus.Mac = ? AND Channels.Channel_Id = ?";
var filter = [octopusMac, channelID];
var query = mariaDB.query({
sql: queryString
}, filter );
query.on('error', function(err) {
if (err) {
console.log(err.code);
return
}
})
.on('result', function(row) {
callback(result[0].CT_size, channelID, octopusMac, adcTicks);
})
.on('end', function() {
mariaDB.release();
});
}
The query function of mariasql should be invoked with a string as first argument (the query) instead of an object)
It should look like:
var queryString = "SELECT Channels.CT_size FROM Channels INNER JOIN Octopus ON Octopus.Id=Channels.Octopus_Id WHERE Octopus.Mac = :octopusMac AND Channels.Channel_Id = :channelID";
var query = mariaDB.query(queryString, { octopusMac, channelID }, function(err, rows) {
if (err)
console.log(err.code);
callback(rows[0].CT_size, channelID, octopusMac, adcTicks);
})

nodejs get sqlite3 query result using promise or wait

This is my first personal project in Nodejs. I'm trying to get in live soon.
I have a Nodejs server that uses sqlite3. There are only 3000 rows with word, transform and a precalculated value each in a column of the table, which is already populated.
I need to just lookup the word in the DB to be sure it is valid.
var sqlite3 = require("sqlite3").verbose();
var db = new sqlite3.Database("validate.db");
db.get("SELECT * FROM tab WHERE w = ?", word, function(err, row) {
if(err) { console.log("Lookup:",word,", Error => ",err); return false; }
return true;
});
The problem is that the caller of this code has a lot of context and need the operation to wait. So, I tried this
function dbLookup(db, w) {
return function(cb) {
var rows = [];
db.exec('SELECT w FROM tab WHERE w = "'+w+'"')
.on('row', function(r) {
rows.push(r)
})
.on('result', function() {
cb(rows);
});
}
async.each([word], function(w) {
dbLookup(this.db, w);
}, function(err) {
if(err) {console.log("...ERROR..."); return false; }
else {console.log("...SUCCESS..."); return true; }
});
This doesn't solve the wait issue as the callback can fire at its own pace.
I read that promise using something like bluebird can solve my problem
but now I'm not able to get the value/result of the query out:
I've been pulling my hair for so long. Please help me either get the async working or get the result back from the promise approach.
var async = require('async');
var sqlite3 = require("sqlite3").verbose();
var db = new sqlite3.Database("validate.db");
function check(word, callback) {
db.get("SELECT count(1) cnt FROM tab WHERE w = ?", word, callback)
}
async.map(words, check, function(err, results) {
if (err)
return console.log('Query error')
var all_checked = results.filter(function(r) {
return r.cnt > 0
});
...
});
Or
var sqlite3 = require("sqlite3").verbose();
var db = new sqlite3.Database("validate.db");
db.all("SELECT distinct w FROM tab", function(err, rows) {
var all_checked = words.filter(function (w) {
return rows.indexOf(w) != -1;
})
...
})

How can I perform an UPSERT using Azure DocumentDB?

Azure DocumentDB does not support UPSERT. Is there a reasonable work around to achieve the same functionality?
Is using a stored procedure which checks if the document exists to determine whether and insert or update should be performed an effective strategy?
What if I need to perform thousands of these in bulk?
Vote for the feature here:
http://feedback.azure.com/forums/263030-documentdb/suggestions/7075256-provide-for-upsert
Update - Here is my attempt at a bulk upsert stored procedure.
function bulkImport(docs) {
var collection = getContext().getCollection();
var collectionLink = collection.getSelfLink();
var count = 0;
if (!docs) throw new Error('Docs parameter is null');
var docsLength = docs.length;
if (docsLength == 0) {
getContext().getResponse().setBody(0);
}
tryUpsert(docs[count], callback);
function tryUpsert(doc, callback) {
var query = { query: ""select * from root r where r.id = #id"", parameters: [ {name: ""#id"", value: doc.id}]};
var isAccepted = collection.queryDocuments(collectionLink, query, function(err, resources, options) {
if (err) throw err;
if(resources.length > 0) {
// Perform a replace
var isAccepted = collection.replaceDocument(resources[0]._self, doc, callback);
if (!isAccepted) getContext().getResponse().setBody(count);
}
else {
// Perform a create
var isAccepted = collection.createDocument(collectionLink, doc, callback);
if (!isAccepted) getContext().getResponse().setBody(count);
}
});
if (!isAccepted) getContext().getResponse().setBody(count);
}
function callback(err, doc, options) {
if (err) throw err;
// One more document has been inserted, increment the count.
count++;
if (count >= docsLength) {
// If we have created all documents, we are done. Just set the response.
getContext().getResponse().setBody(count);
} else {
// Create next document.
tryUpsert(docs[count], callback);
}
}
}
Update (2015-10-06): Atomic upsert is now supported by Azure DocumentDB.
Yes, a store procedure would work great for upsert.
There are even code samples available on DocumentDB's Github:
Upsert (Optimized for Insert): https://github.com/aliuy/azure-node-samples/blob/master/documentdb-server-side-js/stored-procedures/upsert.js
Upsert (Optimized for Replace): https://github.com/aliuy/azure-node-samples/blob/master/documentdb-server-side-js/stored-procedures/upsertOptimizedForReplace.js
Bulk Import / Upsert:
https://github.com/Azure/azure-documentdb-hadoop/blob/master/src/BulkImportScript.js

Resources