How to search via elasticsearch in Node.js? - node.js

I have indexed data from firebase via elasticSearch .
And Its Working Properly .
Now I am Searching data via firebase below cloud function :
exports.searchIt = functions.database.ref('/search/{key}')
.onUpdate(event => {
let key=event.params.key;
let elasticSearchConfig = functions.config().elasticsearch;
const esClient = new elastic.Client({
host: 'http://35.198.221.164',
log: 'error'
});
console.log('client Created');
let searchBody = {
size: 20,
from: 0,
query: {
match_all: {}
}
};
esClient.search({index: 'offers', body: searchBody})
.then(results => {
console.log('Successfully Entered');
results.hits.hits.forEach(
(hit, index) => console.log(hit)
)
})
.catch(console.error);
});
But this gives error below :
textPayload: "{ Error: Not Found at respond
(/user_code/node_modules/elasticsearch/src/lib/transport.js:307:15) at
checkRespForFailure
(/user_code/node_modules/elasticsearch/src/lib/transport.js:266:7) at
HttpConnector.
(/user_code/node_modules/elasticsearch/src/lib/connectors/http.js:159:7)
at IncomingMessage.bound
(/user_code/node_modules/elasticsearch/node_modules/lodash/dist/lodash.js:729:21)
at emitNone (events.js:91:20) at IncomingMessage.emit
(events.js:185:7) at endReadableNT (_stream_readable.js:974:12) at
_combinedTickCallback (internal/process/next_tick.js:80:11) at process._tickDomainCallback (internal/process/next_tick.js:128:9)
And on changing host to any other it still create client.
Why is this happening?

Related

Dropbox API integration on AWS Lambda gets FetchError (ETIMEDOUT)

I have a node.js app which runs on AWS Lambda. The Lambda is connected with a VPC. It goes internet with a static IP. I use v10.23.0 dropbox-sdk-js. It always seems to run on my local but it sometimes runs on the lambda, sometimes gets fetch error.
My code is like this:
async function main() {
const Dropbox = require('dropbox').Dropbox;
const dropbox = {
dbx: new Dropbox({
accessToken: process.env.ACCESS_TOKEN,
pathRoot: JSON.stringify({ '.tag': 'namespace_id', 'namespace_id': process.env.NAMESPACE_ID })
})
};
const payload = {
path: '',
recursive: true,
include_media_info: false,
include_deleted: false,
include_has_explicit_shared_members: true,
include_mounted_folders: true,
include_non_downloadable_files: true
};
let hasMore = true;
let entries = [];
let response;
let cursor;
while (hasMore) {
try {
if (cursor) {
response = await dropbox.dbx.filesListFolderContinue({ cursor: cursor });
}
else {
response = await dropbox.dbx.filesListFolderGetLatestCursor(payload);
response = await dropbox.dbx.filesListFolderContinue({ cursor: response.result.cursor });
}
console.info('Entries: ', JSON.stringify(response.result.entries));
cursor = response.result.cursor;
entries = entries.concat(response.result.entries);
hasMore = response.result.has_more;
}
catch (error) {
console.info(error);
return error;
}
}
}
main();
Error log:
2022-01-20T08:22:18.579Z 67caa239-e75c-46ce-be4c-0fcf6c154694 INFO FetchError: request to https://api.dropboxapi.com/2/files/list_folder/continue failed, reason: connect ETIMEDOUT 162.125.4.19:443
at ClientRequest.<anonymous> (/var/task/node_modules/dropbox/node_modules/node-fetch/lib/index.js:1483:11)
at ClientRequest.emit (events.js:400:28)
at TLSSocket.socketErrorListener (_http_client.js:475:9)
at TLSSocket.emit (events.js:400:28)
at emitErrorNT (internal/streams/destroy.js:106:8)
at emitErrorCloseNT (internal/streams/destroy.js:74:3)
at processTicksAndRejections (internal/process/task_queues.js:82:21) {
type: 'system',
errno: 'ETIMEDOUT',
code: 'ETIMEDOUT'
}
I deattached VPC from lambda and it worked. I think AWS blocks fetching while you are using VPC.

Howo to get IPP endpoint

I'm using the ipp npm module to send a print job from a google cloud function. I believe I have set up the printer correctly but I don't know how I'm supposed to know the exact uri for sending the print job.
The printer model is Brother MFC-L3770CDW
Here is how my settings look in the web view for the printer configuration.
And here is the function code.:
var ipp = require('ipp');
var PDFDocument = require('pdfkit');
var doc = new PDFDocument;
doc.text("Hello World");
var buffers = [];
doc.on('data', buffers.push.bind(buffers));
doc.on('end', function () {
var printer = ipp.Printer("https://10.0.0.55:443");
var file = {
"operation-attributes-tag":{
"requesting-user-name": "User",
"job-name": "Print Job",
"document-format": "application/pdf"
},
data: Buffer.concat(buffers)
};
printer.execute("Print-Job", file, function (err, res) {
if(err) {
console.log(err);
}
else{
console.log("Printed: "+res.statusCode);
}
});
console.log('executing');
});
doc.end();
console.log('finished executing');
I have tried various uris such as
https://10.0.0.55:631
https://10.0.0.55:443
https://10.0.0.55:631/ipp
https://10.0.0.55:631/ipp/printer
Sometimes I get an error like:
"Error: socket hang up
at TLSSocket.onHangUp (_tls_wrap.js:1148:19)
at Object.onceWrapper (events.js:313:30)
at emitNone (events.js:111:20)
at TLSSocket.emit (events.js:208:7)
at endReadableNT (_stream_readable.js:1064:12)
at _combinedTickCallback (internal/process/next_tick.js:139:11)
at process._tickCallback (internal/process/next_tick.js:181:9)
code: 'ECONNRESET',
path: null,
host: '10.0.0.55',
port: '631',
localAddress: undefined }"

Error: socket hang up in Http Request made in Node JS using request-promise causing for loop to restart

I am trying to make an Http Request using request-promise inside a for loop. But it seems if a Http Request takes long, request-promise closes the connection.
This behavior is ok but what I am not able to grasp is the for loop starts from 0 again after the error is printed.
Below is the code
const rp = require('request-promise');
async function stepIterator(processingSteps, documentId) {
var finalResult = null;
for (var step = 0, len = processingSteps.length; step < len; step++) {
if (step === 0 || step === 1 || step == 2 || step == 3) {
try {
console.log('Calling step ', step);
let url = 'http://internal-server:8080/process';
let collection = getCollection(documentId);
let splitText = getSPlit(documentId);
let outputFormat = 'xmi';
let documentObject = await callServer(url, collection, splitText, outputFormat);
finalResult = documentObject;
} catch (error) {
console.log("Error");
}
}
}
return finalResult;
}
async function callServer(url, collection, splitText, outputFormat) {
var options = {
method: 'POST',
uri: url,
headers: {
'Content-Type': 'multipart/form-data',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive '
},
formData: {
collection: collection,
text: splitText,
output: outputFormat
}
};
return rp(options)
}
The complete error trace is as follows
{ RequestError: Error: socket hang up
at new RequestError (D:\New_Projects\new-data-access-layer\node_modules\request-promise-core\lib\errors.js:14:15)
at Request.plumbing.callback (D:\New_Projects\new-data-access-layer\node_modules\request-promise-core\lib\plumbing.js:87:29)
at Request.RP$callback [as _callback] (D:\New_Projects\new-data-access-layer\node_modules\request-promise-core\lib\plumbing.js:46:31)
at self.callback (D:\New_Projects\new-data-access-layer\node_modules\request\request.js:185:22)
at Request.emit (events.js:182:13)
at Request.onRequestError (D:\New_Projects\new-data-access-layer\node_modules\request\request.js:881:8)
at ClientRequest.emit (events.js:182:13)
at Socket.socketOnEnd (_http_client.js:425:9)
at Socket.emit (events.js:187:15)
at endReadableNT (_stream_readable.js:1094:12)
at process._tickCallback (internal/process/next_tick.js:63:19) name: 'RequestError', message: 'Error: socket hang up', cause:
{ Error: socket hang up
at createHangUpError (_http_client.js:322:15)
at Socket.socketOnEnd (_http_client.js:425:23)
at Socket.emit (events.js:187:15)
at endReadableNT (_stream_readable.js:1094:12)
at process._tickCallback (internal/process/next_tick.js:63:19) code: 'ECONNRESET' }, error: { Error: socket hang up
at createHangUpError (_http_client.js:322:15)
at Socket.socketOnEnd (_http_client.js:425:23)
at Socket.emit (events.js:187:15)
at endReadableNT (_stream_readable.js:1094:12)
at process._tickCallback (internal/process/next_tick.js:63:19) code: 'ECONNRESET' }, options: { method: 'POST',
uri: 'http://internal-server:8080/process',
json: true,
headers: { Connection: 'keep-alive ' },
body:
{ docSplitId: [Array],
_id: 5c579d84812acb17ec74ac39,
contentType: 'application/pdf',
location:
'C:\\Users\\newuser\\AppData\\Local\\Temp\\2\\report.pdf',
docModelVersion: '1',
visualMetaDataId: null,
categoryId: '5c52a72f6df294140c0535bc',
deductedInfo: null,
status: 'New',
isDeleted: false,
metadata: [Object],
detailedStatus: [Array] },
callback: [Function: RP$callback],
transform: undefined,
simple: true,
resolveWithFullResponse: false,
transform2xxOnly: false }, response: undefined }
Obviously the socket is hanging! Don't bother with http, it is a little complex. Use node unirest and it closes the data stream.
var unirest = require('unirest');
var req = unirest('POST', 'localhost:3200/store/artifact/metamodel')
.attach('file', '/home/arsene/DB.ecore')
.field('description', 'We are trying to save the metamodel')
.field('project', '6256d72a81c4b80ccfc1768b')
.end(function (res) {
if (res.error) throw new Error(res.error);
console.log(res.raw_body);
});
Hope this helps!

Sending signTransaction with web3.js and getting the issues

Blockchain, web3.js issue
----------------
I am new on block-chain and try to make transactions using ropsten test network but getting errors
Thanks in advance if anyone can help me.
exports.sendTransactions = () => {
var admin = "0xEF9dE245F335e0f3ae8A9563FD54D001de1d3E2D";
var contract_address = "0x39E2f0E87027093C6Ffe76A4d2f20AEE479E5f64";
var tx = {
from: admin,
to: contract_address,
gas: 184000,
data: "",`enter code here`
chainId: "1337"
};
web3.eth.accounts.signTransaction(tx, 'privateKey').then((hash) => {
web3.eth.sendSignedTransaction(hash.rawTransaction).then((receipt) => {
resolve();
}, (error) => {
console.log(error);
reject(500);
});
}, (error) => {
reject(500);
});
}
{ messageHash: '0x15e3f440015b35151c1343fb9a6be2497c082b47dbce607d32e80c10eff800f1',
v: '0xa95',
r: '0x4446233885e382fc9f297dc47fe9294be57623341b781eece94ce28b52bec6ed',
s: '0x51b096eb07f73d5a30757300caf990b4083d001d2f6aea0fca280aafbea5593d',
rawTransaction: '0xf8678085012a05f2008302cec09439e2f0e87027093c6ffe76a4d2f20aee479e5f648080820a95a04446233885e382fc9f297dc47fe9294be57623341b781eece94ce28b52bec6eda051b096eb07f73d5a30757300caf990b4083d001d2f6aea0fca280aafbea5593d' }
Error: Returned error: invalid sender
at Object.ErrorResponse (/home/ri-8/Desktop/tokenAI/node_modules/web3-core-helpers/src/errors.js:29:16)
at /home/ri-8/Desktop/tokenAI/node_modules/web3-core-requestmanager/src/index.js:137:36
at XMLHttpRequest.request.onreadystatechange (/home/ri-8/Desktop/tokenAI/node_modules/web3-providers-http/src/index.js:77:13)
at XMLHttpRequestEventTarget.dispatchEvent (/home/ri-8/Desktop/tokenAI/node_modules/xhr2/lib/xhr2.js:64:18)
at XMLHttpRequest._setReadyState (/home/ri-8/Desktop/tokenAI/node_modules/xhr2/lib/xhr2.js:354:12)
at XMLHttpRequest._onHttpResponseEnd (/home/ri-8/Desktop/tokenAI/node_modules/xhr2/lib/xhr2.js:509:12)
at IncomingMessage.<anonymous> (/home/ri-8/Desktop/tokenAI/node_modules/xhr2/lib/xhr2.js:469:24)
at emitNone (events.js:110:20)
at IncomingMessage.emit (events.js:207:7)
at endReadableNT (_stream_readable.js:1059:12)
at _combinedTickCallback (internal/process/next_tick.js:138:11)
at process._tickCallback (internal/process/next_tick.js:180:9)
The chain ID for Ropsten is 3 (see EIP 155), but you've supplied 1337.

Node.js Error: Max redirects exceeded

how I can ignore pages with cycle redirects?
I use this code to fetching pages:
var libxml = require("libxmljs"),
http = require('follow-redirects').http,
url = require("url");
var request = http.request( { "host": host, "path": URL, "port": 80 }, function( response ) {
var str = '';
response.on( 'data', function( chunk ) {
str += chunk;
});
response.on( 'end', function() {
callback( str, response.statusCode );
}).on( 'error', function ( err ) {
console.log( err );
});
}).end();
It will not go to 'error' block, and I've got an exception:
events.js:85
throw er; // Unhandled 'error' event
^
Error: Max redirects exceeded.
at ClientRequest.cb (/var/parsing/node_modules/follow-redirects/create.js:55:19)
at ClientRequest.g (events.js:199:16)
at ClientRequest.emit (events.js:107:17)
at HTTPParser.parserOnIncomingClient [as onIncoming] (_http_client.js:426:21)
at HTTPParser.parserOnHeadersComplete (_http_common.js:111:23)
at Socket.socketOnData (_http_client.js:317:20)
at Socket.emit (events.js:107:17)
at readableAddChunk (_stream_readable.js:163:16)
at Socket.Readable.push (_stream_readable.js:126:10)
at TCP.onread (net.js:538:20)
The error is being thrown by the request object, not the response object, so you need to add an (additional) error listener to request;
var request = http.request(...).on('error', function(err) {
...
}).end();
Looking at the docs for the package you are using (https://www.npmjs.com/package/follow-redirects), it looks like it just has a maxRedirects option. Directly from the linked page:
require('follow-redirects').maxRedirects = 10; // Has global affect (be careful!)
https.request({
host: 'bitly.com',
path: '/UHfDGO',
maxRedirects: 3 // per request setting
}, function (res) {/* ... */});

Resources