I am downloading a file from Google Drive using nodejs module googleapis 7.1.0. When I do authentication or retrieve metadata, everything goes fine.
When the download is finished and the application is supposed to end, I get two different outcomes, both seem to be wrong.
In Windows, the program just hangs indefinitely and produces no output, no exception. I just hangs.
On FreeBSD, I get the following stack trace:
buffer.js:377
throw new Error('toString failed');
^
Error: toString failed
at Buffer.toString (buffer.js:377:11)
at BufferList.toString (/usr/home/jvavruska/gdrive/node_modules/googleapis/node_modules/google-auth-library/node_modules/request/node_modules/bl/bl.js:166:33)
at Request.<anonymous> (/usr/home/jvavruska/gdrive/node_modules/googleapis/node_modules/google-auth-library/node_modules/request/request.js:1035:36)
at emitOne (events.js:82:20)
at Request.emit (events.js:169:7)
at IncomingMessage.<anonymous> (/usr/home/jvavruska/gdrive/node_modules/googleapis/node_modules/google-auth-library/node_modules/request/request.js:1003:12)
at emitNone (events.js:72:20)
at IncomingMessage.emit (events.js:166:7)
at endReadableNT (_stream_readable.js:913:12)
at nextTickCallbackWith2Args (node.js:442:9)
I use node 4.4.5 on both machines (Windows 10 and FreeBSD 10) and the same version of googleapis (7.1.0).
The final function that does the download is here. name is read from file metadat using get API, auth is the auth object created from google.auth.OAuth2, googleDrive is proxy of google.drive('v3') and google is from google = require('googleapis') :
function googleDownload ( name, fileId, auth, downloadDirectory ) {
downloadDirectory = downloadDirectory || 'c:\\playground' ;
var targetFileName = path.join( downloadDirectory, name );
var dest = fs.createWriteStream(targetFileName);
console.log(`Starting download of ${fileId} as ${name}`);
googleDrive.files.get({
fileId: fileId,
auth: auth,
alt: 'media'
},
(err, response) => {
if(err) { console.log("Download error: ", err);}
else { console.log("Download completed."); }
})
.on('end', () => { console.log('All data received.'); })
.on('finish', () => { console.log('All data written.'); })
.on('close', () => { console.log('Connectin closed.'); })
.on('error', (err) => { console.log('Error during download: ', err); })
.pipe(dest);
}
By looking into the code I was not able to find a direct link between the place where the error is thrown and what was actually supposed to happen. I just noticed that the googleapis bundle seems to duplicate a number of methods or functions available in NodeJS API but cannot say what is the impact on the error.
Related
I am using request module in Node JS (v8.12) to call a third party API. Since the API is not very reliable and due to lack of better option I am timing out the call after 2 seconds in case if there is no response from the API. But in doing so it creates a socket hang up error. Below is the code used and stack trace
const options = {
url: resource_url,
rejectUnauthorized: false,
timeout: 2000,
method: 'GET',
headers: {
'content-Type': 'application/json',
}
};
return new Promise(function (resolve, reject) {
request(options, function (err, res, body) {
if (!err) {
resolve(JSON.parse(body.data));
} else {
if (err.code === 'ETIMEDOUT' || err.code == 'ESOCKETTIMEDOUT') {
resolve(someOldData);
} else {
resolve(someOldData);
}
}
});
});
Error: socket hang up
at createHangUpError (_http_client.js:331:15)
at TLSSocket.socketCloseListener (_http_client.js:363:23)
at scope.activate (<trace-log-base-path>/dd-trace/packages/dd-trace/src/scope/base.js:54:19)
at Scope._activate (<trace-log-base-path>/dd-trace/packages/dd-trace/src/scope/async_hooks.js:51:14)
at Scope.activate (<trace-log-base-path>/dd-trace/packages/dd-trace/src/scope/base.js:12:19)
at TLSSocket.bound (<trace-log-base-path>/dd-trace/packages/dd-trace/src/scope/base.js:53:20)
at emitOne (events.js:121:20)
at TLSSocket.emit (events.js:211:7)
at _handle.close (net.js:554:12)
at TCP.done [as _onclose] (_tls_wrap.js:356:7)
After doing a bit of reading and research I found this article pointing out a similar issue so I switched to http module as mentioned in one of the solution in the article. But switching to http module also did not resolve the issue. Below is code implementation using http and stack trace.
let responseData;
const requestOptions = {
hostname: resource_host,
path: resource_path,
method: 'GET',
timeout: 2000,
};
return new Promise((resolve, reject) => {
const requestObject = http.request(requestOptions, (responseObj) => {
responseObj.setEncoding('utf8');
responseObj.on('data', (body) => {
responseData = body;
});
responseObj.on('end', () => {
resolve(responseData);
});
});
requestObject.on('error', (err) => {
responseData = someOldData;
resolve(responseData);
});
requestObject.on('timeout', () => {
responseData = someOldData;
requestObject.abort();
});
requestObject.end();
});
Error: socket hang up
at connResetException (internal/errors.js:608:14)
at Socket.socketCloseListener (_http_client.js:400:25)
at <trace-log-base-path>/dd-trace/packages/dd-trace/src/scope/base.js:54:19
at Scope._activate (<trace-log-base-path>/dd-trace/packages/dd-trace/src/scope/async_hooks.js:51:14)
at Scope.activate (<trace-log-base-path>/dd-trace/packages/dd-trace/src/scope/base.js:12:19)
at Socket.bound (<trace-log-base-path>/dd-trace/packages/dd-trace/src/scope/base.js:53:20)
at Socket.emit (events.js:322:22)
at Socket.EventEmitter.emit (domain.js:482:12)
at TCP.<anonymous> (net.js:672:12)
I went through multiple SO post and various other resources over the web, but I am unable to resolve this issue.
Could it be because of the third party, because I also tried to reproduce the issue by creating a dummy server which sleeps for some time after the request is fired and timing out that request but was unable to reproduce the issue.
I'll be very grateful for any help in this regards.
Removing requestObject.abort() in timeout event block when using http module resolves this issue.
I am building a NodeJS app that makes calls to an external API. The external API uses a self-signed certificate. I tried setting the environment variable process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0'. This works to ignore the certificate verification when using the app normally. However, a request to the same endpoint does NOT work when calling the NodeJS route with the Jest Supertest agent.
There is a certificate verification error when running the Jest Supertest case. Is there a way to accept self-signed certificates when sending requests using the Supertest agent?
npm test
Error: Error: SSL Error: DEPTH_ZERO_SELF_SIGNED_CERT
at Object.dispatchError (/home/node/app/node_modules/jsdom/lib/jsdom/living/xhr-utils.js:54:19)
at EventEmitter.<anonymous> (/home/node/app/node_modules/jsdom/lib/jsdom/living/xmlhttprequest.js:675:20)
at EventEmitter.emit (events.js:323:22)
at Request.<anonymous> (/home/node/app/node_modules/jsdom/lib/jsdom/living/xhr-utils.js:384:47)
at Request.emit (events.js:311:20)
at Request.onRequestResponse (/home/node/app/node_modules/request/request.js:948:10)
at ClientRequest.emit (events.js:311:20)
at HTTPParser.parserOnIncomingClient [as onIncoming] (_http_client.js:603:27)
at HTTPParser.parserOnHeadersComplete (_http_common.js:119:17)
at TLSSocket.socketOnData (_http_client.js:476:22) undefined
NodeJS internal route
Works when accessing route via the browser, but not when running Jest Supertest. The internal route is /internal and that works, but when that code subsequently sends a request to the external API that has a self-signed certificate, the self-signed certificate causes a 500 error message.
router.get('/internal', (req, res, next) => {
// Set request values that are specific to this route
const requestOptionsData = { method: `GET`, endpoint: `/external` };
try {
httpCtrl.makeRequest(requestOptionsData).then(result => {
if (result.error) {
return res.status(result.status).json(result.error.message || result.error);
}
return res.status(result.status).json(result);
}).catch((error) => {
console.error(error);
return res.status(500).send(error);
});
} catch (e) {
console.error(e);
return res.status(500).send(e);
}
});
NodeJS controller
A wrapper function to make axios requests to external API
httpCtrl.makeRequest = async (requestOptionsData) => {
let result = {};
// Set request options
const requestOptions = httpCtrl.setApiRequestOptions(requestOptionsData);
let response;
try {
response = await axios(requestOptions);
} catch(e) {
result.error = e.toJSON() || e;
console.error(result.error);
result.status = 500;
return result;
}
result.status = response && response.status || 500;
result.data = response && response.data || {};
return result;
}
JEST Supertest
Test that causes certificate error
const app = require('../app.js');
const supertest = require('supertest');
describe('API routes', () => {
it('GET internal NodeJS route', async done => {
agent
.get('/internal')
.set('Accept', 'application/json')
.send()
.expect(200)
.end((err, res) => {
if (err) {
return done(err);
}
expect(res.status).toBe(200);
return done();
});
});
});
UPDATE:
I tried removing NODE_TLS_REJECT_UNAUTHORIZED and setting rejectUnauthorized to false in the axios agent config but still having the same problem. The connection works when using the app via the browser but does work with supertest.
const agent = new https.Agent({
rejectUnauthorized: false
});
const options = {
url: url,
headers: {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Authorization': `Bearer ${requestOptionsData.jwt}`,
'Host': process.env.ADMIN_API_BASE_URL
},
method: requestOptionsData.method || `GET`,
httpsAgent: agent
}
Here is the error with this agent configuration:
Error: Error: self signed certificate
at Object.dispatchError (/home/node/app/node_modules/jsdom/lib/jsdom/living/xhr-utils.js:54:19)
at EventEmitter.<anonymous> (/home/node/app/node_modules/jsdom/lib/jsdom/living/xmlhttprequest.js:675:20)
at EventEmitter.emit (events.js:323:22)
at Request.<anonymous> (/home/node/app/node_modules/jsdom/lib/jsdom/living/xhr-utils.js:384:47)
at Request.emit (events.js:311:20)
at Request.onRequestError (/home/node/app/node_modules/request/request.js:877:8)
at ClientRequest.emit (events.js:311:20)
at TLSSocket.socketErrorListener (_http_client.js:426:9)
at TLSSocket.emit (events.js:311:20)
at emitErrorNT (internal/streams/destroy.js:92:8) undefined
console.error controllers/http.ctrl.js:50
I was able to solve this with the solution in this github issue.
I solved it by adding testEnvironment: 'node', to jest.config.js file.
https://github.com/axios/axios/issues/1180
Observed Application Behavior
I'm getting a UnhandledPromiseRejectionWarning: Error: Upload failed when using #google-cloud/storage in node.js.
These errors come when processing thousands of requests. It's a small percentage that cause errors, but due to the lack of ability to handle the errors, and the lack of proper context from the error message, it's very difficult to determine WHICH files are failing.
I know in general promises must have a .catch or be surrounded by a try/catch block. But in this case I'm using a write stream. I'm a little bit confused as to where the promise that's being rejected is actually located and how I would intercept it. The stack trace is unhelpful, as it only contains library code:
UnhandledPromiseRejectionWarning: Error: Upload failed
at Request.requestStream.on.resp (.../node_modules/gcs-resumable-upload/build/src/index.js:163:34)
at emitTwo (events.js:131:20)
at Request.emit (events.js:214:7)
at Request.<anonymous> (.../node_modules/request/request.js:1161:10)
at emitOne (events.js:121:20)
at Request.emit (events.js:211:7)
at IncomingMessage.<anonymous> (.../node_modules/request/request.js:1083:12)
at Object.onceWrapper (events.js:313:30)
at emitNone (events.js:111:20)
at IncomingMessage.emit (events.js:208:7)
My Code
The code that's creating the writeStream looks like this:
const {join} = require('path')
const {Storage} = require('#google-cloud/storage')
module.exports = (config) => {
const storage = new Storage({
projectId: config.gcloud.project,
keyFilename: config.gcloud.auth_file
})
return {
getBucketWS(path, contentType) {
const {bucket, path_prefix} = config.gcloud
// add path_prefix if we have one
if (path_prefix) {
path = join(path_prefix, path)
}
let setup = storage.bucket(bucket).file(path)
let opts = {}
if (contentType) {
opts = {
contentType,
metadata: {contentType}
}
}
const stream = setup.createWriteStream(opts)
stream._bucket = bucket
stream._path = path
return stream
}
}
}
And the consuming code looks like this:
const gcs = require('./gcs-helper.js')
module.exports = ({writePath, contentType, item}, done) => {
let ws = gcs.getBucketWS(writePath, contentType)
ws.on('error', (err) => {
err.message = `Could not open gs://${ws._bucket}/${ws._path}: ${err.message}`
done(err)
})
ws.on('finish', () => {
done(null, {
path: writePath,
item
})
})
ws.write(item)
ws.end()
}
Given that I'm already listening for the error event on the stream, I don't see what else I can do here. There isn't a promise happening at the level of #google-cloud/storage that I'm consuming.
Digging into the #google-cloud/storage Library
The first line of the stack trace brings us to a code block in the gcs-resumable-upload node module that looks like this:
requestStream.on('complete', resp => {
if (resp.statusCode < 200 || resp.statusCode > 299) {
this.destroy(new Error('Upload failed'));
return;
}
this.emit('metadata', resp.body);
this.deleteConfig();
this.uncork();
});
This is passing the error to the destroy method on the stream. The stream is being created by the #google-cloud/common project's utility module, and this is using the duplexify node module to create the stream. The destroy method is defined on the duplexify stream and can be found in the README documentation.
Reading the duplexify code, I see that it first checks this._ondrain before emitting an error. Maybe I can provide a callback to avoid this error being unhandled?
I tried ws.write(item, null, cb) and still got the same UnhandledPromiseRejectionWarning. I tried ws.end(item, null, cb) and even wrapped the .end call in a try catch, and ended up getting this error which crashed the process entirely:
events.js:183
throw er; // Unhandled 'error' event
^
Error: The uploaded data did not match the data from the server. As a precaution, the file has been deleted. To be sure the content is the same, you should try uploading the file again.
at delete (.../node_modules/#google-cloud/storage/build/src/file.js:1295:35)
at Util.handleResp (.../node_modules/#google-cloud/common/build/src/util.js:123:9)
at retryRequest (.../node_modules/#google-cloud/common/build/src/util.js:404:22)
at onResponse (.../node_modules/retry-request/index.js:200:7)
at .../node_modules/teeny-request/build/src/index.js:208:17
at <anonymous>
at process._tickCallback (internal/process/next_tick.js:189:7)
My final code looks something like this:
let ws = gcs.getBucketWS(writePath, contentType)
const handleErr = (err) => {
if (err) err.message = `Could not open gs://${ws._bucket}/${ws._path}: ${err.message}`
done(err)
}
ws.on('error', handleErr)
// trying to do everything we can to handle these errors
// for some reason we still get UnhandledPromiseRejectionWarning
try {
ws.write(item, null, err => {
handleErr(err)
})
ws.end()
} catch (e) {
handleErr(e)
}
Conclusion
It's still a mystery to me how a user of the #google-cloud/storage library, or duplexify for that matter, is supposed to perform proper error handling. Comments from library maintainers of either project would be appreciated. Thanks!
UPDATED QUESTION
The problem is ApiError: Error during request.
Code:
import * as functions from 'firebase-functions';
const cors = require('cors')({ origin: true });
import * as admin from 'firebase-admin';
const gcs = admin.storage();
export const deleteImage = functions.https.onRequest((req, res) => {
return cors(req, res, async () => {
res.set('Content-Type', 'application/json');
const id = req.body.id;
const name = req.body.name;
const imageRef = gcs.bucket(`images/${name}`);
if (!name || !id) {
return res.status(400).send({message: 'Missing parameters :/'});
}
try {
await imageRef.delete();
console.log('Image deleted from Storage');
return res.status(200).send({status: 200, message: `Thank you for id ${id}`});
}
catch (error) {
console.log('error: ', error);
return res.status(500).send({message: `Image deletion failed: ${error}`});
}
});
});
And the problem is here: await imageRef.delete();, I get the following error:
ApiError: Error during request.
I do, indeed, have admin.initializeApp(); in one of my other functions, so that can't be the issue, unless GCF have a bug.
More In-Depth Error:
{ ApiError: Error during request.
at Object.parseHttpRespBody (/user_code/node_modules/firebase-admin/node_modules/#google-cloud/common/src/util.js:187:32)
at Object.handleResp (/user_code/node_modules/firebase-admin/node_modules/#google-cloud/common/src/util.js:131:18)
at /user_code/node_modules/firebase-admin/node_modules/#google-cloud/common/src/util.js:496:12
at Request.onResponse [as _callback] (/user_code/node_modules/firebase-admin/node_modules/#google-cloud/common/node_modules/retry-request/index.js:198:7)
at Request.self.callback (/user_code/node_modules/firebase-admin/node_modules/request/request.js:185:22)
at emitTwo (events.js:106:13)
at Request.emit (events.js:191:7)
at Request.<anonymous> (/user_code/node_modules/firebase-admin/node_modules/request/request.js:1161:10)
at emitOne (events.js:96:13)
at Request.emit (events.js:188:7)
code: undefined,
errors: undefined,
response: undefined,
message: 'Error during request.' }
(old question removed)
"Error: Can't set headers after they are sent" means that you tried to send two responses to the client. This isn't valid - you can send only one response.
Your code is clearly sending two 200 type responses to the client in the event that imageRef.delete() fails and the catch callback on it is triggered.
Also, you're mixing up await with then/catch. They're not meant to be used together. You choose one or the other. Typically, if you're using await for async programming, you don't also use then/catch with the same promise. This is more idiomatic use of await with error handling:
try {
await imageRef.delete()
res.status(200).send({status: 200, message: `Thank you for id ${id}`});
} catch (error) {
res.status(500).send({message: `Image deletion failed: ${err}`});
}
Note also that you typically send a 500 response to the client on failure, not 200, which indicates success.
I am trying to PUT mongoose data using postman but it shows "TypeError: bear.save is not a function" in server console and crash the node app.
In this problem I am running two apps on different ports and db with the same code, normal http app can make all requests(POST,PUT,DEL,GET) success but another https app can't make PUT request, it can only make POST and GET successfully .
I can't understand why the same code on http app not showing error if it was code problem. Please help me.
REST API reference from - Here
.get(function(req, res) {
Bear.find( {ID: req.params.bear_id} , function(err, bear) {
if (err)
res.send(err);
res.jsonp(bear);
});
})
.put(function(req, res) {
Bear.find( {ID: req.params.bear_id}, function(err, bear) {
if (err)
res.send(err);
bear.Name = req.body.Name;
//res.json(bear) can send data up to this line
// save the bear (crash after following line)
bear.save(function(err) {
if (err)
res.send(err);
res.json({ message: 'Bear updated!' });
});
})
});
Error Log on console -
TypeError: bear.save is not a function
at Promise.<anonymous> (/var/www/vhosts/mydomain.com/nodeapp.js:130:18)
at Promise.<anonymous> (/var/www/vhosts/mydomain.com/node_modules/mpromise/lib/promise.js:162:8)
at emitOne (events.js:96:13)
at Promise.emit (events.js:188:7)
at Promise.emit (/var/www/vhosts/mydomain.com/node_modules/mpromise/lib/promise.js:79:38)
at Promise.fulfill (/var/www/vhosts/mydomain.com/node_modules/mpromise/lib/promise.js:92:20)
at /var/www/vhosts/mydomain.com/node_modules/mongoose/lib/query.js:1736:26
at model.Document.init (/var/www/vhosts/mydomain.com/node_modules/mongoose/lib/document.js:251:11)
at completeMany (/var/www/vhosts/mydomain.com/node_modules/mongoose/lib/query.js:1734:12)
at cb (/var/www/vhosts/mydomain.com/node_modules/mongoose/lib/query.js:1697:11)
You probably want to use findOne instead of find.