invalid signature error when unzip folder in nodejs - node.js

I use unzip node module for unzip my binary-data (come from request module).
it fails in some cases when request module's response not contain zip folder binary data (if response is not have zip folder data, some other binary data).
How I handle this exception.
const request = require("request");
const unzip = require('unzip');
const stream = require('stream');
var options = {
method: 'GET',
url: /*URL*/,
encoding: null
};
request(options, function (error, response, body) {
zipExtract(error, body);
});
zipExtract:
function zipExtract(error, zipData) {
if (error) {
console.error(error);
}
else {
try {
//create stream object
var artifactStream = new stream.PassThrough();
//parse buffer into stream
artifactStream.end(zipData);
//pipe response to unzip
artifactStream.pipe(unzip.Extract({path: 'app/output'}));
}
catch (exception) {
console.error(exception);
}
}
}
it prompt error on console
events.js:160
throw er; // Unhandled 'error' event
^
Error: invalid signature: 0x6d74683c
at C:\app-hub\module-application-size\node_modules\unzip\lib\parse.js:63:13
at runCallback (timers.js:637:20)
at tryOnImmediate (timers.js:610:5)
at processImmediate [as _immediateCallback] (timers.js:582:5)
npm ERR! Test failed. See above for more details.

use adm-zip module for handle exception.
const admzip = require('adm-zip');
try {
var zip = new admzip(zipData);
zip.extractAllTo(/*path*/);
}
catch (exception) {
console.error(exception);
}

Related

How to get/log/capture the errors from a graphql apollo client query

I am interfacing with a graphql backend using the #apollo/client.
The request i am making returns a 400 bad request and in the network tab i can see the json of the errors.
This is what i would like to log in my code but i am uanble to.
try {
const response = await GraphQLClient.query({
query: GET_PERSON,
variables: {
personId: id,
},
errorPolicy: "all",
});
console.log("response", response);
} catch (err) {
console.log("err", err);
}
When i execute the above it goees into the catch block and i do not have access to the errors object.
err Error: Response not successful: Received status code 400
at new ApolloError (index.ts:54)
at QueryManager.ts:1073
at both (asyncMap.ts:30)
at asyncMap.ts:19
at new Promise ()
at Object.then (asyncMap.ts:19)
at Object.error (asyncMap.ts:31)
at notifySubscription (module.js:137)
at onNotify (module.js:176)
at SubscriptionObserver.error (module.js:229)
at iteration.ts:13
at Array.forEach ()
at iterateObserversSafely (iteration.ts:13)
at Object.error (Concast.ts:185)
at notifySubscription (module.js:137)
at onNotify (module.js:176)
at SubscriptionObserver.error (module.js:229)
at createHttpLink.ts:203
graphqlservice
import { ApolloClient, InMemoryCache } from "#apollo/client";
import { Config } from "./../config";
const FRONTEND_API = `${Config.frontend_api}/graphql` || "";
export const GraphQLClient = new ApolloClient({
uri: FRONTEND_API,
cache: new InMemoryCache(),
}
To get the errors as a json response in the catch method.
console.log(err.networkError.result.errors);
Still very unsure why the response object has an error and errors property and i don't know when these are accessible, maybe someone else could shed some light on that.
export declare type ApolloQueryResult<T> = {
data: T;
errors?: ReadonlyArray<GraphQLError>;
error?: ApolloError;
loading: boolean;
networkStatus: NetworkStatus;
partial?: boolean;
};

How to catch UnhandledPromiseRejectionWarning for GCS WriteStream

Observed Application Behavior
I'm getting a UnhandledPromiseRejectionWarning: Error: Upload failed when using #google-cloud/storage in node.js.
These errors come when processing thousands of requests. It's a small percentage that cause errors, but due to the lack of ability to handle the errors, and the lack of proper context from the error message, it's very difficult to determine WHICH files are failing.
I know in general promises must have a .catch or be surrounded by a try/catch block. But in this case I'm using a write stream. I'm a little bit confused as to where the promise that's being rejected is actually located and how I would intercept it. The stack trace is unhelpful, as it only contains library code:
UnhandledPromiseRejectionWarning: Error: Upload failed
at Request.requestStream.on.resp (.../node_modules/gcs-resumable-upload/build/src/index.js:163:34)
at emitTwo (events.js:131:20)
at Request.emit (events.js:214:7)
at Request.<anonymous> (.../node_modules/request/request.js:1161:10)
at emitOne (events.js:121:20)
at Request.emit (events.js:211:7)
at IncomingMessage.<anonymous> (.../node_modules/request/request.js:1083:12)
at Object.onceWrapper (events.js:313:30)
at emitNone (events.js:111:20)
at IncomingMessage.emit (events.js:208:7)
My Code
The code that's creating the writeStream looks like this:
const {join} = require('path')
const {Storage} = require('#google-cloud/storage')
module.exports = (config) => {
const storage = new Storage({
projectId: config.gcloud.project,
keyFilename: config.gcloud.auth_file
})
return {
getBucketWS(path, contentType) {
const {bucket, path_prefix} = config.gcloud
// add path_prefix if we have one
if (path_prefix) {
path = join(path_prefix, path)
}
let setup = storage.bucket(bucket).file(path)
let opts = {}
if (contentType) {
opts = {
contentType,
metadata: {contentType}
}
}
const stream = setup.createWriteStream(opts)
stream._bucket = bucket
stream._path = path
return stream
}
}
}
And the consuming code looks like this:
const gcs = require('./gcs-helper.js')
module.exports = ({writePath, contentType, item}, done) => {
let ws = gcs.getBucketWS(writePath, contentType)
ws.on('error', (err) => {
err.message = `Could not open gs://${ws._bucket}/${ws._path}: ${err.message}`
done(err)
})
ws.on('finish', () => {
done(null, {
path: writePath,
item
})
})
ws.write(item)
ws.end()
}
Given that I'm already listening for the error event on the stream, I don't see what else I can do here. There isn't a promise happening at the level of #google-cloud/storage that I'm consuming.
Digging into the #google-cloud/storage Library
The first line of the stack trace brings us to a code block in the gcs-resumable-upload node module that looks like this:
requestStream.on('complete', resp => {
if (resp.statusCode < 200 || resp.statusCode > 299) {
this.destroy(new Error('Upload failed'));
return;
}
this.emit('metadata', resp.body);
this.deleteConfig();
this.uncork();
});
This is passing the error to the destroy method on the stream. The stream is being created by the #google-cloud/common project's utility module, and this is using the duplexify node module to create the stream. The destroy method is defined on the duplexify stream and can be found in the README documentation.
Reading the duplexify code, I see that it first checks this._ondrain before emitting an error. Maybe I can provide a callback to avoid this error being unhandled?
I tried ws.write(item, null, cb) and still got the same UnhandledPromiseRejectionWarning. I tried ws.end(item, null, cb) and even wrapped the .end call in a try catch, and ended up getting this error which crashed the process entirely:
events.js:183
throw er; // Unhandled 'error' event
^
Error: The uploaded data did not match the data from the server. As a precaution, the file has been deleted. To be sure the content is the same, you should try uploading the file again.
at delete (.../node_modules/#google-cloud/storage/build/src/file.js:1295:35)
at Util.handleResp (.../node_modules/#google-cloud/common/build/src/util.js:123:9)
at retryRequest (.../node_modules/#google-cloud/common/build/src/util.js:404:22)
at onResponse (.../node_modules/retry-request/index.js:200:7)
at .../node_modules/teeny-request/build/src/index.js:208:17
at <anonymous>
at process._tickCallback (internal/process/next_tick.js:189:7)
My final code looks something like this:
let ws = gcs.getBucketWS(writePath, contentType)
const handleErr = (err) => {
if (err) err.message = `Could not open gs://${ws._bucket}/${ws._path}: ${err.message}`
done(err)
}
ws.on('error', handleErr)
// trying to do everything we can to handle these errors
// for some reason we still get UnhandledPromiseRejectionWarning
try {
ws.write(item, null, err => {
handleErr(err)
})
ws.end()
} catch (e) {
handleErr(e)
}
Conclusion
It's still a mystery to me how a user of the #google-cloud/storage library, or duplexify for that matter, is supposed to perform proper error handling. Comments from library maintainers of either project would be appreciated. Thanks!

upload failed: { Error: unable to verify the first certificate

I wrote a small code in AWS-Lambda(Node.js) to send the file to some API. I am able to run the code but i am getting the upload error.
Error: Function Logs:
START RequestId: 08ad7fab-3658-11e8-8483-a7fbad976cb7 Version: $LATEST
2018-04-02T09:27:17.787Z 08ad7fab-3658-11e8-8483-a7fbad976cb7 upload failed: { Error: unable to verify the first certificate
at Error (native)
at TLSSocket.<anonymous> (_tls_wrap.js:1092:38)
at emitNone (events.js:86:13)
at TLSSocket.emit (events.js:185:7)
at TLSSocket._finishInit (_tls_wrap.js:610:8)
at TLSWrap.ssl.onhandshakedone (_tls_wrap.js:440:38) code: 'UNABLE_TO_VERIFY_LEAF_SIGNATURE' }
END RequestId: 08ad7fab-3658-11e8-8483-a7fbad976cb7.
The code which i have written is :
console.log('Loading Function');
const fs = require('fs');
const request = require('request');
const url = require('url');
const https = require('https');
https.globalAgent.options.ca = rootCas;
var rootCas = require('ssl-root-cas').create();
// default for all https requests
// (whether using https directly, request, or another module)
require('https').globalAgent.options.ca = rootCas;
exports.handler = (event, context, callback) => {
var formData = {
// Pass a simple key-value pair
my_field: 'my_value',
// Pass data via Buffers
my_buffer: new Buffer([1, 2, 3]),
// Pass data via Streams
my_file: fs.createReadStream(__dirname + '/myfile.csv'),
// Pass multiple values /w an Array
// attachments: [
// fs.createReadStream(__dirname + '/myfile.txt'),
// fs.createReadStream(__dirname + '/myfile.txt')
// ],
};
var req = request.post({url:'https://abc.xyz.com:443/file/', formData: formData}, function optionalCallback(err, httpResponse, body) {
if (err) {
return console.error('upload failed:', err);
}
console.log('Upload successful! Server responded with:', body);
});
};
When i am trying to run the same code locally, i am not getting any error even the module ssl is able to download all the certificates. The same when i am trying in AWS-Lambda , the code wont be able to download the certificates. I even tried in the Configure test with adding the JSON as rejectUnauthorized": false ,still not able to overcome with the error.

Node.js - The "listener" argument must be of type Function

I am trying to create a proxy update code with node.js, and im getting this error:
events.js:180
throw new errors.TypeError('ERR_INVALID_ARG_TYPE', 'listener', 'Function');
^
TypeError [ERR_INVALID_ARG_TYPE]: The "listener" argument must be of type Function
at _addListener (events.js:180:11)
at WriteStream.addListener (events.js:240:10)
at WriteStream.close (fs.js:2298:10)
at WriteStream.<anonymous> (/Users/camden/Desktop/proxyupdate/u.js:9:15)
at WriteStream.emit (events.js:164:20)
at finishMaybe (_stream_writable.js:616:14)
at afterWrite (_stream_writable.js:467:3)
at onwrite (_stream_writable.js:457:7)
at fs.write (fs.js:2242:5)
at FSReqWrap.wrapper [as oncomplete] (fs.js:703:5)
here is my code:
var UpdateProxyList = function(sourceURL, destinationPath) {
var HTTP = require("http");
var FS = require("fs");
var File = FS.createWriteStream(destinationPath);
HTTP.get(sourceURL, function(response) {
response.pipe(File);
File.on('finish', function() {
File.close();
});
File.on('error', function(error) {
FS.unlink(destinationPath);
})
});
}
UpdateProxyList("http://www.example.com/proxy.txt", "myproxylist.txt");
Im on MacOS Sierra with node.js v9.3.0.
apparently, when I use node.js v8.9.3, it works fine
Between v8.9.3 and v9.3.0, the implementation of WriteStream.prototype.close has changed.
In v8.9.3, it was a reference to ReadStream.prototype.close, for which a callback argument was optional.
In v9.3.0, it is now a separate method that, amongst other things, emits a close event:
WriteStream.prototype.close = function(cb) {
if (this._writableState.ending) {
this.on('close', cb);
return;
}
...
};
The error that you get is caused by this.on('close', cb), which requires a Function second argument that isn't being passed in your code.
I'm not sure if you actually need to use a finish handler at all in your situation, as writable handling will be done internally by the .pipe() code.

How to test a function that throws an error asynchronously, using tape?

I am attempting to test this module (receiver.js) for an error thrown:
var request = require('request')
module.exports = function(url){
request({
url: url,
method: 'POST'
}, function(error) {
if(error){
throw error
}
})
}
using this test (test.js):
var test = require('tape')
test('Receiver test', function(t){
var receiver = require('./receiver')
t.throws(function(){
receiver('http://localhost:9999') // dummy url
}, Error, 'Should throw error with invalid URL')
t.end()
})
but tape runs the assertion before the error is thrown, resulting in the following error message:
TAP version 13
# Receiver test
not ok 1 Should throw error with invalid URL
---
operator: throws
expected: |-
[Function: Error]
actual: |-
undefined
at: Test.<anonymous> (/path/to/tape-async-error-test/test.js:5:4)
...
/path/to/receiver.js:9
throw error
^
Error: connect ECONNREFUSED 127.0.0.1:9999
at Object.exports._errnoException (util.js:856:11)
at exports._exceptionWithHostPort (util.js:879:20)
at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1062:14)
Is there a way around this?
Generally, using tape, you have to ensure you call assert.end() after the async call has completed. Using promises (would require request-promise and returning the promise):
test('Receiver test', function(t){
// Tells tape to expec a single assertion
t.plan(1);
receiver('http://localhost:9999')
.then(() => {
t.fail('request should not succeed')
})
.catch(err => {
t.ok(err, 'Got expected error');
})
.finally({
t.end();
});
});
Using async/await:
test('Receiver test', async function(t) {
try {
await receiver('http://localhost:9999');
assert.fail('Should not get here');
} catch (err) {
assert.ok(err, 'Got expected error');
}
t.end();
});
The above example is mostly correct but here's a complete working example that compares async to synchronous side by side and also shows how to check for the error message in a manner similar to the tape examples given on tape's README.md.
test('ensure async function can be tested to throw', async function(t) {
// t.throw works synchronously
function normalThrower() {
throw(new Error('an artificial synchronous error'));
};
t.throws(function () { normalThrower() }, /artificial/, 'should be able to test that a normal function throws an artificial error');
// you have to do this for async functions, you can't just insert async into t.throws
async function asyncThrower() {
throw(new Error('an artificial asynchronous error'));
};
try {
await asyncThrower();
t.fail('async thrower did not throw');
} catch (e) {
t.match(e.message,/asynchronous/, 'asynchronous error was thrown');
};
});

Resources