Axios, request: cannot ignore socket hang up (ECONNRESET) error - node.js

I'm working on a web scraping software using axios and libraries on node.js. Here is part of the code.
The problem is, I cannot pass through Error: socket hang up error specifically. When other errors happend, for example, Error: getaddrinfo ENOTFOUND, Error: Request failed with status code 404, Error: Request failed with status code 403, Error: unable to verify the first certificate, the script pass through these errors and proceed the next URLs with no issue. However, seemingly some site causes Error: socket hang up error and the error makes the script exit with exit code of 0 (success).
I have no clue what is happening around the error, why the only error causes the script end, and no detailed error info or errored exit code. How can I prevent the script from exiting because of the error? Thansk.
async function getData( url, rank="-1" ){
var obj ;
let source = CancelToken.source();
setTimeout(() => {
source.cancel();
}, 20000);
await axios
.get( url , { cancelToken: source.token, timeout: 20000 })
.then((response) => {
if (response.status === 200){
const html = response.data;
const $ = cheerio.load(html);
obj = {
rank: rank ,
url: url,
title: title = $('title').text(),
keywords: keywords = $("meta[name='Keywords']").attr('content'),
description: description = $("meta[name='Description']").attr('content'),
h1: Array.from($('h1 ')).map(a => $(a).text() ),
h2: Array.from($('h2 ')).map(a => $(a).text() ),
status: 0
}
}
})
.catch((error) => {
if (error.code === "ECONNRESET") {
console.log("Timeout occurs");
// return;
}
});
if (! obj){
obj = {
rank: rank,
url: url,
status: 1
}
}
return obj ;
}
// calling the above function in batch something like this
// I doubt following code HAS the problem but just in case
... // some code here
promises = [];
websitelists_obj["url"].forEach((item, i) => {
promises.push(
function(){
return new Promise((resolve, reject) => {
console.log("Started:", item)
data = getData(websitelists_obj["url"][i], websitelists_obj["rank"][i])
resolve(
data
)
})
.then((data) => { console.log("Completed:", item); return data; })
}
)
});
... // some code here
while (promises.length) {
array = promises.splice(0, 20).map(f => f());
result = await Promise.all( array );
obj = obj.concat(result);
}

Related

FETCH for uploading folder with multiple files DOES NOT ABORT when I close the Tab

I'm trying to implement a multiple file loader in my web app, i.e. I want to upload to a given path in my local machine a folder with multiple files in it.
I'm using Svelte for the frontend part and Express for the backend part.
So far I've done the following. Client side:
async function load_report() {
let first_input = document.getElementById("folder");
let files_list = Array.from(first_input.files);
let local_files_paths = files_list.map((f) => f.webkitRelativePath);
const payload = new FormData();
for (let i = 0; i < files_list.length; i++) {
payload.append("files", files_list[i]);
payload.append("local_files_paths", local_files_paths[i]);
}
try {
isUploading = true; // to disable the buttons for other uploading operations
const response = await fetch("/upload_report", {
//signal: signal,
method: "POST", // or "PUT"
body: payload,
// No content-type! With FormData obect, Fetch API sets this automatically.
// Doing so manually can lead to an error
});
isUploading = false;
let fetch_msg = await response.json();
if (!response.ok) {
upload_report_error = `${fetch_msg["title"]}: ${fetch_msg["message"]}`;
alert.addAlertToStore(
"danger",
fetch_msg["title"],
fetch_msg["message"]
);
console.error(fetch_msg["message"]);
} else {
alert.addAlertToStore(
"success",
fetch_msg["title"],
fetch_msg["message"]
);
}
} catch (e) {
console.error(e);
} finally {
//$alert = `Todo '${name}' has been added`;
first_input.value = null;
}
}
and also in my onMount() function:
onMount(() => {
window.onbeforeunload = function () {
if (isUploading) {
return "You are uploading! CHILL OUT!";
}
};
Server-side, the endpoint is treated in this way:
app.post("/upload_report", async(req, res) => {
if (!req.files) {
return res.status(400).send({ title: 'upload_folder_error', message: 'No files were uploaded.'});
}
let date;
try{
date = req.body.local_files_paths[0].match(/([0-9_-]+)\//)[1];
}catch(e){
return res.status(500).send({ title: 'upload_folder_error', message: e.message });
}
if(fs.existsSync(`./app/${date}/folder`)){
return res.status(500).send({ title: 'upload_folder_error', message: 'this folder already exists' });
}
for(let i= 0; i< req.body.local_files_paths.length; i++){
let _file = req.files.files[i];
//move photo to uploads directory
_file.mv(`*some_path*`);
}
console.log('DONE');
return res.status(200).send({ title: 'upload_folder_success', message: 'Folder correctly uploaded' });
});
My problem is that when I try to close the tab, a pop up alert appears telling me If I really want to close the tab, and I can either confirm or cancel the operation. But, when I confirm the operation, I would expect the fetch to be consequentially aborted (correct me if I'm wrong) but instead it goes on server side (i.e. I can see my server logging "DONE") and the files are correctly uploaded... what I'm missing in that?
PS. I also tried to use the AbortController() and calling abort inside the onbeforeunload event, but that seemed to me to be wrong.
Could you please help me to figure it out? Thanks!

Error when trying to upload an image to ImgBB

I'm trying to upload images to ImgBB using NodeJS and GraphQL.
I have a uploadImage mutation that takes an image in the form of a data url string, and goes as follows:
import parseDataUrl from "data-uri-to-buffer";
// ...
{
Mutation: {
async uploadImage(_, { dataUrl }) {
const buffer = parseDataUrl(dataUrl); // https://www.npmjs.com/package/data-uri-to-buffer
if (buffer.byteLength > 10 * 10 ** 6)
throw new Error("The image exceeds the maximum size of 10 MB");
const body = new FormData();
body.append("image", buffer);
const result = await fetch(
`https://api.imgbb.com/1/upload?key=${process.env.IMGBB_KEY}`,
{
method: "post",
headers: { ...body.getHeaders() },
body
}
).then<any>(result => result.json());
if (!result.success || !result.url) {
const msg = result.error?.message;
throw new Error(
`There was an error during upload${msg ? `: ${msg}` : ""}`
);
}
return result.url;
}
}
}
body.getHeaders() contains:
{
'content-type': 'multipart/form-data; boundary=--------------
------------656587403243047934588601'
}
(I'm using node-fetch)
But no matter the combinations of query params, headers and body I use, I always end up getting this error:
{
status_code: 400,
error: { message: 'Undefined array key "scheme"', code: 0 },
status_txt: 'Bad Request'
}
I can't find anything about it, do you have an idea?
There are multiple things you can do to resolve your issue.
Important thing with FormData, you need to explicitly provide a name for the image buffer if it's not already included, uploading without name API would throw the same error you have mentioned.
body.append("image", buffer, "addSomeImageName.png");
The api does not require any header explicitly so you can remove it.
{
method: "post",
headers: { ...body.getHeaders() }, // This can be removed.
body
}
The logic you are using to check for the result is faulty and would always throw error even if the result is successful.
if (!result.success || !result.url) { // Check for success flag only.
const msg = result.error?.message;
throw new Error(
`There was an error during upload${msg ? `: ${msg}` : ""}`
);
}
This is the block I tested and is working fine:
import parseDataUrl from "data-uri-to-buffer";
import fetch from 'node-fetch';
import FormData from "form-data";
async function uploadImage({
dataUrl
}) {
const buffer = parseDataUrl(dataUrl);
if (buffer.byteLength > 10 * 10 ** 6)
throw new Error("The image exceeds the maximum size of 10 MB");
const body = new FormData();
body.append("image", buffer, "someImageName.png");
const result = await fetch(
`https://api.imgbb.com/1/upload?key=<your-api-key>`, {
method: "post",
// headers: { ...body.getHeaders() },
body
}
).then(result => result.json())
// .then(data => {
// console.log(data); // Working fine here too.
// });
console.log("-------result--------\n", result); // Result is fine.
// Logic testing.
console.log(result.success);
console.log(result.url);
console.log(!result.success);
console.log(!result.url);
console.log(!result.success || !result.url);
if (!result.success || !result.url) {
const msg = result.error ? .message;
console.log(`There was an error during upload${msg ? `: ${msg}` : ""}`)
// throw new Error(
// `There was an error during upload${msg ? `: ${msg}` : ""}`
// );
}
return result.url;
}
console.log("--------------------- console result ------------------------")
console.log(uploadImage({
dataUrl: "data:image/gif;base64,R0lGODlhEAAQAMQAAORHHOVSKudfOulrSOp3WOyDZu6QdvCchPGolfO0o/XBs/fNwfjZ0frl3/zy7////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACH5BAkAABAALAAAAAAQABAAAAVVICSOZGlCQAosJ6mu7fiyZeKqNKToQGDsM8hBADgUXoGAiqhSvp5QAnQKGIgUhwFUYLCVDFCrKUE1lBavAViFIDlTImbKC5Gm2hB0SlBCBMQiB0UjIQA7"
}));
One thing that seems odd to me:
const buffer = parseDataUrl(dataUrl); // https://www.npmjs.com/package/data-uri-to-buffer
The package in the comment only offers the function dataUriToBuffer, which you are not using. Are you using parseDataUrl from jsdom? Are you using node-fetch's own implementation of FormData as supposed to according to the documentation?
Please append your question with all relevant import statements and please also share the contents of body.getHeaders().

Matcher error: received value must be a mock function

I am trying to check the number of times a function has been called and I am receiving this error
expect(waitForCallStatus(callService, currentCall, CallStatus.Held, delay, retries, nextAttempt)).toHaveBeenCalledTimes(2);
The error -
expect(received).toHaveBeenCalledTimes(expected)
Matcher error: received value must be a mock or spy function
Here is my test file - In the test I am trying to check the recursion behaviour if in first attempt the status isn't matched it should try again
describe('lib.call-status-service.spec.ts', () => {
const callService = getMockCallService();
let holdCall: Call;
let currentCall: Call;
const delay = 300;
const retries = 5;
beforeEach(() => {
jest.clearAllMocks();
holdCall = getMockTenfoldCall({
_id: '1234',
pbxCallId: '1234',
status: CallStatus.Held,
});
currentCall = getMockTenfoldCall({
_id: '4321',
pbxCallId: '4321',
status: CallStatus.Connected,
});
(waitForCallStatus as jest.Mock).mockResolvedValue({});
});
describe('waitCallForStatus', () => {
it('should check if the fetched call status matches the expected status incase of holdCall', () => {
const status = 'Held';
(callService.findCall as jest.Mock).mockResolvedValue([holdCall]);
expect(holdCall.status).toEqual(status);
expect(waitForCallStatus(callService, currentCall, CallStatus.Held)).resolves.toHaveReturnedWith(holdCall);
});
it('Should throw an error if current retry is equal to total number of retries', () => {
const status = 'Held';
(callService.findCall as jest.Mock).mockResolvedValue([currentCall]);
expect(currentCall.status).not.toEqual(status);
const nextAttempt = 5;
expect(waitForCallStatus(callService, currentCall, CallStatus.Held, delay, retries, nextAttempt))
.rejects.toThrowError('Max Retries Reached');
});
it('Should check again if in first attemp fetched call status doesnt match the expected status', () => {
const status = 'Held';
(callService.findCall as jest.Mock).mockResolvedValue([currentCall]);
expect(currentCall.status).not.toEqual(status);
const nextAttempt=1;
expect(waitForCallStatus(callService, currentCall, CallStatus.Held, delay, retries, nextAttempt)).toHaveBeenCalledTimes(2);
});
});
});
Here is my function:-
callService: CallService,
call: Call,
status: CallStatus,
delay = 300,
retries = 5,
currentTry = 0,
) :Promise<any> {
if (currentTry === retries) {
throw TfApiError.badRequest('Max retries reached');
}
await Bluebird.delay(delay);
const query = {
_id: call._id,
pbxCallId: call.pbxCallId,
};
const updatedCall = await callService.findCall(query);
if (updatedCall.status === status) {
return call;
}
const nextAttempt = currentTry + 1;
return waitForCallStatus(callService, updatedCall, status, delay, retries, nextAttempt);
}
What am I doing wrong?

What is the best pactices or approach to pass an error from nested function to centralize error handler

I am using the express.js framework to built my application. I need concrete and best practices opinion to manage to handle the error centralize.
I am developing a multi-lingual application, so at the initial level, I thought it would manage also from the backend side but later on, I realized, and manage only a single language in the backend.
Here I can pass the error Key and it will return the full error message through res.__('errKey') function.
AuthController.js
async signIn(req, res) {
const { deviceMetadata, locationMetadata } = req;
const { app: requestApp } = req.headers;
const { email = null, password = null } = req.body;
if (Object.keys(req.body).length === 0 || email.length === 0 || password.length === 0) {
return _400('errEmptyEmailOrPassword');
}
if (email.length === 0) {
return _400('errEmptyEmail');
}
if (password.length === 0) {
return _400('errEmptyPassword');
}
const isValid = schemaValidator(signIn, req.body);
if (!isValid.valid) {
logger.error(isValid);
return _400('errInvalidEmailOrPassword');
}
const userIden = await authService.verifyEmailAndHash({
email,
password,
deviceMetadata,
locationMetadata,
});
if (userIden === 'errInvalidEmailAndHash') {
logger.error(userIden);
return _401('errInvalidEmailAndHash');
}
const objGrantedToken = await tokenService.grantToken({
userIden,
});
if (objGrantedToken === 'errHttpBadRequest') {
return _400('httpBadRequest');
}
return commonHelper.successResponse(res, 'validEmailAndHash', objGrantedToken);
}
}
tokenService.js
static async verifyEmailAndHash(params) {
const { email, password: inputHash} = params;
const resEmail = await authDbService.verifyEmail(email);
if (resEmail && resEmail.name && resEmail.name === 'SequelizeDatabaseError') {
return 'errSequelizeDatabaseError';
}
if (resEmail.length === 0) {
return 'errInvalidEmailAndHash';
}
const { id: userId = null } = resEmail[0];
const resHash = await authDbService.getHash(userId);
if (resHash.length === 0) {
return 'errInvalidEmailAndHash';
}
const { hash: dbHash } = resHash[0];
const isHashCompared = await AuthService.assertHash(inputHash, dbHash);
const isValid = isHashCompared === 'validEmailAndHash' ? userId : 'errInvalidEmailAndHash';
if (isValid === 'errInvalidEmailAndHash') {
await authDbService.insertLoginAttempts({
userId,
});
}
return isValid;
}
custom-error.js
class MyError extends Error {
/**
* #description constructor to modify the error object
* #param {String} statusCode
* #param {...any} args [name, message, errorKey]
*/
constructor(statusCode = 500, ...args) {
const [statusName, errorKey] = args;
super(errorKey);
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
}
this.statusCode = statusCode;
this.success = false;
this.name = statusName;
this.errorKey = errorKey;
}
}
exports.MyError = MyError;
customer-error-utils.js
const { MyError } = require('./MyError');
const objStatusName = {
400: 'HttpBadRequest',
401: 'HttpUnauthorized',
403: 'HttpForbidden',
404: 'HttpNotFound',
409: 'HttpConflict',
412: 'HttpPreconditionFailed',
413: 'HttpEntityTooLarge',
500: 'InternalServerError',
502: 'BadGateway',
503: 'ServiceUnavailble',
};
const objMessage = {
400: 'Server will not process the request',
401: 'User does not have the needed credentials',
403: 'Service is declining to react',
404: 'Service not found',
409: 'Request could not have processed due to conflict',
412: 'Request did not state the length of content,',
413: 'Payload is too large',
500: 'Something unexpected happens',
502: 'Bad gateway',
503: 'No connection could be made because the target machine actively refused it',
};
const objErrorKey = {
400: 'errHttpBadRequest',
401: 'errHttpUnauthorized',
403: 'errHttpForbidden',
404: 'errHttpNotFound',
409: 'errHttpConflict',
412: 'errHttpPreconditionFailed',
413: 'errHttpEntityTooLarge',
500: 'errInternalServerError',
502: 'errBadGateway',
503: 'errServiceUnavailble',
};
const _400 = (errorKey = objErrorKey[400]) => {
throw new MyError(400, objStatusName[400], errorKey);
};
const _401 = (errorKey = objErrorKey[401]) => {
throw new MyError(401, objStatusName[401], errorKey);
};
const _403 = (errorKey = objErrorKey[403]) => {
throw new MyError(403, objStatusName[403], errorKey);
};
const _404 = (errorKey = objErrorKey[404]) => {
throw new MyError(404, objStatusName[404], errorKey);
};
const _409 = (errorKey = objErrorKey[409]) => {
throw new MyError(409, objStatusName[409], errorKey);
};
const _412 = (errorKey = objErrorKey[412]) => {
throw new MyError(412, objStatusName[412], errorKey);
};
const _413 = (errorKey = objErrorKey[413]) => {
throw new MyError(413, objStatusName[413], errorKey);
};
const _500 = (errorKey = objErrorKey[500]) => {
throw new MyError(500, objStatusName[500], errorKey);
};
const _502 = (errorKey = objErrorKey[502]) => {
throw new MyError(502, objStatusName[502], errorKey);
};
const _503 = (errorKey = objErrorKey[503]) => {
throw new MyError(503, objStatusName[503], errorKey);
};
module.exports = {
_400,
_401,
_403,
_404,
_409,
_412,
_413,
_500,
_502,
_503,
};
error-middlware.js
// catch 404 and forward to error handler
app.use((req, res, next) => {
return res
.status(404)
.json({
success: false,
errorCode: 'errHttpNotFound',
message: res.__('errHttpNotFound'),
})
.end();
});
// catch all remaining programming error
app.use((err, req, res, next) => {
const { success, stack, name: errorName, errorKey = 'errInternalServerError' } = err;
let { statusCode = 500 } = err;
const objErr = {
success: success || false,
errorCode: errorKey,
message: res.__(`${errorKey}`),
};
if (debugMode === 'on') {
Object.assign(objErr, {
stack,
});
}
logger.error(`
url: ${req.url}\n
statusCode: ${statusCode}\n
sucess: ${success}\n
errorName: ${errorName}\n
errorKey:${errorKey},
stack: ${stack}
`);
if (
err instanceof Error &&
((err.errno === 'ECONNREFUSED' && err.code === 'ECONNREFUSED') ||
(err.errno === 'ENOTFOUND' && err.code === 'ENOTFOUND'))
) {
statusCode = 503;
const address = err.message.split(' ')[err.message.split(' ').length - 1];
Object.assign(objErr, {
errorCode: 'errUnreachable',
message: `${address} ${res.__('errUnreachable')}`,
});
return res.status(statusCode).json(objErr).end();
}
return res.status(statusCode).json(objErr).end();
});
I need more opinion and best industry-standard guidelines that how do I robust implementation of an error.
In my code, you have seen that there are so many errors that occurred at services or DB services level and pass to the controller and at last controller will call my error function. It would become more tedious and create chaining from bottom level function to top-level function.
In my above scenario and pattern, you have seen that if any database error occurred it would pass to services and then go to controller.
I see that something I have missed, but I don't know and even I don't judge that how do I make more robust.
So I need an answer or guideline for the following questions
How would I manage the errors centrally so I will not repeat the code?
How to catch the error at the place where it occurred rather than passing error from DB methods > service methods > controller, and it would directly catch in centralize error handler and reduce my overhead?
Even I don't know that current whatever the approach I have followed is legitimate or not?
Here my problem is that I am not able to leverage centralize error handler. Every time I need to catch each and every condition.
I expect good and robust guidelines so I learn and improve my codebase.
I will provide more information and scenario if anyone needs more information.
I had a simple strategy for handling my errors.
I created some global functions responsible for the return statement.
First I create some conventional rules.
Any database error with 422 status code and the error message is the return.
Any validation error with 400 status code and the message body has two properties. Message: "bad request, reason :" the validation error reason".
Unauthorized requests return with 401 status code and the message is: unauthorized or invalid token
404 status code when something is not found or not available at the moment with the proper message
403 status code when the action is forbidden with the message: "forbidden", reason: " proper reason"
You can handle this by just one function with proper parameters. With this simple pattern I can achieve:
Avoid duplicate codes
Clean code
Flexible error handling for a return statement
Handling error localization
Of course this is not your answer but it might help.

Node.js long process ran twice

I have a Node.js restful API built in express.js framework. It is usually hosted by pm2.
One of the services has very long process. When front end called the service, the process started up. Since there is an error in database, the process won't be done properly and the error would be caught. However, before the process reached the error, another exactly same process started with same parameters. So in the meantime, two processes were both running while one was ahead of the other. After a long time, the first process reached error point and returned error. Then the second one returned exactly the same thing.
I checked front end Network and noticed there was actually only one request sent. Where did the second request come from?
Edit 1:
The whole process is: first process sends query to db -> long time wait -> second process starts up -> second process sends query to db -> long time wait -> first process receives db response -> long time wait -> second process receives db response
Edit 2:
The code of the service is as follow:
import { Express, Request, Response } from "express";
import * as multer from "multer";
import * as fs from "fs";
import { Readable, Duplex } from "stream";
import * as uid from "uid";
import { Client } from "pg";
import * as gdal from "gdal";
import * as csv from "csv";
import { SuccessPayload, ErrorPayload } from "../helpers/response";
import { postgresQuery } from "../helpers/database";
import Config from "../config";
export default class ShapefileRoute {
constructor(app: Express) {
// Upload a shapefile
/**
* #swagger
* /shapefile:
* post:
* description: Returns the homepage
* responses:
* 200:
*/
app.post("/shapefile", (req: Request, res: Response, next: Function): void => {
// Create instance of multer
const multerInstance = multer().array("files");
multerInstance(req, res, (err: Error) => {
if (err) {
let payload: ErrorPayload = {
code: 4004,
errorMessage: "Multer upload file error.",
errorDetail: err.message,
hints: "Check error detail"
};
req.reservePayload = payload;
next();
return;
}
// Extract files
let files: any = req.files;
// Extract body
let body: any = JSON.parse(req.body.filesInfo);
// Other params
let writeFilePromises: Promise<any>[] = [];
let copyFilePromises: Promise<any>[] = [];
let rootDirectory: string = Config.uploadRoot;
let outputId: string = uid(4);
// Reset index of those files
let namesIndex: string[] = [];
files.forEach((item: Express.Multer.File, index: number) => {
if(item.originalname.split(".")[1] === "csv" || item.originalname.split(".")[1] === "txt" || item.originalname.split(".")[1] === "shp") {
namesIndex.push(item.originalname);
}
})
// Process and write all files to disk
files.forEach((item: Express.Multer.File, outterIndex: number) => {
if(item.originalname.split(".")[1] === "csv" || item.originalname.split(".")[1] === "txt") {
namesIndex.forEach((indexItem, index) => {
if(indexItem === item.originalname) {
ShapefileRoute.csv(item, index, writeFilePromises, body, rootDirectory, outputId,);
}
})
} else if (item.originalname.split(".")[1] === "shp") {
namesIndex.forEach((indexItem, index) => {
if(indexItem === item.originalname) {
ShapefileRoute.shp(item, index, writeFilePromises, body, rootDirectory, outputId,);
}
})
} else {
ShapefileRoute.shp(item, outterIndex, writeFilePromises, body, rootDirectory, outputId,);
}
})
// Copy files from disk to database
ShapefileRoute.copyFiles(req, res, next, writeFilePromises, copyFilePromises, req.reserveSuperPg, () => {
ShapefileRoute.loadFiles(req, res, next, copyFilePromises, body, outputId)
});
})
});
}
// Process csv file
static csv(file: Express.Multer.File, index: number, writeFilePromises: Promise<any>[], body: any, rootDirectory: string, outputId: string) {
// Streaming file to pivotcsv
writeFilePromises.push(new Promise((resolve, reject) => {
// Get specification from body
let delimiter: string;
let spec: any;
let lrsColumns: string[] = [null, null, null, null, null, null];
body.layers.forEach((jsonItem, i) => {
if (jsonItem.name === file.originalname.split(".")[0]) {
delimiter = jsonItem.file_spec.delimiter;
spec = jsonItem
jsonItem.lrs_cols.forEach((lrsCol) => {
switch(lrsCol.lrs_type){
case "rec_id":
lrsColumns[0] = lrsCol.name;
break;
case "route_id":
lrsColumns[1] = lrsCol.name;
break;
case "f_meas":
lrsColumns[2] = lrsCol.name;
break;
case "t_meas":
lrsColumns[3] = lrsCol.name;
break;
case "b_date":
lrsColumns[4] = lrsCol.name;
break;
case "e_date":
lrsColumns[5] = lrsCol.name;
break;
}
})
}
});
// Pivot csv file
ShapefileRoute.pivotCsv(file.buffer, `${rootDirectory}/${outputId}_${index}`, index, delimiter, outputId, lrsColumns, (path) => {
console.log("got pivotCsv result");
spec.order = index;
resolve({
path: path,
spec: spec
});
}, reject);
}));
}
// Process shapefile
static shp(file: Express.Multer.File, index: number, writeFilePromises: Promise<any>[], body: any, rootDirectory: string, outputId: string) {
// Write file to disk and then call shp2csv to gennerate csv
writeFilePromises.push(new Promise((resolve, reject) => {
// Write shpefile to disk
fs.writeFile(`${rootDirectory}/shps/${file.originalname}`, file.buffer, (err) => {
// If it is .shp file, resolve it's path and spec
if(file.originalname.split(".")[1] === "shp") {
// Find spec of the shapefile from body
body.layers.forEach((jsonItem, i) => {
if (jsonItem.name === file.originalname.split(".")[0]) {
let recordColumn: string = null;
let routeIdColumn: string = null;
jsonItem.lrs_cols.forEach((lrsLayer) => {
if (lrsLayer.lrs_type === "rec_id") {
recordColumn = lrsLayer.name;
}
if (lrsLayer.lrs_type === "route_id") {
routeIdColumn = lrsLayer.name;
}
})
// Transfer shp to csv
ShapefileRoute.shp2csv(`${rootDirectory}/shps/${file.originalname}`, `${rootDirectory}/${outputId}_${index}`, index, outputId, recordColumn, routeIdColumn, (path, srs) => {
// Add coordinate system, geom column and index of this file to spec
jsonItem.file_spec.proj4 = srs;
jsonItem.file_spec.geom_col = "geom";
jsonItem.order = index;
// Return path and spec
resolve({
path: path,
spec: jsonItem
})
}, (err) => {
reject;
})
}
});
} else {
resolve(null);
}
})
}));
}
// Copy files to database
static copyFiles(req: Request, res: Response, next: Function, writeFilePromises: Promise<any>[], copyFilePromises: Promise<any>[], client: Client, callback: () => void) {
// Take all files generated by writefile processes
Promise.all(writeFilePromises)
.then((results) => {
// Remove null results. They are from .dbf .shx etc of shapefile.
const files: any = results.filter(arr => arr);
// Create promise array. This will be triggered after all files are written to database.
files.forEach((file) => {
copyFilePromises.push(new Promise((copyResolve, copyReject) => {
let query: string = `copy lbo.lbo_temp from '${file.path}' WITH NULL AS 'null';`;
// Create super user call
postgresQuery(client, query, (data) => {
copyResolve(file.spec);
}, copyReject);
}));
});
// Trigger upload query
callback()
})
.catch((err) => {
// Response as error if any file generating is wrong
let payload: ErrorPayload = {
code: 4004,
errorMessage: "Something wrong when processing csv and/or shapefile.",
errorDetail: err.message,
hints: "Check error detail"
};
req.reservePayload = payload;
next();
})
}
// Load layers in database
static loadFiles(req: Request, res: Response, next: Function, copyFilePromises: Promise<any>[], body: any, outputId: string) {
Promise.all(copyFilePromises)
.then((results) => {
// Resort all results by the order assigned when creating files
results.sort((a, b) => {
return a.order - b.order;
});
results.forEach((result) => {
delete result.order;
});
// Create JSON for load layer database request
let taskJson = body;
taskJson.layers = results;
let query: string = `select lbo.load_layers2(p_session_id := '${outputId}', p_layers := '${JSON.stringify(taskJson)}'::json)`;
postgresQuery(req.reservePg, query, (data) => {
// Get result
let result = data.rows[0].load_layers2.result;
// Return 4003 error if no result
if (!result) {
let payload: ErrorPayload = {
code: 4003,
errorMessage: "Load layers error.",
errorDetail: data.rows[0].load_layers2.error ? data.rows[0].load_layers2.error.message : "Load layers returns no result.",
hints: "Check error detail"
};
req.reservePayload = payload;
next();
return;
}
let payload: SuccessPayload = {
type: "string",
content: "Upload files done."
};
req.reservePayload = payload;
next();
}, (err) => {
req.reservePayload = err;
next();
});
})
.catch((err) => {
// Response as error if any file generating is wrong
let payload: ErrorPayload = {
code: 4004,
errorMessage: "Something wrong when copy files to database.",
errorDetail: err,
hints: "Check error detail"
};
req.reservePayload = payload;
next();
})
}
// Pivot csv process. Write output csv to disk and return path of the file.
static pivotCsv(buffer: Buffer, outputPath: string, inputIndex: number, delimiter: string, outputId: string, lrsColumns: string[], callback: (path: string) => void, errCallback: (err: Error) => void) {
let inputStream: Duplex = new Duplex();
// Define output stream
let output = fs.createWriteStream(outputPath, {flags: "a"});
// Callback when output stream is done
output.on("finish", () => {
console.log("output stream finish");
callback(outputPath);
});
// Define parser stream
let parser = csv.parse({
delimiter: delimiter
});
// Close output stream when parser stream is end
parser.on("end", () => {
console.log("parser stream end");
output.end();
});
// Write data when a chunck is parsed
let header = [null, null, null, null, null, null];
let attributesHeader = [];
let i = 0;
let datumIndex: boolean = true;
parser.on("data", (chunk) => {
console.log("parser received on chunck: ", i);
if (datumIndex) {
chunk.forEach((datum, index) => {
if (lrsColumns.includes(datum)) {
header[lrsColumns.indexOf(datum)] = index;
} else {
attributesHeader.push({
name: datum,
index: index
})
}
});
datumIndex = false;
} else {
i ++;
// let layer_id = ;
let rec_id = header[0] ? chunk[header[0]] : i;
let route_id = header[1] ? chunk[header[1]] : null;
let f_meas = header[2] ? chunk[header[2]] : null;
let t_meas = header[3] ? chunk[header[3]] : null;
let b_date = header[4] ? chunk[header[4]] : null;
let e_date = header[5] ? chunk[header[5]] : null;
let attributes = {};
attributesHeader.forEach((attribute) => {
attributes[attribute.name] = chunk[attribute.index];
});
let attributesOrdered = {};
Object.keys(attributes).sort().forEach((key) => {
attributesOrdered[key] = attributes[key];
});
let outputData = `${outputId}\t${inputIndex}\t${rec_id}\t${route_id}\tnull\t${f_meas}\t${t_meas}\t${b_date}\t${e_date}\tnull\t${JSON.stringify(attributesOrdered)}\n`;
output.write(outputData);
}
});
inputStream.push(buffer);
inputStream.push(null);
inputStream.pipe(parser);
}
// Write shp and transfer to database format. Return file path and projection.
static shp2csv(inputPath: string, outputPath: string, i: number, ouputId: string, recordColumn: string, routeIdColumn: string, callback: (path: string, prj: string) => void, errCallback: (err: Error) => void) {
let dataset = gdal.open(inputPath);
let layercount = dataset.layers.count();
let layer = dataset.layers.get(0);
let output = fs.createWriteStream(outputPath, {flags: "a"});
output.on("finish", () => {
callback(outputPath, layer.srs.toProj4());
});
layer.features.forEach((feature, featureId) => {
let geom;
let recordId: number = null;
let routeId: string = null;
try {
let geomWKB = feature.getGeometry().toWKB();
let geomWKBString = geomWKB.toString("hex");
geom = geomWKBString;
if (recordColumn) {
recordId = feature.fields.get(recordColumn);
}
if (routeIdColumn) {
routeId = feature.fields.get(routeIdColumn);
}
}
catch (err) {
console.log(err);
}
let attributes = {};
let attributesOrdered = {};
feature.fields.forEach((value, field) => {
if (field != recordColumn && field != routeIdColumn) {
attributes[field] = value;
}
});
Object.keys(attributes).sort().forEach((key) => {
attributesOrdered[key] = attributes[key];
});
output.write(`${ouputId}\t${i.toString()}\t${recordId ? recordId : (featureId + 1).toString()}\t${routeId}\tnull\tnull\tnull\tnull\tnull\t${geom}\t${JSON.stringify(attributesOrdered)}\n`);
});
output.end();
}
}
The browser retries some requests if the server doesn't send a response and the browser hits its timeout value. Each browser may be configured with its own timeout, but 2 minutes sounds like it's probably the browser timeout.
You can't control the browser's timeout from your server. Two minutes is just too long to ask it to wait. You need a different design that responds sooner and then communicates back the eventual result later when it's ready. Either client polling or server push with webSocket/socket.io.
For client polling, you could have the server respond immediately from your first request and return back a token (some unique string). Then, the client can ask the server for the response for that token every minute until the server eventually has the response. If the server doesn't yet have the response, it just immediately returns back a code that means no response yet. If so, the client sets a timer and tries again in a minute, sending the token each time so the server knows which request it is asking about.
For server push, the client creates a persistent webSocket or socket.io connection to the server. When the client makes it's long running request, the server just immediately returns the same type of token described above. Then, when the server is done with the request, it sends the token and the final data over the socket.io connection. The client is listening for incoming messages on that socket.io connection and will receive the final response there.

Resources