Enabling Http2Stream in koa2 app - node.js

I7m trying to create a simple http2 server and want to make use of Http2Stream in the http2 module to push large assets. How can I incorporate it in my Koa2 app? currently my server is just a middleware that receives the ctx and next object and checks if the file exists and tries to send it out.
async server(ctx, next, ...arg){
//check if file exists
//if it exists, set the headers and mimetype and send file
}
Does the ctx object contain the functions needed to work with http2stream, or how can i extend it?

You can utilize the stream in ctx.res (which is the original node response) like so: ctx.res.stream
Working example: Koa2 with http/2 - this one gets a file in the public folder (filename ist hardcoded here) and sends it through the stream (which then should be the http2stream). Just type https://localhost:8080/file in your browser. You need to place a file thefile.html in to ./public:
'use strict';
const fs = require('fs');
const http2 = require('http2');
const koa = require('koa');
const app = new koa();
const options = {
key: fs.readFileSync('./key.pem'),
cert: fs.readFileSync('./cert.pem'),
passphrase: 'test'
};
function getFile(path) {
const filePath = `${__dirname}/public/${path}`;
try {
const content = fs.openSync(filePath, 'r');
const contentType = 'text/html';
return {
content,
headers: {
'content-type': contentType
}
};
} catch (e) {
return null;
}
}
// response
app.use(ctx => {
if (ctx.request.url === '/file') {
const file = getFile('thefile.html');
ctx.res.stream.respondWithFD(file.content, file.headers);
} else {
ctx.body = 'OK' ;
}
});
const server = http2.createSecureServer(options, app.callback());
console.log('Listening on port 8080');
server.listen(8080);
Hope that helps

Related

Delivering image from S3 to React client via Context API and Express server

I'm trying to download a photo from an AWS S3 bucket via an express server to serve to a react app but I'm not having much luck. Here are my (unsuccessful) attempts so far.
The Workflow is as follows:
Client requests photo after retrieving key from database via Context API
Request sent to express server route (important so as to hide the true location from the client)
Express server route requests blob file from AWS S3 bucket
Express server parses image to base64 and serves to client
Client updates state with new image
React Client
const [profilePic, setProfilePic] = useState('');
useEffect(() => {
await actions.getMediaSource(tempPhoto.key)
.then(resp => {
console.log('server resp: ', resp.data.data.newTest) // returns ����\u0000�\u0000\b\u0006\
const url = window.URL || window.webkitURL;
const blobUrl = url.createObjectURL(resp.data.data.newTest);
console.log("blob ", blobUrl);
setProfilePic({ ...profilePic, image : resp.data.data.newTest });
})
.catch(err => errors.push(err));
}
Context API - just axios wrapped into its own library
getMediaContents = async ( key ) => {
return await this.API.call(`http://localhost:5000/${MEDIA}/mediaitem/${key}`, "GET", null, true, this.state.accessToken, null);
}
Express server route
router.get("/mediaitem/:key", async (req, res, next) => {
try{
const { key } = req.params;
// Attempt 1 was to try with s3.getObject(downloadParams).createReadStream();
const readStream = getFileStream(key);
readStream.pipe(res);
// Attempt 2 - attempt to convert response to base 64 encoding
var data = await getFileStream(key);
var test = data.Body.toString("utf-8");
var container = '';
if ( data.Body ) {
container = data.Body.toString("utf-8");
} else {
container = undefined;
}
var buffer = (new Buffer.from(container));
var test = buffer.toString("base64");
require('fs').writeFileSync('../uploads', test); // it never wrote to this directory
console.log('conversion: ', test); // prints: 77+977+977+977+9AO+/vQAIBgYH - this doesn't look like base64 to me.
delete buffer;
res.status(201).json({ newTest: test });
} catch (err){
next(ApiError.internal(`Unexpected error > mediaData/:id GET -> Error: ${err.message}`));
return;
}
});
AWS S3 Library - I made my own library for using the s3 bucket as I'll need to use more functionality later.
const getFileStream = async (fileKey) => {
const downloadParams = {
Key: fileKey,
Bucket: bucketName
}
// This was attempt 1's return without async in the parameter
return s3.getObject(downloadParams).createReadStream();
// Attempt 2's intention was just to wait for the promise to be fulfilled.
return await s3.getObject(downloadParams).promise();
}
exports.getFileStream = getFileStream;
If you've gotten this far you may have realised that I've tried a couple of things from different sources and documentation but I'm not getting any further. I would really appreciate some pointers and advice on what I'm doing wrong and what I could improve on.
If any further information is needed then just let me know.
Thanks in advance for your time!
Maybe it be useful for you, that's how i get image from S3, and process image on server
Create temporary directory
createTmpDir(): Promise<string> {
return mkdtemp(path.join(os.tmpdir(), 'tmp-'));
}
Gets the file
readStream(path: string) {
return this.s3
.getObject({
Bucket: this.awsConfig.bucketName,
Key: path,
})
.createReadStream();
}
How i process file
async MainMethod(fileName){
const dir = await this.createTmpDir();
const serverPath = path.join(
dir,
fileName
);
await pipeline(
this.readStream(attachent.key),
fs.createWriteStream(serverPath + '.jpg')
);
const createFile= await sharp(serverPath + '.jpg')
.jpeg()
.resize({
width: 640,
fit: sharp.fit.inside,
})
.toFile(serverPath + '.jpeg');
const imageBuffer = fs.readFileSync(serverPath + '.jpeg');
//my manipulations
fs.rmSync(dir, { recursive: true, force: true }); //delete temporary folder
}

NodeJS Amazon AWS SDK S3 client stops working intermittently

I have NodeJS express web server that serves files from AWS S3. Most of the time this exact code works correctly and serves files for a wide verity of applications with large numbers of requests in Production. The NodeJS web server is running across multiple nodes on a docker swarm server.
After about 2-3 weeks this stops working. There is no response from S3Client GetObjectCommand, there no error returned or anything. This starts working again only after restarting the NodeJS Docker container.
I read the S3 SDK docs that indicate a that the SDK will retry automatically.
Each AWS SDK implements automatic retry logic.
Questions:
How can we make this code more resilient and not need a restart?
Is the error handling correct? I'm wondering why there is no seemingly no response or error returned at all in this situation.
Is it necessary to configure the re-try settings?
NodeJS version: node:lts-alpine
Module: #aws-sdk/client-s3
Controllers
AWS Controller
const consoleLogger = require('../logger/logger.js').console;
const { S3Client, GetObjectCommand } = require('#aws-sdk/client-s3');
const config = {
"credentials": {
"accessKeyId": "example",
"secretAccessKey": "example"
},
"endpoint": "example",
"sslEnabled": true,
"forcePathStyle": true
}
const s3client = new S3Client(config);
const awsCtrl = {};
awsCtrl.getObject = async (key) => {
// Get object from Amazon S3 bucket
let data;
try {
// Data is returned as a ReadableStream
data = await s3client.send(new GetObjectCommand({ Bucket: "example", Key: key }));
console.log("Success", data);
} catch (e) {
consoleLogger.error("AWS S3 error: ", e);
const awsS3Error = {
name: e.name || null,
status: e.$metadata.httpStatusCode || 500
};
throw awsS3Error;
}
return data;
}
module.exports = awsCtrl;
Files Controller
const queryString = require('query-string');
const consoleLogger = require('../logger/logger.js').console;
const httpCtrl = require('./http.ctrl');
const jwtCtrl = require('./jwt.ctrl');
const awsCtrl = require('./aws.ctrl');
filesCtrl.deliverFile = async (req, res) => {
/* Get object from AWS S3 */
let fileObjectStream;
try {
fileObjectStream = await awsCtrl.getObject(filePath);
} catch (e) {
consoleLogger.error(`Unable to get object from AWS S3`, e);
if (e.status && e.status === 404) {
result.error = `Not found`;
result.status = 404;
return res.status(result.status).json(result);
}
return res.status(e.status || 500).json(result);
}
const filename = lookupResponse.data.filename;
// Set response header: Content-Disposition
res.attachment(filename);
// API response object stream download to client
return fileObjectStream.Body.pipe(res);
}
API
const express = require('express');
const router = express.Router();
const filesCtrl = require('../../controllers/files.ctrl');
const filesValidation = require('../validation/files');
router.get('/:fileId', [filesValidation.getFile], (req, res, next) => {
return filesCtrl.deliverFile(req, res);
});

Filter large debug logs on client side

I am working on a web app. Web app can execute code and retrieve debug log and display based on the filter selected. This debug log can be large. The web app has express app on the server side and handlebars on the client side. currently I am retrieving the debug log into server side code and using code below.
const fs = require("fs");
const readline = require("readline");
const {Stream} = require("stream");
//creating file using below code
fs.writeFile("DebugLog.txt", debugLog, async function(err) {
if (err) {
throw err;
}
const result = await searchStream("DebugLog","USER_DEBUG|METHOD_ENTRY|METHOD_EXIT");
res.send(JSON.stringify({"DebugLog": result}));
});
//executing the search using below code
const searchStream = (filename, text) => {
return new Promise((resolve)=>{
const inStream = fs.createReadStream("./" + filename + ".txt");
const outStream = new Stream();
const rl = readline.createInterface(inStream, outStream);
const result = [];
const regEx = new RegExp(text, "i");
rl.on("line", function(line) {
if (line && line.search(regEx) >=0) {
result.push(line);
}
});
rl.on("close", function() {
console.log("finished search", filename);
resolve(result);
});
});
};
Doing this way is ending up creating a static file that is available across the server. but I need the file to be available only for user session. I am looking for any solutions that can either create file that is available for only a individual session or client side filtering where I don't have to worry about creating a file on the server side.

How can I get and modify response.body in nodejs?

I am using express and write this code in a middleware. I need to proxy a request to another newUrl and have to get the body of the response from newUrl. But I don't know how to get it in this way. Can anybody tell me how to get the res.body?
var stream = req.pipe(request(newUrl)).pipe(res);
stream.on('finish', function() {
// how can I get res.body from the newUrl?
next();
});
Here is an easy way on how to make proxy request using streams.
'use strict';
const
stream = require('stream'),
util = require('util'),
http = require('http');
let Transform = stream.Transform;
// make Trasform stream
function MyProxyStream(options) {
if (!(this instanceof MyProxyStream)) {
return new MyProxyStream(options);
}
// set proxy url
this.proxyUrl = 'http://go-to-proxy'
Transform.call(this, options);
}
util.inherits(MyProxyStream, Transform);
// Transform stuff here
MyProxyStream.prototype._transform = function (chunk, enc, cb) {
// send proxy request somethere -> get data
// chunk is string cast to Object with JSON.parse(chunk)
// request.post(this.proxyUrl, chunk)
let data = "my proxy data";
this.push(data);
return cb();
};
const server = http.createServer((req, res) => {
let transformer = new MyProxyStream({objectMode: true});
req.setEncoding('utf8');
req.pipe(transformer).pipe(res);
});
server.listen(1337);
Hope this helps.

Send PDF file from AngularJS to NodeJS

i need to send a PDF file from angularjs client to NodeJS service.
I did the angularjs service, and when i receive the file its a string like this:
%PDF-1.3
3 0 obj
<</Type /Page
/Parent 1 0 R
/Reso
How can i reconvert this string to PDF in NodeJS?
This is the client code:
var sendByEmail = function () {
$scope.generatingPdf = true;
$('#budget').show();
var pdf = new JsPDF('p', 'pt', 'letter');
var source = $('#budget')[0];
pdf.addHTML(source, 0, 0, function () {
var resultPdf = pdf.output();
BillService.sendByEmail("rbrlnx#gmail.com", resultPdf).then(function () {
});
$('#budget').hide();
});
};
var sendByEmail = function (email, file) {
var deferred = $q.defer();
var data = {
email: email,
file: file
};
BillService.sendByEmail(data, function (result) {
deferred.resolve(result);
}, function () {
deferred.reject();
});
return deferred.promise;
};
The server code controller its empty:
var sendByEmail = function (req, res, next) {
var file = req.body.file;
};
I experimented with this a while ago, and I came up with this. It's not production ready by a long shot maybe you find it useful. It's free of front end libraries (except Angular ofcourse), but assumes you're using Express 4x and body-parser.
The result:
In the browser:
On the server:
What you're seeing:
You're seeing a tiny node server, serving static index.html and angular files, and a POST route receiving a PDF in base64 as delivered by the HTML FileReader API, and saves it to disk.
Instead of saving to disk, you can send it as an email attachment. See for instance here or here for some info on that.
The example below assumes uploading a PDF by a user through a file input, but the idea is the same for all other ways of sending a document to your back end system. The most important thing is to send the pdf data as BASE64, because this is the format that most file writers and email packages use (as opposed to straight up binary for instance..). This also goes for images, documents etc.
How did I do that:
In your HTML:
<div pdfs>Your browser doesn't support File API.</div>
A directive called pdfs:
myApp.directive('pdfs', ['upload', function(upload) {
return {
replace: true,
scope: function() {
files = null;
},
template: '<input id="files" type="file">',
link: function(scope,element) {
element.bind('change', function(evt) {
scope.$apply(function() {
scope.files = evt.target.files;
});
});
},
controller: function($scope, $attrs) {
$scope.$watch('files', function(files) {
//upload.put(files)
if(typeof files !== 'undefined' && files.length > 0) {
for(var i = 0; i<files.length;i++) {
readFile(files[i])
}
}
}, true);
function readFile(file) {
var reader = new FileReader();
reader.addEventListener("loadend", function(evt) {
upload.post({name: file.name, data: reader.result})
})
if(reader.type = 'application/pdf') {
reader.readAsDataURL(file);
}
}
}
}
}]);
A tiny service:
myApp.service('upload', function($http) {
this.post = function(file) {
$http.post('/pdf', file);
}
});
And a node server:
var express = require('express');
var bodyParser = require('body-parser')
var fs = require("fs");
var app = express();
app.use(express.static('.'));
app.use( bodyParser.json({limit: '1mb'}) );
app.post('/pdf', function(req, res){
var name = req.body.name;
var pdf = req.body.data;
var pdf = pdf.replace('data:application/pdf;base64,', '');
res.send('received');
fs.writeFile(name, pdf, 'base64', function(err) {
console.log(err);
});
});
var server = app.listen(3000, function() {
console.log('Listening on port %d', server.address().port);
});

Resources