How to modify html response using Vite proxy configuration - node.js

I have a following Vite configuration:
import { defineConfig } from "vite";
const zlib = require("zlib");
export default defineConfig(() => {
return {
server: {
proxy: {
"/start": {
target: "https://someremoteurl.com",
secure: false,
changeOrigin: true,
configure: (proxy) => {
proxy.on("proxyRes", (proxyRes, req, res) => {
const chunks = [];
proxyRes.on("data", (chunk) => chunks.push(chunk));
proxyRes.on("end", () => {
const buffer = Buffer.concat(chunks);
const encoding = proxyRes.headers["content-encoding"];
if (encoding === "gzip" || encoding === "deflate") {
zlib.unzip(buffer, (err, buffer) => {
if (!err) {
let remoteBody = buffer.toString();
const modifiedBody = remoteBody.replace() // do some string manipulation on remoteBody
res.write(modifiedBody);
res.end();
} else {
console.error(err);
}
});
}
});
});
},
},
},
},
};
});
Everything works as expected modifiedBody is of needed shape.
However the server doesn't return the modified response, it retuns the initial html that the "https://someremoteurl.com" url served.
With the following code the response is "correctly" changed:
proxyRes.on("end", () => {
res.end('<h1>Some Test HTML</h1>')
});
But this wouldnt work for me, as i need to read the response first, unzip it, modify it and only then send back.
To me it looks like the proxied response is streamed, but dev server doesn't wait for the response to first finish streaming, running transformations and only then serving the desired document.
Any idea how can i achieve the desired result?

As Vite uses the http-node-proxy lib under the hood i had to look fo the answer in their documentation. I found that selfHandleResponse option needs to be true in order to serve your modified response.
Setting that option solved my question.

Related

unexpected behavior using zip-stream NPM on Google k8s

I am working on creating a zip of multiple files on the server and stream it to the client while creating. Initially, I was using ArchiverJs It was working fine if I was appending buffer to it but it fails when I need to add streams into it. Then after having some discussion on Github, I switched to Node zip-stream which started working fine thanks to jntesteves. But as I deploy the code on GKE k8s I Started getting Network Failed errors for huge files.
Here is my sample code :
const ZipStream = require("zip-stream");
/**
* #summary Adding readable stream provided by https module into zipStreamer using entry method
*/
const handleEntryCB = ({ readableStream, zipStreamer, fileName, resolve }) => {
readableStream.on("error", () => {
console.error("Error while listening readableStream : ", error);
resolve("done");
});
zipStreamer.entry(readableStream, { name: fileName }, error => {
if (!error) {
resolve("done");
} else {
console.error("Error while listening zipStream readableStream : ", error);
resolve("done");
}
});
};
/**
* #summary Handling downloading of files using native https, http and request modules
*/
const handleUrl = ({ elem, zipStreamer }) => {
return new Promise((resolve, reject) => {
let fileName = elem.fileName;
const url = elem.url;
//Used in most of the cases
if (url.startsWith("https")) {
https.get(url, readableStream => {
handleEntryCB({ readableStream, zipStreamer, url, fileName, resolve, reject });
});
} else if (url.startsWith("http")) {
http.get(url, readableStream => {
handleEntryCB({ readableStream, zipStreamer, url, fileName, resolve, reject });
});
} else {
const readableStream = request(url);
handleEntryCB({ readableStream, zipStreamer, url, fileName, resolve, reject });
}
});
};
const downloadZipFile = async (data, resp) => {
let { urls = [] } = data || {};
if (!urls.length) {
throw new Error("URLs are mandatory.");
}
//Output zip name
const outputFileName = `Test items.zip`;
console.log("Downloading using streams.");
//Initialize zip-stream instance
const zipStreamer = new ZipStream();
//Set headers to response
resp.writeHead(200, {
"Content-Type": "application/zip",
"Content-Disposition": `attachment; filename="${outputFileName}"`,
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST, OPTIONS"
});
//piping zipStreamer to the resp so that client starts getting response
//as soon as first chunk is added to the zipStreamer
zipStreamer.pipe(resp);
for (const elem of urls) {
await handleUrl({ elem, zipStreamer });
}
zipStreamer.finish();
};
app.post(restPrefix + "/downloadFIle", (req, resp) => {
try {
const { data } = req.body || {};
downloadZipFile(data, resp);
} catch (error) {
console.error("[FileBundler] unknown error : ", error);
if (resp.headersSent) {
resp.end("Unknown error while archiving.");
} else {
resp.status(500).end("Unknown error while archiving.");
}
}
});
I tested for 7-8 files of ~4.5 GB each on local, it works fine and when I tried the same on google k8s, I got network failed error.
After some more research, I Increased server timeout on k8s t0 3000 seconds, than it starts working fine, but I guess the increasing timeout is not good.
Is there anything I am missing on code level or can you suggest some good GKE deployment configuration for a server that can download large files with many concurrent users?
I am stuck on this for the past 1.5+ months. please help!
Edit 1: I edited the timeout in the ingress i.e Network services-> Load Balancing ->edit the timeout in the service

Angular and Nodejs basic file download

Could someone show me an example of a user basic file download using Node and Angular please. I understand it like this, but this is not working:
Nodejs:
// Does it matter that it is a post and not a get?
app.post('/some-api', someData, (request, response) => {
response.download('file/path/mytext.txt');
});
Angular 2+:
this.httpclient.post<any>('.../some-api', {...data...}).subscribe(response => {
console.log(response);
// This throws an error, but even if it doesn't,
// how do I download the Nodejs `response.download(...) ?`
});
Here are possible answers, but they are so complex, could someone just give me a super basic example (basically what I have here, but a working version). The easiest solution please.
How do I download a file with Angular2
Angular download node.js response.sendFile
There you go..
Node.js Server:
const express = require("express");
const router = express.Router();
router.post("/experiment/resultML/downloadReport",downloadReport);
const downloadReport = function(req, res) {
res.sendFile(req.body.filename);
};
Component Angular:
import { saveAs } from "file-saver"
download() {
let filename = "/Path/to/your/report.pdf";
this.api.downloadReport(filename).subscribe(
data => {
saveAs(data, filename);
},
err => {
alert("Problem while downloading the file.");
console.error(err);
}
);
}
Service Angular:
public downloadReport(file): Observable<any> {
// Create url
let url = `${baseUrl}${"/experiment/resultML/downloadReport"}`;
var body = { filename: file };
return this.http.post(url, body, {
responseType: "blob",
headers: new HttpHeaders().append("Content-Type", "application/json")
});
}

Stream audio to Dialogflow with chunks from browser

We're doing some experimenting with Dialogflow and we've run into a complete stop for the time being. We're trying to set up a browser client that streams audio in chunks to Dialogflow via the node v2beta1 version of the dialogflow npm package. We followed the example to get it running and it works fine when we use the node server to pick up the sound via extra software (sox), but we want to stream from the browser. So we've set up a small code snippet that picks up the MediaStream from the mic.
When the data event is triggerend we get a chunk (an arraybuffer) that we, in chunks, pass to our node server.
On the server we've followed this example: https://cloud.google.com/dialogflow-enterprise/docs/detect-intent-stream#detect-intent-text-nodejs. The only thing we do different is instead of using pump to chain streams, we just write our chunks to the sessionsClient.
streamingDetectIntent().write({ inputAudio: [chunk] })
During experimentation we received several errors that we solved. But at this point we pass our chunks and receive empty responses, during and at the end.
Is this a valid way of passing audio to dialogflow, or do we really need to set up a stream? We do not want to use the node server as an entry, it needs to be the browser. We will have full control.
Client
import getUserMedia from 'get-user-media-promise';
import MicrophoneStream from 'microphone-stream';
export const startVoiceStream = () => {
const microphoneStream = new MicrophoneStream();
getUserMedia({ video: false, audio: true })
.then(function(micStream) {
microphoneStream.setStream(micStream);
socket.emit('startMicStream');
state.streamingMic = true;
setTimeout(() => {
// Just closing the stream on a timer for now
socket.emit('endMicStream');
}, 5000);
})
.catch(function(error) {
console.log(error);
});
microphoneStream.on('data', function(chunk) {
if (state.streamingMic) {
socket.emit('micStreamData', chunk);
}
});
};
Server code is much longer so I think I'll spare the details, but these are the main parts.
const initialStreamRequest = {
session: sessions.sessionPath,
queryParams: {
session: sessions.sessionPath, //TODO: try to delete
},
queryInput: {
audioConfig: {
audioEncoding: 'AUDIO_ENCODING_LINEAR_16',
sampleRateHertz: '16000',
languageCode: 'en-US',
},
singleUtterance: false
},
};
const startRecognitionStream = socketClient => {
streamIntent = sessions.sessionClient
.streamingDetectIntent()
.on('error', error => {
console.error({ error });
socketClient.emit('streamError', error);
})
.on('data', data => {
socketClient.emit('debug', { message: 'STREAM "ON DATA"', data });
if (data.recognitionResult) {
socketClient.emit(
'playerTranscript',
data.recognitionResult.transcript,
);
console.log(
`#Intermediate transcript : ${data.recognitionResult.transcript}`,
);
} else {
socketClient.emit('streamAudioResponse', data);
}
});
streamIntent.write(initialStreamRequest);
};
socket.on('micStreamData', data => {
if (streamIntent !== null) {
stop = true;
streamIntent.write({ inputAudio: data });
}
});

I'm using .pfx certification in my node.js https server and I can't use 'passphrase' in option

I'm currently making an web application with node.js and https.
So I try to use my .pfx(I got the file from here http://www.cert-depot.com/) for certification required for https, like following code:
var https = require('https');
var fs = require('fs');
var options = {
pfx: fs.readFileSync('./8ab20f7b-51b9-4c09-a2e0-1918bb9fb37f.pfx')
passphrase: 'password'
};
var server = https.createServer(options, function (request, response) {
fs.readFile('index.html', function (error, data) {
response.writeHead(200, {'Content-Type': 'text/html'});
response.end(data);
});
}).listen(12345, function(){
console.log('server running');
});
But when I start this code with node.js, I'm getting an error message in my windows console:
passphrase: 'password'
Unexpected identifier
My code is very similar to the official guide page of Node.js (http://nodejs.org/api/https.html#https_https_createserver_options_requestlistener), but I can't start my https server.
What's wrong with my passphrase?
(I'm running node.js in Windows 8 64bit.)
I guess the missing comma between your pfx and passphrase properties is what cause the error. Here I added the comma:
var options = {
pfx: fs.readFileSync('./8ab20f7b-51b9-4c09-a2e0-1918bb9fb37f.pfx'),
passphrase: 'password'
};
I stick a promise wrapper on my implementation of it and keep it async (ES2015).
lib/pfx.js
import { readFile } from 'fs'
import { resolve as resolvePath } from 'path'
export const CERTIFICATE_ROOT = resolvePath(__dirname, '..', 'etc', 'certificates')
export const getCertificatePath = filename => resolvePath(CERTIFICATE_ROOT, filename)
export function readCertificate(filename) {
let certificatePath = getCertificatePath(filename)
return new Promise((resolve, reject) => {
readFile(certificatePath, (err, certificate) => {
if (err)
return reject(err)
resolve(certificate)
})
})
}
export function readPfx(filename, passphrase) {
assert.typeOf(passphrase, 'string', 'passphrase must be a string')
assert.isAbove(passphrase.length, 0, 'passphrase must not be empty')
return readCertificate(filename).then(pfx => ({ pfx, passphrase }))
}
and usage
lib/app.js
import { readPfx } from './pfx'
readPfx('8ab20f7b-51b9-4c09-a2e0-1918bb9fb37f.pfx', process.env.PASSPHRASE)
.then(opts => /* start server here */)
.catch(err => /* handle errors */)

Node.js - How to get my external IP address in node.js app?

I'm using node.js and I need to get my external IP address, provided by my ISP.
Is there a way to achieve this without using a service like http://myexternalip.com/raw ?
Thanks.
Can do the same as what they do in Python to get external IP, connect to some website and get your details from the socket connection:
const net = require('net');
const client = net.connect({port: 80, host:"google.com"}, () => {
console.log('MyIP='+client.localAddress);
console.log('MyPORT='+client.localPort);
});
*Unfortunately cannot find the original Python Example anymore as reference..
Update 2019:
Using built-in http library and public API from https://whatismyipaddress.com/api
const http = require('http');
var options = {
host: 'ipv4bot.whatismyipaddress.com',
port: 80,
path: '/'
};
http.get(options, function(res) {
console.log("status: " + res.statusCode);
res.on("data", function(chunk) {
console.log("BODY: " + chunk);
});
}).on('error', function(e) {
console.log("error: " + e.message);
});
Tested with Node.js v0.10.48 on Amazon AWS server
--
Update 2021
ipv4bot is closed, here is another public API:
var http = require('http');
http.get({'host': 'api.ipify.org', 'port': 80, 'path': '/'}, function(resp) {
resp.on('data', function(ip) {
console.log("My public IP address is: " + ip);
});
});
--
Update 2022
ChatGPT wrote longer example using ipify with json: *Yes, i've tested it.
https://gist.github.com/unitycoder/745a58d562180994a3025afcb84c1753
More info https://www.ipify.org/
npm install --save public-ip from here.
Then
publicIp.v4().then(ip => {
console.log("your public ip address", ip);
});
And if you want the local machine ip you can use this.
var ip = require("ip");
var a = ip.address();
console.log("private ip address", a);
Use my externalip package on GitHub
externalip(function (err, ip) {
console.log(ip); // => 8.8.8.8
});
Edit: This was written back in 2013... The site is gone. I'm leaving the example request code for now unless anyone complains but go for the accepted answer.
http://fugal.net/ip.cgi was similar to that one.
or you can
require('http').request({
hostname: 'fugal.net',
path: '/ip.cgi',
agent: false
}, function(res) {
if(res.statusCode != 200) {
throw new Error('non-OK status: ' + res.statusCode);
}
res.setEncoding('utf-8');
var ipAddress = '';
res.on('data', function(chunk) { ipAddress += chunk; });
res.on('end', function() {
// ipAddress contains the external IP address
});
}).on('error', function(err) {
throw err;
}).end();
Ref: http://www.nodejs.org/api/http.html#http_http_request_options_callback
this should work well without any external dependencies (with the exception of ipify.org):
var https = require('https');
var callback = function(err, ip){
if(err){
return console.log(err);
}
console.log('Our public IP is', ip);
// do something here with the IP address
};
https.get({
host: 'api.ipify.org',
}, function(response) {
var ip = '';
response.on('data', function(d) {
ip += d;
});
response.on('end', function() {
if(ip){
callback(null, ip);
} else {
callback('could not get public ip address :(');
}
});
});
You could also use https://httpbin.org
GET https://httpbin.org/ip
Simply use superagent
var superagent = require('superagent');
var getip = function () {
superagent
.get('http://ip.cn/')
.set('User-Agent', 'curl/7.37.1')
.end(function (err, res) {
if (err) {
console.log(err);
}
var ip = res.text.match(/\d+\.\d+\.\d+\.\d+/)[0];
console.log(ip)
// Here is the result
});
};
Another little node module is ext-ip. The difference is, that you can use different response options, matching your coding style. It's ready to use out of the box ...
Promise
let extIP = require('ext-ip')();
extIP.get().then(ip => {
console.log(ip);
})
.catch(err => {
console.error(err);
});
Events
let extIP = require('ext-ip')();
extIP.on("ip", ip => {
console.log(ip);
});
extIP.on("err", err => {
console.error(err);
});
extIP();
Callback
let extIP = require('ext-ip')();
extIP((err, ip) => {
if( err ){
throw err;
}
console.log(ip);
});
The simplest answer, based on experience is that you can't get your external IP in most cases without using an external service, since you'll typically be behind a NAT or shielded by a firewall. I say in most cases, since there may be situations where you can get it from your router, but it is too case specific to provide a general answer.
What you want is simply to choose your favourite http client in NodeJS and find a maintained server that simply responds with the IP address in the body. You can also use a package, but you should see if it is still using a maintained remote server.
While there are plenty of examples already, here is one that first tries IPv6 and then falls back to IPv4. It leverages axios, since that is what I am comfortable with. Also, unless the optional parameter debug is set to true, the result is either a value or undefined.
const axios = require('axios');
// replace these URLs with whatever is good for you
const remoteIPv4Url = 'http://ipv4bot.whatismyipaddress.com/';
const remoteIPv6Url = 'http://ipv6bot.whatismyipaddress.com/';
// Try getting an external IPv4 address.
async function getExternalIPv4(debug = false) {
try {
const response = await axios.get(remoteIPv4Url);
if (response && response.data) {
return response.data;
}
} catch (error) {
if (debug) {
console.log(error);
}
}
return undefined;
}
// Try getting an external IPv6 address.
async function getExternalIPv6(debug = false) {
try {
const response = await axios.get(remoteIPv6Url);
if (response && response.data) {
return response.data;
}
} catch (error) {
if (debug) {
console.log(error);
}
}
return undefined;
}
async function getExternalIP(debug = false) {
let address;
// Try IPv6 and then IPv4
address = await getExternalIPv6(debug);
if (!address) {
address = await getExternalIPv4(debug);
}
return address;
}
module.exports { getExternalIP, getExternalIPv4, getExternalIPv6 }
Feel free to suggest improvements.
You may use the request-ip package:
const requestIp = require('request-ip');
// inside middleware handler
const ipMiddleware = function(req, res, next) {
const clientIp = requestIp.getClientIp(req);
next();
};
My shameless plug: canihazip (Disclosure: I'm the author of module, but not of the main page.)
It can be required as a module, exposing a single function that can optionally be passed a callback function an it will return a promise.
It can be also be installed globally and used as CLI.
You could very easily use an api solution for retrieving the external IP!
I made a ip tracker site made for this kinda thing a few days ago!
Here is a snippit of code you could use to get IP!
async function getIp(cb) {
let output = null;
let promise = new Promise(resolve => {
let http = new XMLHttpRequest();
http.onreadystatechange = function() {
if (this.readyState == 4 && this.status == 200) {
output = this.responseText;
resolve("done");
}
}
http.open("GET", "https://iptrackerz.herokuapp.com/ip", true);
http.send();
});
await promise;
if (cb != undefined) {
cb(JSON.parse(output)["ip"]);
} else {
return JSON.parse(output)["ip"];
}
}
Ok, now you have the function getIp()!
The way I coded it allows you to do 2 different ways of invoking it!
Here they are.
Asynchronous
async function printIP() {
let ip = await getIp();
document.write("Your IP is " + ip);
};
printIP();
Callback
getIp(ip => {
document.write("Your IP is " + ip);
});
I was looking for a solution not relying to other's libraries/ resources,
and found this as acceptable alternative:
Just a GET request to external server ( under my control ),
where I read req.headers['x-forwarded-for'] and serve it back to my client.
node.js has a lot of great built in modules you can use without including any external dependencies. you can make this file.
WhatsMyIpAddress.js
const http = require('http');
function WhatsMyIpAddress(callback) {
const options = {
host: 'ipv4bot.whatismyipaddress.com',
port: 80,
path: '/'
};
http.get(options, res => {
res.setEncoding('utf8');
res.on("data", chunk => callback(chunk, null));
}).on('error', err => callback(null, err.message));
}
module.exports = WhatsMyIpAddress;
Then call it in your main.js like this.
main.js
const WhatsMyIpAddress = require('./src/WhatsMyIpAddress');
WhatsMyIpAddress((data,err) => {
console.log('results:', data, err);
});
You can use nurl library command ippublic to get this. (disclosure: I made nurl)
> npm install nurl-cli -g
> ippublic;
// 50.240.33.6

Resources