How to get specific chunks from stream node js - node.js

I'm trying to make a server that are used like a "CDN proxy".
We have
S1: main server that has all media
S2 CDN proxy
client
The aim is:
obtain a stream from server1 (S1)
(I'm using this follow link as placeholder, the effective link could be a temp link)
axios.get(link, { responseType: "stream", adapter: httpAdapter })
.then((axiosResponse: any) => { ... })
since I have a stream, I don't need to proxy the entire media to the client, but just a chunk (specified from the client)
I don't know how to retrieve a specific chunk without download all chunks up to the desired chunk.
This is a scratch:
import express, { Request, Response } from 'express';
import expressAsyncHandler from 'express-async-handler';
import * as http from 'http';
const axios = require("axios");
const httpAdapter = require("axios/lib/adapters/http");
const app = express();
const HTTP_PORT = 3000;
var server = http.createServer(app);
const link = 'https://images.all-free-download.com/footage_preview/mp4/city_panorama_6891675.mp4';
app.get('/video.mp4', expressAsyncHandler(async (req: Request, res: Response) => {
axios.get(link, { responseType: "stream", adapter: httpAdapter })
.then((axiosResponse: any) => {
let stream = axiosResponse?.data;
const fileSize = axiosResponse["headers"]["content-length"];
const range = req.headers.range
if (range) {
const parts = range.replace(/bytes=/, "").split("-")
const start = parseInt(parts[0], 10)
const end = parts[1]
? parseInt(parts[1], 10)
: fileSize - 1
const chunksize = (end - start) + 1
/*******************************/
const streamChunk = /* GET CHUNCK FROM STREAM WITHOUT OVERHEAD */ null;
/*******************************/
const head = {
'Content-Range': `bytes ${start}-${end}/${fileSize}`,
'Accept-Ranges': 'bytes',
'Content-Length': chunksize,
'Content-Type': 'video/mp4',
}
res.writeHead(206, head);
streamChunk.pipe(res);
} else {
const head = {
'Content-Length': fileSize,
'Content-Type': 'video/mp4',
}
res.writeHead(200, head)
stream.pipe(res)
}
})
}));
server.listen(HTTP_PORT, () => {
console.log("Running on port: " + HTTP_PORT);
});
I hope someone can help me.
Thanks in advice :)
UPDATE
Follow code works on VLC
import express, { Request, Response } from 'express';
import expressAsyncHandler from 'express-async-handler';
import * as http from 'http';
import * as https from 'https';
const axios = require("axios");
const httpAdapter = require("axios/lib/adapters/http");
const app = express();
const HTTP_PORT = 3000;
var server = http.createServer(app);
/************************************************/
// PREVENT EXCEPTION CLOSURE
process.on('uncaughtException', function (err) {
console.error(err);
console.log("Node NOT Exiting...");
});
/************************************************/
const link = 'https://samplelib.com/lib/preview/mp4/sample-30s.mp4';
app.get('/video.mp4', expressAsyncHandler(async (req: Request, res: Response) => {
if (req.socket.destroyed) {
return;
}
delete req.headers.referer;
let head;
let status;
const range = req.headers.range
const axiosResponseHead = await axios.head(link)
const fileSize = axiosResponseHead["headers"]["content-length"];
const agent = new https.Agent({
rejectUnauthorized: false
});
console.log(range)
if (range) {
req.headers.host = new URL(link).hostname;
const parts = range?.replace(/bytes=/, "")?.split("-")
const start = parseInt(parts[0], 10)
const end = parts[1]
? parseInt(parts[1], 10)
: fileSize - 1
const chunksize = (end - start) + 1
head = {
'Content-Range': `bytes ${start}-${end}/${fileSize}`,
'range': `bytes=${start}-${end}`,
'Accept-Ranges': 'bytes',
'Content-Length': chunksize,
'Content-Type': 'video/mp4',
}
status = 206;
req.headers.range = head.range
} else {
head = {
'Content-Length': fileSize,
'Content-Type': 'video/mp4',
}
status = 200;
}
console.log(req.headers)
console.log(head)
console.log("==================================")
let axiosResponse: any
let stream: any;
res.on('close', function () {
stream?.destroy();
});
let instance = axios.create();
axiosResponse = await instance.get(link, {
responseType: "stream", adapter: httpAdapter, headers: req.headers, httpsAgent: agent
})
stream = axiosResponse?.data;
res.writeHead(status, head)
stream.pipe(res, { end: true });
}));
server.listen(HTTP_PORT, () => {
console.log("Running on port: " + HTTP_PORT);
});
function sleep(ms: number) {
return new Promise(resolve => setTimeout(resolve, ms));
}

Related

SyntaxError: Unexpected end of JSON input at JSON.parse (<anonymous>) at Server.requestListene ()

Have problem with Node Js backend, if i use test client (not React) it works, if place it to React app I have subj error.
Server is running on http://localhost:8000
undefined:1
SyntaxError: Unexpected end of JSON input
at JSON.parse ()
at Server.requestListener (/home/boris/bookni_ru_srv/backend.js:27:38)
at processTicksAndRejections (node:internal/process/task_queues:96:5)
Here is server part:
let { Logincheck } = require('./ConnectDB');
const http = require("http");
var fs = require('fs');
const host = 'localhost';
const port = 8000;
const rbkp_srv ='192.168.56.101';
const test = fs.readFileSync('test.json', 'utf-8');
const requestListener = async function (req, res) {
const buffers = [];
for await (const chunk of req) {
buffers.push(chunk);
}
const data = Buffer.concat(buffers);
res.setHeader("Content-Type", "application/json");
switch (req.url) {
case "/login":
res.writeHead(200);
const rbkp_passwd = JSON.parse(data).passwd;
const rbkp_user = JSON.parse(data).user;
let login = Logincheck(rbkp_srv, rbkp_user, rbkp_passwd);
login.then(function(result){if (result===true) {console.log('Login sucessed'); res.end('Login');} else { console.log(result); res.end(result.toString())}
});
break
}
};
const server = http.createServer(requestListener);
server.listen(port, host, () => {
console.log(`Server is running on http://${host}:${port}`);
});
Here is FrontEnd part:
export default function Auth(){
const requestOptions = {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({"user" : "boris",
"passwd" : "test"})
};
fetch('http://localhost:8000/login', requestOptions)
.then(response => response.json())
.then(data => this.setState({ postId: data.id }));
}`
Have no clu hoe to fix it yet need help.

File upload from React Native ( expo ) to Node ( multer )

How can I upload a file( pdf, docs etc) from React Native using expo to the server using node. I've seen many examples for images using the expo image-picker api but I've come across none that uses document-picker or filesystem apis from expo. The expo file system documentation was a little hard to interpret for a beginner like me.
Thanks for the help. I was able to come up with a solution and I'll post it below so it can be of some use to whoever comes here in the future.
React Native
import React, { useState } from 'react';
import { Button, View } from 'react-native';
import * as DocumentPicker from 'expo-document-picker';
import * as FileSystem from 'expo-file-system';
const DocPicker = () => {
const [ doc, setDoc ] = useState();
const pickDocument = async () => {
let result = await DocumentPicker.getDocumentAsync({ type: "*/*", copyToCacheDirectory: true }).then(response => {
if (response.type == 'success') {
let { name, size, uri } = response;
let nameParts = name.split('.');
let fileType = nameParts[nameParts.length - 1];
var fileToUpload = {
name: name,
size: size,
uri: uri,
type: "application/" + fileType
};
console.log(fileToUpload, '...............file')
setDoc(fileToUpload);
}
});
// console.log(result);
console.log("Doc: " + doc.uri);
}
const postDocument = () => {
const url = "http://192.168.10.107:8000/upload";
const fileUri = doc.uri;
const formData = new FormData();
formData.append('document', doc);
const options = {
method: 'POST',
body: formData,
headers: {
Accept: 'application/json',
'Content-Type': 'multipart/form-data',
},
};
console.log(formData);
fetch(url, options).catch((error) => console.log(error));
}
return (
<View>
<Button title="Select Document" onPress={pickDocument} />
<Button title="Upload" onPress={postDocument} />
</View>
)
};
export default DocPicker;
Node.js
const express = require('express')
const bodyParser = require('body-parser')
var multer = require('multer')
var upload = multer({ dest: 'uploads/' })
const app = express()
const fs = require('fs')
const http = require('http')
const port = 8000
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
app.get('/', (req,res) => {
res.json({
success: true
})
})
app.post('/', (req, res) => {
console.log(req.body)
res.status(200)
})
app.post('/upload', upload.single('document'),(req , res) => {
console.log(req.file, req.body)
});
app.listen(port, () => {
console.log(`Example app listening at http://localhost:${port}`)
})
Cheers!!!
If the solution given by #Anandhu doesn't work then try the above code like this.
import React, { useState } from 'react';
import { Button, View } from 'react-native';
import * as DocumentPicker from 'expo-document-picker';
import * as FileSystem from 'expo-file-system';
const DocPicker = () => {
const [ doc, setDoc ] = useState();
const pickDocument = async () => {
let result = await DocumentPicker.getDocumentAsync({
type: "*/*",
copyToCacheDirectory: true })
.then(response => {
if (response.type == 'success') {
let { name, size, uri } = response;
/ ------------------------/
if (Platform.OS === "android" && uri[0] === "/") {
uri = `file://${uri}`;
uri = uri.replace(/%/g, "%25");
}
/ ------------------------/
let nameParts = name.split('.');
let fileType = nameParts[nameParts.length - 1];
var fileToUpload = {
name: name,
size: size,
uri: uri,
type: "application/" + fileType
};
console.log(fileToUpload, '...............file')
setDoc(fileToUpload);
}
});
// console.log(result);
console.log("Doc: " + doc.uri);
}
const postDocument = () => {
const url = "http://192.168.10.107:8000/upload";
const fileUri = doc.uri;
const formData = new FormData();
formData.append('document', doc);
const options = {
method: 'POST',
body: formData,
headers: {
Accept: 'application/json',
'Content-Type': 'multipart/form-data',
},
};
console.log(formData);
fetch(url, options).catch((error) => console.log(error));
}
return (
<View>
<Button title="Select Document" onPress={pickDocument} />
<Button title="Upload" onPress={postDocument} />
</View>
)
};
export default DocPicker;
There is a bug in the way the path was encoded, and the file:// scheme is missing.
This bug may be fixed in next release.
Try this Uploading pictures,documents and videos from your phone in your app with React Native, Expo
Here is an example, which also uses multer and express on the backend: https://github.com/expo/examples/tree/master/with-formdata-image-upload
That said, I'd recommend using FileSystem.uploadAsync instead of fetch and the background sessionType in order to support uploads while the app is backgrounded on iOS.

Reuse TCP connection with node-fetch in node.js

I am using this function to call an external API
const fetch = require('node-fetch');
fetchdata= async function (result = {}) {
var start_time = new Date().getTime();
let response = await fetch('{API endpoint}', {
method: 'post',
body: JSON.stringify(result),
headers: { 'Content-Type': 'application/json' },
keepalive: true
});
console.log(response)
var time = { 'Respone time': + (new Date().getTime() - start_time) + 'ms' };
console.log(time)
return [response.json(), time];
}
The problem is that i am not sure that node.js is reusing the TCP connection to the API every time i use this function, eventhough i defined the keepalive property.
Reusing the TCP connection can significantly improve response time
Any suggestions will be welcomed.
As documented in https://github.com/node-fetch/node-fetch#custom-agent
const fetch = require('node-fetch');
const http = require('http');
const https = require('https');
const httpAgent = new http.Agent({ keepAlive: true });
const httpsAgent = new https.Agent({ keepAlive: true });
const agent = (_parsedURL) => _parsedURL.protocol == 'http:' ? httpAgent : httpsAgent;
const fetchdata = async function (result = {}) {
var start_time = new Date().getTime();
let response = await fetch('{API endpoint}', {
method: 'post',
body: JSON.stringify(result),
headers: { 'Content-Type': 'application/json' },
agent
});
console.log(response)
var time = { 'Respone time': + (new Date().getTime() - start_time) + 'ms' };
console.log(time)
return [response.json(), time];
}
Here's a wrapper around node-fetch based on their documentation:
import nodeFetch, { RequestInfo, RequestInit, Response } from "node-fetch";
import http from "http";
import https from "https";
const httpAgent = new http.Agent({
keepAlive: true
});
const httpsAgent = new https.Agent({
keepAlive: true
});
export const fetch = (url: RequestInfo, options: RequestInit = {}): Promise<Response> => {
return nodeFetch(url, {
agent: (parsedURL) => {
if (parsedURL.protocol === "http:") {
return httpAgent;
} else {
return httpsAgent;
}
},
...options
});
};
Keep-alive is not enabled for the default used agent and is not currently implemented into node-fetch directly, but you can easily specify a custom-agent where you enable the keep-alive option:
const keepAliveAgent = new http.Agent({
keepAlive: true
});
fetch('{API endpoint}', {
...
agent: keepAliveAgent
});
here is a wrapper for node-fetch to add a keepAlive option, based on Ilan Frumer's answer
// fetch: add option keepAlive with default true
const fetch = (function getFetchWithKeepAlive() {
const node_fetch = require('node-fetch');
const http = require('http');
const https = require('https');
const httpAgent = new http.Agent({ keepAlive: true });
const httpsAgent = new https.Agent({ keepAlive: true });
return async function (url, userOptions) {
const options = { keepAlive: true };
Object.assign(options, userOptions);
if (options.keepAlive == true)
options.agent = (parsedUrl => parsedUrl.protocol == 'http:' ? httpAgent : httpsAgent);
delete options.keepAlive;
return await node_fetch(url, options);
}
})();
const response = await fetch('https://github.com/');
const response = await fetch('https://github.com/', { keepAlive: false });

Server-Sent Events (SSE) problem with SSL/HTTPS

Hello I am developing a web application in React that receives SSE data from an Express server with Nginx.
SERVER.JS
const express = require('express');
const bodyParser = require('body-parser');
const cors = require('cors');
const crypto = require('crypto');
const app = express();
var lastClientRes = null;
function eventsHandler(req, res, next) {
const headers = {
'Content-Type': 'text/event-stream',
'Connection': 'keep-alive',
'Cache-Control': 'no-cache'
};
res.writeHead(200, headers);
const clientId = Date.now();
const newClient = {
id: clientId,
nonce: null,
cart: null,
res
};
requests.push(newClient);
const data = `data: ${JSON.stringify({client: clientId})}\n\n`;
res.write(data);
req.on('close', () => {
console.log(`${clientId} Connection closed`);
clients = clients.filter(c => c.id !== clientId);
});
}
function sendEventsToAll(newNest) {
clients.forEach(c => c.res.write(`data: ${JSON.stringify(newNest)}\n\n`))
}
async function addCart(req, res) {
const newCart = req.body;
requests.forEach(r => {
if(newCart.client == r.id){
var nonce = crypto.randomBytes(16).toString('base64');
r.nonce = nonce;
r.cart = newCart.cart;
r.res.write(`data: ${JSON.stringify({nonce: nonce})}\n\n`);
}
})
}
async function addCart(req, res) {
const newCart = req.body;
requests.forEach(r => {
if(newCart.client == r.id){
var nonce = crypto.randomBytes(16).toString('base64');
r.nonce = nonce;
r.cart = newCart.cart;
r.res.write(`data: ${JSON.stringify({nonce: nonce})}\n\n`);
}
})
}
async function confirmCart(req, res){
var nonce = req.body.nonce;
var found = -1;
requests.forEach((item, i) => {
if(item.nonce == nonce){
found = i;
return;
}
});
if(found)
{
console.log("OK");
requests[found].res.write(`data: ${JSON.stringify({confirm: true})}\n\n`);
}
}
app.use(cors());
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({extended: false}));
app.post('/addCart', addCart);
app.post('/confirmCart', confirmCart);
app.get('/events', eventsHandler);
app.get('/status', (req, res) => res.json({clients: clients.length}));
const PORT = 3001;
let requests= [];
let clients = [];
let nests = [];
app.listen(PORT, () => console.log(`SSE service listening on port ${PORT}`));
INDEX:JS
import React from 'react';
import ReactDOM from 'react-dom';
import axios from 'axios';
class App extends React.Component {
constructor(props) {
super(props);
this.state = {
jsonCart: "",
cartNonce: "",
clientId: "",
cartConfirmed: false,
cart: Array(),
timerId: null,
listening: false,
cartConfermato: ""
};
}
buy(){
if (!this.state.listening) {
const events = new EventSource('https://api.myDomain.com/events', );
events.onmessage = (event) => {
const parsedData = JSON.parse(event.data);
console.log(event.data);
if(parsedData.client != null)
{
this.setState({
clientId: parsedData.client,
});
this.sendCart();
}
if(parsedData.nonce != null)
this.setState({
cartNonce: parsedData.nonce,
});
if(parsedData.confirm == true)
this.setState({
cartNonce: "",
cartConfermato: "Il carrello รจ stato confermato!"
});
};
this.setState({
listening: true
});
}
}
sendCart(){
var cart = JSON.stringify(this.state.cart.slice());
this.setState({
jsonCart: cart
});
axios.post(`https://api.myDomain.com/addCart`, {client: this.state.clientId, cart: cart});
}
*** ... ***
const events = new EventSource('https://api.myDomain.com/events', );
axios.post(https://api.myDomain.com/addCart, {client:
this.state.clientId, cart: cart});
In http everything works perfectly but if I set https generating the certificates with certbot I no longer receive "events" from the express server.
The only errors that appears in the chrome console is this
I replaced sub.domain with my domain
These errors appear a few minutes after the first request
GET https://sub.domain.com/events net::ERR_INCOMPLETE_CHUNKED_ENCODING 200 (OK)
2sub.domain.com/addCart:1 POST https://sub.domain.com/addCart 504 (Gateway Time-out)
(index):1 Access to XMLHttpRequest at 'https://sub.domain.com/addCart' from origin 'https://example.com' has been blocked by CORS policy: No 'Access-Control-Allow-Origin' header is present on the requested resource.
createError.js:16 Uncaught (in promise) Error: Network Error
at e.exports (createError.js:16)
at XMLHttpRequest.p.onerror (xhr.js:83)
e.exports # createError.js:16
p.onerror # xhr.js:83
error (async)
(anonymous) # xhr.js:80
e.exports # xhr.js:12
e.exports # dispatchRequest.js:50
Promise.then (async)
u.request # Axios.js:61
r.forEach.u.<computed> # Axios.js:86
(anonymous) # bind.js:9
value # index.js:156
state.listening.EventSource.onmessage # index.js:121
index.js:114 {"client":1579885346578}
index.js:150 send
sub.domain.com/events:1 GET https://sub.domain.com/events net::ERR_INCOMPLETE_CHUNKED_ENCODING 200 (OK)
2sub.domain.com/addCart:1 POST https://sub.domain.com/addCart net::ERR_ABORTED 504 (Gateway Time-out)
As Darren Cook described here
You need to disable buffering on your server-side.
Adding the "X-Accel-Buffering" parameter and set it to "no" in the response header fixed the issue for me.
const headers = {
'Content-Type': 'text/event-stream',
'Connection': 'keep-alive',
'Cache-Control': 'no-cache',
'X-Accel-Buffering': 'no'
};

node-fetch timeout issue/ express timeout/ lambda timeout or sth else?

below code works fine when running locally, but "getAccessToken ()" does not work as expected when running inside an aws lambda function, when doing "POST" to /webhook endpoint. I am using node 8.10, "aws-serverless-express": "^3.3.6", "express": "^4.16.4" and "node-fetch": "^2.5.0", basically it print the correct jwt token but node-fetch does not return anything, sample log.
START RequestId: c8efba59-1869-4eaa-b9d8-aa15a7507d52 Version: $LATEST
2019-05-27T19:55:32.328Z c8efba59-1869-4eaa-b9d8-aa15a7507d52 start getExecution
2019-05-27T19:55:32.328Z c8efba59-1869-4eaa-b9d8-aa15a7507d52 exectution_url:
2019-05-27T19:55:32.328Z c8efba59-1869-4eaa-b9d8-aa15a7507d52 https://cloudmanager.adobe.io/somevalidurl
2019-05-27T19:55:32.328Z c8efba59-1869-4eaa-b9d8-aa15a7507d52 start getAccessToken
END RequestId: c8efba59-1869-4eaa-b9d8-aa15a7507d52
REPORT RequestId: c8efba59-1869-4eaa-b9d8-aa15a7507d52 Duration: 6657.00 ms Billed Duration: 6700 ms Memory Size: 128 MB Max Memory Used: 37 MB
I made sure lambda timeout is 30 seconds, tried to disable "node-fetch" timeout by setting it to 0 and , used a middleware for all the routes "app.use(timeout("30000"))" also for that specific webhook request timeout. (I receive the 200 pong response immediately but getexectuion async function does not work properly)
const express = require('express')
const bodyParser = require('body-parser')
const crypto = require('crypto')
const jsrsasign = require('jsrsasign')
const fetch = require('node-fetch')
const timeout = require('connect-timeout')
const URL = require('url').URL
const URLSearchParams = require('url').URLSearchParams
//require('dotenv').config()
const app = express()
async function getAccessToken () {
console.log("start getAccessToken")
const EXPIRATION = 60 * 60 // 1 hour
const header = {
'alg': 'RS256',
'typ': 'JWT'
}
const payload = {
'exp': Math.round(new Date().getTime() / 1000) + EXPIRATION,
'iss': process.env.ORGANIZATION_ID,
'sub': process.env.TECHNICAL_ACCOUNT_ID,
'aud': `https://ims-na1.adobelogin.com/c/${process.env.API_KEY}`,
'https://ims-na1.adobelogin.com/s/ent_cloudmgr_sdk': true
}
const jwtToken = jsrsasign.jws.JWS.sign('RS256', JSON.stringify(header), JSON.stringify(payload), process.env.PRIVATE_KEY)
//console.log("jwt token:")
//console.log(jwtToken)
const body = new URLSearchParams({
client_id: process.env.API_KEY,
client_secret: process.env.CLIENT_SECRET,
jwt_token: jwtToken
})
const response = await fetch('https://ims-na1.adobelogin.com/ims/exchange/jwt', {
method: 'POST',
options: { timeout: 0},
timeout: 0,
size: 0,
body: body
})//.catch(error => {
// console.log("an error happend in fetchg")
// console.log(error)
//})
const json = await response.json()
if ((response.status !== 200) && (response.status !== 201)) {
console.error(`Invalid response status ${ response.status }.`);
throw json;
}
console.log("access_token:")
console.log(json['access_token'])
return json['access_token']
}
async function makeApiCall (accessToken, url, method) {
console.log("start make api call")
const response = await fetch(url, {
'method': method,
'headers': {
'x-gw-ims-org-id': process.env.ORGANIZATION_ID,
'x-api-key': process.env.API_KEY,
'Authorization': `Bearer ${accessToken}`
}
})
console.log("finish make api call")
const json = await response.json()
return json
}
function getLink (obj, linkType) {
return obj['_links'][linkType].href
}
async function getExecution (executionUrl) {
console.log("start getExecution")
console.log("exectution_url:")
console.log(executionUrl)
const accessToken = await getAccessToken()
console.log("access-token:")
console.log(accessToken)
const execution = await makeApiCall(accessToken, executionUrl, 'GET')
console.log(execution)
console.log("aaaa")
const program = await makeApiCall(accessToken, new URL(getLink(execution, 'http://ns.adobe.com/adobecloud/rel/program'), executionUrl))
console.log(execution)
console.log("here")
execution.program = program
return execution
}
//app.use(bodyParser.json())
app.use(bodyParser.json({
verify: (req, res, buf, encoding) => {
const signature = req.header('x-adobe-signature')
if (signature) {
const hmac = crypto.createHmac('sha256', process.env.CLIENT_SECRET)
hmac.update(buf)
const digest = hmac.digest('base64')
if (signature !== digest) {
throw new Error('x-adobe-signature HMAC check failed')
}
} else if (!process.env.DEBUG && req.method === 'POST') {
throw new Error('x-adobe-signature required')
}
}
}))
app.use(timeout("30000"))
app.post('/webhook', (req, res) => {
req.setTimeout(120000, function(){
console.log('Request has timed out.');
res.send(408);
});
res.writeHead(200, { 'Content-Type': 'application/text' })
res.end('pong')
getExecution("https://cloudmanager.adobe.io/<somevalidurl>").then(execution => {
console.log(`Execution for ${execution.program.name} started`)
})
})
module.exports = app;
//const port = process.env.PORT || 3000
//app.listen(port, () =>
// console.log(`App is listening on port ${port}.`)
//)

Resources