I want to implement a system in which if the user types domain/crypt/text (the page where the text gets crypted), the text is available to read in the main page (domain). I've found out the problem is that when I change the URL to check the main page datas gets resetted since I'm loading once again the page, but I can't find any solution to this problem.
const Encryption = require('node_triple_des');
const sha1 = require('sha1');
const express = require('express');
const app = express();
const key = "Chiave_scelta";
var coded_key = 'Coded_Key';
var text_crypted = 'NaN';
var text_decrypted = 'NaN';
async function encryptKey() {
coded_key = sha1(key); //si codifica la chiave scelta in SHA1
}
async function encryptAll(msg) {
await encryptKey();
var result = await Encryption.encrypt(coded_key,msg);
return result;
}
async function decryptAll(msg) {
await encryptKey();
var result = await Encryption.decrypt(coded_key, msg);
return result;
}
app.get('/', function (req, res) {
res.send('Generated key: ' + coded_key + '<br>' +
'Last crypted text: ' + text_crypted + '<br>' +
'Last decrypted text: ' + text_decrypted + '<br>'
)
})
app.get('/crypt/:code', async function (req, res) {
crypted_text = await encryptAll(req.params.code); //cripta il messaggio
res.send('Crypted text: ' + crypted_text )
})
app.get('/decrypt/:code', async function (req, res) {
decrypted_text = await decryptAll(req.params.code); //decripta il messaggio
res.send('Decrypted text: ' + decrypted_text )
})
app.listen(3000)
I fixed by importing the module 'local-storage' (which I previously installed).
const ls = require('local-storage');
To set values I used the following syntax:
ls.set(name,value);
To get values I used the following syntax:
ls.get(name,value);
Related
When I paste the endpoint URL with query directly inside the axios.get(), it responds correctly and I can see the json object returned. (i.e axios.get(http://localhost:3000/api/products/product_search?secretKey=${secret}&id=${blabla})). However, if I call the url with the summonerByNameUrl method, it crashes when I make a request. What is the problem in my code?
Crash report:
...
data: '<!DOCTYPE html>\n' +
'<html lang="en">\n' +
'<head>\n' +
'<meta charset="utf-8">\n' +
'<title>Error</title>\n' +
'</head>\n' +
'<body>\n' +
'<pre>Cannot GET /[object%20Object]</pre>\n' +
'</body>\n' +
'</html>\n'
},
isAxiosError: true,
toJSON: [Function: toJSON]
Code:
config.js
const summonerByNameUrl = (summonerName) => `${URL(hidden)}${summonerName}`;
module.exports = {
summonerByNameUrl
}
summoner.js
const config = require('../config');
const axios = require('axios');
const getSummonerByName = async (summonerName) => {
const res = await axios.get(config.summonerByNameUrl(summonerName));
return res.data;
}
const summonerParser = async (req, res) => {
if(!req.query.secretKey)
return res.status(403).json({error: 'missing secret key.'})
let data = await getSummonerByName(req.query)
return res.status(200).json(data);
}
module.exports = {
getSummonerByName,
summonerParser
}
products.js
var express = require('express');
var axios = require('axios')
var router = express.Router();
const summoner = require('../services/summoner');
router.get('/product_search', summoner.summonerParser)
module.exports = router;
app.js
...
app.use('/api/products', productsRouter);
...
You're calling your function with getSummonerByName(req.query) where it is clear from the lines just before that req.query is an object and not a string. When objects are used in a string-context (like your URL), they become "[object Object]", hence the error.
Taking some guesses here but it seems you want to forward some req.query information to the Axios call as query params. Try this instead...
const PRODUCT_SEARCH_URL = "http://localhost:3000/api/products/product_search"
const getSummonerByName = async ({ secretKey, id }) => {
const { data } = await axios.get(PRODUCT_SEARCH_URL, {
params: { secretKey, id }
})
return data
}
If you've got a helper function that returns the base URL (ie http://localhost:3000/api/products/product_search) then by all means, use that instead of a string literal in the Axios call.
The req.query is a Object, not a string.
You can try map the req.query object to make a string. Something like that:
Object.keys(req.query).map(key => {
return key + '=' + req.query[key]
}).join('&')
This code return a string like that: 'id=1&name=test', so you can pass to the endpoint.
I have an export button on my front-end that when clicked, sends a POST on our Express server to log the button click. This uses the route app.post(usagereport) . What I want to be able to do is capture the route the user was at when they clicked export. However, since the code to send the POST request is it's own route, it only ever returns that route's name when trying something like req.route.
I'm using API Gateway + Lambda + AWS-Serverless-Express.
I was thinking I could store something like req.session.previousRoute in req.session to capture the last route the user loaded and then return this to the access log code. However, I was unsure if this approach would work on Lambda or perhaps there is just a better way to handle it.
Here is my server.js (trimmed down)
// create the server and setup routes
const app = express();
const mysql = require("mysql");
// AWS-Serverless-Express https://github.com/awslabs/aws-serverless-express
const awsServerlessExpressMiddleware = require("aws-serverless-express/middleware");
app.use(awsServerlessExpressMiddleware.eventContext());
//Setup paths to database connection pools
const nawfprojectsDB = require("../lib/naWfProjectsDb.js");
const queries = require("./queries.js");
const accessLog = require("../lib/accessLog.js");
//Setup a timestamp for logging
const timestamp = new Date().toString();
// S3 Data Mitigation is needed when a data set exceeds 5 MB in size.
// This is a restriction of Lambda itself (they say 6 MB but want to ensure we dont ever hit the limit)
const s3DataMitigation = require("../lib/s3DataMitigation.js");
let environment = process.env.NODE_ENV;
app.get("/wg_data", (req, res, callback) => {
const dataSet = "wg_data";
nawfprojectsDB.query(queries.wg_data, (err, result) => {
if (err) {
console.log(err);
}
s3Data(dataSet, res, callback, result);
console.log(
timestamp,
"Returned " + result.length + " rows from " + dataSet
);
});
accessLog({ dataSet, req });
});
// Usage report everytime export button is clicked
app.post("/usagereport", (req) => {
const currentPath = dataSet;
const dataSet = "Data Exported: " + currentPath;
console.log(timestamp, "Data exported");
accessLog({ dataSet, req });
});
module.exports = app;
accessLog.js
let nawfprojectsDB = require("./naWfProjectsDb.js");
let queries = require("../routes/queries.js");
let environment = process.env.NODE_ENV;
//Insert data into access_logs table when usageLog is called
const accessLog = ({ dataSet, req }) => {
// We only want to log access when in beta, gamma, or prod. Not in development.
if (environment === "development") {
console.log("No access log as we are in dev");
} else {
// req.apiGateway comes from AWS-Serverless-Express - https://github.com/awslabs/aws-serverless-express
const user = req.apiGateway.event.requestContext.authorizer.principalId;
let sqlData = [dataSet, user, environment];
// Run the log_access query using the sqlData above
nawfprojectsDB.query(queries.log_access, sqlData, (err) => {
if (err) {
console.error("MySQL query error: " + err);
}
console.log("Access log added for: ", user, " at data set: ", dataSet);
});
}
};
module.exports = accessLog;
Solved my own question.
The way solved this was using express-session. I set req.session.previousRoute within each of my routes. I can then access this in my usagereport route.
const { v4: uuidv4 } = require("uuid");
const express = require("express");
const cookieParser = require("cookie-parser");
const session = require("express-session");
const randomString = uuidv4();
let sessionOptions = {
cookie: {
secret: randomString,
maxAge: 269999999999,
},
saveUninitialized: true,
resave: true,
};
// create the server and setup routes
const app = express();
// Add express-session Middleware - https://www.npmjs.com/package/express-session
app.use(cookieParser(randomString)); // Need cookieParser to properly parse our random string into the type of value expected by session
app.use(session(sessionOptions));
// AWS-Serverless-Express - https://github.com/awslabs/aws-serverless-express
const awsServerlessExpressMiddleware = require("aws-serverless-express/middleware");
app.use(awsServerlessExpressMiddleware.eventContext());
app.get("/wg_data", (req, res, callback) => {
const dataSet = "wg_data";
const action = "Accessed";
req.session.previousRoute = dataSet; // This is where we set the previousRoute in session
nawfprojectsDB.query(queries.wg_data, (err, result) => {
if (err) {
console.log(err);
}
s3Data(dataSet, res, callback, result);
console.log(
timestamp,
"Returned " + result.length + " rows from " + dataSet
);
});
accessLog({ dataSet, action, req });
});
// Usage report everytime export button is clicked
app.post("/usagereport", (req) => {
// Here we grab the previousRoute set in session to see the true place the data was exported from
const action = "Exported";
const previousRoute = req.session.previousRoute; // Now when usagereport is triggered, it knows the previous route from the session and uses that here.
const dataSet = previousRoute;
console.log(timestamp, "Data exported from ", previousRoute);
accessLog({ dataSet, action, req });
});
So I am making a kind of API middleware for my company that will grab information from the NOAA API and then store in in my database. It does more then but that a separate part. I have set it up so that it works it will get the information and store it in my sql database perfectly The issue is the information I get is based off of zipcode. One request is the information for one zipcode. I need to be able to 'loop" through a list of zipcode one at a time and store the information in the database. I am not sure how to properly get it to work. I have tested a couple of ways but have not been able to get it to work so if someone can get me pointed in the right direction it would be appreciated.
Sorry in advance my code is not cleaned up.
Everything below apiRequest.end() has little function for the question. I keep it for context.
let mysql = require('mysql');
let config = require('./config.js');
var https = require("https");
var express = require("express");
var app = express();
const port = 3000;
var fs= require('fs');
var csv = require('fast-csv');
//last test
//array will replace this zip variable
let zip = '90012';
api(zip);
function api(zips){
//All of the parts for building the get requests url
app.get("/", function(req, response) {
var apiKey = "gPaEVizejLlbRVbXexyWtXYkfkWkoBhd";
let webapi = 'https://www.ncdc.noaa.gov/cdo-web/api/v2/data?';
let datasetid="datasetid=GHCND";
let datatypeid="&datatypeid=TMAX";
let location="&locationid=ZIP:";
const zipcode = zips;
let startdate="&startdate=2019-01-01";
let enddate="&enddate=2020-01-01";
let units = "&units=standard";
let limit="&limit=1000";
let url = webapi + datasetid + datatypeid + location + zipcode + startdate + enddate + units + limit;
var options = {
port: 443,
method: "GET",
headers: {
"token": apiKey
}
};
let data = "";
//request to grab from NOAA api
let apiRequest = https.request(url, options, function(res) {
console.log("Connected");
//grabing all data
res.on("data", chunk => {
data += chunk;
});
res.on("end", () => {
console.log("data collected");
//Format JSON data
response.send(JSON.parse(data));
var getData = JSON.parse(data);
if(isEmpty(getData)){
emptyCorrect();
}
dataFormat(getData);
});
});
apiRequest.end();
});
//fix format for date Can add more formating if needed here
function dataFormat(formData){
for(x in formData.results){
let date = formData.results[x].date;
formData.results[x].date = date.slice(0,10);
}
jsonToSQL(formData.results);
}
//test function is going to be used for inserting the zip
function test(){
var content = "";
console.log("your test worked see ***************");
return "92507";
}
//function to add grabed JSON data into the SQL database
function jsonToSQL(datafin){
var zipcode = zips;
let connection = mysql.createConnection(config);
// insert statment
let stmt = `INSERT INTO test1(ZIPCODE,DATE, TEMP) VALUES ? `;
let values = [];
for(let x in datafin){
values.push([zipcode,datafin[x].date,datafin[x].value]);
}
// execute the insert statment
connection.query(stmt, [values], (err, results, fields) => {
if (err) {
return console.error("error");
}
// get inserted rows
console.log('Row inserted:' + results.affectedRows);
});
// close the database connection
connection.end();
}
function emptyCorrect(){
console.log("Eror correction");
var zipcode = zips;
let connection = mysql.createConnection(config);
// insert statment
let stmt = `INSERT INTO test1(ZIPCODE,DATE, TEMP) VALUES ? `;
let valueE = [];
valueE.push([zipcode,"0","No Data"]);
// execute the insert statment
connection.query(stmt, [valueE], (err, results, fields) => {
if (err) {
return console.error("error");
}
// get inserted rows
console.log('Row inserted:' + results.affectedRows);
});
// close the database connection
connection.end();
}
function isEmpty(obj) {
for(var key in obj) {
if(obj.hasOwnProperty(key))
return false;
}
return true;
}
app.listen(port, () => console.log(`Example app listening on port ${port}!`))
}
As I understand your problem can roughly be summarized as "How to loop through asynchronous evaluations in Nodejs".
There are some options for you. I would recommend wrapping call to the NOAA API with a promise and then chain those promises. This can be done as follows:
app.get('/', async function(req, response) {
var apiKey = 'some value';
let webapi = 'https://www.ncdc.noaa.gov/cdo-web/api/v2/data?';
let datasetid = 'datasetid=GHCND';
let datatypeid = '&datatypeid=TMAX';
let location = '&locationid=ZIP:';
let startdate = '&startdate=2019-01-01';
let enddate = '&enddate=2020-01-01';
let units = '&units=standard';
let limit = '&limit=1000';
var options = {
port: 443,
method: 'GET',
headers: {
token: apiKey
}
};
const zipCodes = ['90012', '90013']; // Place a call to your function for fetching zip codes here
let datas = [];
prom = Promise.resolve();
zipCodes.forEach(zipcode => {
prom = prom.then(() =>
new Promise((resolve, reject) => {
let url =
webapi +
datasetid +
datatypeid +
location +
zipcode +
startdate +
enddate +
units +
limit;
let apiRequest = https.request(url, options, function(res) {
console.log('Connected');
let data = '';
res.on('data', chunk => {
data += chunk;
});
res.on('end', () => {
console.log('data collected for zip ' + zipcode);
datas.push(data);
resolve();
});
});
apiRequest.end();
})
);
});
prom.then(() => {
// All requests have now been handled sequentially
response.send(/* You'll need to figure out what to do here */);
});
});
An alternative is to use something like the async library for dealing with sequentially calling callbacks. The async library (https://github.com/caolan/async) describes itself as:
Async is a utility module which provides straight-forward, powerful functions for working with asynchronous JavaScript.
See e.g. Node.js: How do you handle callbacks in a loop? for a similar problem (not with regards to callign an API, but dealing with asynchronous function in a loop).
The first API request successfully send a response. However, when I do another GET request the error "write after end" is given.
When I turn off .pipe(addThing) then it does work on consecutive calls.
Is the through2-map function ending the connection or response somehow?
const fs = require('fs');
const express = require("express");
const route = express.Router();
const map = require("through2-map")
const csv2json = require("csv2json");
const firstThreeDigits = new RegExp(/[\s]*\d{3}/);
route.get("/", function(_, res){
fs.createReadStream('./data/data.csv')
.pipe(addThing)
.pipe(csv2json({
separator: ';'
}))
.pipe(res)
});
const addThing = map(function (chunk) {
const chunkArr = chunk.toString().split('\n');
const chunkWithNumber = chunkArr.map((line, index) => {
if (index === 0) {
return 'Number;' + line
}
return firstThreeDigits.exec(line) + ';' + line
})
return chunkWithNumber.toString().split(',').join("\n")
});
module.exports = route;
Not sure if it's relevant, but the csv:
./data/data.csv
Thing;Latitude;Longitude
FOO-123 Banana;52.09789;3.113278
BAR-456 Monocle;52.034599;5.11235
After reading "Error: write after end" with csv-write-stream I noticed that the problem might be that the variable addThing is not created new on every consecutive call.
It was allocated in memory.
So the solution:
fs.createReadStream('./data/cameras-defb.csv')
.pipe(map(addThing))
.pipe(csv2json({
separator: ';'
}))
.pipe(res);
function addThing(chunk) {
const chunkArr = chunk.toString().split('\n');
const chunkWithNumber = chunkArr.map((line, index) => {
if (index === 0) {
return 'Number;' + line
}
return firstThreeDigits.exec(line) + ';' + line
})
return chunkWithNumber.toString().split(',').join("\n")
})
I have a firebase function that will take a request from frontend with a file's name, which will be a video that stored in firebase storage, and then I will apply ffmpeg and extract the video to many frames. In the end, I will upload all frames into firebase storage.
Everything works good, I am able to get all frames. However, there is a problem with uploading frames. Sometimes I can upload all frames successfully, but the function will keep running until timeout, and sometimes I can only upload the first frame. I am new to node.js. I guess there is a problem with return or promise (I don't quit understand what to return and how to handle promise).
Also, I would like to write the data of each frame to database. Where should I put this part of code?
exports.extractFrame = functions.https.onRequest(function (req, res) {
const name = req.query.fileName;
const username = name.substr(0, name.length - 4);
const sessionId = 'video-org';
const framePath = 'frame-org';
const sourceBucketName = 'this is my bucket name';
const sourceBucket = gcs.bucket(sourceBucketName);
const temDir = os.tmpdir();
return sourceBucket.file(sessionId + '/' + name).download({
destination: temDir + '/' + name
}
).then(() => {
console.log('extract frames');
return spawn(ffmpegPath, ['-i', temDir + '/' + name, temDir + '/' +
username + '%d.png']);
}).then(() => {
const frames = fs.readdirSync(temDir);
console.log(frames);
for (let index in frames) {
if (index != 0) {
console.log('uploading');
sourceBucket.upload(temDir + '/' + frames[index], {destination:
framePath + '/' + frames[index]});
}
}
}).then(() => {
res.send('I am done');
});
});
Thanks so much for the help!!
Collect all the promises from all of the calls to sourceBucket.upload() into an array, then use Promise.all() to wait for the entire set to resolve before sending the response:
const promises = [];
for (let index in frames) {
if (index != 0) {
console.log('uploading');
const p = sourceBucket.upload(temDir + '/' + frames[index], {destination:
framePath + '/' + frames[index]});
promises.push(p);
}
}
return Promise.all(promises);
Also, you don't return a promise from an HTTP type function. Just sending the response with res.send() will end the function. This is mentioned in the documentation.
I wrote a gist on this a while back:
// set it up
firebase.storage().ref().constructor.prototype.putFiles = function(files) {
var ref = this;
return Promise.all(files.map(function(file) {
return ref.child(file.name).put(file);
}));
}
// use it!
firebase.storage().ref().putFiles(files).then(function(metadatas) {
// Get an array of file metadata
}).catch(function(error) {
// If any task fails, handle this
});