Very new to nodejs, I have a file which contains only single column jobid. And I am iterating one by one job id and sending requesting to a service which give me the status of the job in a json format. Reading the json response and fetching few values from it and writing to the database. And I want write when it reach the 100th(with the help of a counter) jobid(because I am expecting more than 100 jobid in the file, also this is dynamic).
For example if I have 234 records, then it will write 3 times, first two 100 each and third one with 34. And the jobStatusMetrics array should be cleaned every write.
const fileStatusprocess = require('../controller/readResultFile');
const config = require('../config/config');
const https = require('https');
const uuid = require('uuid-random');
async function jobProcesser() {
const iterator = (await fileStatusprocess.processResultFile("C:\Support\result.csv"))
console.log('Total jobs are',iterator[0].length);
var counter = 0 ;
for (i = 0; i < iterator[0].length; i++) {
counter++;
const formJobStatusURL = "https://localhost:8091/api/job/" + iterator[0][i] + "/status";
const option = {
method: 'GET' ,
headers: {
'X-Message-Created-Ts': `${new Date().toISOString()}`,
'X-Transaction-Created-Ts': `${new Date().toISOString()}`,
'X-User-Id': 'PerformanceExecuter',
'X-Client-Id': `${uuid()}`,
'X-Message-Id': `${uuid()}`,
'X-Transaction-Id': `${uuid()}`,
'Content-Type': 'application/json'
}
}
let content = '';
let reqGet = https.request(formJobStatusURL,option, function (response) {
response.on('data', function (data) {
content += data;
});
response.on('end', function () {
const jsonPayload = JSON.parse(content);
const jobStatusMetrics = {};
for ( var key in jsonPayload){
if(jsonPayload.hasOwnProperty(key)){
jobStatusMetrics.job_id = jsonPayload.id;
jobStatusMetrics.status = jsonPayload.status;
jobStatusMetrics.initiatedBy = jsonPayload.initiatedBy;
jobStatusMetrics.product = jsonPayload.product;
jobStatusMetrics.operation = jsonPayload.operation;
jobStatusMetrics.startTimestamp = jsonPayload.startTimestamp;
jobStatusMetrics.endTimestamp = jsonPayload.endTimestamp;
jobStatusMetrics.totalRecords = jsonPayload.file.totalRecords;
jobStatusMetrics.failedRecords = jsonPayload.file.totalFailedRecords;
jobStatusMetrics.sucessRecords = jsonPayload.file.totalSuccessRecords;
jobStatusMetrics.inprogressRecords = jsonPayload.file.totalInProgressRecords;
jobStatusMetrics.sucessStatus = jsonPayload.results.successFileAvailable;
jobStatusMetrics.failureStatus = jsonPayload.results.failureFileAvailable;
jobStatusMetrics.uploadJob = jsonPayload.actionsAvailable.dataUploadAllowed;
jobStatusMetrics.abortJob = jsonPayload.actionsAvailable.abortJob;
}
if ( counter%100 == 0) {
console.log('Writting to the database')
// logic for influxdb writter
}
else {
}
}
//console.log(jobStatusMetrics);
})
});
reqGet.end();
}
}
jobProcesser()
Rather to use https for http request please use another library like phin-retry. Please check the below code, added the logic. Hope this work.
const rp = require('phin-retry');
const fileStatusprocess = require('../controller/readResultFile');
const config = require('../config/config');
const uuid = require('uuid-random');
const influxDBWrite = require('../controller/influxdbWritter')
async function checkFileStatus() {
const iterator = (await fileStatusprocess.processResultFile("C:\Support\result.csv"));
const jobIds = iterator[0];
console.log(jobIds)
let jobDetailsList = [];
for (i = 0; i < jobIds.length; i++) {
console.log('JobId', i)
const jobId = jobIds[i];
const formJobStatusURL = "https://localhost:8091/api/job/" + jobId + "/status";
const jobDetails = await rp.get({
url: formJobStatusURL,
headers: {
'X-Message-Created-Ts': `${new Date().toISOString()}`,
'X-Transaction-Created-Ts': `${new Date().toISOString()}`,
'X-User-Id': 'PerformanceExecuter',
'X-Client-Id': `${uuid()}`,
'X-Message-Id': `${uuid()}`,
'X-Transaction-Id': `${uuid()}`,
'Content-Type': 'application/json'
}
});
jobDetailsList.push(jobDetails);
if ((i + 1) % 5 === 0) {
await saveResultsToInfux(jobDetailsList);
jobDetailsList = [];
}
}
await saveResultsToInfux(jobDetailsList);
}
async function saveResultsToInfux(jobDetailsList) {
const metrics = [];
for (i = 0; i < jobDetailsList.length; i++) {
console.log('Metrics', i)
const jobDetail = jobDetailsList[i];
const tags = {
Id: jobDetail.id,
Status: jobDetail.status.overall,
Product: jobDetail.product,
Entity: jobDetail.entity,
Operation: jobDetail.operation
}
const fields = {
startTimestamp:jobDetail.startTimestamp,
endTimestamp:jobDetail.endTimestamp,
totalRecords:jobDetail.file.totalRecords,
totalFailedRecords:jobDetail.file.totalFailedRecords,
totalSuccessRecords:jobDetail.file.totalSuccessRecords,
totalInProgressRecords:jobDetail.file.totalInProgressRecords,
successFileAvailable:jobDetail.results.successFileAvailable,
failureFileAvailable:jobDetail.results.failureFileAvailable,
dataUploadAllowed:jobDetail.actionsAvailable.dataUploadAllowed,
abortJob:jobDetail.actionsAvailable.abortJob,
}
metrics.push({
measurement: 'BulkData',
tags,
fields
});
}
influxDBWrite.dbWritter(metrics);
}
checkFileStatus()
Related
I am trying to connect with the api hubstaff to which I have set up my authentication using auth0 and express as my backend. To know about the info about the logged in user I need to send the token object via the API.
By some research I have gotten to this point:
const {
Issuer,
TokenSet
} = require('openid-client');
const fs = require('fs');
const jose = require('jose');
// constants
const ISSUER_EXPIRE_DURATION = 7 * 24 * 60 * 60; // 1 week
const ACCESS_TOKEN_EXPIRATION_FUZZ = 30; // 30 seconds
const ISSUER_DISCOVERY_URL = 'https://account.hubstaff.com';
// API URl with trailing slash
const API_BASE_URL = 'https://api.hubstaff.com/';
let state = {
api_base_url: API_BASE_URL,
issuer_url: ISSUER_DISCOVERY_URL,
issuer: {}, // The issuer discovered configuration
issuer_expires_at: 0,
token: {},
};
let client;
function loadState() {
return fs.readFileSync('./configState.json', 'utf8');
}
function saveState() {
fs.writeFileSync('./configState.json', JSON.stringify(state, null, 2), 'utf8');
console.log('State saved');
}
function unixTimeNow() {
return Date.now() / 1000;
}
async function checkToken() {
//console.log('state.token.access_token', state.token.access_token);
if (!state.token.access_token || state.token.expires_at < (unixTimeNow() + ACCESS_TOKEN_EXPIRATION_FUZZ)) {
// console.log('Refresh token');
state.token = await client ? .refresh(state.token);
// console.log('Token refreshed');
saveState();
}
}
async function initialize() {
console.log('API Hubstaff API');
let data = loadState();
data = JSON.parse(data);
if (data.issuer) {
state.issuer = new Issuer(data.issuer);
state.issuer_expires_at = data.issuer_expires_at;
}
if (data.token) {
state.token = new TokenSet(data.token);
}
if (data.issuer_url) {
state.issuer_url = data.issuer_url;
}
if (data.api_base_url) {
state.api_base_url = data.api_base_url;
}
if (!state.issuer_expires_at || state.issuer_expires_at < unixTimeNow()) {
console.log('Discovering');
state.issuer = await Issuer.discover(state.issuer_url);
state.issuer_expires_at = unixTimeNow() + ISSUER_EXPIRE_DURATION;
console.log(state.issuer);
}
client = new state.issuer.Client({
// For personal access token we can use PAT/PAT.
// This is only needed because the library requires a client_id where as the API endpoint does not require it
client_id: 'Z',
client_secret: 'J',
});
saveState();
console.log('API Hubstaff initialized');
}
async function request(url, options) {
await checkToken();
let fullUrl = state.api_base_url + url;
return client ? .requestResource(fullUrl, state.token, options);
}
function tokenDetails() {
let ret = {};
if (state.token.access_token) {
ret.access_token = jose.JWT.decode(state.token.access_token);
}
if (state.token.refresh_token) {
ret.refresh_token = jose.JWT.decode(state.token.refresh_token);
}
return ret;
}
module.exports = {
initialize,
checkToken,
request,
tokenDetails
};
// COntroller
const { response } = require('express')
const api = require('../util/hubstaffConnect.util');
const testConnected = require('../util/testhubstaff.util');
const usersGet = async (req, res = response) => {
await api.initialize();
const response = await api.request('v2/organizations',{
method: 'GET',
json: true,
});
console.log('response', response);
if(response != null){
const body = JSON.parse(response);
res.json({
organizations: body.organizations || []
});
}
};
Although when I go to the address localhost:8080/oauth/api/organizations I ran into an error:
I do realise this is regarding missing tokens which won't let me get the user's information.
I have a project with Node JS in which I am collecting the information of a JSON by http using the node-fetch module.
This is the way I have found to use the node-fetch module with async, if it is possible to improve this function, suggestions are added, I am new to this module.
This is my code where I read the information:
const fetch = require('node-fetch');
(async () => {
try {
const res = await fetch('https://jsonplaceholder.typicode.com/users');
const headerDate = res.headers && res.headers.get('date') ? res.headers.get('date') : 'no response date';
const users = await res.json();
for(user of users) {
console.log(`Got user with id: ${user.id}, name: ${user.name}`);
}
} catch (err) {
console.log(err.message); //can be console.error
}
})();
My problem: how can I extract all the information to a CSV with a limit of lines ?, that is, the CSV has a limit of 10 lines (the limit can vary), if the JSON information occupies 30 lines, 3 CSVs would be created to store all the information. I have added the json-2-csv module, but I don't know how to use it or if this module is necessary or something else is better.
const { Parser } = require("json2csv");
const fetch = require("node-fetch");
const fs = require("fs");
const csvLimit = 3;
const getJson = async () => {
const response = await fetch("https://jsonplaceholder.typicode.com/users");
const responseJson = await response.json();
return responseJson;
};
const jsonToCsv = async () => {
const json = await getJson();
const json2csvParser = new Parser();
let i = 0,
j = 0;
while (j < json.length) {
let csv = [];
let temp = [];
for (j = i * csvLimit; j < (i + 1) * csvLimit; j++) {
temp.push(json[j]);
}
csv.push(json2csvParser.parse(temp));
fs.writeFileSync(`file${(i * csvLimit) / 3}.csv`, csv);
i++;
}
};
jsonToCsv();
If you want only specific fields in the csv file, then you can pass the fields as parameter in this way.
const json2csvParser = new Parser({fields})
I used the flat package to extract the field names from the keys of the first record of the JSON and then used the json-2-csv package to convert from JSON to CSV.
const converter = require("json-2-csv");
const fetch = require("node-fetch");
const fs = require("fs");
const flatten = require('flat');
const maxRecords = 3;
const getJson = async () => {
const response = await fetch("https://jsonplaceholder.typicode.com/users");
const responseJson = await response.json();
return responseJson;
};
const convertToCSV = async () => {
const json = await getJson();
let keys = Object.keys(flatten(json[0]));
let options = {
keys: keys
};
converter.json2csv(json, json2csvCallback, options);
};
let json2csvCallback = function (err, csv) {
if (err) throw err;
const headers = csv.split('\n').slice(0,1);
const records = csv.split('\n').slice(0,);
for(let i=1;i<records.length;i=i+maxRecords) {
let dataOut = headers.concat(records.slice(i, i+3)).join('\n');
let id = Math.floor(i/maxRecords)+1;
fs.writeFileSync('data' + id + '.csv', dataOut)
}
};
convertToCSV();
Here's one of the files opened in Excel.
I am creating an API for listing trip data with image and pdf base url,
All things are working fine but I can not access the last result array data_to_send out of for loop.
app.js
app.get("/getChallanList/:userId/:role", (req, res) => {
const userData = req.params;
let site_source = "";
let site_destination = "";
var site_from_name = "";
const data_to_send = [];
if (userData.role == "D") {
db.select("trip", "*", `driver_id = '${req.params.userId}'`, (data) => {
for (let i = 0; i < data.data.length; i++) {
site_source = data.data[i].site_from;
site_destination = data.data[i].site_to;
db.select(
"site",
"*",
`id in ('${site_source}','${site_destination}')`,
(data_site) => {
data.data[i].site_from = data_site.data[0].name;
data.data[i].site_to = data_site.data[1].name;
if (data.data[i].truck_challan_pdf != "") {
data.data[i].truck_challan_pdf =
base_url + "truckchallan/" + data.data[i].truck_challan_pdf;
}
if (data.data[i].driver_challan_pdf != "") {
data.data[i].driver_challan_pdf =
base_url + "driverchallan/" + data.data[i].driver_challan_pdf;
}
if (data.data[i].preparer_img != "") {
data.data[i].preparer_img = base_url + data.data[i].preparer_img;
}
if (data.data[i].driver_img != "") {
data.data[i].driver_img = base_url + data.data[i].driver_img;
}
data_to_send.push(data.data);
// console.log(data_to_send); // working
}
);
}
console.log(data_to_send); // empty
});
}
}
db.select
let select = (table, column, condition, callback) => {
try {
let sql = "SELECT " + column + " FROM " + table + " WHERE " + condition;
conn.query(sql, (err, results) => {
if (err) {
let data = {
status: 0,
data: sql,
message: "Something went wrong!",
};
callback(data);
} else {
let data = {
status: 1,
data: results,
message: "Success",
};
callback(data);
}
});
} catch (err) {
let data = {
status: 0,
data: err,
message: "In catch",
};
callback(data);
}
};
async await
app.get("/getChallanList/:userId/:role", async (req, res) => {
const userData = req.params;
let site_source = "";
let site_destination = "";
var site_from_name = "";
const data_to_send = [];
if (userData.role == "D") {
await db.select(
"trip",
"*",
`driver_id = '${req.params.userId}'`,
async (data) => {
// const data_to_send_ = [];
for (let i = 0; i < data.data.length; i++) {
site_source = data.data[i].site_from;
site_destination = data.data[i].site_to;
await db.select(
"site",
"*",
`id in ('${site_source}','${site_destination}')`,
(data_site) => {
data.data[i].site_from = data_site.data[0].name;
data.data[i].site_to = data_site.data[1].name;
if (data.data[i].truck_challan_pdf != "") {
data.data[i].truck_challan_pdf =
base_url + "truckchallan/" + data.data[i].truck_challan_pdf;
}
if (data.data[i].driver_challan_pdf != "") {
data.data[i].driver_challan_pdf =
base_url + "driverchallan/" + data.data[i].driver_challan_pdf;
}
if (data.data[i].preparer_img != "") {
data.data[i].preparer_img =
base_url + data.data[i].preparer_img;
}
if (data.data[i].driver_img != "") {
data.data[i].driver_img = base_url + data.data[i].driver_img;
}
data_to_send.push(data.data);
// console.log(data_to_send); // working
}
);
// data_to_send_.push(data_to_send);
}
console.log(data_to_send); // empty
}
);
}
}
this is because of the asynchronous behavior of NodeJs, so you have to plan things accordingly i.e
console.log(1)
db.select(
"trip",
"*",
`driver_id = '${req.params.userId}'`,
async (data) => {
console.log(2)
})
console.log(3)
The output of the above code would be 1 then 3 and then 2 and this is how NodeJs works it does not wait for I/O calls i.e DB query in your case.
Please check how promises work in NodeJs for more details.
Here is how you can accomplish your task:
const challanList = (userData) => {
return new Promise((resolve, reject) => {
const data_to_send = [];
db.select("trip", "*", `driver_id = '${req.params.userId}'`, data => {
for (let i = 0; i < data.data.length; i++) {
const site_source = data.data[i].site_from;
const site_destination = data.data[i].site_to;
db.select("site", "*", `id in ('${site_source}','${site_destination}')`, data_site => {
data.data[i].site_from = data_site.data[0].name;
data.data[i].site_to = data_site.data[1].name;
if (data.data[i].truck_challan_pdf != "") {
data.data[i].truck_challan_pdf = base_url + "truckchallan/" + data.data[i].truck_challan_pdf;
}
if (data.data[i].driver_challan_pdf != "") {
data.data[i].driver_challan_pdf = base_url + "driverchallan/" + data.data[i].driver_challan_pdf;
}
if (data.data[i].preparer_img != "") {
data.data[i].preparer_img = base_url + data.data[i].preparer_img;
}
if (data.data[i].driver_img != "") {
data.data[i].driver_img = base_url + data.data[i].driver_img;
}
data_to_send.push(data.data);
// console.log(data_to_send); // working
});
}
resolve(data_to_send);
});
});
};
app.get("/getChallanList/:userId/:role", async (req, res) => {
const userData = req.params;
const challanListResult =await challanList(userData);
console.log(challanListResult);
resp.json(challanListResult);
});
Without knowing what database or ORM you are using it is difficult to answer, but my suspicion is that db.select is an asynchronous method, i.e. it is returning a Promise. If so, the second console log is still seeing the "old" data_to_send.
Try adding an await in front of the first db.select call. (Don't forget the async in front of the callback in second argument of app.get.
Your database is asynchronous so console.log(data_to_send) gets called before the query finished executing. Try adding async before (req, res) in line 1 then await before db.select.
This works for me
app.get("/getChallanList/:userId/:role", async (req, res) => {
const userData = req.params;
let site_source = "";
let site_destination = "";
var site_from_name = "";
const data_to_send = [];
if (userData.role == "D") {
const data = await db.query(
`SELECT * FROM trip WHERE driver_id = '${req.params.userId}'`
);
// console.log(data.length);
// const data_to_send_ = [];
for (let i = 0; i < data.length; i++) {
site_source = data[i].site_from;
site_destination = data[i].site_to;
// cons
const site_data = await db.query(
`SELECT * FROM site WHERE id in ('${site_source}','${site_destination}')`
);
// console.log(site_data);
db.select(
"site",
"*",
`id in ('${site_source}','${site_destination}')`,
(data_site) => {
data[i].site_from = data_site.data[0].name;
data[i].site_to = data_site.data[1].name;
if (data[i].truck_challan_pdf != "") {
data[i].truck_challan_pdf =
base_url + "truckchallan/" + data[i].truck_challan_pdf;
}
if (data[i].driver_challan_pdf != "") {
data[i].driver_challan_pdf =
base_url + "driverchallan/" + data[i].driver_challan_pdf;
}
if (data[i].preparer_img != "") {
data[i].preparer_img = base_url + data[i].preparer_img;
}
if (data[i].driver_img != "") {
data[i].driver_img = base_url + data[i].driver_img;
}
data_to_send.push(data);
// console.log(data.data);
// console.log(data_to_send); // working
}
);
// data_to_send_.push(data_to_send);
}
// console.log(data_to_send);
// console.log(data_to_send);
res.send({ success: 1, data: data, message: "" });
}
I am trying to implement Twitter login with my MERN application. Following twitter tutorials, i understand that all request should be signed with an OAuth headers. If i use postman, i enter my credentials (Consumer Key, Consumer Secret) in the authorization tab and the call works. The things is that postman transform the Consumer secret into and oauth_signature before sending the call. Now i want to do this workflow in Nodejs. All tutorials online use complicated passport strategies and the use of request module which is depricated. I understand that to generate oauth_signature one would have to generate an oauth_nonce and then do:
Percent encode every key and value that will be signed.
Sort the list of parameters alphabetically [1] by encoded key [2].
For each key/value pair:
Append the encoded key to the output string.
Append the ‘=’ character to the output string.
Append the encoded value to the output string.
If there are more key/value pairs remaining, append a ‘&’ character to the output string.
I am sure doing all this would be reinventing the wheel and am pretty sure there is a module that does this step specifically without all the passport and authentication (which is already done in my app) i simply need to sign my twitter requests like Postman does nothing more.
I tried the following but it seems am still doing something wrong
var axios = require('axios');
const jsSHA = require('jssha/sha1');
//Am i using the right library??
const callBackUL = 'https%3A%2F%2F127.0.0.1%3A3000%2Flogin';
var oauth_timestamp = Math.round(new Date().getTime() / 1000.0);
const nonceObj = new jsSHA('SHA-1', 'TEXT', { encoding: 'UTF8' });
nonceObj.update(Math.round(new Date().getTime() / 1000.0));
const oauth_nonce = nonceObj.getHash('HEX');
const endpoint = 'https://api.twitter.com/oauth/request_token';
const oauth_consumer_key = process.env.TWITTER_API_KEY;
const oauth_consumer_secret = process.env.TWITTER_API_SECRET;
var requiredParameters = {
oauth_consumer_key,
oauth_nonce,
oauth_signature_method: 'HMAC-SHA1',
oauth_timestamp,
oauth_version: '1.0'
};
const sortString = requiredParameters => {
var base_signature_string = 'POST&' + encodeURIComponent(endpoint) + '&';
var requiredParameterKeys = Object.keys(requiredParameters);
for (var i = 0; i < requiredParameterKeys.length; i++) {
if (i == requiredParameterKeys.length - 1) {
base_signature_string += encodeURIComponent(
requiredParameterKeys[i] +
'=' +
requiredParameters[requiredParameterKeys[i]]
);
} else {
base_signature_string += encodeURIComponent(
requiredParameterKeys[i] +
'=' +
requiredParameters[requiredParameterKeys[i]] +
'&'
);
}
}
return base_signature_string;
};
const sorted_string = sortString(requiredParameters);
console.log('Sorted string:', sorted_string);
const signing = (signature_string, consumer_secret) => {
let hmac;
if (
typeof signature_string !== 'undefined' &&
signature_string.length > 0
) {
//console.log('String OK');
if (
typeof consumer_secret !== 'undefined' &&
consumer_secret.length > 0
) {
// console.log('Secret Ok');
const secret = consumer_secret + '&';
var shaObj = new jsSHA('SHA-1', 'TEXT', {
hmacKey: { value: secret, format: 'TEXT' }
});
shaObj.update(signature_string);
hmac = encodeURIComponent(shaObj.getHash('B64'));
//var hmac_sha1 = encodeURIComponent(hmac);
}
}
return hmac;
};
const signed = signing(sorted_string, oauth_consumer_secret);
console.log(signed);
var data = {};
var config = {
method: 'post',
url: endpoint,
headers: {
Authorization: `OAuth oauth_consumer_key=${process.env.TWITTER_API_KEY},oauth_signature_method="HMAC-SHA1",oauth_timestamp=${oauth_timestamp},oauth_nonce=${oauth_nonce},oauth_version="1.0",oauth_callback=${callBackUL},oauth_consumer_secret=${signed}`,
'Content-Type': 'application/json'
},
data: data
};
try {
const response = await axios(config);
console.log(JSON.stringify(response.data));
} catch (err) {
console.log(err.response.data);
}
next();
});
SOLVED
var axios = require('axios');
const jsSHA = require('jssha/sha1');
const callBackUL = 'https%3A%2F%2F127.0.0.1%3A3000%2Flogin';
var oauth_timestamp = Math.round(new Date().getTime() / 1000.0);
const nonceObj = new jsSHA('SHA-1', 'TEXT', { encoding: 'UTF8' });
nonceObj.update(Math.round(new Date().getTime() / 1000.0));
const oauth_nonce = nonceObj.getHash('HEX');
const endpoint = 'https://api.twitter.com/oauth/request_token';
const oauth_consumer_key = process.env.TWITTER_API_KEY;
const oauth_consumer_secret = process.env.TWITTER_API_SECRET;
var requiredParameters = {
oauth_callback: callBackUL,
oauth_consumer_key,
oauth_nonce,
oauth_signature_method: 'HMAC-SHA1',
oauth_timestamp,
oauth_version: '1.0'
};
const sortString = requiredParameters => {
var base_signature_string = 'POST&' + encodeURIComponent(endpoint) + '&';
var requiredParameterKeys = Object.keys(requiredParameters);
for (var i = 0; i < requiredParameterKeys.length; i++) {
if (i == requiredParameterKeys.length - 1) {
base_signature_string += encodeURIComponent(
requiredParameterKeys[i] +
'=' +
requiredParameters[requiredParameterKeys[i]]
);
} else {
base_signature_string += encodeURIComponent(
requiredParameterKeys[i] +
'=' +
requiredParameters[requiredParameterKeys[i]] +
'&'
);
}
}
return base_signature_string;
};
const sorted_string = sortString(requiredParameters);
console.log('Sorted string:', sorted_string);
const signing = (signature_string, consumer_secret) => {
let hmac;
if (
typeof signature_string !== 'undefined' &&
signature_string.length > 0
) {
//console.log('String OK');
if (
typeof consumer_secret !== 'undefined' &&
consumer_secret.length > 0
) {
// console.log('Secret Ok');
const secret = encodeURIComponent(consumer_secret) + '&';
var shaObj = new jsSHA('SHA-1', 'TEXT', {
hmacKey: { value: secret, format: 'TEXT' }
});
shaObj.update(signature_string);
hmac = encodeURIComponent(shaObj.getHash('B64'));
}
}
return hmac;
};
const signed = signing(sorted_string, oauth_consumer_secret);
console.log(signed);
var data = {};
var config = {
method: 'post',
url: endpoint,
headers: {
Authorization: `OAuth oauth_consumer_key=${process.env.TWITTER_API_KEY},oauth_nonce=${oauth_nonce},oauth_signature=${signed},oauth_signature_method="HMAC-SHA1",oauth_timestamp=${oauth_timestamp},oauth_version="1.0",oauth_callback=${callBackUL}`,
'Content-Type': 'application/json'
},
data: data
};
try {
const response = await axios(config);
console.log(JSON.stringify(response.data));
} catch (err) {
console.log(err.response.data);
}
next();
I have to write a json response in a column of tsv file how can I do that?
I am using the following code. Please find me a solution? I have to check it but its not working.
//npm init -y gen package.json file
var unProcessedItems = [];
var data = loadData('./Alabama_Pre_Final.tsv');
async function X(i) {
if (data[i] && data[i][7]) {
console.log(data[i][7]);
function address(address_details) {
request({
url: 'https://us-extract.api.smartystreets.com/?auth-id=xxx&auth-token=xxx',
method: 'POST',
headers: {
'content-type': 'application/json'
},
body: address_details,
},
(error, response, body) => {
if (!error && response.statusCode == 200) {
var res = JSON.parse(body);
let objectArray = [];
if (res.addresses[0].verified) {
objectArray.push(res.addresses[0].api_output[0].delivery_line_1, res.addresses[0].api_output[0].last_line, res.addresses[0].api_output[0].delivery_point_barcode)
}
var address_data = objectArray.join([separator = ','])
console.log(address_data)
}
});
}
address("1000 Greenhill Blvd NW, Fort Payne, 35967, AL, USA");
const data = 'Column 19\n1\t2\t3\n4\t5\t6';
require('fs').writeFileSync('./Alabama_Pre_Final.tsv', data);
// i++;
// X(i);
}
}
}
I've looked at your answer above I think it should be easy enough to modify so that it works as you expect, I've done this below. The other functions should be left unchanged. So the function X processes each row then returns a record variable which we append to the records array. After processing is complete we write the records array to the csv file.
async function loadData(filePath) {
if (fs.existsSync(filePath)) {
var tsvData = fs.readFileSync(filePath, 'utf-8');
var rowCount = 0;
var scenarios = [];
parse_tsv(tsvData, (row) => {
rowCount++;
if (rowCount > 1) {
scenarios.push(row);
}
});
return scenarios;
} else {
console.log("loadData: Returning empty..")
return [];
}
}
// Process a row of TSV data
function X(row) {
return new Promise((resolve, reject) => {
if (row && row[7]) {
console.log(row[7]);
request({
url: 'https://us-extract.api.smartystreets.com/?auth-id=e62698e8-c3fc-b929-0f5b-d3b54d0bcd0c&auth-token=cibfMexBdl3HrmwbWY6p',
method: 'POST',
headers: { 'content-type': 'application/json' },
body: row[7],
},
(error, response, body) => {
if (!error && response.statusCode == 200) {
var res = JSON.parse(body);
let objectArray = [];
if (res.addresses[0].verified) {
objectArray.push(res.addresses[0].api_output[0].delivery_line_1, res.addresses[0].api_output[0].components.city_name,
res.addresses[0].api_output[0].components.zipcode, res.addresses[0].api_output[0].components.state_abbreviation)
}
var address_data = objectArray.join([separator = ','])
resolve({ name: address_data.replace(/['"]+/g, '') });
} else if (error) {
reject(error);
} else {
reject( { statusCode: response.statusCode });
}
});
}
});
}
async function processData() {
let MAX_RECORDS = 5; // Change as appropriate
var data = await loadData('./Alabama_Pre_Final.tsv');
if (data.length > 0) {
unProcessedItems = [];
let records = [];
for(let row of data) {
// Process a row of data.
let record = await X(row);
records.push(record);
if (records.length === MAX_RECORDS) break;
}
console.log(records);
csvWriter.writeRecords(records)
.then(() => console.log('The CSV file was written successfully'));
} else {
console.log("No Data");
}
}
processData();
const fs = require('fs');
const createCsvWriter = require('csv-writer').createObjectCsvWriter;
var rp = require('request-promise');
var dataToWrite;
var http = require("http");
var request = require('request');
function loadData(filePath) {
if (fs.existsSync(filePath)) {
var tsvData = fs.readFileSync(filePath, 'utf-8');
var rowCount = 0;
var scenarios = [];
parse_tsv(tsvData, (row) => {
rowCount++;
if (rowCount > 1) {
scenarios.push(row);
}
});
return scenarios;
} else {
return [];
}
}
function parse_tsv(s, f) {
var ix_end = 0;
for (var ix = 0; ix < s.length; ix = ix_end + 1) {
ix_end = s.indexOf('\n', ix);
if (ix_end == -1) {
ix_end = s.length;
}
//var row = s.substring(ix, ix_end - 1).split('\t');
var row = s.substring(ix, ix_end).split('\t');
f(row);
}
}
var unProcessedItems = [];
var data = loadData('./Alabama_Pre_Final.tsv');
var records = [];
async function X(i) {
if (data[i] && data[i][7]) {
console.log(data[i][7]);
var options = {
method: 'POST',
url: 'https://us-extract.api.smartystreets.com/?auth-id=c64de073-9531-9444-35e6-7204d9d62c36&auth-token=e5FxYc1niUD7Cp0peixd',
headers: {
'content-type': 'application/json'
},
body: data[i][7],
json: true // Automatically stringifies the body to JSON
};
rp(options)
.then(function (parsedBody) {
// POST succeeded...
var res = parsedBody;
let objectArray = [];
if (res.addresses.length) {
if (res.addresses[0].verified) {
objectArray.push(res.addresses[0].api_output[0].delivery_line_1, res.addresses[0].api_output[0].components.city_name,
res.addresses[0].api_output[0].components.zipcode, res.addresses[0].api_output[0].components.state_abbreviation)
}
var address_data = objectArray.join([separator = ','])
console.log(address_data)
// records.push({ name: address_data.replace(/['"]+/g, '') });
const ADDRESS_COLUMN_INDEX = 7;
for(let row of address_data) {
row[ADDRESS_COLUMN_INDEX] = await X(row[ADDRESS_COLUMN_INDEX]);
}
records.push(address_data.replace(/['"]+/g, ''));
let output = records.map(row => row.join("\t")).join("\n");
i++;
if (i <= 7) {
X(i);
}
else {
fs.writeFileSync('out.tsv', output);
console.log('The TSV file was written successfully');
// return callback(records);
}
}
});
}
// console.log(records);
}
if (data.length > 0) {
unProcessedItems = [];
X(0);
} else {
console.log("No Data");
}
I've updated this answer based on your last revisions, I hope this helps you:
function loadData(filePath) {
if (fs.existsSync(filePath)) {
var tsvData = fs.readFileSync(filePath, 'utf-8');
var rowCount = 0;
var scenarios = [];
parse_tsv(tsvData, (row) => {
rowCount++;
if (rowCount > 1) {
scenarios.push(row);
}
});
return scenarios;
} else {
return [];
}
}
function parse_tsv(s, f) {
var ix_end = 0;
for (var ix = 0; ix < s.length; ix = ix_end + 1) {
ix_end = s.indexOf('\n', ix);
if (ix_end == -1) {
ix_end = s.length;
}
var row = s.substring(ix, ix_end).split('\t');
f(row);
}
}
var unProcessedItems = [];
var data = loadData('./Alabama_Pre_Final.tsv');
var records = [];
async function X(i) {
if (data[i] && data[i][7]) {
console.log(data[i][7]);
var options = {
method: 'POST',
url: 'https://us-extract.api.smartystreets.com/?auth-id=c64de073-9531-9444-35e6-7204d9d62c36&auth-token=e5FxYc1niUD7Cp0peixd',
headers: {
'content-type': 'application/json'
},
body: data[i][7],
json: true // Automatically stringifies the body to JSON
};
rp(options)
.then(async function (parsedBody) {
// POST succeeded...
var res = parsedBody;
let objectArray = [];
if (res.addresses.length) {
if (res.addresses[0].verified) {
objectArray.push(res.addresses[0].api_output[0].delivery_line_1, res.addresses[0].api_output[0].components.city_name,
res.addresses[0].api_output[0].components.zipcode, res.addresses[0].api_output[0].components.state_abbreviation)
}
var address_data = objectArray.join([separator = ','])
console.log("rp.then -> address_data:",address_data);
data[i][7] = address_data.replace(/['"]+/g, '');
records.push(data[i].join("\t"));
i++;
if (i <= 7) {
console.log("Looking up address #" + i);
X(i);
} else {
const output = records.join("\n");
// Remove the _test when you are happy with the result.
fs.writeFileSync('./Alabama_Pre_Final_test.tsv', output);
console.log('The TSV file was written successfully');
}
}
});
}
}
if (data.length > 0) {
unProcessedItems = [];
X(0);
} else {
console.log("No Data");
}