How can i read a file from https://storage.googleapis.com without download ?
let url = "https://storage.googleapis.com/shopify-tiers-assets-prod-us-east1/c84niyyf31t4bxhj7ulb2prf2r9p?GoogleAccessId=assets-us-prod%40shopify-tiers.iam.gserviceaccount.com&Expires=1611140532&Signature=bEiC5Ftxr8rCtiIfm3hyKKP%2B4EUf4TzPUAM3NPuO1jh7DkG1lr7DVQTMYd1rwC4DLMJAZfcQwca7X2Ab%2FWKNwbpfw%2FEjAIh%2B5qhsy77%2FP3BZFrAJjcpSHU6Mj2d3elv1cqTnVErPbig0TvB3caZ1P1apAVMXQP5WRHYGPcnXhV8g9%2B%2FRvQaO4myDS7qfhA89IicVI5e2pPwEMtUgeE6XQavY8ZdpzwLsO0XeAxsLBcH%2FezjIHPOShlRWN09OoqGwBWYcHOvULzA4Rt1fgtiejNI2vZo3FE806YWGW%2BbH%2BXzvFuNq7xMEDgvPIgkM9RPUNmcWSEzTo%2BHXAJ2Ph7%2FADw%3D%3D&response-content-disposition=attachment%3B+filename%3D%22bulk-102030409886.jsonl%22%3B+filename%2A%3DUTF-8%27%27bulk-102030409886.jsonl&response-content-type=application%2Fjsonl"
async function processLineByLine(url) {
try {
const rl = createInterface({
input: createReadStream(url),
crlfDelay: Infinity
});
rl.on("line", (line) => {
console.log(line);
});
await once(rl, "close");
console.log("File processed.");
} catch (err) {
console.error(err);
}
}
You could stream the file using got and then use ndjson as the file is json in each line.
const ndjson = require('ndjson')
const etl = require("etl");
const got = require("got");
const url = "https://storage.googleapis.com/shopify-tiers-assets-prod-us-east1/c84niyyf31t4bxhj7ulb2prf2r9p?GoogleAccessId=assets-us-prod%40shopify-tiers.iam.gserviceaccount.com&Expires=1611140532&Signature=bEiC5Ftxr8rCtiIfm3hyKKP%2B4EUf4TzPUAM3NPuO1jh7DkG1lr7DVQTMYd1rwC4DLMJAZfcQwca7X2Ab%2FWKNwbpfw%2FEjAIh%2B5qhsy77%2FP3BZFrAJjcpSHU6Mj2d3elv1cqTnVErPbig0TvB3caZ1P1apAVMXQP5WRHYGPcnXhV8g9%2B%2FRvQaO4myDS7qfhA89IicVI5e2pPwEMtUgeE6XQavY8ZdpzwLsO0XeAxsLBcH%2FezjIHPOShlRWN09OoqGwBWYcHOvULzA4Rt1fgtiejNI2vZo3FE806YWGW%2BbH%2BXzvFuNq7xMEDgvPIgkM9RPUNmcWSEzTo%2BHXAJ2Ph7%2FADw%3D%3D&response-content-disposition=attachment%3B+filename%3D%22bulk-102030409886.jsonl%22%3B+filename%2A%3DUTF-8%27%27bulk-102030409886.jsonl&response-content-type=application%2Fjsonl"
function getStream() {
return got.stream(url);
}
getStream()
.pipe(ndjson.parse())
.pipe(etl.map(data => {
// do something with data
console.log(data);
return data;
}))
.promise()
.then(() => console.log("done"))
.catch(err => console.error(err));
Related
I am stuck at problem on how to write excel file to filestream and download it in browser.. I can only create new file in server but this is not what I want.. I don't want to create in on server (ok if it must be created then i also want to delete it when user downloads it in browser).
But I can't achieve the download..
So the general idea is that I read the csv file, than parse the data.
I also read a template Excele file which I overwrite and write it to the file stream. When I call the get API, then I can the download starts (I will integrate it in Angular app later)..
I am using Exceljs npm package.
I don't have any errors but code is not working as I want
I uploaded whole code in github so you can easily see the code and duplicate my code.
https://github.com/zigax1/mean-generate-download-excel/tree/master
My excel-builder script:
export const generateExcel = async (req: Request, res: Response) => {
try {
await csvParse();
res.setHeader("Content-disposition", `attachment;`);
res.contentType(
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
);
return res.status(200).json("Success");
} catch (err) {
return res.status(500).json("False");
}
};
const csvParse = async () => {
fs.createReadStream("./content/TestCsv.csv")
.pipe(csv.parse())
.on("error", (error: any) => console.log("Error"))
.on("data", (row: any) => {
let line: any = String(row);
line = line.split(";");
//let parsedData = line[0];
let parsedData = line;
allParsedData.push(parsedData);
})
.on("end", (rowCount: any) => {
let test = allParsedData.toString();
generateFile(test);
});
};
const generateFile = (data: any) => {
return new Promise<fs.ReadStream>((resolve, reject) => {
const workbook = new Excel.Workbook();
workbook.xlsx.readFile("./utilities/template.xlsx").then(() => {
workbook.xlsx.writeFile("./content/Test.xlsx").then(
() => {
let stream = fs.createReadStream("./content/Test.xlsx");
stream.on("close", () => {
fs.unlink("./content/Test.xlsx", (error) => {
if (error) {
throw error;
}
});
});
resolve(stream);
},
(err) => {
throw err;
}
);
});
});
};
Thanks to everyone!
const csv = require('fast-csv');
const fs = require('fs');
function exportCSVFile(res, path, data) {
const ws = fs.createWriteStream(path + ".csv");
ws.on("finish", function () {
res.download(path + ".csv", () => {
fs.unlinkSync(path + ".csv");
});
});
csv.write(data, {headers: true}).pipe(ws);
}
You use this export csv function for your response
Im trying to make a quick node script to download MP3s from a RSS feed. At the moment I have this :
const https = require('https');
const xml2js = require('xml2js');
const parser = new xml2js.Parser();
const fs = require('fs');
const URL_TO_PARSE = 'https://some-rss.feed.xml';
const req = https.get(URL_TO_PARSE, async (res) => {
let xml = '';
res.on('data', (stream) => {
xml = xml + stream;
});
res.on('end', () => {
parser.parseString(xml, async (err, result) => {
if (err) {
console.log(err);
} else {
let items = result.rss.channel[0].item;
await Promise.all(items.map(async (item) => {
let title = item.title[0];
let enclosure = item.enclosure[0];
let url = enclosure.$.url;
let filepath = `./${title}`;
console.log(`Downloading ${title} to ${filepath}`);
await download_audio_file(url, filepath);
}));
}
});
});
});
const download_audio_file = async (url, filepath) => {
https.get(url, (res) => {
const writeStream = fs.createWriteStream(filepath);
res.pipe(writeStream);
writeStream.on('finish', () => {
writeStream.close();
console.log('File downloaded');
Promise.resolve();
});
writeStream.on('error', (err) => {
console.log(err);
Promise.reject(err);
});
})
But it currently tried to download each one at the same time. Is there a better way to write this to download just one at a time - and possibly also track the % progress?
I see 2 problems with your code.
The first one is that download_audio_file is not returning a promise that resolves when the file is fully downloaded.
You can fix that with this refactored version:
const download_audio_file = async (url, filepath) => {
const promise = new Promise((resolve, reject) => {
https.get(url, (res) => {
const writeStream = fs.createWriteStream(filepath);
res.pipe(writeStream);
writeStream.on("finish", () => {
writeStream.close();
console.log("File downloaded");
resolve();
});
writeStream.on("error", (err) => {
console.log(err);
reject(err);
});
});
});
return promise;
};
Secondly, you are using Promise.all which awaits for all the promises in parallel.
You can replace that code snippet with:
const req = https.get(URL_TO_PARSE, async (res) => {
let xml = '';
res.on('data', (stream) => {
xml = xml + stream;
});
res.on('end', () => {
parser.parseString(xml, async (err, result) => {
if (err) {
console.log(err);
} else {
let items = result.rss.channel[0].item;
for(const item of items) {
let title = item.title[0];
let enclosure = item.enclosure[0];
let url = enclosure.$.url;
let filepath = `./${title}`;
console.log(`Downloading ${title} to ${filepath}`);
await download_audio_file(url, filepath);
}
}
});
});
});
Notice how I replaced the Promise.all with for(const item of items)
As the title says, how do you make a direct download link with a file from mongoDB(GridFsBucket) using express?
The file should be downloadable from memory, as i dont want to save it temporarily on the server.
I have this method:
async function downloadFileFromDB(fileId) {
var gridfsbucket = new mongoose.mongo.GridFSBucket(mongoose.connection.db, {
chunkSizeBytes: 1024,
bucketName: 'filesBucket'
});
try {
const stream = gridfsbucket.openDownloadStream(fileId)
const fileBuffer = Buffer.from(stream)
return fileBuffer
} catch (err) {
stream.on('error', () => {
console.log("Some error occurred in download:" + error);
})
console.log(err);
}
}
And this route:
router.get('/download-file', async (req,res) => {
const fileId = req.query.fileId
const ObjectFileId = new ObjectId(fileId)
const fileBuffer = await fileFacade.downloadFileFromDB(ObjectFileId)
res.download(fileBuffer)
})
But res.download wants a path and not a buffer. Aswell im not sure i can make a buffer directly from the openDownloadStream method.
Can anyone help?
I believe you need to write the data to your res object. I accomplished this like:
const readStream = gridfs.openDownloadStreamByName(filename);
readStream.on("data", (chunk) => {
res.write(chunk);
});
readStream.on("end", () => {
res.status(200).end();
mongoClient.close();
});
readStream.on("error", (err) => {
console.log(err);
res.status(500).send(err);
});
So, you may just have to do:
res.write(fileBuffer).end();
//// Instead of doing:
// res.download(fileBuffer);
I am having trouble sending an error to the front end when a csv file is uploaded and the numbers already exist in the database. The backend is logging an error that the primary key value already exist, but the code I have written tells the front end that the file uploaded just fine.
Code snippet:
router.post('/:program/upload', upload.single('testUpload'), (req, res, next) => {
try {
CSVtoPSQL(req.params.program, req.file.filename)
return res.status(201).json({
message: 'File Uploaded Just fine :)'
});
} catch (error) {
return res.status(500).json({
message: error
})
}
});
const CSVtoPSQL = (program, filePath) => {
let stream = fs.createReadStream(path.resolve(__dirname, '../files', filePath));
let csvData = [];
let csvStream = csv
.parse({ headers: false })
.on('error', error => console.error(error))
.on('data', (data) => {
csvData.push(data.toString());
})
.on('end', () => {
csvData.forEach(item => {
queries.upload(program, item)
.then(() => {
console.log('QR Code Added: ' + item);
}).catch((err) => {
console.log(`oopsie: ${err}`);
});
})
});
stream.pipe(csvStream);
}
Pretty confident the issue is with my poor understanding of promises.
As expected, I wasn't handling my promises correctly. I've updated the code a bit and now it responds with 2 arrays of successful uploads and errored uploads.
router.post('/:program/upload', upload.single('testUpload'), async (req, res, next) => {
try {
const result = await CSVtoPSQL(req.params.program, req.file.filename)
return res.status(201).json(result);
}
catch (error) {
return res.status(500).json({
message: error,
})
}
});
const CSVtoPSQL = (program, filePath) => {
let stream = fs.createReadStream(path.resolve(__dirname, '../files', filePath));
let csvData = [];
return new Promise((resolve) => {
const results = {
seccess: [],
error: [],
}
let csvStream = csv
.parse({ headers: false })
.on('error', error => console.error(error))
.on('data', (data) => {
csvData.push(data.toString());
})
.on('end', async () => {
await Promise.all(
csvData.map(async (item) => {
try{
await queries.upload(program, item);
results.success.push(item);
console.log('QR Code Added: ' + item);
}
catch (error) {
console.log(`oopsie: ${error}`)
results.error.push(item);
}
})
)
resolve(results);
});
stream.pipe(csvStream);
})
}
I would like to take some output and append the output to a csv
This is the code I have so far:
async function writeData() {
const csv = require('csv-parser')
const results = [];
fs.createReadStream('C:\\Users\\User\\Documents\\testingclean.csv')
.pipe(csv())
.on('data',(data)=> results.push(data))
.on('end', async () => {
const cookieJar = await getCookieJar();
const promises = [];
results.forEach((data) => {
promises.push(boxrec.getPersonById(cookieJar,data.id));
})
try {
const fighters = await Promise.all(promises);
fighters.forEach((fighter)=>{
boxer = JSON.parse(JSON.stringify(fighter.output));
fs.appendFile('C:\\Users\\User\\Documents\\newtest.csv',boxer, (err) => {
if (err) console.error('Could not append data to csv');
console.log('Data successfully appended');
})
});
} catch (e) {
console.log(e);
}
})
};
try {
writeData();
} catch (error) {
console.log("Error in writeData: " + error);
}
However running this code does not produce the desired csv output.
I am specifically writing to csv because I have read that I cannot append to a json (would ideally want to write data to a json)
If you don't have anything writen to the CSV file, you should close it explicitly at the end of your program :
var readsteam = fs.createReadStream();
...
readStream.destroy();
Not sure but this should be the full code :
async function writeData() {
const csv = require('csv-parser')
const results = [];
var readsteam = fs.createReadStream('C:\\Users\\User\\Documents\\testingclean.csv');
readsteam
.pipe(csv())
.on('data',(data)=> results.push(data))
.on('end', async () => {
const cookieJar = await getCookieJar();
const promises = [];
results.forEach((data) => {
promises.push(boxrec.getPersonById(cookieJar,data.id));
})
try {
const fighters = await Promise.all(promises);
fighters.forEach((fighter)=>{
boxer = JSON.parse(JSON.stringify(fighter.output));
fs.appendFile('C:\\Users\\User\\Documents\\newtest.csv',boxer, (err) => {
if (err) console.error('Could not append data to csv');
console.log('Data successfully appended');
})
});
} catch (e) {
console.log(e);
}
}
readsteam.destroy();
};
try {
writeData();
} catch (error) {
console.log("Error in writeData: " + error);
}