Write a file keeping tabs and EOL - node.js

I am trying to write the content of a string in a file in node.js
I have some raml files and I am able to join them. If I print the variable in console, I see it well parsed but as soon as I save in on a file, the file just contains one single line:
var raml = require('raml-parser');
var fs = require('fs');
var path = require('path');
var os = require('os')
path.join(__dirname, './')
raml.loadFile('schema.raml').then( function(data) {
console.log(data);
var filePath = "schema.raml";
fs.unlinkSync(filePath);
fs.writeFile("./new.raml", JSON.stringify(data).replace('/\n', os.EOL), function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
});
}, function(error) {
console.log('Error parsing: ' + error);
});
I added a replace EOL to change all "\n" in file. If I delete that, file will contain "\n" in each end of line.
On console, this is the output:
{ title: 'RAML Flattener',
baseUri: 'http://github.com/joeledwards/node-flat-raml',
version: '1',
mediaType: 'application/json',
protocols: [ 'HTTP' ],
resources:
[ { relativeUri: '/base',
methods: [Object],
resources: [Object],
relativeUriPathSegments: [Object] } ] }

data is a Javascript object; how that is being displayed when you console.log() it doesn't have much to do with how it will end up in the file you are writing.
The problem is that you are using JSON.stringify(), which, by default, will not pretty-print the output string.
Instead, try this:
JSON.stringify(data, null, 2)
This will make your output look like this:
{
"title": "RAML Flattener",
"baseUri": "http://github.com/joeledwards/node-flat-raml",
"version": "1",
"mediaType": "application/json",
"protocols": [
"HTTP"
],
"resources": [
{
"relativeUri": "/base",
"methods": { ... },
"resources": { ... },
"relativeUriPathSegments": { ... }
}
]
}
You may or may not need to call .replace() on its output. If you do, use this (the one you're using isn't valid):
.replace(/\n/, os.EOL)

Related

How to remove line with expression in node.js via replace-in-file library

I have a task, I need to remove a piece of code through the replace-in-files library. let's say there is a code like 'import smth from "smth"'. it is necessary to delete the ENTIRE LINE with the expression and even the remaining empty space.
1.deleting the entire line
2.deleting an empty space in place of this line
I managed to make a text transformation through the library, but I don't know how to work with the code.
const options = {
files: './replace/**/*.js',
from: /smth/g,
to: '',
optionsForFiles: {
'ignore': [
"**/node_modules/**"
]
},
saveOldFile: false,
encoding: 'utf8',
shouldSkipBinaryFiles: true,
onlyFindPathsWithoutReplace: false,
returnPaths: true,
returnCountOfMatchesByPaths: true
}
we have project with a lot of folders. f1/ f2, f3/ f4 ....
and in this folders we have files like smth.js
in smth.js we have lines
'import library from "library" '
i need to remove this lines without leaving empty space.
WE HAVE
'import library from "library" '
//code
NEED
//code
code i have right now
const replaceInFile = require('replace-in-files');
const regFrom = "import { someFunc, } from '~utils/index.js";
const options = {
files: './replace/**/*.js',
from: regFrom,
to: ' ',
optionsForFiles: {
'ignore': [
"**/node_modules/**"
]
},
saveOldFile: false,
encoding: 'utf8',
shouldSkipBinaryFiles: true,
onlyFindPathsWithoutReplace: false,
returnPaths: true,
returnCountOfMatchesByPaths: true
}
const modifyFiles = async () => {
try {
const {
changedFiles,
countOfMatchesByPaths,
replaceInFilesOptions
} = await replaceInFile(options);
} catch (e) {
console.log('Error occurred:', error);
}
}
modifyFiles()
this code works but a i have problem with empty space

CLIENT_ERROR when uploading Video To LinkedIn API

I followed the LinkedIn docs Here to upload a Video to their Vector Assets API
The video uploaded, but When I checked the status of the Video I keep getting CLIENT_ERROR under recipes.
This is the code I used:
let fileName = 'redacted.mp4'; //This sample file is about 24MB
let fileStream = fs.createReadStream(fileName);
let organisationId = 'redacted';
let isVideo = module.exports.isVideo(fileName);
let fileStats = fs.statSync(fileName);
let fileSizeBytes = fileStats.size;
let mediaUploadInitAction = `https://api.linkedin.com/v2/assets?action=registerUpload`;
let registerUploadRequest = {
"registerUploadRequest": {
"owner": `urn:li:organization:${organisationId}`,
"recipes": [
`urn:li:digitalmediaRecipe:feedshare-${isVideo ? 'video' : 'image'}`
],
"serviceRelationships": [
{
"identifier": "urn:li:userGeneratedContent",
"relationshipType": "OWNER"
}
]
}
};
if (!isVideo) {
registerUploadRequest['registerUploadRequest']["supportedUploadMechanism"] = [
"SYNCHRONOUS_UPLOAD"
]
} else {
if (fileSizeBytes > 52428800) { //I was told if the file length is more than 52428800, I should use multipart upload mechanism. My current file size is less than that though.
registerUploadRequest['registerUploadRequest']["supportedUploadMechanism"] = [
"MULTIPART_UPLOAD"
];
registerUploadRequest['registerUploadRequest']['fileSize'] = fileSizeBytes;
}
}
let { data: mediaUploadRegisterResponse } = await axios.post(mediaUploadInitAction, registerUploadRequest, { headers: headers });
let uploadRegisterData = mediaUploadRegisterResponse.value;
let uploadMechanism = uploadRegisterData['uploadMechanism'];
let singleUploadRequest = uploadMechanism['com.linkedin.digitalmedia.uploading.MediaUploadHttpRequest'];
let uploadUrl = singleUploadRequest ? singleUploadRequest['uploadUrl'] : '';
let uploadHeaders = singleUploadRequest ? singleUploadRequest['headers'] : '';
let multipartUpload = uploadMechanism['com.linkedin.digitalmedia.uploading.MultipartUpload'];
if (singleUploadRequest) { //This always work flawlessly for image uploads but not for video uploads
await axios.put(uploadUrl, fileStream, {
headers: {
'Accept': '*/*',
...uploadHeaders,
...headers,
'Content-Type': 'application/octet-stream'
},
maxContentLength: Infinity,
maxBodyLength: Infinity,
});
fs.unlink(fileName, () => { });
}
The above code works flawlessly for Image Uploads. But for Video Uploads, I keep getting back CLIENT_ERROR.
This is the full status message I keep getting back:
{
"recipes": [
{
"recipe": "urn:li:digitalmediaRecipe:feedshare-video",
"status": "CLIENT_ERROR"
}
],
"serviceRelationships": [
{
"relationshipType": "OWNER",
"identifier": "urn:li:userGeneratedContent"
}
],
"mediaTypeFamily": "VIDEO",
"created": 1641646213127,
"id": "C4adhdhahahhdKJZw",
"lastModified": 1641646215307,
"status": "ALLOWED"
}
Please, what can I do to resolve this?
Thank you
For anyone who might experience similar issue in the future. After brute-forcing all possible options, here is what worked for me.
If the video file is less than 52MB don't use fs.createReadStream. Use the code below instead:
fs.readFile(fileName, async (err, data) => {
if (err) {
console.log(`Error Reading LinkedIn Video File ${fileName}`, err);
return;
}
await axios.put(uploadUrl, data, {
headers: singleFileUploadHeaders,
maxContentLength: Infinity,
maxBodyLength: Infinity,
});
});
And the video finally uploaded without any CLIENT_ERROR again!!!

NodeJS get Google Sheet list

I have a google sheet with a list (dropdown, or data validation - list from ranges, as Google Sheets call it), like so:
Image of sheet
Imagine that in the list I have 4 values to select from. My goal is not only to get all table values, but also all values that constitute the list ["Beer","Wine","Rum","Martini"].
I've tried 2 different ways to retrieve the info and the list:
a) With sheets.spreadsheets.values.get, I get the table values in a digestible way, but not the content of the dropdown. Instead, the cell comes in as blank ("") [Comment: on Apps Script, you would get this information]
b) With sheets.spreadsheets.getByDataFilter, I get much more than I need and in a horrible format. However, I do not get the dropdown content as an array (as I'd want), but rather as a refence: (userEnteredValue: "=Input!$F$5:$F$7")
The question is, how do I get only the table, including the dropdown content as an array? I know it is possible and easy to do in Google Apps Script (I have it implemented), but not on Node.
Below the code as a reference for other programmers.
var {google} = require("googleapis");
let privatekey = require("./client_secret.json");
// configure a JWT auth client
let jwtClient = new google.auth.JWT(
privatekey.client_email,
null,
privatekey.private_key,
['https://www.googleapis.com/auth/spreadsheets',
]);
//authenticate request
jwtClient.authorize(function (err, tokens) {
if (err) {
console.log(err);
return;
} else {
console.log("Successfully connected!");
}
});
//Google Sheets API
let spreadsheetId = '<SPREADSHEET ID>';
let sheetName = 'Input!A1:B4'
let sheets = google.sheets('v4');
exports.fetch = (req, res) => {
sheets.spreadsheets.values.get({
auth: jwtClient,
spreadsheetId: spreadsheetId,
range: sheetName,
}, function (err, response) {
if (err) {
console.log('The API returned an error: ' + err);
} else {
res.json(response);
}
});
// OR
sheets.spreadsheets.getByDataFilter({
auth: jwtClient,
spreadsheetId: spreadsheetId,
"includeGridData": true,
}, function (err, response) {
if (err) {
console.log('The API returned an error: ' + err);
} else {
res.json(response);
}
});
}
What you are interested in is dataValidation
In order to retrieve it, you can use the method spreadsheets.get, setting the parameter fields to sheets/data/rowData/values/dataValidation
If your data validation is set as List of items, the response will look like:
{
"sheets": [
{
"data": [
{
"rowData": [
{
"values": [
{
"dataValidation": {
"condition": {
"type": "ONE_OF_LIST",
"values": [
{
"userEnteredValue": "Beer"
},
{
"userEnteredValue": "Wine"
},
{
"userEnteredValue": "Martini"
}
]
},
"showCustomUi": true
}
}
]
}
]
}
]
}
]
}
If your data validation is set as List form a range the response will look like:
{
"sheets": [
{
"data": [
{
"rowData": [
{
"values": [
{
{
"dataValidation": {
"condition": {
"type": "ONE_OF_RANGE",
"values": [
{
"userEnteredValue": "=Sheet1!$B$1:$B$3"
}
]
},
"showCustomUi": true
}
}
]
}
]
}
]
}
]
}
In the latter case you would need to call subsequently the method spreadsheets.values.get on =Sheet1!$B$1:$B$3 - that is the range returned as userEnteredValue within the object values nested within the object dataValidation.

Unable to parse xml to json properly

I'm trying to parse a xml to json & i'm using xml2js in nodejs.
Below is my code
var chai = require('chai');
var chaiHttp = require('chai-http');
var request = chai.request;
var should = chai.should();
var expect = chai.expect;
var assert = chai.assert;
var supertest = require('supertest');
var fs = require('fs');
var xmlLocation = "./configdata/xmlDoc.xml";
var xml2js = require('xml2js');
var parser = new xml2js.Parser();
//Plugin for HTTP, etc.
chai.use(chaiHttp);
process.env['NODE_TLS_REJECT_UNAUTHORIZED'] = '0';
//xmlFile = JSON.parse(fs.readFileSync(xmlData, 'utf8'));
describe("Test : ", function () {
it("convert xml to json", function (done) {
r = fs.readFileSync(xmlLocation, 'UTF-8');
parser.parseString(r, function (err, parsedData) {
if (err) throw err;
else {
fs.writeFile("jsonData.json", JSON.stringify(parsedData), function (err, response) {
});
}
});
done();
});
})
My sample xml file :
<?xml version="1.0" encoding="UTF-8" ?>
<ALEXA>
<SD TITLE="A" FLAGS="" HOST="davidwalsh.name">
<TITLE TEXT="David Walsh Blog :: PHP, MySQL, CSS, Javascript, MooTools, and Everything Else"/>
<LINKSIN NUM="1102"/>
<SPEED TEXT="1421" PCT="51"/>
</SD>
<SD>
<POPULARITY URL="davidwalsh.name/" TEXT="7131"/>
<REACH RANK="5952"/>
<RANK DELTA="-1648"/>
</SD>
</ALEXA>
I'm getting the below output :
{
"ALEXA": {
"SD": [
{
"$": {
"TITLE": "A",
"FLAGS": "",
"HOST": "davidwalsh.name"
},
"TITLE": [
{
"$": {
"TEXT": "David Walsh Blog :: PHP, MySQL, CSS, Javascript, MooTools, and Everything Else"
}
}
],
"LINKSIN": [
{
"$": {
"NUM": "1102"
}
}
],
"SPEED": [
{
"$": {
"TEXT": "1421",
"PCT": "51"
}
}
]
},
{
"POPULARITY": [
{
"$": {
"URL": "davidwalsh.name/",
"TEXT": "7131"
}
}
],
"REACH": [
{
"$": {
"RANK": "5952"
}
}
],
"RANK": [
{
"$": {
"DELTA": "-1648"
}
}
]
}
]
}
}
'$' is getting added to the parsed json. How to avoid it..??
Looking for a solution. Thanks in advance.
The $ is the place for your attributes with the default config.
As xml2js parses your XML tags (SD for example) explicitly as arrays (with explicitArray=true - you either way have multiple of them, and you can only assign one similar key per object in JSON), you need a place to store the attributes, this is what $ is for. You can enforce creating arrays using (which is the default) or turn this off. Using mergeAttrs you might eventually get a result, that you desire.
You can change the attrkey if that would be one solution as well. The same goes for charkey and so on. You can find the whole config options in the README on GitHub at https://github.com/Leonidas-from-XIV/node-xml2js - eventually an option to transform to children might the right for you.
If you don't need attributes at all you can set ignoreAttrs=true. By the way; parser options go as an object into the parser constructor, in your case: new xml2js.Parser({...options});
library like this usually parse everything and sometimes resulting in having many properties which you don't need.
i created camaro for this purpose.
downside of this is you have to write your own template file for the structure you want xml to be transform to.
const transform = require('camaro')
const fs = require('fs')
const xml = fs.readFileSync('ean.xml', 'utf-8')
const template = {
cache_key: "/HotelListResponse/cacheKey",
hotels: ["//HotelSummary", {
hotel_id: "hotelId",
name: "name",
rooms: ["RoomRateDetailsList/RoomRateDetails", {
rates: ["RateInfos/RateInfo", {
currency: "ChargeableRateInfo/#currencyCode",
non_refundable: "nonRefundable",
price: "ChargeableRateInfo/#total"
}],
room_name: "roomDescription",
room_type_id: "roomTypeCode"
}]
}],
session_id: "/HotelListResponse/customerSessionId"
}
const result = transform(xml, template)
Instead xml2js, use xml2json which converts exactly from xml to json with the specific keys.

Export mongodb collection data to csv file in node js

I have created a mongodb collection in mongolab and want to print all documents in that collecton. I have mongolab and the url of mongolab to get connected with the collection is -
mongodb://user:1234#xyz.mongolab.com:41248/new
The sample document structure is -
{
"_id": "9759572745-Sing",
"details": {
"Gender": "M",
"PreTrainingStatus": "Fresher",
"Religion": "Hindu",
"attendanceInPercentage": "",
"batchHolders": {
"AssessmentDate": "Thu Jul 16 2015",
"CourseFee": "7500",
"isEditable": false
},
"batchID": "282726",
"eid": "",
"whereDidYouHearAboutStar": "---Select---",
"skillInstructorOrTrainerName": "282726",
"specificGovtInstitutetieups": "---Select---",
"isSelected": false,
"isEditable": false
},
"addedOnMs": 1439455766000,
"submittedOnMs": 1439454813000,
"latitude": "27.409566879272",
"longitude": "77.69295501709",
"locationName": "Uttar Pradesh 281006,null"
}
I want to Print it and all nested properties to be displayed in a column.
But I am not able to do so, Please help.
Thank you (In Advance),
Dia
Implementing json2csv library for exporting data to csv file on nodejs
Example
const json2csv = require('json2csv').parse;
//For unique file name
const dateTime = new Date().toISOString().slice(-24).replace(/\D/g,
'').slice(0, 14);
const filePath = path.join(__dirname, "../../../", "public", "exports", "csv-" + dateTime + ".csv");
let csv;
const student = await req.db.collection('Student').find({}).toArray();
// Logging student
// [{id:1,name:"John",country:"USA"},{id:1,name:"Ronny",country:"Germany"}]
const fields = ['id','name','country'];
try {
csv = json2csv(booking_info, {fields});
} catch (err) {
return res.status(500).json({err});
}
fs.writeFile(filePath, csv, function (err) {
if (err) {
return res.json(err).status(500);
}
else {
setTimeout(function () {
fs.unlink(filePath, function (err) { // delete this file after 30 seconds
if (err) {
console.error(err);
}
console.log('File has been Deleted');
});
}, 30000);
res.download(filePath);
}
})
You can use https://www.npmjs.com/package/json2csv
Set nested option value to true
And Also Specify fields that you want from JSON. For nested document you can specify like this details.Gender

Resources