Neutralino JS - Why read all file to string without end of lines (txt) - neutralinojs

I.
I´m trying to make a skeleton of a app to read and write files in Neutralino.js but...
Now i´m stopped by trying to read the content of a selected file to a variable.
I think the file is corrected selected, but... when using the Neutralino.filesystem.readFile it reads the text file whitout respecting the end of line carriege.
The conteudo_edm contains all the file content but just as a string, hope to be like a array, but there is no line breaks...
Any ideia?
async function lerN() {
let entries = await Neutralino.os.showOpenDialog('Abrir um ficheiro', {
filters: [
{ name: 'ISPOL', extensions: ['edm', 'EDM'] },
{ name: 'All files', extensions: ['*'] }
]
});
ficheiro = entries + ''
ficheiro = ficheiro.replace(/\\/g, "/")
conteudo_edm = await Neutralino.filesystem.readFile(entries[0]); //Aqui está a juntar tudo numa string!!!
console.log(conteudo_edm)
document.getElementById("lbl_ficheiroaberto").textContent = "Ficheiro aberto : " + ficheiro;
console.log("entries__")
console.log(entries)
console.log("entries__")
filtra_ficheiro(entries[0])
//document.getElementById("lbl_apagar").textContent = "estou a ler: " + ficheiro
}

Try Using Split Instead Of Replace.
This Worked For Me:
async function lerN() {
let entries = await Neutralino.os.showOpenDialog('Abrir um ficheiro', {
filters: [
{ name: 'ISPOL', extensions: ['edm', 'EDM'] },
{ name: 'All files', extensions: ['*'] }
]
});
ficheiro = entries + ''
ficheiro = ficheiro.split(/\r?\n/); # This is what i tried.
conteudo_edm = await Neutralino.filesystem.readFile(entries[0]);
console.log(conteudo_edm)
document.getElementById("lbl_ficheiroaberto").textContent = "Ficheiro aberto : " + ficheiro;
console.log("entries__")
console.log(entries)
console.log("entries__")
filtra_ficheiro(entries[0])
}

Related

What is causing a double comma in my csv output when using `",\n`?

I am writing a script that is designed to take in an array and replace a designated row in a csv(buffer) then output a csv(buffer) in nodejs. However, I have found that whenever I have the following combination of characters: ",\n", it is doubling the comma. I have tried using \r instead or \n, but the system I am importing the csv has issues with the \r. I also found that by adding an extra whitespace: ", \n it prevents the double comma, but again, the system I'm importing the final result into won't accept the extra space. Does anyone know what is causing the extra comma and/or a way to not get the extra comma?
Script that replaces CSV row:
node.on('input', function(msg) {
node.rowNumber = msg.rowNumber || config.rowNumber || 0; //Row being replaced.
node.newRow = msg.newRow || config.newRow; //New Row Array or Comma Separated String
var payload = msg.file || config.file || RED.util.getMessageProperty(msg, "payload"); //File path or buffer.
if (!Buffer.isBuffer(payload)) {
payload = payload.replace('\\', '/');
payload = fs.readFileSync(payload);
}
if (!Array.isArray(this.newRow)) {
node.newRow = node.newRow.split(',');
}
var dataArray = [];
var csvArr = [];
const readable = new Stream.Readable()
readable._read = () => {}
readable.push(payload)
readable.push(null)
readable.pipe(csv())
.on('data', function (data) {
dataArray.push(data);
})
.on('end', function(){
csvArr.push(Object.keys(dataArray[0]));
dataArray.forEach((item, i) => {
csvArr.push(_.values(item));
});
if (node.rowNumber == 0) {
csvArr.push(node.newRow);
}
else {
csvArr.splice(node.rowNumber - 1, 1, node.newRow);
}
var finalCSV = csvArr.join('\n');
msg.payload = Buffer.from(finalCSV);
node.send(msg); //Returns the msg object
});
});
Input:
[
`""{
""""actions"""":{
""""validation"""":[
],
""""reconciliation"""":[
]
},
""""enforce_all_required_fields"""":"""""""",
""""form_history"""":""""12c2acda35980131f98acf2a39c1aafe"""",
""""form_id"""":""""228"""",
""""options"""":[
],
""""record_action"""":""""both"""",
""""secondary_form_history"""":"""""""",
""""secondary_form_id"""":""""0"""",
""""secondary_form_name"""":"""""""",
""""secondary_is_tier1_form"""":"""""""",
""""selected_columns"""":[
""""field_9326"""",
""""field_3742_first"""",
""""field_3742_last"""",
""""field_9325"""",
""""field_9327"""",
],
""""skip_headers"""":"""""""",
""""target_match_type"""":""""""""
}""`
]
Undesired output:
"{
""actions"":{
""validation"":[
],
""reconciliation"":[
]
},
""enforce_all_required_fields"":"""",,
""form_history"":""12c2acda35980131f98acf2a39c1aafe"",,
""form_id"":""228"",,
""options"":[
],
""record_action"":""both"",,
""secondary_form_history"":"""",,
""secondary_form_id"":""0"",,
""secondary_form_name"":"""",,
""secondary_is_tier1_form"":"""",,
""selected_columns"":[
""field_9326"",,
""field_3742_first"",,
""field_3742_last"",,
""field_9325"",,
""field_9327"",,
],
""skip_headers"":"""",,
""target_match_type"":""""
}"
Notice the double commas?

Node.js array of interface not populated correctly

I am trying to read 4 files into an array of objects. However, the object value in the array are all the last object. I would like to know what wrong I did for this behavior.
Code (the code has been simplified, so it may not make good sense):
import * as fs from "fs";
interface axObj {
id: String;
name: String;
body: String;
};
const logger = console;
const mypath = "/Users/autorun/Documents/Projects/Axway/Projects/axDeploy/data/rtczip/PD_only/";
const files:String[] = [
"PD.DEVOPS.TEST.1.FS.json",
"PD.DEVOPS.TEST.1.SFTP.json",
"PD.DEVOPS.TEST.1.json",
"PD.DEVOPS.TEST.1_AS2.json"
];
function doJobsSeqCall() {
var pdObj: axObj = {
id: "",
name: "PD.Object",
body: ""
};
var pdObjArray: axObj[] = [];
var i = 0;
var data: string = "";
for (var file of files) {
logger.debug("\nReading file: " + mypath + file);
const data = fs.readFileSync(mypath + file, "utf8");
if (!data) {
throw "No data to post";
}
pdObj.id = String(i + 1);
pdObj.body = data;
pdObj.name = file;
pdObjArray.push(pdObj);
//Everything is correctly set here.
logger.debug("Setting up axObj [" + pdObjArray[i].id + "]");
logger.debug("Data file is valid with size: [" + pdObjArray[i].body.length + "] file: [" + pdObjArray[i].name + "]");
i++;
}
//Right after the for loop. This checking showed the last item was populated to all other array items which I don't understand at all.
logger.debug("\n\nChecking pdObjArray Content");
i = 1;
for (let iAxObj of pdObjArray) {
console.log("axSeqCall index: [" + i++ + "] name: [" + iAxObj.name + "]");
}
};
try {
doJobsSeqCall();
} catch (err) {
logger.error(`Error: ${err}`);
process.exit(-1);
};
Result in the log:
Reading file: /Users/autorun/Documents/Projects/Axway/Projects/axDeploy/data/rtczip/PD_only/PD.DEVOPS.TEST.1.FS.json
Setting up axObj [1]
Data file is valid with size: [754] file: [PD.DEVOPS.TEST.1.FS.json]
Reading file: /Users/autorun/Documents/Projects/Axway/Projects/axDeploy/data/rtczip/PD_only/PD.DEVOPS.TEST.1.SFTP.json
Setting up axObj [2]
Data file is valid with size: [1625] file: [PD.DEVOPS.TEST.1.SFTP.json]
Reading file: /Users/autorun/Documents/Projects/Axway/Projects/axDeploy/data/rtczip/PD_only/PD.DEVOPS.TEST.1.json
Setting up axObj [3]
Data file is valid with size: [1507] file: [PD.DEVOPS.TEST.1.json]
Reading file: /Users/autorun/Documents/Projects/Axway/Projects/axDeploy/data/rtczip/PD_only/PD.DEVOPS.TEST.1_AS2.json
Setting up axObj [4]
Data file is valid with size: [874] file: [PD.DEVOPS.TEST.1_AS2.json]
Checking pdObjArray Content
axSeqCall index: [1] name: [PD.DEVOPS.TEST.1_AS2.json]
axSeqCall index: [2] name: [PD.DEVOPS.TEST.1_AS2.json]
axSeqCall index: [3] name: [PD.DEVOPS.TEST.1_AS2.json]
axSeqCall index: [4] name: [PD.DEVOPS.TEST.1_AS2.json]
Issue:
I don't expect all of the names are [PD.DEVOPS.TEST.1.AS2.json] in the "Checking pdObjArrary Content]. I expect all of the 4 different file names such as "PD.DEVOPS.TEST.1.FS.json", "PD.DEVOPS.TEST.1.SFTP.json", "PD.DEVOPS.TEST.1.json", "PD.DEVOPS.TEST.1_AS2.json" there.
Please help and assist.
for (const file of files) or for (let file of files)
You can't use the var keyword in the for loop if you have to access the file inside asynchronous call. It won't capture the exact value of file.
Please try to change the var in the first for loop inside doJobsSeqCall function and try it again.
for (var file of files) {
var pdObj: axObj = {
id: "",
name: "PD.Object",
body: ""
};
...
}
Please declare variable pdObj in for loop.
Why?:
if pdObj outside for loop,
pdObj = ...
pdObjArray.push(pdObj);
Above code means: append same reference variable pdOjb to pdObjArray.
So, pdObjArray contains same reference array
This means shows only last value.
But, if pdObj inside for loop, pdObj is created every for loop, so, different reference object is appended to pdObjArray.
Or you can use Object clone before pdObjArray.push(pdObj);
pdObjArray.push(JSON.parese(JSON.stringfy(pdObj)));
What is the most efficient way to deep clone an object in JavaScript?
As files is an array of string, you can try to do foreach and then do all the logic in it.
files.forEach(file => {
//logic here
})
check out the example on Mozilla's site
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/forEach

How to open a Blob object in the browser?

I currently have a blob object which I would like to give the user the option to open.
I am currently using js-xlsx from the SheetsJS library. I have successfully created an excel sheet with the given data I need in it.
I convert the excel sheet object into a Blob and use FileSaver.js to successfully give the user the option to save the excel sheet.
saveAs(new Blob([s2ab(wbout)], {type:"application/octet-stream"}), 'mysheet.xlsx');
function s2ab(s: any) {
var buf = new ArrayBuffer(s.length);
var view = new Uint8Array(buf);
for (var i=0; i<s.length; i++) view[i] = s.charCodeAt(i) & 0xFF;
return buf;
}
Here is the whole function:
exportExcel() {
var wb = XLSX.utils.book_new();
wb.Props = {
Title: "SheetJS",
Subject: "CCDB",
Author: "CCDB",
CreatedDate: new Date(2017,12,19)
};
wb.SheetNames.push("Test Sheet");
var ws_data = this.slotData;
ws_data.unshift(this.headers);
ws_data.unshift(['Slots :: CCDB']);
var ws = XLSX.utils.aoa_to_sheet(ws_data);
var wscols = [
{wch:8},
{wch:25},
{wch:25},
{wch:15},
{wch: 10},
{wch: 10},
{wch: 35},
{},
{}
];
ws['!cols'] = wscols;
ws['!merges'] = [{ s: { r: 0, c: 0 }, e: { r: 0, c: 8 } }];
// XLSX.utils.
wb.Sheets["Test Sheet"] = ws;
var wbout = XLSX.write(wb, {bookType: 'xlsx', type: 'binary'});
function s2ab(s: any) {
var buf = new ArrayBuffer(s.length);
var view = new Uint8Array(buf);
for (var i=0; i<s.length; i++) view[i] = s.charCodeAt(i) & 0xFF;
return buf;
}
saveAs(new Blob([s2ab(wbout)], {type:"application/octet-stream"}), 'Slots CCDB.xlsx');
},
The file saves correctly, but it is not exactly the dialogue box I want.
I want to give the user the option to OPEN the file, ie. "in Excel", like this:
I am currently able to open a CSV file like this, by converting the data into a url and passing it into window.open().
exportCSV() {
let csv = 'data:text/csv;charset=utf-8,';
const csvContent = Papa.unparse({
fields: this.headers,
data: this.slotData,
});
csv += csvContent;
const encodedUri = encodeURI(csv);
window.open(encodedUri);
},
I just can't seem to get this to work with a .xlsx file though.
Anyone have any ideas?
Okay, I have figured it out.
Turns our I just had to change the type to 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' to be opened from a blob as a .xlsx file!
saveAs(new Blob([s2ab(wbout)], {
type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
}), 'Slots CCDB.xlsx');

Renaming single file names while copying entire folder using copyTpl

My yeoman generator copies files from template to destination path:
this.fs.copyTpl(
this.templatePath(),
this.destinationPath(), {
appName: this.props.appName
});
During project generation, I need to assign value of this.props.appName to some of filenames.
Unfortunately I can't do this that way like I could do inside this files:
<%=appName%>-project.sln
All files that need to be renamed have appTemplate in their names, so what I need to do is simply replace appTemplate with value of this.props.appName.
Can I somehow configure copyTpl to rename some of files while copying them to another destination?
OK, I found a solution. According to yeoman docs:
Any generator author can register a transformStream to modify the file path and/or the content.
Using this method:
this.registerTransformStream();
What that means is I can pipe all generated files through some script:
var rename = require("gulp-rename");
//other dependecies...
module.exports = yeoman.Base.extend({
//some other things generator do...
writing: function() {
var THAT = this;
this.registerTransformStream(rename(function(path) {
path.basename = path.basename.replace(/(666replacethat666)/g, THAT.props.appName);
path.dirname = path.dirname.replace(/(666replacethat666)/g, THAT.props.appName);
}));
this.fs.copyTpl(
this.templatePath(),
this.destinationPath(), {
appName: this.props.appName
});
}
});
This script will pipe all files through gulp-rename, changing 666replacethat666 to something more intelligent.
If you cannot use registerTransformStream because you are using the composeWith() feature in Yeoman (which disconnects transform stream registrations), you can use the processDestinationPath, which works when you select multiple files (not when you specify a specific file in the first argument, for some reason).
this.fs.copyTpl(
this.templatePath("**/{.*,*}"),
this.destinationPath(),
{ /* usually your prompt answers are here */ },
{},
{
processDestinationPath: (filePath: string) =>
filePath.replace(/somedir\/a-file.js/g, 'newdir/better-filename.js'),
},
);
Source to documentation options: https://yeoman.github.io/generator/actions_fs.html#.copyTemplate
Which is based on https://github.com/SBoudrias/mem-fs-editor#copyfrom-to-options-context-templateoptions-
registerTransformStream with gulp-rename is still an issue. However, I get it working with glob.
const glob = require('glob');
writing() {
const files = glob.sync('**', { dot: true, nodir: true, cwd: this.templatePath() })
for (let i in files) {
this.fs.copyTpl(
this.templatePath(files[i]),
this.destinationPath( this.props.destinationFolderPath + '\\' + files[i].replace(/__fileName__/g,this.props.fileName)),
this.props
)
}
}
After copy, iterate over the paths of the output dir and regex replace all occurrences.
const getReplacement = (base, pathRel, match, replace) => {
let pathRelNew = pathRel.replace(match, replace);
let oldPathAbs = path.join(base, pathRel);
let newPathAbs = path.join(base, pathRelNew);
if (oldPathAbs != newPathAbs) {
return {
oldPath: oldPathAbs,
newPath: newPathAbs
}
}
}
const getReplacementsRecursive = (base, match, replace, replacements = []) => {
let pathsRel = fs.readdirSync(base);
pathsRel.forEach(pathRel => {
if (fs.statSync(path.join(base, pathRel)).isDirectory()) {
replacements = getReplacementsRecursive(path.join(base, pathRel), match, replace, replacements);
var replacement = getReplacement(base, pathRel, match, replace)
if (replacement) replacements.push(replacement);
} else {
var replacement = getReplacement(base, pathRel, match, replace)
if (replacement) replacements.push(replacement);
}
});
return replacements;
};
function replaceMatches(dir, match, replace) {
var replacements = getReplacementsRecursive(dir, match, replace);
replacements.forEach(function(replacement) {
fs.renameSync(replacement.oldPath, replacement.newPath);
});
}
module.exports = class extends Generator {
// ...
writing() {
// don't forget to set the output directory
let OUTPUT_DIR = "./out";
// this.fs.copyTpl(...);
// setTimeout is used to give some time for the copyTpl to finish
setTimeout(
() => {
var match = new RegExp( "666replacethat666", 'g' );
replaceMatches(OUTPUT_DIR, match, this.props.appName);
}, 1000);
}
}

Using Stored Data to Define Sub Menu Entries

My extension should use the user's options to build submenus under the main extension context menu entry. The options are stored in a table, where each line is defining a submenu. The whole table is stored as a json string in chrome.local.storage with the key jsondata.
The manifest is:
"background": {
"persistent": true,
"scripts": [ "js/storage.js", "js/backgroundlib.js", "js/background.js" ]
},
...
"permissions": [ "storage", "contextMenus", "http://*/*", "https://*/*", "tabs", "clipboardRead", "clipboardWrite" ],
...
In the background script, I'm trying to get the data using:
window.addEventListener('load', function () {
var key = 'jsondata';
storage.area.get(key, function (items){
console.log(items[key]);
build_submenu(items[key]);});
});
function build_submenu(json) {
console.log("build_submenu: " + json);
}
and build_submenu should then call multiple chrome.contextMenus.create({... }) to add the submenus.
For now, I can't get build_submenu being called. Am I trying to do something that is not possible or am I just missing something obvious?
Thanks, F.
Replace storage.area.get with chrome.storage.local.get.
Another suggestion would be removing the outer window.onload listener, since you are using background scripts and window.onload makes no sense.
OK, I finally got this, that works:
manifest.json
"background": {
"persistent": false,
"scripts": [ "js/storage.js", "js/backgroundlib.js", "js/background.js" ]
},
in background.js, The context menu is build in the callback function when reading from storage. This reading is called when onInstalled is fired.
I use a global var that is saved onSuspend et read again onStartup. and that associate the submenu id and the corresponding row from the user's option. The onClick listener test if the global variable is defined. If not it is read again from storage.
var regex = new Object();
chrome.runtime.onInstalled.addListener( function () {
console.log("onInstalled called");
var key = 'jsondata';
storage.area.get(key, function (items){ get_jsondata(items[key]);});
function get_jsondata(value){
var data = JSON.parse(value);
var fcb =[ {fcb_context: "fcb_copy", title:"Copy filtered", context: ["selection", "link"]}, {fcb_context:"fcb_paste", context:["editable"], title:"Paste filtered"}];
for (var i=0; i<fcb.length; i++) {
var menu = fcb[i];
chrome.contextMenus.create({
//title: "Look up: %s",
title: menu.title,
id: menu.fcb_context,
contexts: menu.context,
});
var last = data.length;
//var sel = info.selectionText;
for (var j=0; j<last; j++){
chrome.contextMenus.create({
title: data[j].name,
contexts: menu.context,
id: menu.fcb_context + "_" + j,
parentId: menu.fcb_context,
//onclick: function(info, tab){ run_cmd( data[j].regex, info, menu.fcb_context ); }
});
regex[ menu.fcb_context + "_" + j] = data[j];
//console.log(regex[menu.fcb_context + "_" + j]);
}// for j
} // for i
}//get_jsondata
}); //add listener
chrome.contextMenus.onClicked.addListener(function(info, tabs){
var id = info.menuItemId;
if (typeof regex === "undefined" ){
storage.area.get("regex", function(items){
regex = JSON.parse(items["regex"]);
console.log("get " + items["regex"] + " from storage");
run_cmd( regex, info );
});
} else {
console.log("regex was defined... " + JSON.stringify(regex));
run_cmd( regex, info );
}
});
chrome.runtime.onSuspend.addListener(function() {
// Do some simple clean-up tasks.
console.log("onSuspend called saving " + JSON.stringify(regex));
storage.area.set({ "regex" : JSON.stringify(regex)}, function(){console.log("regex saved");} );
});
chrome.runtime.onStartup.addListener(function() {
console.log("onStartup called");
storage.area.get("regex", function(items){
regex = JSON.parse(items["regex"]);
console.log("get " + items["regex"] + " from storage");
});
});
function getSelectedText(info){
var sel = info.selectionText;
chrome.tabs.executeScript(null, {file:"js/script.js"});
}
function pasteFilteredText(info){
chrome.tabs.executeScript(null, {file:"js/script.js"});
}
function run_cmd(regex, info){
var id = info.menuItemId;
var data = regex[id];
var sel = info.selectionText;
var fcb_context = info.parentMenuItemId;
//console.log("run_cmd regex " + data.regex + " sel " + (sel ? sel : ""));
alert("run_cmd regex " + data.regex + " sel " + (sel ? sel : "") + " fcb_context: " + fcb_context);
}
Thanks for pointing me what is superfluous or missing.

Resources