Make Initialization Asynchronous in node.js - node.js

I am trying to initialize a key class in a node.js program, but the instructions are running in arbitrary order and therefore it is initializing wrong. I've tried both making initialization happen in the definition and in a separate function; neither works. Is there something that I'm missing?
Current code:
class BotState {
constructor() {
this.bios = {}
this.aliases = {};
this.stories = {};
this.nextchar = 0;
}
}
var ProgramState = new BotState();
BotState.prototype.Initialize = function() {
this.bios = {};
var aliases = {};
var nextchar = 0;
this.nextchar = 0;
fs.readdir(biosdir, function (err, files) {
if (err) throw err;
for (var file in files) {
fs.readFile(biosdir + file + ".json", {flag: 'r'}, (err, data) => {
if (err) throw err;
var bio = JSON.parse(data);
var index = bio["charid"];
this.bios[index] = bio;
for (var alias in bio["aliaslist"]) {
this.aliases[bio["aliaslist"][alias].toLowerCase()] = index;
}
if (index >= nextchar) {
nextchar = index + 1;
}
})
}
this.stories = {};
this.nextchar = Math.max(Object.keys(aliases).map(key => aliases[key]))+1;
});
}
ProgramState.Initialize();
Is there some general way to make node.js just... run commands in the order they're written, as opposed to some arbitrary one?
(Apologies if the code is sloppy; I was more concerned with making it do the right thing than making it look nice.)

You are running an asynchronous operation in a loop which causes the loop to continue running and the asynchronous operations finish in some random order so you process them in some random order. The simplest way to control your loop is to switch to the promise-based version of the fs library and then use async/await to cause your for loop to pause and wait for the asynchronous operation to complete. You can do that like this:
const fsp = require('fs').promises;
class BotState {
constructor() {
this.bios = {}
this.aliases = {};
this.stories = {};
this.nextchar = 0;
}
}
var ProgramState = new BotState();
BotState.prototype.Initialize = async function() {
this.bios = {};
this.nextchar = 0;
let aliases = {};
let nextchar = 0;
const files = await fsp.readdir(biosdir);
for (const file of files) {
const data = await fsp.readFile(biosdir + file + ".json", {flag: 'r'});
const bio = JSON.parse(data);
const index = bio.charid;
const list = bio.aliaslist;
this.bios[index] = bio;
for (const alias of list) {
this.aliases[alias.toLowerCase()] = index;
}
if (index >= nextchar) {
nextchar = index + 1;
}
}
this.stories = {};
// there is something wrong with this line of code because you NEVER
// put any data in the variable aliases
this.nextchar = Math.max(Object.keys(aliases).map(key => aliases[key]))+1;
}
ProgramState.Initialize();
Note, there's a problem with your usage of the aliases local variable because you never put anything in that data structure, yet you're trying to use it in the last line of the function. I don't know what you're trying to accomplish there so you will have to fix that.
Also, note that you should never use for/in to iterate an array. That iterates properties of an object which can include more than just the array elements. for/of is made precisely for iterating an iterable like an array and it also saves the array dereference too as it gets you each value, not each index.

Related

How to make global function in nodejs expressjs?

I have a function to make capitalize every first word in nodejs expressjs like this:
function titleCase(str) {
var splitStr = str.toLowerCase().split(' ');
for (var i = 0; i < splitStr.length; i++) {
// You do not need to check if i is larger than splitStr length, as your for does that for you
// Assign it back to the array
splitStr[i] = splitStr[i].charAt(0).toUpperCase() + splitStr[i].substring(1);
}
// Directly return the joined string
return splitStr.join(' ');
}
But how can i use that function in global? So i can use that in my controller.
I have tried to make function.js file:
module.exports = {
titleCase: function(str) {
var splitStr = str.toLowerCase().split(' ');
for (var i = 0; i < splitStr.length; i++) {
// You do not need to check if i is larger than splitStr length, as your for does that for you
// Assign it back to the array
splitStr[i] = splitStr[i].charAt(0).toUpperCase() + splitStr[i].substring(1);
}
// Directly return the joined string
return splitStr.join(' ');
}
}
and require function.js file in server.js like this:
require('./app/helpers/function');
But its still not working.
There are several ways to do this, just make sure the syntax is right.
If you don't know the difference between these two things below or when to use them, you should read about it.
Simplest way:
exports.titleCase = function(str) {
// function here
}
Module.exports way:
function titleCase(str) {
// function here
}
module.export = {
titleCase,
}
And require for both:
// Require
const {titleCase} = require('./app/helpers/function');
// => titleCase("something");
// or
const func = require('./app/helpers/function');
// => func.titleCase("something")
global['titleCase'] = require('./app/helpers/function');
please update your server.js with global['titleCase'] = require('./app/helpers/function'); instead of require('./app/helpers/function');

Async/Await Node-Postgres Queries Within ForEach Loops

EDIT: I'm using node v8.0.0
I just started learning how to access SQL databases with node-postgres, and I'm having a little bit of trouble accessing multiple databases to collect the data in a work able format, particularly with executing multiple queries within forEach loops. After a few tries, I'm trying async/await, but I get the following error:
await client.connect()
^^^^^^
SyntaxError: Unexpected identifier
When I tried using a pool or calling .query sequentially, I would get something along the lines of
1
[]
could not connect to postgres Error: Connection terminated
Here is an abbreviated version of my code:
const { Client } = require('pg');
const moment = require('moment');
const _ = require('lodash');
const turf = require('#turf/turf');
const connString = // connection string
var collected = []
const CID = 300
const snaptimes = // array of times
var counter=0;
const client = new Client(connString);
function createArray(i,j) {
// return array of i arrays of length j
}
await client.connect()
snaptimes.forEach(function(snaptime){
var info = {}; // an object of objects
// get information at given snaptime from database 1
const query1 = // parametrized query selecting two columns from database 1
const result1 = await client.query(query1, [CID,snaptime]);
var x = result1.rows;
for (var i = 0; i < x.length; i++) {
// store data from database 1 into info
// each row is an object with two fields
}
// line up subjects on the hole
const query2 = // parametrized query grabbing JSON string from database 2
const result2 = await client.query(query2, [CID,snaptime]);
const raw = result2.rows[0].JSON_col;
const line = createArray(19,0); // an array of 19 empty arrays
for (var i = 0; i < raw.length; i++) {
// parse JSON object and record data into line
}
// begin to collect data
var n = 0;
var g = 0;
// walk down the line
for (var i = 18; i > 0; i--) {
// if no subjects are found at spot i, do nothing, except maybe update g
if ((line[i] === undefined || line[i].length == 0) && g == 0){
g = i;
} else if (line[i] !== undefined && line[i].length != 0) {
// collect data for each subject if subjects are found
line[i].forEach(function(subject){
const query 3 = // parametrized query grabbing data for each subject
const result3 = await client.query(query3,[CID,subject,snaptime]);
x = result3.rows;
const y = moment(x[0].end_time).diff(moment(snaptime),'minutes');
var yhat = 0;
// the summation over info depends on g
if (g===0){
for (var j = i; j <= 18; j++){
yhat = moment.duration(info[j].field1).add(yhat,'m').asMinutes();
}
} else {
for (var j = i; j <= 18; j++){
if (i<j && j<g+1) {
yhat = moment.duration(info[j].field2).add(yhat,'m').asMinutes();
} else {
yhat = moment.duration(info[j].field1).add(yhat,'m').asMinutes();
}
}
}
collected.push([y,yhat,n,i]);
});
}
n+=line[i].length;
g=0;
}
// really rough work-around I once used for printing results after a forEach of queries
counter++;
if (counter===snaptimes.length){
console.log(counter);
console.log(collected);
client.end();
}
});
The problem is caused by your forEach callback not being async:
snaptimes.forEach(function(snaptime){
should be:
snaptimes.forEach(async function (snaptime) {
for the await to be recognizable at all.
Keep in mind that an async function returns immediately and it returns a promise that gets eventually resolved by return statements of the async function (or rejected with uncaught exceptions raised inside the async function).
But also make sure your Node version supports async/await:
Since Node 7.6 it can be used with no --harmony flag.
In Node 7.x before 7.6 you have to use the --harmony flag.
It was not available in Node before 7.0.
See: http://node.green/#ES2017-features-async-functions
Also note that you can use await only inside of functions declared with the async keyword. If you want to use it in the top level of your script or module then you need to wrap it in an immediately invoked function expression:
// cannot use await here
(async () => {
// can use await here
})();
// cannot use await here
Example:
const f = () => new Promise(r => setTimeout(() => r('x'), 500));
let x = await f();
console.log(x);
prints:
$ node t1.js
/home/rsp/node/test/prom-async/t1.js:3
let x = await f();
^
SyntaxError: Unexpected identifier
but this:
const f = () => new Promise(r => setTimeout(() => r('x'), 500));
(async () => {
let x = await f();
console.log(x);
})();
prints:
$ node t2.js
x
after 0.5s delay, as expected.
On versions of Node that don't support async/await the first (incorrect) example will print:
$ ~/opt/node-v6.7.0/bin/node t1.js
/home/rsp/node/test/prom-async/t1.js:3
let x = await f();
^
SyntaxError: Unexpected identifier
and the second (correct) example will print a different error:
$ ~/opt/node-v6.7.0/bin/node t2.js
/home/rsp/node/test/prom-async/t2.js:3
(async () => {
^
SyntaxError: Unexpected token (
It's useful to know because Node versions that don't support async/await will not give you a meaningful error like "async/await not supported" or something like that, unfortunately.
Make sure that you should use async block outside like:
async function() {
return await Promise.resolve('')
}
And it is default supported after node 7.6.0. Before 7.6.0, you should use --harmony option to work for it.
node -v first to check your version.
First of all, you don't know enough about async-await just yet. don't worry, it's actually quite easy; but you need to read the documentation to be able to use that stuff.
More to the point, the problem with your code is that you can only await inside async functions; you're doing that outside of any function.
First of all, here's the solution that is closest to the code you wrote:
const { Client } = require('pg');
const moment = require('moment');
const _ = require('lodash');
const turf = require('#turf/turf');
const connString = // connection string
var collected = []
const CID = 300
const snaptimes = // array of times
var counter=0;
const client = new Client(connString);
function createArray(i,j) {
// return array of i arrays of length j
}
async function processSnaptime (snaptime) {
var info = {}; // an object of objects
// get information at given snaptime from database 1
const query1 = // parametrized query selecting two columns from database 1
const result1 = await client.query(query1, [CID,snaptime]);
var x = result1.rows;
for (var i = 0; i < x.length; i++) {
// store data from database 1 into info
// each row is an object with two fields
}
// line up subjects on the hole
const query2 = // parametrized query grabbing JSON string from database 2
const result2 = await client.query(query2, [CID,snaptime]);
const raw = result2.rows[0].JSON_col;
const line = createArray(19,0); // an array of 19 empty arrays
for (var i = 0; i < raw.length; i++) {
// parse JSON object and record data into line
}
// begin to collect data
var n = 0;
var g = 0;
// walk down the line
for (var i = 18; i > 0; i--) {
// if no subjects are found at spot i, do nothing, except maybe update g
if ((line[i] === undefined || line[i].length == 0) && g == 0){
g = i;
} else if (line[i] !== undefined && line[i].length != 0) {
// collect data for each subject if subjects are found
line[i].forEach(function(subject){
const query 3 = // parametrized query grabbing data for each subject
const result3 = await client.query(query3,[CID,subject,snaptime]);
x = result3.rows;
const y = moment(x[0].end_time).diff(moment(snaptime),'minutes');
var yhat = 0;
// the summation over info depends on g
if (g===0){
for (var j = i; j <= 18; j++){
yhat = moment.duration(info[j].field1).add(yhat,'m').asMinutes();
}
} else {
for (var j = i; j <= 18; j++){
if (i<j && j<g+1) {
yhat = moment.duration(info[j].field2).add(yhat,'m').asMinutes();
} else {
yhat = moment.duration(info[j].field1).add(yhat,'m').asMinutes();
}
}
}
collected.push([y,yhat,n,i]);
});
}
n+=line[i].length;
g=0;
}
// really rough work-around I once used for printing results after a forEach of queries
counter++;
if (counter===snaptimes.length){
console.log(counter);
console.log(collected);
}
}
async function run () {
for (let snaptime of snaptimes) {
await processSnaptime(snaptime);
}
}
/* to run all of them concurrently:
function run () {
let procs = [];
for (let snaptime of snaptimes) {
procs.push(processSnaptime(snaptime));
}
return Promise.all(procs);
}
*/
client.connect().then(run).then(() => client.end());
client.connect returns a promise and I use then to call run once it's resolved. When that part is over, client.end() can be called safely.
run is an async function, therefore it can use await to make the code more readable. The same goes for processSnaptime.
Of course I can't actually run your code, so I can only hope I didn't make any mistakes.

execute variable number of mongo queries in node, return single result

Ooof. Ever have one of those days where you know you're close, but you just can't quite get it?
I am writing a hangman puzzle solver. This is running in a service written with node/hapi backended with mongo db.
So I have a function:
solvePuzzle(puzzle, alreadyCalled);
The args are the puzzle itself, with solved letters as literals, and unsolved as ?s, like so:
?O?N? ?O ?H? ?TO??
and alreadyCalled being simply a list of letters called but incorrect. After some mucking about, a RegEx is created for each word, which is then sent to a function that queries a wordlist stored in mongo for matches.
Everything is functioning as it should, and if I create a dummy wordlist as a simple array, everything works fine and I get a list of matches.
The return format is an array of objects like so: (I use array indices to preserve word order when displaying possible solutions)
matches[0][?O?N?] = ['GOING', 'DOING', 'BOING'];
So on to the actual PROBLEM. I split the whole puzzle into words, and run a for loop over them, calling the function which performs the mongo query for each one. Problem is, that function call seems to be returning before the query has actually run. The console logs interspersed throughout seem to bear this theory out.
I tried having the query function return a promise, but that has only served to muddy the waters further. I feel like I'm close but yeah - I dunno. Here was my original non-promise code:
function solvePuzzle(puzzle, called) {
// first build the exclusion match pattern
//console.log('solvePuzzle: building match pattern');
var x = buildMatchPattern(puzzle, called);
// split the puzzle into words
//console.log('solvePuzzle: tokenizing puzzle');
var words = tokenize(puzzle.toUpperCase());
//console.log('solvePuzzle:', words);
var results = [];
for(var i = 0; i < words.length; i++) {
console.log('solvePuzzle: matching ' + words[i]);
results[i] = {};
results[i][words[i]] = matchWord(words[i], x);
}
console.log('solvePuzzle: matches: ', results);
return results;
}
function matchWord(word, exclude) {
var pattern = '^';
var letters = word.toUpperCase().split('');
var matches = new Array();
var query = {};
//console.log('matchWord:', letters);
for(var i = 0; i < letters.length; i++) {
if(letters[i] !== '?') {
pattern += letters[i];
}
else {
pattern += exclude;
}
}
pattern += '$';
var re = new RegExp(pattern);
//console.log('matchWord:', re);
query.word = {"$regex" : re, "$options": "i"};
//console.log("matchWord query:", JSON.stringify(query));
db.wordlist.find(query, function (err, words) {
if(err) {
console.error('error:', err);
}
for(let i = 0; i < words.length; i++) {
if(words[i] !== null) {
console.log('loop:', words[i].word);
matches.push(words[i].word);
}
}
console.log('matchWord:', matches.length);
if(matches.length < 1) {
console.log('matchWord: found no matches');
matches.push('No Matches Found');
}
return matches;
});
}
So my console output was basically:
solvePuzzle: matching ?O?N?
solvePuzzle: matches: [] <---- problem
loop: 'going'
loop: 'doing'
etc etc.
.
.
matchWord: 5 (number of matches found);
So as you can see, the call to matchWord is returning before the actual query is running. So I have never done a hapi service backed by mongo. How can I structure this code so it loops over all the words, queries mongo for each one, and returns a single array as result?
TIA.
In node, database calls are asynchronous so you can't use return like this.
You need to use Promise (native in node.js)
this code should work :
function solvePuzzle(puzzle, called) {
var results = [];
// first build the exclusion match pattern
var x = buildMatchPattern(puzzle, called);
// split the puzzle into words
var words = tokenize(puzzle.toUpperCase());
// an array to store the words index
var indexes = Array.apply(null, {
length: words.length
}).map(Number.call, Number); // looks like [1, 2, 3, 4, ...]
// create a Promise for each word in words
var promises = indexes.map(function(index) {
return new Promise(function(resolve, reject) {
console.log('solvePuzzle: matching ' + words[index]);
results[index] = {};
var pattern = '^';
var letters = words[index].toUpperCase().split('');
var matches = new Array();
var query = {};
for (var i = 0; i < letters.length; i++) {
if (letters[i] !== '?') {
pattern += letters[i];
} else {
pattern += exclude;
}
}
pattern += '$';
var re = new RegExp(pattern);
query.word = {
"$regex": re,
"$options": "i"
};
db.wordlist.find(query, function(err, wordsRes) {
if (err) {
console.error('error:', err);
reject(err); // if request failed, promise doesn't resolve
}
for (let i = 0; i < wordsRes.length; i++) {
if (wordsRes[i] !== null) {
console.log('loop:', wordsRes[i].word);
matches.push(wordsRes[i].word);
}
}
console.log('matchWord:', matches.length);
if (matches.length < 1) {
console.log('matchWord: found no matches');
matches.push('No Matches Found');
}
results[index][words[index]] = matches;
resolve(); // request successfull
});
});
});
// when all promise has resolved, then return the results
Promise.all(promises).then(function() {
console.log('solvePuzzle: matches: ', results);
return results;
});
}

How do I replace a string in a PDF file using NodeJS?

I have a template PDF file, and I want to replace some marker strings to generate new PDF files and save them. What's the best/simplest way to do this? I don't need to add graphics or anything fancy, just a simple text replacement, so I don't want anything too complicated.
Thanks!
Edit: Just found HummusJS, I'll see if I can make progress and post it here.
I found this question by searching, so I think it deserves the answer. I found the answer by BrighTide here: https://github.com/galkahana/HummusJS/issues/71#issuecomment-275956347
Basically, there is this very powerful Hummus package which uses library written in C++ (crossplatform of course). I think the answer given in that github comment can be functionalized like this:
var hummus = require('hummus');
/**
* Returns a byteArray string
*
* #param {string} str - input string
*/
function strToByteArray(str) {
var myBuffer = [];
var buffer = new Buffer(str);
for (var i = 0; i < buffer.length; i++) {
myBuffer.push(buffer[i]);
}
return myBuffer;
}
function replaceText(sourceFile, targetFile, pageNumber, findText, replaceText) {
var writer = hummus.createWriterToModify(sourceFile, {
modifiedFilePath: targetFile
});
var sourceParser = writer.createPDFCopyingContextForModifiedFile().getSourceDocumentParser();
var pageObject = sourceParser.parsePage(pageNumber);
var textObjectId = pageObject.getDictionary().toJSObject().Contents.getObjectID();
var textStream = sourceParser.queryDictionaryObject(pageObject.getDictionary(), 'Contents');
//read the original block of text data
var data = [];
var readStream = sourceParser.startReadingFromStream(textStream);
while(readStream.notEnded()){
Array.prototype.push.apply(data, readStream.read(10000));
}
var string = new Buffer(data).toString().replace(findText, replaceText);
//Create and write our new text object
var objectsContext = writer.getObjectsContext();
objectsContext.startModifiedIndirectObject(textObjectId);
var stream = objectsContext.startUnfilteredPDFStream();
stream.getWriteStream().write(strToByteArray(string));
objectsContext.endPDFStream(stream);
objectsContext.endIndirectObject();
writer.end();
}
// replaceText('source.pdf', 'output.pdf', 0, /REPLACEME/g, 'My New Custom Text');
UPDATE:
The version used at the time of writing an example was 1.0.83, things might change recently.
UPDATE 2:
Recently I got an issue with another PDF file which had a different font. For some reason the text got split into small chunks, i.e. string QWERTYUIOPASDFGHJKLZXCVBNM1234567890- got represented as -286(Q)9(WER)24(T)-8(YUIOP)116(ASDF)19(GHJKLZX)15(CVBNM1234567890-)
I had no idea what else to do rather than make up a regex.. So instead of this one line:
var string = new Buffer(data).toString().replace(findText, replaceText);
I have something like this now:
var string = Buffer.from(data).toString();
var characters = REPLACE_ME;
var match = [];
for (var a = 0; a < characters.length; a++) {
match.push('(-?[0-9]+)?(\\()?' + characters[a] + '(\\))?');
}
string = string.replace(new RegExp(match.join('')), function(m, m1) {
// m1 holds the first item which is a space
return m1 + '( ' + REPLACE_WITH_THIS + ')';
});
Building on Alex's (and other's) solution, I noticed an issue where some non-text data were becoming corrupted. I tracked this down to encoding/decoding the PDF text as utf-8 instead of as a binary string. Anyways here's a modified solution that:
Avoids corrupting non-text data
Uses streams instead of files
Allows multiple patterns/replacements
Uses the MuhammaraJS package which is a maintained fork of HummusJS (should be able to swap in HummusJS just fine as well)
Is written in TypeScript (feel free to remove the types for JS)
import muhammara from "muhammara";
interface Pattern {
searchValue: RegExp | string;
replaceValue: string;
}
/**
* Modify a PDF by replacing text in it
*/
const modifyPdf = ({
sourceStream,
targetStream,
patterns,
}: {
sourceStream: muhammara.ReadStream;
targetStream: muhammara.WriteStream;
patterns: Pattern[];
}): void => {
const modPdfWriter = muhammara.createWriterToModify(sourceStream, targetStream, { compress: false });
const numPages = modPdfWriter
.createPDFCopyingContextForModifiedFile()
.getSourceDocumentParser()
.getPagesCount();
for (let page = 0; page < numPages; page++) {
const copyingContext = modPdfWriter.createPDFCopyingContextForModifiedFile();
const objectsContext = modPdfWriter.getObjectsContext();
const pageObject = copyingContext.getSourceDocumentParser().parsePage(page);
const textStream = copyingContext
.getSourceDocumentParser()
.queryDictionaryObject(pageObject.getDictionary(), "Contents");
const textObjectID = pageObject.getDictionary().toJSObject().Contents.getObjectID();
let data: number[] = [];
const readStream = copyingContext.getSourceDocumentParser().startReadingFromStream(textStream);
while (readStream.notEnded()) {
const readData = readStream.read(10000);
data = data.concat(readData);
}
const pdfPageAsString = Buffer.from(data).toString("binary"); // key change 1
let modifiedPdfPageAsString = pdfPageAsString;
for (const pattern of patterns) {
modifiedPdfPageAsString = modifiedPdfPageAsString.replaceAll(pattern.searchValue, pattern.replaceValue);
}
// Create what will become our new text object
objectsContext.startModifiedIndirectObject(textObjectID);
const stream = objectsContext.startUnfilteredPDFStream();
stream.getWriteStream().write(strToByteArray(modifiedPdfPageAsString));
objectsContext.endPDFStream(stream);
objectsContext.endIndirectObject();
}
modPdfWriter.end();
};
/**
* Create a byte array from a string, as muhammara expects
*/
const strToByteArray = (str: string): number[] => {
const myBuffer = [];
const buffer = Buffer.from(str, "binary"); // key change 2
for (let i = 0; i < buffer.length; i++) {
myBuffer.push(buffer[i]);
}
return myBuffer;
};
And then to use it:
/**
* Fill a PDF with template data
*/
export const fillPdf = async (sourceBuffer: Buffer): Promise<Buffer> => {
const sourceStream = new muhammara.PDFRStreamForBuffer(sourceBuffer);
const targetStream = new muhammara.PDFWStreamForBuffer();
modifyPdf({
sourceStream,
targetStream,
patterns: [{ searchValue: "home", replaceValue: "emoh" }], // TODO use actual patterns
});
return targetStream.buffer;
};
There is another Node.js Package asposepdfcloud, Aspose.PDF Cloud SDK for Node.js. You can use it to replace text in your PDF document conveniently. Its free plan offers 150 credits monthly. Here is sample code to replace text in PDF document, don't forget to install asposepdfcloud first.
const { PdfApi } = require("asposepdfcloud");
const { TextReplaceListRequest }= require("asposepdfcloud/src/models/textReplaceListRequest");
const { TextReplace }= require("asposepdfcloud/src/models/textReplace");
// Get App key and App SID from https://aspose.cloud
pdfApi = new PdfApi("xxxxx-xxxxx-xxxx-xxxxxxxxxxx", "xxxxxxxxxxxxxxxxxxxxxb");
var fs = require('fs');
const name = "02_pages.pdf";
const remoteTempFolder = "Temp";
//const localTestDataFolder = "C:\\Temp";
//const path = remoteTempFolder + "\\" + name;
//var data = fs.readFileSync(localTestDataFolder + "\\" + name);
const textReplace= new TextReplace();
textReplace.oldValue= "origami";
textReplace.newValue= "aspose";
textReplace.regex= false;
const textReplace1= new TextReplace();
textReplace1.oldValue= "candy";
textReplace1.newValue= "biscuit";
textReplace1.regex= false;
const trr = new TextReplaceListRequest();
trr.textReplaces = [textReplace,textReplace1];
// Upload File
//pdfApi.uploadFile(path, data).then((result) => {
// console.log("Uploaded File");
// }).catch(function(err) {
// Deal with an error
// console.log(err);
//});
// Replace text
pdfApi.postDocumentTextReplace(name, trr, null, remoteTempFolder).then((result) => {
console.log(result.body.code);
}).catch(function(err) {
// Deal with an error
console.log(err);
});
P.S: I'm developer evangelist at aspose.

How to get an object that was changed in angularjs?

I use this function to watch an array of objects for changes:
$scope.$watch('Data', function (newVal) { /*...*/ }, true);
How can I get an object in which property has been changed so that I can push it in an array?
For example:
var myApp = angular.module("myApp", []);
myApp.factory("Data", function(){
var Data = [{id:1, property: "Random"}, {id:2, property: "Random again"}];
return Data;
});
var myBigArray = [];
function tableCtrl($scope, Data){
$scope.TheData = Data;
$scope.$watch("TheData", function() {
//Here an object should be pushed
myBigArray.push(">>Object in which property has been changed <<<");
}, true);
}
I don't see a way currently in Angular to get the changed object... I suspect you might need to traverse the new array and try to find the differences with the old array...
Edit: Note that this solution turns out to be a bad practice as it is adding a lot of watchers, which is something you do not want because it has a performance penalty.
=======
I eventually came up with this solution:
items.query(function (result) {
_(result).each(function (item, i) {
$scope.items.push(item);
$scope.$watch('items[' + i + ']' , function(){
console.log(item); // This is the item that changed.
}, true);
});
});
There is still no option like this for $watch, but you can use jQuery plugin for that, http://archive.plugins.jquery.com/project/jquery-diff
I implemented undo/redo with AngularJS using $watch, mb this can help
//History Manager Factory
.factory('HistoryManager', function () {
return function(scope) {
this.container = Array();
this.index = -1;
this.lock = false;
//Insert new step into array of steps
this.pushDo = function() {
//we make sure that we have real changes by converting to json,
//and getting rid of all hash changes
if(this.container.length == 0 || (angular.toJson(scope.widgetSlider) != angular.toJson(this.container[this.index][0]))) {
//check if current change didn't came from "undo" change'
if(this.lock) {
return;
}
//Cutting array, from current index, because of new change added
if(this.index < this.container.length-1) {
this.container = this.container.slice(0, this.index+1);
}
var currentStepSlider = angular.copy(scope.widgetSlider);
var selectedWidgetIndex = scope.widgetSlider.widgets.indexOf(scope.widgetCurrent);
//Initialising index, because of new "Do" added
this.index = this.container.length;
this.container.push([currentStepSlider, selectedWidgetIndex]);
if (this.onDo) {
this.onDo();
}
}
}
//Upon undo returns previous do
this.undo = function() {
this.lock = true;
if(this.index>0){
this.index--;
scope.widgetSlider = angular.copy(this.container[this.index][0]);
var selectedWidgetIndex = this.container[this.index][1];
scope.widgetCurrent = scope.widgetSlider.widgets[selectedWidgetIndex];
}
this.lock = false;
}
//Upon redo returns next do
this.redo = function() {
if(this.index < this.container.length-1) {
this.index++;
scope.widgetSlider = angular.copy(this.container[this.index][0]);
var selectedWidgetIndex = this.container[this.index][1];
scope.widgetCurrent = scope.widgetSlider.widgets[selectedWidgetIndex];
}
}
}
})
;

Resources