I receive the below object in my service but when I am parsing this object I get the error
SyntaxError: Unexpected token : in JSON at position 603069
Code:
var data1 = [];
// Process a chunk of data. This may be called multiple times.
req
.on("data", function(chunk) {
// Append to buffer
data1.push(chunk);
})
.on("end", function() {
var buffer = Buffer.concat(data1);
console.info("Buffer Data Request Body: " + buffer);
buffer = buffer.toString("utf8");
var partsOfStr = buffer.split("&");
//This line gives error
var obj = JSON.parse(
decodeURI(buffer.replace(/&/g, '","').replace(/=/g, '":"'))
);
Object:
{
"type" : "NewThreadVoice",
"incidentId": "398115",
"channel" : "Mobile",
"data": a huge base 64 string
"fileName": "1.aac",
"contentType" : "aac",
"contactId" : "954344"
}
When I reduce the base64 (value of data) to half it works.
A base64 string is not necessary to contain only one "=" character. This character is used for padding (for more information see Why does a base64 encoded string have an = sign at the end )
For example, the codification of home in base64 is aG9tZQ==. Using your code ( .replace(/=/g, '":"') ), this will be transformed into aG9tZQ":"":"
You should use .replace(/=+/g, '":"') for replacing all consecutive = chars.
Related
I am trying to encode a text to Base64 and using NodeJS, and then while getting the data I am decoding it back from Base64. Now I need to change the data into JSON so I could fill the relevant fields but its not converting to JSON.
Here is my code:
fs.readFile('./setting.txt', 'utf8', function(err, data) {
if (err) throw err;
var encodedData = base64.encode(data);
var decoded = base64.decode(encodedData).replace(/\n/g, '').replace(/\s/g, " ");
return res.status(200).send(decoded);
});
In setting.txt I have the following text:
LENGTH=1076
CRC16=28653
OFFSET=37
MEASUREMENT_SAMPLING_RATE=4
MEASUREMENT_RANGE=1
MEASUREMENT_TRACE_LENGTH=16384
MEASUREMENT_PRETRIGGER_LENGTH=0
MEASUREMENT_UNIT=2
MEASUREMENT_OFFSET_REMOVER=1
This decodes the result properly but when I use JSON.parse(JSON.stringify(decoded ) its not converting to JSON.
Can someone help me with it.
Try below snippet
let base64Json= new Buffer(JSON.stringify({}),"base64").toString('base64');
let json = new Buffer(base64Json, 'ascii').toString('ascii');
What does base-64 encoding/decoding have to do with mapping a list of tuples (key/value pairs) like this:
LENGTH=1076
CRC16=28653
OFFSET=37
MEASUREMENT_SAMPLING_RATE=4
MEASUREMENT_RANGE=1
MEASUREMENT_TRACE_LENGTH=16384
MEASUREMENT_PRETRIGGER_LENGTH=0
MEASUREMENT_UNIT=2
MEASUREMENT_OFFSET_REMOVER=1
into JSON?
If you want to "turn it (the above) into JSON", you need to:
Decide on what its JSON representation should be, then
Parse it into its component bits, convert that into an appropriate data struct, and then
use JSON.stringify() to convert it to JSON.
For instance:
function jsonify( document ) {
const tuples = document
.split( /\n|\r\n?/ )
.map( x => x.split( '=', 2) )
.map( ([k,v]) => {
const n = Number(n);
return [ k , n === NaN ? v : n ];
});
const obj = Object.fromEntries(tuples);
const json = JSON.stringify(obj);
return json;
}
I am writing a script that is designed to take in an array and replace a designated row in a csv(buffer) then output a csv(buffer) in nodejs. However, I have found that whenever I have the following combination of characters: ",\n", it is doubling the comma. I have tried using \r instead or \n, but the system I am importing the csv has issues with the \r. I also found that by adding an extra whitespace: ", \n it prevents the double comma, but again, the system I'm importing the final result into won't accept the extra space. Does anyone know what is causing the extra comma and/or a way to not get the extra comma?
Script that replaces CSV row:
node.on('input', function(msg) {
node.rowNumber = msg.rowNumber || config.rowNumber || 0; //Row being replaced.
node.newRow = msg.newRow || config.newRow; //New Row Array or Comma Separated String
var payload = msg.file || config.file || RED.util.getMessageProperty(msg, "payload"); //File path or buffer.
if (!Buffer.isBuffer(payload)) {
payload = payload.replace('\\', '/');
payload = fs.readFileSync(payload);
}
if (!Array.isArray(this.newRow)) {
node.newRow = node.newRow.split(',');
}
var dataArray = [];
var csvArr = [];
const readable = new Stream.Readable()
readable._read = () => {}
readable.push(payload)
readable.push(null)
readable.pipe(csv())
.on('data', function (data) {
dataArray.push(data);
})
.on('end', function(){
csvArr.push(Object.keys(dataArray[0]));
dataArray.forEach((item, i) => {
csvArr.push(_.values(item));
});
if (node.rowNumber == 0) {
csvArr.push(node.newRow);
}
else {
csvArr.splice(node.rowNumber - 1, 1, node.newRow);
}
var finalCSV = csvArr.join('\n');
msg.payload = Buffer.from(finalCSV);
node.send(msg); //Returns the msg object
});
});
Input:
[
`""{
""""actions"""":{
""""validation"""":[
],
""""reconciliation"""":[
]
},
""""enforce_all_required_fields"""":"""""""",
""""form_history"""":""""12c2acda35980131f98acf2a39c1aafe"""",
""""form_id"""":""""228"""",
""""options"""":[
],
""""record_action"""":""""both"""",
""""secondary_form_history"""":"""""""",
""""secondary_form_id"""":""""0"""",
""""secondary_form_name"""":"""""""",
""""secondary_is_tier1_form"""":"""""""",
""""selected_columns"""":[
""""field_9326"""",
""""field_3742_first"""",
""""field_3742_last"""",
""""field_9325"""",
""""field_9327"""",
],
""""skip_headers"""":"""""""",
""""target_match_type"""":""""""""
}""`
]
Undesired output:
"{
""actions"":{
""validation"":[
],
""reconciliation"":[
]
},
""enforce_all_required_fields"":"""",,
""form_history"":""12c2acda35980131f98acf2a39c1aafe"",,
""form_id"":""228"",,
""options"":[
],
""record_action"":""both"",,
""secondary_form_history"":"""",,
""secondary_form_id"":""0"",,
""secondary_form_name"":"""",,
""secondary_is_tier1_form"":"""",,
""selected_columns"":[
""field_9326"",,
""field_3742_first"",,
""field_3742_last"",,
""field_9325"",,
""field_9327"",,
],
""skip_headers"":"""",,
""target_match_type"":""""
}"
Notice the double commas?
Here is an example of some JSON I am working with:
{"name":"John","attributes":"{\"key\":\"value\"}"}
Here it is again, in a more readable format:
{
"name": "John",
"attributes": "{\"key\":\"value\"}"
}
Notice above that the doublequotes surrounding key and value are escaped. That's necessary, and is valid JSON (checked at jsonlint.com).
I am trying to get the value of "name", but the escaped doublequotes are causing an error.
What am I doing wrong in my node.js code?
var theString = '{"name":"John","attributes":"{\"key\":\"value\"}"}';
var theJSON = JSON.parse(theString);
var theName = theJSON.name;
console.log("name = " + theName);
Below is the output. The error occurs on the 2nd line of my code, where I "JSON.parse()" the string. JSON.parse seems to be removing the backslashes, turning valid JSON into invalid JSON.
undefined:1
{"name":"John","attributes":"{"key":"value"}"}
^
SyntaxError: Unexpected token k in JSON at position 31
Since that part of the data is JSON-within-JSON, you'd parse the JSON, then parse the JSON on the attributes property:
const obj = JSON.parse(json);
obj.attributes = JSON.parse(obj.attributes);
Live Example:
const json = document.getElementById("json").textContent;
const obj = JSON.parse(json);
obj.attributes = JSON.parse(obj.attributes);
console.log(obj);
<pre id="json">{
"name": "John",
"attributes": "{\"key\":\"value\"}"
}</pre>
I parse the xml using the following code
var convert = require('xml-js');
var xml = require('fs').readFileSync('./2B2DE7DD-FD11-4F2C-AF0D-A244E5977CBA.xml', 'utf8');
result = convert.xml2json(xml, { spaces: 4});
The result throws the following JSON
{
"declaration": {
"attributes": {
"version": "1.0",
"encoding": "utf-8"
}
}
}
However if i try accesing "declaration" using result["declaration"]the console returns undefined
Should i use another parser or is there something wrong with getting the value.
Please use xml2js instead of xml2json if you want it return object.
result = convert.xml2js(xml, options); // to convert xml text to javascript object
result = convert.xml2json(xml, options); // to convert xml text to json text
The data type of result is String, not JavaScript object. That is, the convert.xml2json(xml, { spaces: 4}); statement will return a JSON String, not JS object.
To access declaration, you need to parse the JSON string to object:
var convert = require('xml-js');
var xml = require('fs').readFileSync('./2B2DE7DD-FD11-4F2C-AF0D-A244E5977CBA.xml', 'utf8');
result = convert.xml2json(xml, { spaces: 4});
result = JSON.parse(result);
When I parse this little piece of JSON:
{ "value" : 9223372036854775807 }
This is what I get:
{ hello: 9223372036854776000 }
Is there any way to parse it properly?
Not with built-in JSON.parse. You'll need to parse it manually and treat values as string (if you want to do arithmetics with them there is bignumber.js) You can use Douglas Crockford JSON.js library as a base for your parser.
EDIT2 ( 7 years after original answer ) - it might soon be possible to solve this using standard JSON api. Have a look at this TC39 proposal to add access to source string to a reviver function - https://github.com/tc39/proposal-json-parse-with-source
EDIT1: I created a package for you :)
var JSONbig = require('json-bigint');
var json = '{ "value" : 9223372036854775807, "v2": 123 }';
console.log('Input:', json);
console.log('');
console.log('node.js bult-in JSON:')
var r = JSON.parse(json);
console.log('JSON.parse(input).value : ', r.value.toString());
console.log('JSON.stringify(JSON.parse(input)):', JSON.stringify(r));
console.log('\n\nbig number JSON:');
var r1 = JSONbig.parse(json);
console.log('JSON.parse(input).value : ', r1.value.toString());
console.log('JSON.stringify(JSON.parse(input)):', JSONbig.stringify(r1));
Output:
Input: { "value" : 9223372036854775807, "v2": 123 }
node.js bult-in JSON:
JSON.parse(input).value : 9223372036854776000
JSON.stringify(JSON.parse(input)): {"value":9223372036854776000,"v2":123}
big number JSON:
JSON.parse(input).value : 9223372036854775807
JSON.stringify(JSON.parse(input)): {"value":9223372036854775807,"v2":123}
After searching something more clean - and finding only libs like jsonbigint, I just wrote my own solution. Is not the best, but it solves my problem. For those that are using Axios you can use it on transformResponse callback (this was my original problem - Axios parses the JSON and all bigInts cames wrong),
const jsonStr = `{"myBigInt":6028792033986383748, "someStr":"hello guys", "someNumber":123}`
const result = JSON.parse(jsonStr, (key, value) => {
if (typeof value === 'number' && !Number.isSafeInteger(value)) {
let strBig = jsonStr.match(new RegExp(`(?:"${key}":)(.*?)(?:,)`))[1] // get the original value using regex expression
return strBig //should be BigInt(strBig) - BigInt function is not working in this snippet
}
return value
})
console.log({
"original": JSON.parse(jsonStr),
"handled": result
})
A regular expression is difficult to get right for all cases.
Here is my attempt, but all I'm giving you is some extra test cases, not the solution. Likely you will want to replace a very specific attribute, and a more generic JSON parser (that handles separating out the properties, but leaves the numeric properties as strings) and then you can wrap that specific long number in quotes before continuing to parse into a javascript object.
let str = '{ "value" : -9223372036854775807, "value1" : "100", "strWNum": "Hi world: 42 is the answer", "arrayOfStrWNum": [":42, again.", "SOIs#1"], "arrayOfNum": [100,100,-9223372036854775807, 100, 42, 0, -1, 0.003] }'
let data = JSON.parse(str.replace(/([:][\s]*)(-?\d{1,90})([\s]*[\r\n,\}])/g, '$1"$2"$3'));
console.log(BigInt(data.value).toString());
console.log(data);
you can use this code for change big numbers to strings and later use BigInt(data.value)
let str = '{ "value" : -9223372036854775807, "value1" : "100" }'
let data = JSON.parse(str.replace(/([^"^\d])(-?\d{1,90})([^"^\d])/g, '$1"$2"$3'));
console.log(BigInt(data.value).toString());
console.log(data);