Say if there are arrays like:
const arr1 = [
{ "id": "1", "type": "sales" },
{ "id": "2", "type": "finance" }
]
const arr2 = [
{ "type": "sales" },
{ "id": "2", "type": "finance" }
]
const arr3 = [
{ "id": "1", "type": "sales" },
{ "type": "sales" },
{ "type": "finance" }
]
As you can see, id is optional. I need to merge arrays in such way that uniqueness should be based on id if present, else entire rest of the object.
ex. here merged array would be:
[
{ "id": "1", "type": "sales" },
{ "type": "sales" },
{ "id": "2", "type": "finance" },
{ "type": "finance" }
]
loadash has .unionBy but optional uniqueness doesn't work.
const result = _.unionBy(arr1, arr2, arr3, 'id')
Probably I have to iterate through each, but I was wondering if there is any simpler alternative for this.
Instead of _.unionBy you can use _.unionWith which accepts a custom comparator. The logic for comparison is:
Compare by ID if both items have an ID.
Compare by type if both items do not have an ID.
Consider them different if one has an ID the other not.
const arr1 = [
{ "id": "1", "type": "sales" },
{ "id": "2", "type": "finance" }
]
const arr2 = [
{ "type": "sales" },
{ "id": "2", "type": "finance" }
]
const arr3 = [
{ "id": "1", "type": "sales" },
{ "type": "sales" },
{ "type": "finance" }
]
const result = _.unionWith(arr1, arr2, arr3, (item1, item2) => {
const item1HasId = _.has(item1, "id");
const item2HasId = _.has(item2, "id");
if (item1HasId && item2HasId) //if both have ID...
return item1.id === item2.id; // ...compare by ID
if (!item1HasId && !item2HasId) //if neither has ID...
return item1.type === item2.type; // ...compare by type
return false; // otherwise not equal
});
console.log(result);
<script src="https://cdn.jsdelivr.net/npm/lodash#4.17.21/lodash.min.js"></script>
Related
I want to duplicate a document from MongoDB Model in NodeJS, below is the structure
{
"_id": "62fe22f4b3c0fabfd1222d40", // this needs to be replaced in duplicated document
"id": 1, // this is auto increment field, needs to be generated new auto increment field
"type": "donut",
"name": "Cake",
"ppu": 0.55,
"batters":
{
"batter":
[
{ "id": "1001", "type": "Regular" },
{ "id": "1002", "type": "Chocolate" },
{ "id": "1003", "type": "Blueberry" },
{ "id": "1004", "type": "Devil's Food" }
]
},
"topping":
[
{ "id": "5001", "type": "None" },
{ "id": "5002", "type": "Glazed" },
{ "id": "5005", "type": "Sugar" },
{ "id": "5007", "type": "Powdered Sugar" },
{ "id": "5006", "type": "Chocolate with Sprinkles" },
{ "id": "5003", "type": "Chocolate" },
{ "id": "5004", "type": "Maple" }
]
}
I did refer this Duplicate a document in MongoDB using a new _id post, however I am not sure of resetting auto increment id
Assuming you use a NodeJS application you could do the following. Of course, if your id auto-increment is specified.:
const { _id, id, __v, ...newMyObj } = await MyModel.findOne({ id }).lean();
await MyModel.create(newMyObj);
I was able to fix the solution with below
const myDoc = await myModel.findOne({ id }).exec();
if (myDoc) {
const myDocObj = myDoc.toObject();
delete myDocObj._id;
delete myDocObj.id;
... continue deleting more unwanted props
return new myModel(myDocObj);
}
return null;
I've got an JSON Array listed below:
{
"jsonrpc": "2.0",
"id": 1,
"result": [
{
"_id": "6125496b64ec43ef984a48ad",
"imdb_id": "tt10872600",
"name": "Spider-Man: No Way Home",
"type": "movie",
"year": "2021"
},
{
"_id": "61f3e33b64ec43ef98553e07",
"imdb_id": "tt13634480",
"name": "The Ice Age Adventures of Buck Wild",
"type": "movie",
"year": "2022"
},
{
"_id": "5c4f384dd582b25756275494",
"imdb_id": "tt9032400",
"name": "Eternals",
"type": "movie",
"year": "2021"
}
]
}
I'm wanting to make a function to search through this array by imdb_id or name (not both).
How would I go about doing this?
My poor attempt:
const json = require('./test.json');
function Search(file){
for (let i = 0; i < file.length; i++) {
console.log(file[i].results[i].imdb_id);
}
}
Search(json);
At simplest, you could leverage the inbuilt filtering function, something like this
const idOrNameEquals = searchStr => item => item.imdb_id === searchStr || item.name === searchStr
const json = require('./test.json')
const foundItems = json.result.filter(idOrNameEquals('Eternals'))
try using filter
function SearchJson(json, imdb_id, name){
return json.result.filter( function(item) {
return imdb_id==null? item.name == name : item.imdb_id==imdb_id
});
};
test
var result= SearchJson(json,null, "Eternals");
result
[
{
"_id": "5c4f384dd582b25756275494",
"imdb_id": "tt9032400",
"name": "Eternals",
"type": "movie",
"year": "2021"
}
]
I am working on an NODE JS based application. Trying to figure out any easy method to get my expected result. Had tried few ways, but after reframing the JSON content not able to add the key state.
My content JSON content is in this format
[
{
"item": {
"property1": "aa",
"property2": "22"
},
"state": {
"item": {
"state": "AS",
"country": "US",
"reason": "1"
}
},
"province": {
"item": {
"name": "AS",
"method": "table",
"conf": "3"
}
}
},
...
]
Requirement is to format state as below,
{
...
"state": [
{
"item": {
"state": "AS",
"country": "US",
"reason": "1"
}
}
],
...
}
I want to format the state content. The content is expected to be an array of JSON content. After formatting state, the orginal json content need to be modified with the new content of state. The issue faced here is after formatting not able to add the key state back to the orginal JSON content.Same is the expectation for province.
Requesting support from All...Thanks in Advance....
Below is the code i had written, but the expected result is not obtained.Can anyone help me in identifying were i have gone wrong
var testJson = JSON.parse(jsonData);
var tempFinal = [];
for(let i =0; i<testJson.length; i++) {
let itemData = {};
itemData.item = testJson[i].item;
tempFinal.push(itemData);
var state = [];
state.push( testJson[i].state);
let stateData = {};
stateData.state = state;
tempFinal.push(stateData);
var service = [];
service.push( testJson[i].service);
let serviceData = {};
serviceData.service = service;
tempFinal.push(serviceData);
}
The result obtained is
[
{
"item": {
"protocol": "tcp",
"portid": "22"
}
},
{
"state": [
{
"item": {
"state": "filtered",
"reason": "no-response",
"reason_ttl": "0"
}
}
]
},
{
"service": [
{
"item": {
"name": "ssh",
"method": "table",
"conf": "3"
}
}
]
}
]
Expected is
[
{
"item": {
"a": "a",
"a": "a"
},
"state": [
{
"item": {
"c": "b",
"e": "f",
"reason_ttl": "0"
}
}
],
"service": [
{
"item": {
"name": "q",
"method": "table",
"d": "e"
}
}
]
}
]
You can try this one
//jsonData is your original json data
jsonData.map((obj1=>{
//updating state data
obj1.state=[obj1.state];
}));
//finally print it.
console.log(jsonData);
I have an avro file which has records, then in their fields (which have uniontypes) there are other records, which also have fields with union types, and some types have a certain property connect.name which i need to check if it equals to io.debezium.time.NanoTimestamp. I`m doing this in Apache NiFi using an ExecuteScript processor with Groovy script.
A shortened example of the Avro schema:
{
"type": "record",
"name": "Envelope",
"namespace": "data.none.bpm.pruitsmdb_nautilus_dbo.fast_frequency_tables.avro.test",
"fields": [
{
"name": "before",
"type": [
"null",
{
"type": "record",
"name": "Value",
"fields": [
{
"name": "Id",
"type": {
"type": "string",
"connect.parameters": {
"__debezium.source.column.type": "UNIQUEIDENTIFIER",
"__debezium.source.column.length": "36"
}
}
},
{
"name": "CreatedOn",
"type": [
"null",
{
"type": "long",
"connect.version": 1,
"connect.parameters": {
"__debezium.source.column.type": "DATETIME2",
"__debezium.source.column.length": "27",
"__debezium.source.column.scale": "7"
},
"connect.name": "io.debezium.time.NanoTimestamp"
}
],
"default": null
},
{
"name": "CreatedById",
"type": [
"null",
{
"type": "string",
"connect.parameters": {
"__debezium.source.column.type": "UNIQUEIDENTIFIER",
"__debezium.source.column.length": "36"
}
}
],
"default": null
}
],
"connect.name": "data.none.bpm.pruitsmdb_nautilus_dbo.fast_frequency_tables.avro.test.Value"
}
],
"default": null
},
{
"name": "after",
"type": [
"null",
"Value"
],
"default": null
},
{
"name": "source",
"type": {
"type": "record",
"name": "Source",
"namespace": "io.debezium.connector.sqlserver",
"fields": [
{
"name": "version",
"type": "string"
},
{
"name": "ts_ms",
"type": "long"
},
{
"name": "snapshot",
"type": [
{
"type": "string",
"connect.version": 1,
"connect.parameters": {
"allowed": "true,last,false"
},
"connect.default": "false",
"connect.name": "io.debezium.data.Enum"
},
"null"
],
"default": "false"
}
],
"connect.name": "io.debezium.connector.sqlserver.Source"
}
},
{
"name": "op",
"type": "string"
},
{
"name": "ts_ms",
"type": [
"null",
"long"
],
"default": null
}
],
"connect.name": "data.none.bpm.pruitsmdb_nautilus_dbo.fast_frequency_tables.avro.test.Envelope"
}
My Groovy code, which obviously seems to be checking the top-level records only, and also I'm not sure whether I'm checking the property connect.name correctly:
reader.forEach{ GenericRecord record ->
record.getSchema().getFields().forEach{ Schema.Field field ->
try {
field.schema().getTypes().forEach{ Schema typeSchema ->
if(typeSchema.getProp("connect.name") == "io.debezium.time.NanoTimestamp"){
record.put(field.name(), Long(record.get(field.name()).toString().substring(0, 13)))
typeSchema.addProp("logicalType", "timestamp-millis")
}
}
} catch(Exception ex){
println("Catching the exception")
}
}
writer.append(record)
}
My question is - how to traverse all nested Records (there are top-level records' fields which have "record" type and records inside) in the avro file? And when traversing their Fields - how to check correctly that one of their types (which may go in union) has a property connect.name == io.debezium.time.NanoTimestamp and if yes, perform a transformation on the field value and add a logicalType property to the field`s type?
I think you are looking for a recursion here - there should be a function that will accept the Record as a parameter. When you hit a field that is a nested record then you'll call this function recursively.
Jiri's approach suggestion worked, a recursive function was used, here`s the full code:
import org.apache.avro.*
import org.apache.avro.file.*
import org.apache.avro.generic.*
//define input and output files
DataInputStream inputStream = new File('input.avro').newDataInputStream()
DataOutputStream outputStream = new File('output.avro').newDataOutputStream()
DataFileStream<GenericRecord> reader = new DataFileStream<>(inputStream, new GenericDatumReader<GenericRecord>())
DataFileWriter<GenericRecord> writer = new DataFileWriter<>(new GenericDatumWriter<GenericRecord>())
def contentSchema = reader.schema //source Avro schema
def records = [] //list will be used to temporary store the processed records
//function which is traversing through all records (including nested ones)
def convertAvroNanosecToMillisec(record){
record.getSchema().getFields().forEach{ Schema.Field field ->
if (record.get(field.name()) instanceof org.apache.avro.generic.GenericData.Record){
convertAvroNanosecToMillisec(record.get(field.name()))
}
if (field.schema().getType().getName() == "union"){
field.schema().getTypes().forEach{ Schema unionTypeSchema ->
if(unionTypeSchema.getProp("connect.name") == "io.debezium.time.NanoTimestamp"){
record.put(field.name(), Long.valueOf(record.get(field.name()).toString().substring(0, 13)))
unionTypeSchema.addProp("logicalType", "timestamp-millis")
}
}
} else {
if(field.schema().getProp("connect.name") == "io.debezium.time.NanoTimestamp"){
record.put(field.name(), Long.valueOf(record.get(field.name()).toString().substring(0, 13)))
field.schema().addProp("logicalType", "timestamp-millis")
}
}
}
return record
}
//reading all records from incoming file and adding to the temporary list
reader.forEach{ GenericRecord contentRecord ->
records.add(convertAvroNanosecToMillisec(contentRecord))
}
//creating a file writer object with adjusted schema
writer.create(contentSchema, outputStream)
//adding records to the output file from the temporary list and closing the writer
records.forEach{ GenericRecord contentRecord ->
writer.append(contentRecord)
}
writer.close()
I have an array of schools like this:
{
"schools": [
{
"name": "S1",
"id": 1
},
{
"name": "S2",
"id": 2
},
{
"name": "S3",
"id": 3
}
]
}
and each school has schedule. To Get that I iterate the schools array in a promise and when I get the response I get an array like this
{
"schools": [
{
"name": "S1",
"id": 1
},
{
"name": "S2",
"id": 2
},
{
"name": "S3",
"id": 3
}
],
"schedules": [
[],
[
{
"id_schedule": 58,
"hour1": "13:00:00",
"hour2": "20:00:00",
"id_schools_schedule": 2
}
],
[
{
"id_schedule": 59,
"hour1": "06:30:00",
"hour2": "22:30:00",
"id_schools_schedule": 3
}
]
]
}
I want to know. how to asign the response of each item?
this is my code
for (var i =0; i < datosRes.schools.length; i++){
array_horarios.push(ObtSchedule(datosRes.schools, i))
}
Promise.all(array_horarios).then(response => {
datosRes.horarios = response;
eq.local = data;
}).catch(err => {
return res.json(200,{"datos":datosRes});
})
function ObtHorario(schools, i){
return new Promise(function(resolve, reject){
var id_school = schools[i].id;
Mod_Schedule.obtSchedule(id_school,function(error, data){
if(error || data.error){
errorDB = {"error_log": error, "error_data": data.error};
reject(errorDB)
}else{
resolve(data)
}
})
})
}
What I am doing wrong?
I get the response but only I want to add to each item of schools the schedules
Thanks in advance
First thing first:
You can send ONE response to ONE request. So your question to send multiple responses is invalid.
Here's what you can do, you can get the array of schools with their schedules.
If you are using MongoDB, here's what you can do:
Using Aggregate query:
db.schools.aggregate([
{
$match: {} // Your condition to match schools here
},
{
$lookup: {
from:"schedules",
localField: id,
foreignField: id_schools_schedule,
as: "schedulesData"
}
},
]);
Here, you will get data something like:
[
{
"name": "S1",
"id": 1,
"schedulesData": []
},
{
"name": "S2",
"id": 2,
"schedulesData": [{
"id_schedule": 58,
"hour1": "13:00:00",
"hour2": "20:00:00",
"id_schools_schedule": 2
}]
},
{
"name": "S3",
"id": 3,
"schedulesData": [
{
"id_schedule": 59,
"hour1": "06:30:00",
"hour2": "22:30:00",
"id_schools_schedule": 3
}
]
}
]