For a project I'm working on we get some data delivered in an Excel sheet which I convert to CSV through Excel.
These files contain measurements with different categories but the same ID.
Example
readingId; category; result;
1 ; cat 1 ; A
1 ; cat 2 ; B
2 ; cat1 ; C
I've then converted the CSV to JSON and wrote a function to output the data into different objects
const fs = require('fs');
const path = require('path');
exports.convertJson = (file) => {
let rawData = fs.readFileSync(file);
let jsonData = JSON.parse(rawData);
let rawOutput = [];
for (output of jsonData) {
rawOutput.push({
locationId: output.Meetlocatienummer,
date: output.Aanmaakdatum_score,
subCategorie: output.Bestekspost,
score: output.Score,
scoreNumber: output.Cijfer,
categories: output.Categorie,
coordinates: output.Coordinaten,
neighbourhoodIndex: output.BUURTCODE,
quality: output.KWALITEIT,
district: output.STADSDEEL,
distrcitIndex: output.STADSDLCD,
street: output.STRAATNAAM,
neighbourhood: output.WIJK,
cluster: output.Cluster,
});
}
return rawOutput;
};
Which outputs the following results
[
{
locationId: 10215,
date: undefined,
subCategorie: 'Meubilair-afvalbak-vullingsgraad',
score: '',
scoreNumber: 8,
categories: 'Meubilair',
coordinates: '52.072843, 4.287723',
neighbourhoodIndex: 10,
quality: 'residentiekwaliteit',
district: 'Segbroek',
distrcitIndex: 3,
street: 'Xaverystraat',
neighbourhood: 'Regentessekwartier',
cluster: 'WRF'
},
{
locationId: 10215,
date: undefined,
subCategorie: 'Meubilair-container-bijgeplaatst afval rondom container',
score: 'A+',
scoreNumber: 10,
categories: 'Meubilair',
coordinates: '52.072843, 4.287723',
neighbourhoodIndex: 10,
quality: 'residentiekwaliteit',
district: 'Segbroek',
distrcitIndex: 3,
street: 'Xaverystraat',
neighbourhood: 'Regentessekwartier',
cluster: 'WRF'
},
{
locationId: 10215,
date: undefined,
subCategorie: 'Riolering-kolk-belemmering inlaat',
score: 'A+',
scoreNumber: 10,
categories: 'Riolering',
coordinates: '52.072843, 4.287723',
neighbourhoodIndex: 10,
quality: 'residentiekwaliteit',
district: 'Segbroek',
distrcitIndex: 3,
street: 'Xaverystraat',
neighbourhood: 'Regentessekwartier',
cluster: 'WRF'
},
{
locationId: 10215,
date: undefined,
subCategorie: 'Verharding-open verharding-elementenverharding-onkruid',
score: 'A',
scoreNumber: 8,
categories: 'Verharding',
coordinates: '52.072843, 4.287723',
neighbourhoodIndex: 10,
quality: 'residentiekwaliteit',
district: 'Segbroek',
distrcitIndex: 3,
street: 'Xaverystraat',
neighbourhood: 'Regentessekwartier',
cluster: 'WRF'
},
{
locationId: 10215,
date: undefined,
subCategorie: 'Verharding-natuurlijk afval',
score: 'A',
scoreNumber: 8,
categories: 'Verharding',
coordinates: '52.072843, 4.287723',
neighbourhoodIndex: 10,
quality: 'residentiekwaliteit',
district: 'Segbroek',
distrcitIndex: 3,
street: 'Xaverystraat',
neighbourhood: 'Regentessekwartier',
cluster: 'WRF'
},
{
locationId: 10215,
date: undefined,
subCategorie: 'Verharding-uitwerpselen',
score: 'A+',
scoreNumber: 10,
categories: 'Verharding',
coordinates: '52.072843, 4.287723',
neighbourhoodIndex: 10,
quality: 'residentiekwaliteit',
district: 'Segbroek',
distrcitIndex: 3,
street: 'Xaverystraat',
neighbourhood: 'Regentessekwartier',
cluster: 'WRF'
},
{
locationId: 10215,
date: undefined,
subCategorie: 'Verharding-zwerfafval grof',
score: 'A',
scoreNumber: 8,
categories: 'Verharding',
coordinates: '52.072843, 4.287723',
neighbourhoodIndex: 10,
quality: 'residentiekwaliteit',
district: 'Segbroek',
distrcitIndex: 3,
street: 'Xaverystraat',
neighbourhood: 'Regentessekwartier',
cluster: 'WRF'
},
{
locationId: 10215,
date: undefined,
subCategorie: 'Verharding-veegvuil goten',
score: 'A',
scoreNumber: 8,
categories: 'Verharding',
coordinates: '52.072843, 4.287723',
neighbourhoodIndex: 10,
quality: 'residentiekwaliteit',
district: 'Segbroek',
distrcitIndex: 3,
street: 'Xaverystraat',
neighbourhood: 'Regentessekwartier',
cluster: 'WRF'
},
{
locationId: 10215,
date: undefined,
subCategorie: 'Verharding-onkruid rondom obstakels',
score: 'B',
scoreNumber: 6,
categories: 'Verharding',
coordinates: '52.072843, 4.287723',
neighbourhoodIndex: 10,
quality: 'residentiekwaliteit',
district: 'Segbroek',
distrcitIndex: 3,
street: 'Xaverystraat',
neighbourhood: 'Regentessekwartier',
cluster: 'WRF'
},
{
locationId: 10215,
date: undefined,
subCategorie: 'Verharding-grof vuil',
score: 'A+',
scoreNumber: 10,
categories: 'Verharding',
coordinates: '52.072843, 4.287723',
neighbourhoodIndex: 10,
quality: 'residentiekwaliteit',
district: 'Segbroek',
distrcitIndex: 3,
street: 'Xaverystraat',
neighbourhood: 'Regentessekwartier',
cluster: 'WRF'
},
{
locationId: 10215,
date: undefined,
subCategorie: 'Verharding-zwerfafval fijn',
score: 'A',
scoreNumber: 8,
categories: 'Verharding',
coordinates: '52.072843, 4.287723',
neighbourhoodIndex: 10,
quality: 'residentiekwaliteit',
district: 'Segbroek',
distrcitIndex: 3,
street: 'Xaverystraat',
neighbourhood: 'Regentessekwartier',
cluster: 'WRF'
},
{
locationId: 7466,
date: undefined,
subCategorie: 'Meubilair-afvalbak-vullingsgraad',
score: 'B',
scoreNumber: 6,
categories: 'Meubilair',
coordinates: '52.072647, 4.288656',
neighbourhoodIndex: 10,
quality: 'residentiekwaliteit',
district: 'Segbroek',
distrcitIndex: 3,
street: 'Jan Krosstraat',
neighbourhood: 'Regentessekwartier',
cluster: 'WRF'
}
]
In the end I would like to write this information to MongoDB and I had the following scheme in mind to reduce the loads of duplicated data
{
locationId: output.Meetlocatienummer,
date: output.Aanmaakdatum_score,
subCategories: [
{
subCategory: output.Bestekspost,
score: output.Score,
scoreNumber: output.Cijfer,
},
],
categories: [{ category: output.Categorie }],
coordinates: output.Coordinaten,
neighbourhoodIndex: output.BUURTCODE,
quality: output.KWALITEIT,
district: output.STADSDEEL,
distrcitIndex: output.STADSDLCD,
street: output.STRAATNAAM,
neighbourhood: output.WIJK,
cluster: output.Cluster,
}
This project is a hobby project while learning NodeJS. The actual data are readings how much the streets of the city I work for a poluted with litter. It's a bit boring to read thousands of lines in Excel to find the hotspots of the city as it's a bit boring to just read some scores and graphs so I though it would be nice to import it into Leaflet through NodeJS.
The actual backend will contain more functionality as I learn Node and maybe in the future React, that's why I try to write it myself rather then importing the data into Google maps, which works oke but lacks detailed category filtering.
I hope my idea is a bit clear and someone can point me in the right direction.
Edit 1
I got a bit further with lodash.
return _(rawOutput)
.groupBy('locationId')
.map((obj) => _.assignWith({}, ...obj, (val1, val2) => val1 || val2))
.value();
I found the above snippet and now I only get 1 output per unique locationId but now I'm stuck with contructing the final output with the subcategories.
I was also playing around a bit with csv-parser to directly go from the csv to a proper json output, which would be ideal because I don't have to convert it manually then.
I'll get back to it tomorrow :-)
If you take that JSON and mongoimport into MongoDB, you can use the following pipeline to transform it -- although honestly, a little python script on the outside could construct the structure just as easily and then you would still import the condensed data.
db.foo.aggregate([
{$group: {_id: "$locationId",
subCategories: {$push: {subCategory: "$subCategorie", score:"$score", scoreNumber: "$scoreNumber"}},
categories: {$push: "$categories"},
// Just take the first occurance of each of these since they are claimed
// to be the same.
date: {$first: "$date"},
neighbourhoodIndex: {$first: "$neighbourhoodIndex"},
quality: {$first: "$quality"},
district: {$first: "$district"},
distrcitIndex: {$first: "$distrcitIndex"},
street: {$first: "$street"},
neighbourhood: {$first: "$neighbourhood"},
cluster: {$first: "$cluster"},
coordinates: {$first: "$coordinates"}
}}
// Now that we have a single doc with locationId x and a coordinate, convert
// the string lat,long "52.072843, 4.287723" into a GeoJSON Point which is
// a long,lat array of doubles. We convert by using $addFields to
// overwrite the original coordinates field:
,{$addFields: {"coordinates": {$let: {
vars: {pt: {$split:["$coordinates",","]}},
in: {"type": "Point", "coordinates": [
{$toDouble: {$trim: {input:{$arrayElemAt:["$$pt",1]}}}},
{$toDouble: {$trim: {input:{$arrayElemAt:["$$pt",0]}}}}
]
}
}}
}}
// Put the whole transformed thing into a new collection named "foo2":
,{$out: "foo2"}
]);
Alright, in the end the code from Buzz Moschetti was exactly what I wanted to get rid of the duplicated data. I didn't hear about aggregates yet so thanks for that.
I ended up using the CSV Parse library to convert the CSV to JSON, drop that into the database and then query out the duplicates with the code from Buzz.
I haven't written the code yet to write back the cleaned up data back to the database but that shouldn't be to hard so I'll just post what I have now for as reference for others.
First of all I have written a csv helper for the conversion.
const fs = require('fs');
const { parse } = require('csv');
const moment = require('moment');
exports.processFile = async (filePath) => {
const records = [];
const input = fs.createReadStream(filePath);
const parser = parse({
// CSV options
bom: true,
delimiter: ';',
cast: (value, context) => {
if (context.header) return value;
// Convert data
if (context.column === 'date') {
const dateString = moment(value, 'dd-mm-yyyy h:mm');
const date = dateString.toDate();
return date;
}
// Convert coordinates to GeoJSON
if (context.column === 'coordinates') {
const coordinate = value.split(',');
const geoData = {
type: 'Point',
coordinate: [coordinate[0], coordinate[1]],
};
return geoData;
}
// Output rest of the fields
return String(value);
},
columns: [
'locationId', // meetlocatienummer
'date', // aanmaakdatum score
'subCategory', //bestekpost
'category', // categorie
'score', // score
'coordinates', //coordinaten
undefined, // buurt
undefined, // buurtcode
undefined, // gebied
undefined, // id
'quality', //kwaliteit
undefined, // stadsdeel
'districtIndex', //stadsdlcd
'street', //straatnaam
undefined, //vaknr
'neighbourhood', //wijk
undefined, //wijkcode
'cluster', //cluster
'scoreNumber', //cijfer
undefined, // week
undefined, // maand
undefined, // jr-mnd
undefined, // jaar
],
trim: true,
from_line: 2,
skip_records_with_empty_values: true,
});
// parser.on('error', (err) => {
// console.log(err);
// const error = new Error(err);
// error.httpStatusCode = 500;
// throw error;
// });
//const transformer = transform((record, callback) => {});
input.pipe(parser).on('error', (err) => {
input.close();
});
for await (let record of parser) {
// Skip all lines without coordinates
if (record.coordinates.coordinate[1] === undefined) {
continue;
}
// Push filename to the record object
record.fileName = filePath;
// Push records for final output
records.push(record);
//console.log('Records converted');
}
return records;
};
I'm uploading the file with the Multer lirbary. Here's the POST action in my import data controller. After the file has been uploaded the conversion starts. If an error occurs the file gets deleted again and no records are written to the database. If the conversion succeeds the records will be written to importdatas in MongoDB, these are still the 'dirty' records, so loads of duplicates but without useless data which gets filtered by the CSV Parse helper. (Basically all the data without coordinates)
exports.postImportData = (req, res, next) => {
const uploadedCSV = req.file;
//console.log(uploadedCSV);
// Load imported CSV files from DB to be able to delete them
ImportedCSVFile.find()
.sort({ date: -1 })
.then((result) => {
// Check if there are already files imported
let hasFiles = null;
if (result.length > 0) {
hasFiles = 1;
}
// If there are any erros with the file being uploaded
if (req.fileValidationError) {
return res.render('admin/import-data/import-data', {
pageTitle: 'Importeer data',
path: '/admin/import-data',
files: result,
activeAdmin: true,
errorMessage: req.fileValidationError,
validationErrors: [],
hasFiles,
});
}
// If there's no file uploaded
if (!uploadedCSV) {
return res.render('admin/import-data/import-data', {
pageTitle: 'Importeer data',
path: '/admin/import-data',
files: result,
activeAdmin: true,
errorMessage: 'Geen bestand geselecteerd',
validationErrors: [],
hasFiles,
});
}
(async () => {
const csvFile = await fileHelper.hasFile(uploadedCSV);
try {
const records = await convert.processFile(csvFile);
// Write all CSV data to importdatas in MongoDB
await ImportData.insertMany(records)
.then((result) => {
console.log('Data imported');
// Push info about the uploaded file into 'importedcsvfiles'
const importedCSVFile = new ImportedCSVFile({
filePath: fileHelper.hasFile(uploadedCSV),
originalName: uploadedCSV.originalname,
});
return (
importedCSVFile
.save() // Save all CSV data into 'importedcsvfiles' in MongoDB
.then((result) => {
res.redirect('/admin/import-data');
})
// Catch save filepath error
.catch((err) => {
console.log('save failed');
const error = new Error(err);
error.httpStatusCode = 500;
return next(error);
})
);
})
// Catch insert CSV data into DB error
.catch((err) => {
console.log('insert many failed');
const error = new Error(err);
error.httpStatusCode = 500;
return next(error);
});
} catch (err) {
// console.log(error);
fileHelper.removeFile(csvFile);
return res.render('admin/import-data/import-data', {
pageTitle: 'Importeer data',
path: '/admin/import-data',
files: result,
activeAdmin: true,
errorMessage:
'Het geselecteerde bestand heeft niet de juiste indeling. Neem contact op met de beheerder.',
validationErrors: [],
hasFiles,
});
}
})();
});
};
Also wrote a delete option which removes the CSV file and all the database records which are linked to that file
exports.postDeleteData = (req, res, next) => {
const dataId = req.body.dataId;
ImportedCSVFile.findById(dataId)
.then((result) => {
// console.log('FilePath:');
// console.log(result.filePath);
const filePath = result.filePath;
const deleteData = async () => {
await ImportData.deleteMany({ filePath: filePath })
.then((result) => {})
.catch((err) => {
const error = new Error(err);
error.httpStatusCode = 500;
return next(error);
});
await ImportedCSVFile.findByIdAndDelete(dataId)
.then((result) => {
console.log('Data deleted');
fileHelper.removeFile(filePath);
res.redirect('/admin/import-data');
})
.catch((err) => {
console.log('here');
const error = new Error(err);
error.httpStatusCode = 500;
return next(error);
});
};
return deleteData();
})
.catch((err) => {
const error = new Error(err);
error.httpStatusCode = 500;
return next(error);
});
};
And for now the Aggregate code from Buzz to clean up the data and drop it into Leaflet so I get 1 point with all the different categories.
const { ImportData, OutputData } = require('../models/importData.model');
// Main controller for the homepage
exports.getMainController = (req, res, next) => {
ImportData.aggregate([
{
$group: {
_id: '$coordinates',
subCategories: {
$push: {
subCategory: '$subCategory',
score: '$score',
scoreNumber: '$scoreNumber',
},
},
categories: { $push: '$category' },
// Just take the first occurance of each of these since they are claimed
// to be the same.
date: { $first: '$date' },
quality: { $first: '$quality' },
districtIndex: { $first: '$districtIndex' },
street: { $first: '$street' },
neighbourhood: { $first: '$neighbourhood' },
cluster: { $first: '$cluster' },
},
},
]).exec((err, locations) => {
if (err) {
throw next(err);
}
//console.log(locations);
res.render('index.ejs', {
pageTitle: 'Kaart',
path: '/kaart',
activeAdmin: true,
data: locations,
errorMessage: null,
});
});
};
As of now I just query this data like I said in the beginning. As I am still learning a lot about Javascript and Node I now started to build a frontend with React. Once I got on going there I will convert all this code to an API and I'll finish this part of the project.
Related
In my server I use both MongoDB and Neo4j, and when I update a mongo product record I also update Neo4j product node and relationship to a category node. I use a mongo transaction so if anything fails consistence is maintained across the two dbs.
In my mongo update method I can either update rating or the whole record,so I set up tests to do both.
I pass to the Neo4j update method the js object from returning the updated mongo record, for the first test I modify the category parameter, so in Neo4j it first deletes the relationship to the old category, then it updates the product node and creates a relationship to the new category.
The first test passes ( update category), but when the second test runs (updating rating) it fails as the Neo4j method throws Cannot read properties of undefined (reading 'get') error.
I actually modified the cypher as my first draft ( the commented out one) wasn't updating relationship to the category node and both tests passed.
Thinking that the problem might be that in the second test there is no difference in category I duplicate the first test and indeed the second time it doesn't pass either.
Can you please spot what am I doing wrong?
Many thanks.
Neo4J
exports.updateProductById = async (product) => {
console.log('Neo4j updateProductById product is : ', product);
const driver = neo4j.getDriver();
const session = driver.session();
const json = JSON.stringify(product);
const res = await session.executeWrite(tx => tx.run(
// `
// with apoc.convert.fromJsonMap($json) as json
// match (p:Product {id: json.id}) set p = json
// with p, json
// match (s:Shop)-[r:SELLS]->(p)-[r2:IN_CATEGORY]->(c:Category)
// set r.productId = json.id, c.name = json.category
// RETURN p as product,s as shop, r as relSells, r2 as relCategory
// `
`
with apoc.convert.fromJsonMap($json) as json
match (p:Product {id: json.id})
match (s:Shop)-[r:SELLS]->(p)-[rOld:IN_CATEGORY]->(c:Category)
where c.name = p.category
set p = json
delete rOld
merge (p)-[rNew:IN_CATEGORY]->(cat: category {name : json.category})
set rNew.productId = json.id
RETURN p as product, s as shop, r as relSells, rNew as relCategory, rOld as relOld, c as catOld, cat as category
`
,{json: json }
)).catch((error) => {
console.log('Neo4j updateProductById error: ', error);
});
await session.close();
console.log(`Neo4j updateProductById modified ${res.records.length} products`);
const updatedProduct = res.records[0].get('product');
const shop = res.records[0].get('shop');
const relSells = res.records[0].get('relSells');
const relCategory = res.records[0].get('relCategory');
const category = res.records[0].get('category');
const catOld = res.records[0].get('catOld');
const relOld = res.records[0].get('relOld');
console.log('Neo4j updateProductById modified product is: ', updatedProduct);
console.log('Neo4j updateProductById shop is: ', shop);
console.log('Neo4j updateProductById modified relSells is: ', relSells);
console.log('Neo4j updateProductById modified relCategory is: ', relCategory);
console.log('Neo4j updateProductById modified category is: ', category);
console.log('Neo4j updateProductById old category is: ', catOld);
console.log('Neo4j updateProductById old relCategory is: ', relOld);
return updatedProduct;
}
Whole record update console logs
Mongoose updateProductById: {
_id: new ObjectId("63417b1e6073ddba42d0ddf3"),
createdOnDate: 1638894572905,
name: 'someNewName',
brand: 'someCategory',
price: 12,
description: 'description',
category: 'Bikes',
city: 'Bologna',
region: 'Emilia-Romagna',
country: 'Italy',
vendor: 'fixit',
vendorId: 'test2',
barcode: 'some',
imageUrl: 'https://firebasestorage.googleapis.com/v0/b/fix-it-b4b00.appspot.com/o/Products%2F61af8bec02edbe24ce034963?alt=media&token=a891dc05-407e-43d2-ab2b-0f49226249a9',
fullImages: [],
thumbNails: [],
minimumStock: 10,
availableQuantity: 10,
soldQuantity: 0,
isPromotion: false,
totalRating: 0,
ratings: 0,
createdAt: 2022-10-08T13:29:02.880Z,
updatedAt: 2022-10-08T13:29:02.961Z,
__v: 0,
averageRating: 0,
id: '63417b1e6073ddba42d0ddf3'
}
Neo4j updateProductById product is : {
_id: new ObjectId("63417b1e6073ddba42d0ddf3"),
createdOnDate: 1638894572905,
name: 'someNewName',
brand: 'someCategory',
price: 12,
description: 'description',
category: 'Bikes',
city: 'Bologna',
region: 'Emilia-Romagna',
country: 'Italy',
vendor: 'fixit',
vendorId: 'test2',
barcode: 'some',
imageUrl: 'https://firebasestorage.googleapis.com/v0/b/fix-it-b4b00.appspot.com/o/Products%2F61af8bec02edbe24ce034963?alt=media&token=a891dc05-407e-43d2-ab2b-0f49226249a9',
fullImages: [],
thumbNails: [],
minimumStock: 10,
availableQuantity: 10,
soldQuantity: 0,
isPromotion: false,
totalRating: 0,
ratings: 0,
createdAt: 2022-10-08T13:29:02.880Z,
updatedAt: 2022-10-08T13:29:02.961Z,
__v: 0,
averageRating: 0,
id: '63417b1e6073ddba42d0ddf3'
}
Neo4j updateProductById modified 1 products
Neo4j updateProductById modified product is: Node {
identity: Integer { low: 2, high: 0 },
labels: [ 'Product' ],
properties: {
country: 'Italy',
isPromotion: false,
city: 'Bologna',
vendorId: 'test2',
description: 'description',
fullImages: [],
soldQuantity: Integer { low: 0, high: 0 },
createdAt: '2022-10-08T13:29:02.880Z',
price: Integer { low: 12, high: 0 },
ratings: Integer { low: 0, high: 0 },
vendor: 'fixit',
averageRating: Integer { low: 0, high: 0 },
__v: Integer { low: 0, high: 0 },
imageUrl: 'https://firebasestorage.googleapis.com/v0/b/fix-it-b4b00.appspot.com/o/Products%2F61af8bec02edbe24ce034963?alt=media&token=a891dc05-407e-43d2-ab2b-0f49226249a9',
minimumStock: Integer { low: 10, high: 0 },
id: '63417b1e6073ddba42d0ddf3',
brand: 'someCategory',
barcode: 'some',
updatedAt: '2022-10-08T13:29:02.961Z',
thumbNails: [],
availableQuantity: Integer { low: 10, high: 0 },
totalRating: Integer { low: 0, high: 0 },
createdOnDate: Integer { low: -1782934167, high: 381 },
name: 'someNewName',
_id: '63417b1e6073ddba42d0ddf3',
category: 'Bikes',
region: 'Emilia-Romagna'
},
elementId: '2'
}
Neo4j updateProductById shop is: Node {
identity: Integer { low: 1, high: 0 },
labels: [ 'Shop' ],
properties: { id: 'test2' },
elementId: '1'
}
Neo4j updateProductById modified relSells is: Relationship {
identity: Integer { low: 5, high: 0 },
start: Integer { low: 1, high: 0 },
end: Integer { low: 2, high: 0 },
type: 'SELLS',
properties: { productId: '63417b1e6073ddba42d0ddf3' },
elementId: '5',
startNodeElementId: '1',
endNodeElementId: '2'
}
Neo4j updateProductById modified relCategory is: Relationship {
identity: Integer { low: 1, high: 0 },
start: Integer { low: 2, high: 0 },
end: Integer { low: 8, high: 0 },
type: 'IN_CATEGORY',
properties: { productId: '63417b1e6073ddba42d0ddf3' },
elementId: '1',
startNodeElementId: '2',
endNodeElementId: '8'
}
Neo4j updateProductById modified category is: Node {
identity: Integer { low: 8, high: 0 },
labels: [ 'category' ],
properties: { name: 'Bikes' },
elementId: '8'
}
Neo4j updateProductById old category is: Node {
identity: Integer { low: 4, high: 0 },
labels: [ 'Category' ],
properties: { name: 'Safety and locks' },
elementId: '4'
}
Neo4j updateProductById old relCategory is: Relationship {
identity: Integer { low: 0, high: 0 },
start: Integer { low: -1, high: -1 },
end: Integer { low: -1, high: -1 },
type: '',
properties: {},
elementId: '0',
startNodeElementId: '-1',
endNodeElementId: '-1'
}
Mongoose updateProductById Neo4j updated product
transacion wasn't aborted
committing session
ending session
Update rating console logs
Mongoose updateProductById rating: {
_id: new ObjectId("63417b1e6073ddba42d0ddf3"),
createdOnDate: 1638894572905,
name: 'someNewName',
brand: 'someCategory',
price: 12,
description: 'description',
category: 'Bikes',
city: 'Bologna',
region: 'Emilia-Romagna',
country: 'Italy',
vendor: 'fixit',
vendorId: 'test2',
barcode: 'some',
imageUrl: 'https://firebasestorage.googleapis.com/v0/b/fix-it-b4b00.appspot.com/o/Products%2F61af8bec02edbe24ce034963?alt=media&token=a891dc05-407e-43d2-ab2b-0f49226249a9',
fullImages: [],
thumbNails: [],
minimumStock: 10,
availableQuantity: 10,
soldQuantity: 0,
isPromotion: false,
totalRating: 5,
ratings: 1,
createdAt: 2022-10-08T13:29:02.880Z,
updatedAt: 2022-10-08T13:29:03.096Z,
__v: 0,
averageRating: 5,
id: '63417b1e6073ddba42d0ddf3'
}
Neo4j updateProductById product is : {
_id: new ObjectId("63417b1e6073ddba42d0ddf3"),
createdOnDate: 1638894572905,
name: 'someNewName',
brand: 'someCategory',
price: 12,
description: 'description',
category: 'Bikes',
city: 'Bologna',
region: 'Emilia-Romagna',
country: 'Italy',
vendor: 'fixit',
vendorId: 'test2',
barcode: 'some',
imageUrl: 'https://firebasestorage.googleapis.com/v0/b/fix-it-b4b00.appspot.com/o/Products%2F61af8bec02edbe24ce034963?alt=media&token=a891dc05-407e-43d2-ab2b-0f49226249a9',
fullImages: [],
thumbNails: [],
minimumStock: 10,
availableQuantity: 10,
soldQuantity: 0,
isPromotion: false,
totalRating: 5,
ratings: 1,
createdAt: 2022-10-08T13:29:02.880Z,
updatedAt: 2022-10-08T13:29:03.096Z,
__v: 0,
averageRating: 5,
id: '63417b1e6073ddba42d0ddf3'
}
Neo4j updateProductById modified 0 products
Mongoose updateProductById Neo4j error: TypeError: Cannot read properties of undefined (reading 'get')
at exports.updateProductById (/Users/vincenzocalia/server-node/api/src/neo4j/product_neo4j.js:132:43)
at process.processTicksAndRejections (node:internal/process/task_queues:95:5)
at async exports.updateProductById (/Users/vincenzocalia/server-node/api/src/controllers/product.controller.js:840:9)
Transaction error is: TypeError: Cannot read properties of undefined (reading 'get')
at exports.updateProductById (/Users/vincenzocalia/server-node/api/src/neo4j/product_neo4j.js:132:43)
at process.processTicksAndRejections (node:internal/process/task_queues:95:5)
at async exports.updateProductById (/Users/vincenzocalia/server-node/api/src/controllers/product.controller.js:840:9)
transacion was aborted
ending session
Found the problem being I use merge for the entire pattern where (cat: Category) might need to be created, while (p) existed already
merge (p)-[rNew:IN_CATEGORY]->(cat: Category {name : json.category}) as mentioned in the docs
When using MERGE on full patterns, the behavior is that either the whole pattern matches, or the whole pattern is created. MERGE will not partially use existing patterns — it is all or nothing. If partial matches are needed, this can be accomplished by splitting a pattern up into multiple MERGE clauses.
So after splitting it as follows it worked as intended.
`
with apoc.convert.fromJsonMap($json) as json
match (p:Product {id: json.id})
match (s:Shop)-[r:SELLS]->(p)-[rOld:IN_CATEGORY]->(c:Category)
where c.name = p.category
set p = json
delete rOld
merge (cat: Category {name : json.category})
merge (p)-[rNew:IN_CATEGORY]->(cat)
set rNew.productId = json.id
RETURN p as product, s as shop, r as relSells, rNew as relCategory, rOld as relOld, c as catOld, cat as category
`
Hope this will help other starting with Neo4j.
Cheers
The problems is the excel file I create in a node/express app, downloads the file with zero bytes via http. What should I be doing to download the file with data?
I have a node/express code that generates an excel file and saves it to a temporary folder. Ultimately I intend to move this code to google cloud functions once it works. The problems is, even though, the excel file does get created with data (as I can see it in the temp folder), the res.download(filename) via http downloads a file with zero bytes. What should I be doing to download the file with data?
Sample code follows below. Please ignore significance of sample data, commented code and console.logs
// import * as functions from 'firebase-functions';
import * as Excel from 'exceljs';
import * as express from 'express';
import * as os from 'os';
import * as path from 'path';
const app = express();
const tempFilePath = path.join(os.tmpdir(), "excel.xlsx");
interface Country {
id: number;
country: string;
capital: string;
population: number;
}
interface Heading {
header: string;
key: string;
width: number;
}
interface Align { col: string; align: string; }
function addBottomBorder(worksheet: any, cols: number, rowNo: number) {
for (let i = 0; i < cols; i++) {
const col = String.fromCharCode(65 + i) + rowNo;
worksheet.getCell(col).border = {
bottom: {style:'thin'},
};
}
}
function hiliteRow(worksheet: any, row: number, color: string, cols: number) {
for (let i = 0; i < cols; i++) {
const col = String.fromCharCode(65 + i) + row;
worksheet.getCell(col).fill = {
type: 'pattern',
pattern: 'solid',
fgColor: { argb: color }
};
}
}
function createHeadings(worksheet: any, headings: Heading[]) {
worksheet.columns = headings;
hiliteRow(worksheet, 1, '808B96', headings.length);
addBottomBorder(worksheet, headings.length, 1);
// add filters
const lastColumn = String.fromCharCode(64 + headings.length);
worksheet.autoFilter = { from: 'A1', to: `${lastColumn}1`, };
}
async function generate(data: Country[], headings: Heading[], sheetname: string, alignments: Align[]) {
const workbook = new Excel.Workbook();
const worksheet: any = workbook.addWorksheet(sheetname);
createHeadings(worksheet, headings);
for (const alignObj of alignments) {
worksheet.getColumn(alignObj.col).alignment = { vertical: 'middle', horizontal: alignObj.align };
}
data.forEach((r: any , i: number) => {
worksheet.addRow(r);
if (i % 2 !== 0) {
hiliteRow(worksheet, i + 2, 'D6DBDF', headings.length)
}
});
addBottomBorder(worksheet, headings.length, data.length + 1);
console.log(tempFilePath);
workbook.xlsx.writeFile(tempFilePath).then( result => {
console.log('...ready', result)
return tempFilePath;
})
.catch( err => {
return err;
})
}
app.get('/', async (req, res) => {
const alignments = [
{ col: 'A', align: 'left'},
{ col: 'D', align: 'right'},
];
const columns = [
{ header: 'ID', key: 'id', width: 4 },
{ header: 'Country', key: 'country', width: 10 },
{ header: 'Capital', key: 'capital', width: 22 },
{ header: 'Population', key: 'population', width: 9 }
];
const data = [{
id: 1,
country: 'USA',
capital: 'Washington DC',
population: 325
}, {
id: 2,
country: 'UK',
capital: 'London',
population: 66
}, {
id: 3,
country: 'Italy',
capital: 'Rome',
population: 60.59
}, {
id: 4,
country: 'China',
capital: 'Beijing',
population: 1386
}, {
id: 5,
country: 'Canada',
capital: 'Ottawa',
population: 36.7
}, {
id: 6,
country: 'UK',
capital: 'London',
population: 66
}, {
id: 7,
country: 'Italy',
capital: 'Rome',
population: 60.59
}, {
id: 8,
country: 'China',
capital: 'Beijing',
population: 1386
}, {
id: 9,
country: 'Canada',
capital: 'Ottawa',
population: 36.7
}
];
const sheetname = 'countries';
try {
generate(data, columns, sheetname, alignments).then( notImportant => {
console.log(notImportant);
// !!!!!!!!! - a file with zero bytes is downloaded
// !!!!!!!!! - same result with res.sendFile(...)
res.status(200).download(tempFilePath);
})
} catch (error) {
res.status(500).send(error);
}
})
app.listen(3001, () => {
console.log('...listening')
})
// exports.getExcel = functions.https.onRequest(app);
The solution for me to this issue was to make 2 endpoints - 1s to generate the file, and a second to download the file
I have the following setup.
The me = is incoming data from the request but since it's not used in this example I am skipping the info.
Added the findAndUpdate I am trying to do. Why is there an insert being done on findAndUpdate instead of an update?
app.js
mongoose.connect("mongodb://localhost/time_report");
var reportSchema = new mongoose.Schema({
name: String,
month: String,
date: String,
hours: Number,
dayNumber: Number,
day: String
});
var time = mongoose.model("time", reportSchema);
Query:
app.get("/test", function(req, res) {
var me = new time({
name: info[i + 3],
month: info[i + 1],
date: info[i + 2],
hours: info[i],
dayNumber: info[i + 4],
day: info[i + 5]
});
time.findOneAndUpdate(query, update, options, function(err, data) {
if (err) {
console.log(err);
}
console.log("data: " + data);
found = true;
});
}
..... More stuff and listener
In MongoDB:
{ "_id" : ObjectId("598d71b20f47692f48eb7393"), "name" : "jocke", "month" : "2017-08", "date" : "2017-08-01", "hours" : 8, "dayNumber" : 2, "day" : "Tuesday", "__v" : 0 }
Mongoose debug:
[0;36mMongoose: times.findAndModify({ date: '2017-08-31' }, [], { '$set': { hours: null } }, { new: false, upsert: false, fields: {} })
[0;36mMongoose: times.insert({ name: '', month: '2017-08', date: '2017-08-31', hours: null, dayNumber: 4, day: 'Thursday', _id: ObjectId("598d7ef69386343b3035192f"), __v: 0 })
If you have the mongoose model exported just use below code
var time = require('./models/time');
app.get('/test', function(req, res){
// change the search object as per your need like month=2018/07
time.findOne(req.body, function(err, doc){
if(err) return handleError(err);
res.send(doc)
})
})
I getting data from mongodb via mongoose. Then I try pass data to jade via res.render, but always get error TypeError: Cannot read property 'name' of null, but this property is exist and valid.
The code:
router.get('/item/:sku', (req, res, next) => {
Item.getItem(req.params.sku, (err, data) => {
console.log(data)
if(err)
next(err)
res.render('item', {
title: data.name,
data: data
})
})
})
Output:
{ sale: false,
createdAt: Fri Apr 29 2016 01:11:48 GMT+0700 (KRAT),
ordered: 0,
views: 0,
tags: [ 'tag1', 'tag2', 'tag3', 'tag4' ],
images: [ 'squirrel.jpg' ],
__v: 0,
backImage: '',
category: 57223f8ac80eb66928eae23e,
sku: 'SCH-002',
salePrice: 0,
price: 250,
description: 'test description',
name: 'Squirrel',
_id: 57225264b83ae185f3b2f4dc }
GET /item/SCH-002 200 1398.341 ms - 19177
/Users/lee/Projects/shepki/routes/index.js:30
title: data.name,
TypeError: Cannot read property 'name' of null
at /Users/lee/Projects/shepki/routes/index.js:30:18
at /Users/lee/Projects/shepki/modules/item.js:55:5
But if remove title and pass just whole data - its OK. I don't understand what's wrong.
Thanks.
i need to get the id for the inserted/updated record when using .upsert() in sequelize.
right now .upsert() returns a boolean indicating whether the row was created or updated.
return db.VenueAddress.upsert({
addressId:address.addressId,
venueId: venue.venueId,
street: address.street,
zipCode: address.zipCode,
venueAddressDeletedAt: null
}).then(function(test){
//test returned here as true or false how can i get the inserted id here so i can insert data in other tables using this new id?
});
I don't think that returning the upserted record was available when the OP asked this question, but it has since been implemented with this PR. As of Sequelize v4.32.1, you can pass a boolean returning as a query param to select between returning an array with the record and a boolean, or just a boolean indicating whether or not a new record was created.
You do still need to provide the id of the record you want to upsert or a new record will be created.
For example:
const [record, created] = await Model.upsert(
{ id: 1, name: 'foo' }, // Record to upsert
{ returning: true } // Return upserted record
);
I wanted upsert to return the created or updated object. It doesn't because only PGSQL supports it directly, apparently.
So I created a naive implementation that will - probably in a non-performant way, and possibly with all sorts of race conditions, do that:
Sequelize.Model.prototype.findCreateUpdate = function(findWhereMap, newValuesMap) {
return this.findOrCreate({
where: findWhereMap,
defaults: findWhereMap
})
.spread(function(newObj, created) {
// set:
for(var key in newValuesMap) {
newObj[key] = newValuesMap[key];
}
return newObj.save();
});
};
Usage when trying to create/update a move in a game (contrived example alert!):
models.Game
.findOne({where: {gameId: gameId}})
.then(function(game) {
return db.Move.findCreateUpdate(
{gameId: gameId, moveNum: game.moveNum+1},
{startPos: 'kr4', endPos: 'Kp2'}
);
});
This is what worked for me:
Model.upsert({
title:your title,
desc:your description,
location:your locations
}).then(function (test) {
if(test){
res.status(200);
res.send("Successfully stored");
}else{
res.status(200);
res.send("Successfully inserted");
}
})
It will check db to find based on your primary key. If it finds then, it will update the data otherwise it will create a new row/insert into a new row.
i know this is an old post, but in case this helps anyone
const upsert = async (model: any, values: any, condition: any): Promise<any> => {
const obj = await model.findOne({ where: condition })
if (obj) {
// only do update is value is different from queried object from db
for (var key in values) {
const val = values[key]
if (parseFloat(obj[key]) !== val) {
obj.isUpdatedRecord = true
return obj.update(values)
}
}
obj.isUpdatedRecord = false
return obj
} else {
// insert
const merged = { ...values, ...condition }
return model.create(merged)
}
}
It isn't using upsert, but .bulkCreate has an updateOnDuplicate parameter, which allows you to update certain fields (instead of creating a new row) in the event that the primary key already exists.
MyModel.bulkCreate(
newRows,
{
updateOnDuplicate: ["venueId", ...]
}
)
I believe this returns the resulting objects, and so I think this might enable the functionality you're looking for?
janmeier said:
This is only supported by postgres, so to keep the API consistent across dialects this is not possible.
please see : https://github.com/sequelize/sequelize/issues/3354
I believe my solution is the most up to date with most minimal coding.
const SequelizeModel = require('sequelize/lib/model')
SequelizeModel.upsert = function() {
return this.findOne({
where: arguments[0].where
}).then(obj => {
if(obj) {
obj.update(arguments[0].defaults)
return
}
return this.create(arguments[0].defaults)
})
}
I know this is an old post, but in case this helps anyone...you can get the returned id or any other value in this way based on OP data.
var data = {
addressId:address.addressId,
venueId: venue.venueId,
street: address.street,
zipCode: address.zipCode,
venueAddressDeletedAt: null
}
const result = await db.VenueAddress.upsert(data, { returning: true });
console.log('resulttttttttttttttttt =>', result)
res.status(200).json({ message: 'Your success message', data: result[0].id});
Noticed how I passed { returning: true } and get the value from the result data.
Super old, but if it helps someone:
const [city, created] = await City.upsert({
id: 5,
cityName: "Glasgow",
population: 99999,
});
created is the boolean saying whether the item was created, and in city you have the whole item, where you can get your id.
No need of returning, and this is db agnostic :)
The only solution for SQLite in Sequelize 6.14.0 is to query the inserted row again
I haven't found a solution that works besides a new SELECT query.
It does work in PostgreSQL however.
Presumably, this is because RETURNING was only implemented relatively recently in SQLite 3.35.0 from 2021: https://www.sqlite.org/lang_returning.html and Sequelize doesn't use that version yet.
I've tried both:
Model.upsert with returning: true: did not work on SQLite. BTW, as mentioned at: https://sequelize.org/api/v6/class/src/model.js~model#static-method-upsert returning already defaults to true now, so you don't need to pass it explicitly
Model.bulkCreate with updatOnDuplicate
In both of those cases, some dummy value is returned when the object is present, not the one that is actually modified.
Minimal runnable examples from https://cirosantilli.com/sequelize
update_on_duplicate.js
#!/usr/bin/env node
const assert = require('assert')
const path = require('path')
const { DataTypes, Sequelize } = require('sequelize')
let sequelize
if (process.argv[2] === 'p') {
sequelize = new Sequelize('tmp', undefined, undefined, {
dialect: 'postgres',
host: '/var/run/postgresql',
})
} else {
sequelize = new Sequelize({
dialect: 'sqlite',
storage: 'tmp.sqlite',
})
}
function assertEqual(rows, rowsExpect) {
assert.strictEqual(rows.length, rowsExpect.length)
for (let i = 0; i < rows.length; i++) {
let row = rows[i]
let rowExpect = rowsExpect[i]
for (let key in rowExpect) {
assert.strictEqual(row[key], rowExpect[key])
}
}
}
;(async () => {
const Integer = sequelize.define('Integer',
{
value: {
type: DataTypes.INTEGER,
unique: true, // mandatory
},
name: {
type: DataTypes.STRING,
},
inverse: {
type: DataTypes.INTEGER,
},
},
{
timestamps: false,
}
);
await Integer.sync({ force: true })
await Integer.create({ value: 2, inverse: -2, name: 'two' });
await Integer.create({ value: 3, inverse: -3, name: 'three' });
await Integer.create({ value: 5, inverse: -5, name: 'five' });
let rows
// Initial state.
rows = await Integer.findAll({ order: [['id', 'ASC']]})
assertEqual(rows, [
{ id: 1, value: 2, name: 'two', inverse: -2 },
{ id: 2, value: 3, name: 'three', inverse: -3 },
{ id: 3, value: 5, name: 'five', inverse: -5 },
])
// Update.
rows = await Integer.bulkCreate(
[
{ value: 2, name: 'TWO' },
{ value: 3, name: 'THREE' },
{ value: 7, name: 'SEVEN' },
],
{ updateOnDuplicate: ["name"] }
)
// PostgreSQL runs the desired:
//
// INSERT INTO "Integers" ("id","value","name") VALUES (DEFAULT,2,'TWO'),(DEFAULT,3,'THREE'),(DEFAULT,7,'SEVEN') ON CONFLICT ("value") DO UPDATE SET "name"=EXCLUDED."name" RETURNING "id","value","name","inverse";
//
// but "sequelize": "6.14.0" "sqlite3": "5.0.2" does not use the desired RETURNING which was only added in 3.35.0 2021: https://www.sqlite.org/lang_returning.html
//
// INSERT INTO `Integers` (`id`,`value`,`name`) VALUES (NULL,2,'TWO'),(NULL,3,'THREE'),(NULL,7,'SEVEN') ON CONFLICT (`value`) DO UPDATE SET `name`=EXCLUDED.`name`;
//
// so not sure how it returns any IDs at all, is it just incrementing them manually? In any case, those IDs are
// all wrong as they don't match the final database state, Likely RETURNING will be added at some point.
//
// * https://stackoverflow.com/questions/29063232/sequelize-upsert
// * https://github.com/sequelize/sequelize/issues/7478
// * https://github.com/sequelize/sequelize/issues/12426
// * https://github.com/sequelize/sequelize/issues/3354
if (sequelize.options.dialect === 'postgres') {
assertEqual(rows, [
{ id: 1, value: 2, name: 'TWO', inverse: -2 },
{ id: 2, value: 3, name: 'THREE', inverse: -3 },
// The 6 here seems to be because the new TWO and THREE initially take up dummy rows,
// but are finally restored to final values.
{ id: 6, value: 7, name: 'SEVEN', inverse: null },
])
} else {
assertEqual(rows, [
// These IDs are just completely wrong as mentioned at: https://github.com/sequelize/sequelize/issues/12426
// Will be fixed when one day they use RETURNING.
{ id: 4, value: 2, name: 'TWO', inverse: undefined },
{ id: 5, value: 3, name: 'THREE', inverse: undefined },
{ id: 6, value: 7, name: 'SEVEN', inverse: undefined },
])
}
// Final state.
rows = await Integer.findAll({ order: [['id', 'ASC']]})
assertEqual(rows, [
{ id: 1, value: 2, name: 'TWO', inverse: -2 },
{ id: 2, value: 3, name: 'THREE', inverse: -3 },
{ id: 3, value: 5, name: 'five', inverse: -5 },
{ id: 6, value: 7, name: 'SEVEN', inverse: null },
])
})().finally(() => { return sequelize.close() });
upsert.js
#!/usr/bin/env node
const assert = require('assert')
const path = require('path')
const { DataTypes, Sequelize } = require('sequelize')
let sequelize
if (process.argv[2] === 'p') {
sequelize = new Sequelize('tmp', undefined, undefined, {
dialect: 'postgres',
host: '/var/run/postgresql',
})
} else {
sequelize = new Sequelize({
dialect: 'sqlite',
storage: 'tmp.sqlite',
})
}
function assertEqual(rows, rowsExpect) {
assert.strictEqual(rows.length, rowsExpect.length)
for (let i = 0; i < rows.length; i++) {
let row = rows[i]
let rowExpect = rowsExpect[i]
for (let key in rowExpect) {
assert.strictEqual(row[key], rowExpect[key])
}
}
}
;(async () => {
const Integer = sequelize.define('Integer',
{
value: {
type: DataTypes.INTEGER,
unique: true,
},
name: {
type: DataTypes.STRING,
},
inverse: {
type: DataTypes.INTEGER,
},
},
{
timestamps: false,
}
);
await Integer.sync({ force: true })
await Integer.create({ value: 2, inverse: -2, name: 'two' });
await Integer.create({ value: 3, inverse: -3, name: 'three' });
await Integer.create({ value: 5, inverse: -5, name: 'five' });
let rows
// Initial state.
rows = await Integer.findAll({ order: [['id', 'ASC']]})
assertEqual(rows, [
{ id: 1, value: 2, name: 'two', inverse: -2 },
{ id: 2, value: 3, name: 'three', inverse: -3 },
{ id: 3, value: 5, name: 'five', inverse: -5 },
])
// Update.
rows = [(await Integer.upsert({ value: 2, name: 'TWO' }))[0]]
if (sequelize.options.dialect === 'postgres') {
assertEqual(rows, [
{ id: 1, value: 2, name: 'TWO', inverse: -2 },
])
} else {
// Unexpected ID returned due to the lack of RETURNING, we wanted it to be 1.
assertEqual(rows, [
{ id: 3, value: 2, name: 'TWO', inverse: undefined },
])
}
rows = [(await Integer.upsert({ value: 3, name: 'THREE' }))[0]]
if (sequelize.options.dialect === 'postgres') {
assertEqual(rows, [
{ id: 2, value: 3, name: 'THREE', inverse: -3 },
])
} else {
assertEqual(rows, [
{ id: 3, value: 3, name: 'THREE', inverse: undefined },
])
}
rows = [(await Integer.upsert({ value: 7, name: 'SEVEN' }))[0]]
if (sequelize.options.dialect === 'postgres') {
assertEqual(rows, [
{ id: 6, value: 7, name: 'SEVEN', inverse: null },
])
} else {
assertEqual(rows, [
{ id: 6, value: 7, name: 'SEVEN', inverse: undefined },
])
}
// Final state.
rows = await Integer.findAll({ order: [['value', 'ASC']]})
assertEqual(rows, [
{ id: 1, value: 2, name: 'TWO', inverse: -2 },
{ id: 2, value: 3, name: 'THREE', inverse: -3 },
{ id: 3, value: 5, name: 'five', inverse: -5 },
{ id: 6, value: 7, name: 'SEVEN', inverse: null },
])
})().finally(() => { return sequelize.close() });
package.json
{
"name": "tmp",
"private": true,
"version": "1.0.0",
"dependencies": {
"pg": "8.5.1",
"pg-hstore": "2.3.3",
"sequelize": "6.14.0",
"sql-formatter": "4.0.2",
"sqlite3": "5.0.2"
}
}
In both of those examples, we see that PostgreSQL runs the desired:
INSERT INTO "Integers" ("id","value","name") VALUES (DEFAULT,2,'TWO'),(DEFAULT,3,'THREE'),(DEFAULT,7,'SEVEN') ON CONFLICT ("value") DO UPDATE SET "name"=EXCLUDED."name" RETURNING "id","value","name","inverse";
which works due to RETURNING, but sequelize does not use the desired RETURNING
INSERT INTO `Integers` (`id`,`value`,`name`) VALUES (NULL,2,'TWO'),(NULL,3,'THREE'),(NULL,7,'SEVEN') ON CONFLICT (`value`) DO UPDATE SET `name`=EXCLUDED.`name`;
Tested on Ubuntu 21.10, PostgreSQL 13.5.
Which I myself resolved as follows:
return db.VenueAddress.upsert({
addressId:address.addressId,
venueId: venue.venueId,
street: address.street,
zipCode: address.zipCode,
venueAddressDeletedAt: null
},{individualHooks: true}).then(function(test){
// note individualHooks
});