I need to generate a map on the server side using Nodejs and then create an image of that map. I'm using leaflet-headless to create the map and generate the image.
This is the code:
const L = require('leaflet-headless');
const document = global.document;
let createMap = (lanLat) => {
const element = document.createElement('div');
element.id = 'map-leaflet-image';
document.body.appendChild(element);
const filename = path.join(__dirname, '/leaflet-image.png');
const map = L.map(element.id).setView([0, 0], 3);
L.tileLayer('http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
attribution: '© OpenStreetMap contributors'
}).addTo(map);
map.saveImage(filename, () => {
console.log('done');
})
};
This works and the image is saved but when I change the setView() parameters to setView([0,0], 1)(zoom out) I receive an error message:
return prev.apply(ctx, arguments);
Error: Image given has not completed loading
at Error (native)
at CanvasRenderingContext2D.ctx.(anonymous function) [as drawImage]
Any thoughts?
If this might interest someone, the problem was in the map.save() function which uses the leaflet-image lib.
This happened due to a weirdly specific scenario where a marker with certain coordinates, when added to the map with any other marker(?!), caused the error. I removed that marker and it worked.
Related
I am working on Ionic angular platform.I get a shapefile that is converted to a Base64 string. I am converting it to a file and then trying to convert to arrayBuffer using 'FileReader' to fit the data to match (L.shapefile)
https://github.com/calvinmetcalf/leaflet.shapefile
EDIT: I fixed the previous problem, the current problem is the data (ArrayBuffer) added to leaflet.shapefile which returns the error.
Or if you got the zip some other way (like the File API) then with the arrayBuffer you can call
const geojson = await shp(buffer);
Source: https://www.npmjs.com/package/shpjs
I tried working in this direction to fit the shapefile into L.shapefile as follows:
import * as L from "leaflet";
import * as shp from "shpjs";
const l1 = require('../../assets/leaflet.shpfile.js');
export class Tab7Page {
map: L.Map
async ngOnInit() {
this.map = L.map("map", {
center: [49.7, 8.12],
zoom: 15,
renderer: L.canvas(),
});
L.tileLayer(
"https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}",
{
// maxZoom: 12,
attribution:
"Tiles © Esri — Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community",
}
).addTo(this.map);
// this.map.setView(layer.getBounds()['_northEast'], 14);
setTimeout(() => {
this.map.invalidateSize();
}, 1);
var shape_data = "data:application/zip;base64,UEsDBAoAAAAAAHZwplQAAAAAAAAAAAAAAAAHAAAAbGF5ZXJzL1BLAwQKAAAAAAB2cKZUD7eKbDwBAAA8AQAAEgAAAGxheWVycy9QT0xZR09OLnNocAAAJwoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJ7oAwAABQAAAAAAAFCyGiBAeNAJLjeZSEAAAACYlB0gQMAkzNp mUhAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAaAUAAAAAAABQshogQHjQCS43mUhAAAAAmJQdIEDAJMzafplIQAEAAAAKAAAAAAAAAAAAAJjEGiBAwCTM2n6ZSEAAAABQshogQKOtBVtUmUhAAAAAeLYbIEC8k8/CPplIQAAAABBpHCBAsUVS7DyZSED/// v9RwgQMXhbjg/mUhAAAAA6IAdIEB40AkuN5lIQAAAAJiUHSBAeeTrB2mZSEAAAACo7hwgQFKQcddwmUhAAAAAcNwbIEDbe339cZlIQAAAAJjEGiBAwCTM2n6ZSEBQSwMECgAAAAAAdnCmVObu5cZsAAAAbAAAABIAAABsYXllcnMvUE9MWUdPTi5zaHgAACcKAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA26AMAAAUAAAAAAABQshogQHjQCS43mUhAAAAAmJQdIEDAJMzafplIQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMgAAAGhQSwMECgAAAAAAdnCmVGt60SP0AgAA9AIAABIAAABsYXllcnMvUE9MWUdPTi5kYmYDegQGAQAAAMEAMwIAAAAAAAAAAAAAAAAAAAAAAAAAAHN0cm9rZQAAAAAAQwAAAAD AAAAAAAAAAAAAAAAAAAAc3Ryb2tlLXcAAABOAAAAABIAAAAAAAAAAAAAAAAAAABzdHJva2UtbwAAAE4AAAAAEgAAAAAAAAAAAAAAAAAAAGZpbGwAAAAAAAAAQwAAAAD AAAAAAAAAAAAAAAAAAAAZmlsbC1vcGEAAABOAAAAABIAAAAAAAAAAAAAAAAAAAAAICNjNWQ1NzMgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAyICAgICAgICAgICAgICAgICAxI2RiZDc5MCAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAwLhpQSwMECgAAAAAAdnCmVOPBUWqPAAAAjwAAABIAAABsYXllcnMvUE9MWUdPTi5wcmpHRU9HQ1NbIkdDU19XR1NfMTk4NCIsREFUVU1bIkRfV0dTXzE5ODQiLFNQSEVST0lEWyJXR1NfMTk4NCIsNjM3ODEzNywyOTguMjU3MjIzNTYzXV0sUFJJTUVNWyJHcmVlbndpY2giLDBdLFVOSVRbIkRlZ3JlZSIsMC4wMTc0NTMyOTI1MTk5NDMyOTVdXVBLAQIUAAoAAAAAAHZwplQAAAAAAAAAAAAAAAAHAAAAAAAAAAAAEAAAAAAAAABsYXllcnMvUEsBAhQACgAAAAAAdnCmVA 3imw8AQAAPAEAABIAAAAAAAAAAAAAAAAAJQAAAGxheWVycy9QT0xZR09OLnNocFBLAQIUAAoAAAAAAHZwplTm7uXGbAAAAGwAAAASAAAAAAAAAAAAAAAAAJEBAABsYXllcnMvUE9MWUdPTi5zaHhQSwECFAAKAAAAAAB2cKZUa3rRI/QCAAD0AgAAEgAAAAAAAAAAAAAAAAAtAgAAbGF5ZXJzL1BPTFlHT04uZGJmUEsBAhQACgAAAAAAdnCmVOPBUWqPAAAAjwAAABIAAAAAAAAAAAAAAAAAUQUAAGxheWVycy9QT0xZR09OLnByalBLBQYAAAAABQAFADUBAAAQBgAAAAA="
var shape_fileName = "TestFile"
var file = this.dataURLtoFile(shape_data,shape_fileName);
// console.log("Blob retrieved successfully..", blob);
// this.handleZipFile(file);
var reader = new FileReader();
reader.onload = function(){
if (reader.readyState != 2 || reader.error){
console.error("thats the error side");
return;
} else {
shp(reader.result).then(function (geojson) { //More info: https://github.com/calvinmetcalf/shapefile-js
L.geoJSON(geojson).addTo(this.map);//More info: https://github.com/calvinmetcalf/leaflet.shapefile
});
}
}
reader.readAsArrayBuffer(file);
}
dataURLtoFile(dataurl, filename) {
var bstr1 = atob(dataurl);
console.log("Byte sting", bstr1);
var arr = dataurl.split(','),
mime = arr[0].match(/:(.*?);/)[1],
bstr = atob(decodebase64),
n = bstr.length,
u8arr = new Uint8Array(n);
while(n--){
u8arr[n] = bstr.charCodeAt(n);
}
return new File([u8arr], filename, {type:mime});
}
I just get a map with the below error has informed. I am not sure which leaflet package to use shpjs or L.shapefile?
EDIT: As per comments from GIS platform i installed buffer and updated polyfill.ts and now I get the following error.
I am trying to run an image categorization model in firebase cloud functions using tensorflow.js (specifically tfjs-node) but am running into the flowing error:
Error: method must be bilinear or nearest, but was undefined
at assert (/workspace/node_modules/#tensorflow/tfjs-core/dist/tf-core.node.js:698:15)
at cropAndResize_ (/workspace/node_modules/#tensorflow/tfjs-core/dist/tf-core.node.js:21340:5)
at Object.cropAndResize__op [as cropAndResize] (/workspace/node_modules/#tensorflow/tfjs-core/dist/tf-core.node.js:4287:29)
at prepImage (/workspace/handlers/models.js:58:35)
at /workspace/handlers/models.js:68:44
at processTicksAndRejections (internal/process/task_queues.js:97:5)
at async exports.isFurnished (/workspace/handlers/models.js:10:17)
at async exports.getanalysis (/workspace/handlers/apis.js:103:16)
The error is being thrown by the tf.image.cropAndResize() function. What is strange about this error is that cropAndResize() should be automatically using its default value of "bilinear" as specified in the docs.
Stranger yet, when I run it locally I don't get any errors. My local machine is running node v12.16.0.
Below is my code. please note that I am only lading signature.json from the firebase storage and fetching /standardizing an image (I am not loading and running the actual ts model).
const { admin, db } = require("../util/admin");
const firebase = require("firebase");
const tf = require("#tensorflow/tfjs-node");
const fetch = require("node-fetch");
exports.isFurnished = async (imgUrl) => {
const sigPath = "models/signature.json";
const signature = await loadSignature(sigPath);
const image = await loadImage(imgUrl, signature);
return "It worked!";
};
//signature---------------------------
const loadSignature = (filePath) => {
let file = admin.storage().bucket().file(filePath);
return file
.download()
.then((res) => JSON.parse(res[0].toString("utf8")))
.catch((err) => err.message);
};
//Image-------------------------------
const loadImage = (imgUrl, signature) => {
return fetchImage(imgUrl).then((image) => prepImage(image, signature));
};
const fetchImage = async (url) => {
const response = await fetch(url);
const buffer = await response.buffer();
return buffer;
};
const prepImage = (rawImage, signature) => {
const image = tf.node.decodeImage(rawImage, 3);
const [height, width] = signature.inputs.Image.shape.slice(1, 3);
const [imgHeight, imgWidth] = image.shape.slice(0, 2);
const normalizedImage = tf.div(image, tf.scalar(255));
const reshapedImage = normalizedImage.reshape([1, ...normalizedImage.shape]);
let top = 0;
let left = 0;
let bottom = 1;
let right = 1;
if (imgHeight != imgWidth) {
const size = Math.min(imgHeight, imgWidth);
left = (imgWidth - size) / 2 / imgWidth;
top = (imgHeight - size) / 2 / imgHeight;
right = (imgWidth + size) / 2 / imgWidth;
bottom = (imgHeight + size) / 2 / imgHeight;
}
return tf.image.cropAndResize(
reshapedImage,
[[top, left, bottom, right]],
[0],
[height, width]
);
};
Have I made an error that I'm just not seeing or is this a node and/or tsjs issue?
Also, adding in the "bilinear" parameter yields this error:
Error: Invalid napi_status: A number was expected
As commented above, TensorFlow.js version 2.8.0 seems to have introduced some breaking changes. Workaround (at the time of writing) is to keep using version 2.7.0.
I am working with bodypix. It was working fine until this morning. Although I haven't changed anything, since this afternoon this exact error came up. It could be Tensorflow's issue. Or,
I checked on Windows 8.1. There, it works totally fine. The problem happens on windows 10.
EDIT: I am quite sure it's from TensorFlow. Not the windows. I was using CDN to get the bodypix and after updating the cdn address the error disappeared.
previous: https://cdn.jsdelivr.net/npm/#tensorflow-models/body-pix/dist/body-pix.min.js
https://cdn.jsdelivr.net/npm/#tensorflow/tfjs/dist/tf.min.js
Now: https://cdn.jsdelivr.net/npm/#tensorflow-models/body-pix#2.0.5/dist/body-pix.min.js
https://cdn.jsdelivr.net/npm/#tensorflow/tfjs#2.7.0/dist/tf.min.js
I am trying to send a pdfkit generated pdf file as input to pdflib for merging. I am using async function. My project is being developed using sails Js version:"^1.2.3", "node": "^12.16", my pdf-kit version is: "^0.11.0", "pdf-lib": "^1.9.0",
This is the code:
const textbytes=fs.readFileSync(textfile);
var bytes1 = new Uint8Array(textbytes);
const textdoc = await PDFDocumentFactory.load(bytes1)
The error i am getting is:
UnhandledPromiseRejectionWarning: Error: Failed to parse PDF document (line:0 col:0 offset=0): No PDF header found
Please help me with this issue.
You really don't need this line.
var bytes1 = new Uint8Array(textbytes);
By just reading the file and sending textbytes in the parameters is more than enough.
I use this function to merge an array of pdfBytes to make one big PDF file:
async function mergePdfs(pdfsToMerge)
{
const mergedPdf = await pdf.PDFDocument.create();
for (const pdfCopyDoc of pdfsToMerge)
{
const pdfDoc = await pdf.PDFDocument.load(pdfCopyDoc);
const copiedPages = await mergedPdf.copyPages(pdfDoc, pdfDoc.getPageIndices());
copiedPages.forEach((page) => {
mergedPdf.addPage(page);
});
}
const mergedPdfFile = await mergedPdf.save();
return mergedPdfFile;
};
So basically after you add the function mergePdfs(pdfsToMerge)
You can just use it like this:
const textbytes = fs.readFileSync(textfile);
const textdoc = await PDFDocumentFactory.load(bytes1)
let finalPdf = await mergePdfs(textdoc);
This a repeat question (not yet answered) but I have revised and tightened up the code. And, I have included the specific example. I am sorry to keep beating this drum, but I need help.
This is a Node API. I need to read and write JSON data. I am using the Node core module 'fs', not the npm package by the same name (or fs-extra). I have extracted the particular area of concern onto a standalone module that is shown here:
'use strict';
/*==================================================
This service GETs the list of ids to the json data files
to be processed, from a json file with the id 'ids.json'.
It returns and exports idsList (an array holding the ids of the json data files)
It also calls putIdsCleared to clear the 'ids.json' file for the next batch of processing
==================================================*/
// node modules
const fs = require('fs');
const config = require('config');
const scheme = config.get('json.scheme')
const jsonPath = config.get('json.path');
const url = `${scheme}${jsonPath}/`;
const idsID = 'ids.json';
const uri = `${url}${idsID}`;
let idsList = [];
const getList = async (uri) => {
await fs.readFile(uri, 'utf8', (err, data) => {
if (err) {
return(console.log( new Error(err.message) ));
}
return jsonData = JSON.parse(data);
})
}
// The idea is to get the empty array written back to 'ids.json' before returning to 'process.js'
const clearList = async (uri) => {
let data = JSON.stringify({'ids': []});
await fs.writeFile(uri, data, (err) => {
if (err) {
return (console.log( new Error(err.message) ));
}
return;
})
}
getList(uri);
clearList(uri)
console.log('end of idsList',idsList);
module.exports = idsList;
Here is the console output from the execution of the module:
Error: ENOENT: no such file or directory, open 'File:///Users/doug5solas/sandbox/libertyMutual/server/api/ids.json'
at ReadFileContext.fs.readFile [as callback]
(/Users/doug5solas/sandbox/libertyMutual/server/.playground/ids.js:24:33)
at FSReqWrap.readFileAfterOpen [as oncomplete] (fs.js:235:13)
Error: ENOENT: no such file or directory, open 'File:///Users/doug5solas/sandbox/libertyMutual/server/api/ids.json'
at fs.writeFile
(/Users/doug5solas/sandbox/libertyMutual/server/.playground/ids.js:36:34)
at fs.js:1167:7
at FSReqWrap.oncomplete (fs.js:141:20)
I am being told there is no such file or directory. However I can copy the uri (as shown in the error message)
File:///Users/doug5solas/sandbox/libertyMutual/server/api/ids.json
into the search bar of my browser and this is what is returned to me:
{
"ids": [
"5sM5YLnnNMN_1540338527220.json",
"5sM5YLnnNMN_1540389571029.json",
"6tN6ZMooONO_1540389269289.json"
]
}
This result is the expected result. I do not "get" why I can get the data manually but I cannot get it programmatically, using the same uri. What am I missing? Help appreciated.
Your File URI is in the wrong format.
It shouldn't contain the File:// protocol (that's a browser-specific thing).
I'd imagine you want C://Users/doug5solas/sandbox/libertyMutual/server/api/ids.json.
I solved the problem by going to readFileSync. I don't like it but it works and it is only one read.
Here is a prior post that discusses how the pre-loaded Imagemagick is limited for security reasons on AWS Lambda.
"Note: This update contains an updated /etc/ImageMagick/policy.xml
file that disables the EPHEMERAL, HTTPS, HTTP, URL, FTP, MVG, MSL,
TEXT, and LABEL coders"
I need to use the 'label' function (which works successfully on my development machine - example pic further below))
Within the discussion in the linked post, frenchie4111 generously offers use of a node module he created that uploads imagemagick to a lambda app: github link https://github.com/DoubleDor/imagemagick-prebuilt
I would like to understand how uploading a fresh version of Imagemagick works, and how I will then use that version with the GM module that incorporates IM and nodejs together.
If I read correctly the full version of imagemagick will be reloaded to the address below each time my lambda app boots up ?
/tmp/imagemagick
DoubleDor's readme directions provides the option below:
var imagemagick_prebuilt = require( 'imagemagick-prebuilt' );
var child_process = require( 'child_process' );
exports.handler = function( event, context ) {
return q
.async( function *() {
imagemagick_bin_location = yield imagemagick_prebuilt();
console.log( `ImageMagick installed: ${imagemagick_bin_location}` );
// ImageMagick logo creation test:
// convert logo: logo.gif
var convert_process = child_process
.spawn( imagemagick_bin_location, [ 'logo:', 'logo.gif' ] )
convert_process
.on( 'close', function() {
context.success();
} );
} )();
};
What would I include/require to define 'gm' to work within my partial file below (in my nodejs lambda app)?
Will I need to edit the GM module too?
//imagemaker.js > gets included and called from another file that uploads picture to s3, and/or tweets it after picture is created in /tmp/filename.jpg This works presently.. I can make and upload imagemagick text generated images but I just can't use the 'label' tool which scales text within appended gm images
'use strict';
var Promise = require('bluebird');
var exec = require('child_process').exec;
var async = require('async');
var request = require('request');
var fs = require('fs');
var dateFormat = require('dateformat');
var gm = require('gm').subClass({imageMagick: true});
var aws = require('aws-sdk');
performers = [ { name: 'Matt Daemon', score: 99}, { name: “Jenifer Lawrence”, score: 101}
//created in a makeTitle function I omit for brevity sake.
url = “/temp/pathtotitlepicture.jpg”
// function below successfully appends a gm title image created with other functions that I haven't included
function makeBody(url){
var img = gm(400,40)
.append(url)
.gravity('West')
.fill('black')
.quality('100')
.font('bigcaslon.ttf')
.background('#f0f8ff')
for (var i = 0; i < performers.length; i++) {
var pname = " " + (i+1) + ") " +performers[i].name;
img.out('label:'+pname);
};
img.borderColor('#c5e4ff')
.border('5', '5')
.write(url, function (err) {
if (err) throw err;
var stream = fs.createReadStream("/tmp/fooberry.jpg")
return resolve(stream)
});
}
Just for fun, the image below shows what I've been able to do with gm(graphics magic) and imagemagick on my development machine that I'd now like to get working on AWS Lambda >> I really need that 'label' function and I guess that means learning how to get that whole library uploaded to AWS Lambda each time it boots!(?)