Am a new user of MonogoDB n Nodejs. Building a simple product catalogue. Trying to import CSV file contents to mongodb. I got Syntax error:: Unexpected identifier error: Is there any way to resolve this?
I am curious to know about using ya-csv package available on Git. Can anyone help?
Try as below:
npm install ya-csv //install package
//Put this code in your .js file
var csv = require("ya-csv");
var validRows = [];
var reader = csv.createCsvFileReader("/csv/file.csv", {columnsFromHeader:true,'separator': ',' , encoding: "utf8"});
reader.addListener('data', function(data) { //if rows are in correct format then it will push into array
if(data){
validRows.push(data);
}
});
reader.addListener('end', function(){ //this method will be called after reading all csv rows
validRows.forEach(function(data){
//console.log(data.name); //get your data like this
});
});
reader.addListener('error', function(err){
console.log(err); //if any error occurred
});
For further detail please follow this link
https://github.com/koles/ya-csv
Related
I'm new to Node.js and react-native. I followed the sample on send_telemetry.js exactly but when I run my react-native app I get an error: "The development server returned response error code 500.
the error message is:
bundling failed: Error: Unable to resolve module fs from ProjectPath\node_modules\azure-iot-device\lib\module_client.js: Module fs does not exist in the Haste module map";
Im running:
Node.js v10.15.3
NPM 6.4.1
react-native#0.59.2
First error was the same with Unable to resolve module events,
I can install events,
but the fs module is: "This package name is not currently in use, but was formerly occupied by another package. To avoid malicious use, npm is hanging on to the package name, but loosely, and we'll probably give it to you if you want it."
var Protocol = require('azure-iot-device-http').Http;
var DeviceClient = require('azure-iot-device').Client;
var Message = require('azure-iot-device').Message;
var connectionString = 'my connection string';
var client = DeviceClient.fromConnectionString(connectionString, Protocol);
function ConnectionTest(err) {
if (err) {
console.log('Could not connect: ' + err);
} else {
console.log('Client connected');
}
client.close(function () {
process.exit(0);
});
};
export async function Test() {
client.open(ConnectionTest);
};
Basically I need to know how to get the azure IOT hub client working in my react-native app (not using Expo).
Im pretty much stumped so any help would greatly be appreciated.
A dependency module is missing ... which is fs ...
this file-system npm module is incompatible with react-native ... cause it has it own different environment.
I had "import { symlink } from 'fs';" randomly pop up in one of my scripts. Once I deleted this line same issue you had went away. I would search your whole project for that line.
I was trying to use neo4j at backend. First I want to import csv to neo4j. (first tried to see how many lines csv file has)
But having problem, the code is following
var neo4j = require('neo4j-driver').v1;
var driver = neo4j.driver("bolt://localhost", neo4j.auth.basic("neo4j", "neo4j"));
function createGraphDataBase(csvfilepath)
{
var session = driver.session();
return session
.run( 'LOAD CSV FROM {csvfilepath} AS line RETURN count(*)',
{csvfilepath}
)
.then(result => {
session.close();
console.log(' %d lines in csv.file', result);
return result;
})
.catch(error => {
session.close();
console.log(error);
return error;
});
}
the "csvfilepath" is the path of csv file, it is as follows.
'/Users/.../Documents/Project/.../test/spots.csv';
is there something wrong with giving path like this?
I am calling that function on other module as
var api = require('./neo4j.js');
const csvFile = path.join(__dirname,csvFileName);
api.createGraphDataBase(csvFile);
I am having error as
Error: Connection was closed by server
....
I am new to these, please help!
The URL that you specify in a LOAD CSV clause must be a legal URL.
As stated in this guide:
Make sure to use the right URLs esp. file URLs.+ On OSX and Unix use
file:///path/to/data.csv, on Windows, please use
file:c:/path/to/data.csv
In your case, csvfilepath needs to specify the file:/// protocol (since you seem to be running on OSX) for a local file. Based on your example, the value should be something like this:
'file:///Users/.../Documents/Project/.../test/spots.csv'
I am trying to save project and its file in GridFS. I want to save project first and using "_id" of project as metadata for file I want to save file. When i tried so i am getting ENOENT, open '/tmp/45e85388793de' error. here is my code
newProject.save(function (err,project) {
if (err) {
console.log('save error', err);
}
console.log("project added");
var id=poject._id;
var filepath = req.files.file.path;
var filename = req.files.file.name;
var writestream = gfs.createWriteStream({ filename: filename, metadata:id });
console.log(filepath);
fs.createReadStream(filepath)
.on('end', function() {
})
.on('error', function(err) {
console.log("error encountered"+err);//ENOENT,open error
})
.pipe(writestream);
});
Why i am getting this error and how to resolve it?
ENOENT in this context means "No such file or directory." It means the filepath you are trying to read with createReadStream does not exist.
I think you are getting this error since :
Your file is saved in a temporary location.
When you are inside the callback function your file is removed from that location and you are getting "No such file" error. Path and other variables still exists as part of js and that's why you are able to print them in console.
Solution:
Above(Outside) callback function move your file to some other permanent location using:
fs.rename(req.files.file.path, "./someKnownPath/filename");
Keep note of that location. In your callback function use the new location as path and try saving the file in gridfs. Once the file is saved you may delete it file from that location(/someKnownPath/filename).
This error was occuring for me as well. And the reason was temp directory was not in place. After I created manually and gave a try, it worked.
Now I have shifted to creating directory on the fly through node.js itself.
I have installed Node.Js and Casper.js to perform webscraping and save the info into a DB. But I have a problem because when I try to execute the source, I get the following error in the terminal:
Error: Cannot find module './build/Release/mysql_bindings'
I have previously installed mysql-libmysqlclient with the mysql_bindings inside. I tested creating the route of the error, but it didn't work.
The code is:
var mysql = require('db-mysql');
new mysql.Database({
hostname: 'localhost',
user: 'rool',
password: 'xxxx',
database: 'xxxBD' }).connect(function(error) {
if (error) {
return console.log('CONNECTION error: ' + error);
}
this.query().
select('*').
from('tablaPruebas').
execute(function(error, rows, cols) {
if (error) {
console.log('ERROR: ' + error);
return;
}
console.log(rows.length + ' ROWS found');
});
});
Thanks in advance!
The problem is that your using the following in your code:
var mysql = require('db-mysql');
However your question shows that you have installed mysql-libmysqlclient. This means you should be using the following instead:
var mysql = require('mysql-libmysqlclient');
I presume you installed from https://github.com/Sannis/node-mysql-libmysqlclient
The code you're using looks more like db-mysql which can be found here
If you install via npm install db-mysql then you should be good. Have a look at the link I included for db-mysql as there appears to be a few dependencies (e.g. setting up MYSQL_CONFIG environment variable).
trying to get a CSV dump of some data (~500Mb) in mongodb. Thought streams would be the way to go, to avoid building up an array in memory and then building the csv at once.
But, it seems the stream that mongoose creates and the one that csv expects are not the same thing.
var stream = Subscriber.find().stream()
stream.setEncoding = function() { }
csv().from.stream(stream).on('record', function(record, index) {
console.log(record)
console.log(index)
})
without the setEncoding() stub above, I get an error about that when csv calls setEncoding on the stream. With it, results in
TypeError: Object #<Object> has no method 'indexOf'
at [object Object].stringify (/home/project/node_modules/csv/lib/stringifier.js:98:35)
So, is this even the right approach? if so, what is the problem with the streams?
As zeMirco said: to get a CSV dump of a collection, I'd use the mongoexport tool that comes with MongoDB. Here's an example of exporting a collection called "users" in a database "mydatabase" to CSV format:
$ mongoexport --csv --host localhost:27017 --db mydatabase --collection users --fields name,email,age -o output.csv
And you'll get something that looks like this:
$ cat output.csv
name,email,age
renold,renold.ronaldson#gmail.com,21
jacob,xXxjacobxXx#hotmail.com,16
Something like this should work. Replace process.stdout with a filestream to write it to a file.
var csv = require('csv')
var through = require('through')
var Model = require('...')
_ = require('underscore')
var modelStream = Model.find().stream();
modelStream.pipe(through(write, end)).pipe(csv()).pipe(process.stdout);
function end(){ console.log('done'); }
function write(doc) {
this.queue(_.values(doc.toObject({getters:true, virtuals:false})));
}
If you want to download the csv from a webserver by accessing a URL and your using express you can do this:
var through = require('through');
var csv = require('csv')
var MyModel = require('./my_model');
app.get('/download_csv/', function(req, res) {
res.setHeader('Content-disposition', 'attachment; filename=attendances.csv');
res.contentType('csv');
res.write('property 1,property 2\n');
var modelStream = MyModel.find().stream();
modelStream.
pipe(through(write, end)).
pipe(csv.stringify()).
pipe(res);
function end() {
res.end();
console.log('done outputting file');
}
function write(doc) {
var myObject = doc.toObject({getters:true, virtuals:false});
this.queue([
myObject.property_1,
myObject.property_2
]);
}
});
NOTE: This is using the latest version of the csv module (v0.4) whereas the previous answers are using an older version of the module.