I'm using the ipp npm module to send a print job from a google cloud function. I believe I have set up the printer correctly but I don't know how I'm supposed to know the exact uri for sending the print job.
The printer model is Brother MFC-L3770CDW
Here is how my settings look in the web view for the printer configuration.
And here is the function code.:
var ipp = require('ipp');
var PDFDocument = require('pdfkit');
var doc = new PDFDocument;
doc.text("Hello World");
var buffers = [];
doc.on('data', buffers.push.bind(buffers));
doc.on('end', function () {
var printer = ipp.Printer("https://10.0.0.55:443");
var file = {
"operation-attributes-tag":{
"requesting-user-name": "User",
"job-name": "Print Job",
"document-format": "application/pdf"
},
data: Buffer.concat(buffers)
};
printer.execute("Print-Job", file, function (err, res) {
if(err) {
console.log(err);
}
else{
console.log("Printed: "+res.statusCode);
}
});
console.log('executing');
});
doc.end();
console.log('finished executing');
I have tried various uris such as
https://10.0.0.55:631
https://10.0.0.55:443
https://10.0.0.55:631/ipp
https://10.0.0.55:631/ipp/printer
Sometimes I get an error like:
"Error: socket hang up
at TLSSocket.onHangUp (_tls_wrap.js:1148:19)
at Object.onceWrapper (events.js:313:30)
at emitNone (events.js:111:20)
at TLSSocket.emit (events.js:208:7)
at endReadableNT (_stream_readable.js:1064:12)
at _combinedTickCallback (internal/process/next_tick.js:139:11)
at process._tickCallback (internal/process/next_tick.js:181:9)
code: 'ECONNRESET',
path: null,
host: '10.0.0.55',
port: '631',
localAddress: undefined }"
Related
I need help downloading more than one file from a remote FTP using Node js, the code is the following.
const FTPClient = require('ftp');
let ftp_client = new FTPClient();
const fs = require("fs");
let ftpConfig = {
host: "remoteHost",
port: 21,
user: 'username',
password: 'password',
}
//create a connection to ftp server
ftp_client.connect(ftpConfig);
ftp_client.on('ready', function() {
ftp_client.get('/file1.csv', function(err, stream) {
if (err) throw err;
//stream.once('close', function() { ftp_client.end(); });
stream.pipe(fs.createWriteStream('file1.csv'));
});
ftp_client.get('/dir/file2.dat', function(err, stream) {
if (err) throw err;
//stream.once('close', function() { ftp_client.end(); });
stream.pipe(fs.createWriteStream('file2.dat'));
});
ftp_client.get('/dir/file3.dat', function(err, stream) {
if (err) throw err;
stream.once('close', function() { ftp_client.end(); });
stream.pipe(fs.createWriteStream('file3.dat'));
});
});
And the error
Error: Unable to make data connection
at Socket.<anonymous> (C:\Proyectos\descargar_file\node_modules\ftp\lib\connection.js:935:10)
at Object.onceWrapper (events.js:421:28)
at Socket.emit (events.js:327:22)
at Object.cb (C:\Proyectos\descargar_file\node_modules\ftp\lib\connection.js:575:18)
at Parser.<anonymous> (C:\Proyectos\descargar_file\node_modules\ftp\lib\connection.js:117:20)
at Parser.emit (events.js:315:20)
at Parser._write (C:\Proyectos\descargar_file\node_modules\ftp\lib\parser.js:59:10)
at doWrite (_stream_writable.js:403:12)
at writeOrBuffer (_stream_writable.js:387:5)
at Parser.Writable.write (_stream_writable.js:318:11)
It is already downloading the 3 files, but also showing that error, so how can I correct this to do it in a safer way?
I also would like to add a console log while the process is downloading each file.
Thanks!
I send a file from one server(ex:127.0.0.1:3000) to another server(ex:127.0.0.1:3001),I found that when the file is too large, the transfer will be a problem.
for example:
//127.0.0.1:3000
router.post('/getStream',function(req, res, next){
function getResultStream(){
return new Promise(function(resolve, reject){
oboe('http://127.0.0.1:3001/getEnvelopeResult_2')
.done(function(things) {
resolve(things);
}).node(function(e){
console.log(e);
})
.fail(function(e) {
console.log(e);
reject(e);
});
});
}
getResultStream().then(function(body){
res.json({data:body});
}).catch(function(err){
res.json({message:err});
});
});
for example another server:
I will be to get a json file.json and converted into Stream.
//127.0.0.1:3001
app.get('/getStream',function(req, res, next){
let path="json/file.json";
oboe(fs.createReadStream(path))
.done(function(things) {
send(things,res);
})
.fail(function(e) {
res.json({message:e});
});
function send(things,res){
things.errorArray = JSON.parse(things.errorArray);
let json = JSON.stringify(things);
highland([
json
])
.invoke('split', [''])
.sequence()
.pipe(res)
}
});
the file.json approximately 3000 KB in size.
the content like this:
{"array":"[{\"No\":1,\"tyep\":\"none\",\"checkflag\":true,\"active\":true,\"time\":\"2017-11-08T07:04:49.024Z\"}...30000 pieces ]
"file":"axzf",
"last":30000,
"start":1
}
This array for the vast majority of capacity of about 30,000.
I tried 10,000 and 20,000 and 30,000..., is failed.
the array only about less than 1100 will be successfully transmitted.
the part of the wrong information:
{ statusCode: undefined,
body: undefined,
jsonBody: undefined,
thrown:
Error: Max buffer length exceeded: textNode
Ln: 1
Col: 131072
Chr: undefined
at Error (native)
at emitError
at emitOne (events.js:96:13)
at IncomingMessage.emit (events.js:188:7)
at readableAddChunk (_stream_readable.js:176:18)
at IncomingMessage.Readable.push (_stream_readable.js:134:10)
at HTTPParser.parserOnBody (_http_common.js:123:22)
at Socket.socketOnData (_http_client.js:363:20)
at emitOne (events.js:96:13)
at Socket.emit (events.js:188:7)
at readableAddChunk (_stream_readable.js:176:18)
at Socket.Readable.push (_stream_readable.js:134:10)
at TCP.onread (net.js:547:20) }
maxActual = Math.max(maxActual, textNode.length);
^
TypeError: Cannot read property 'length' of undefined
at checkBufferLength
How can I improve, or do I have to do something in other ways if I can not reduce the information?
the highland: http://highlandjs.org/
the oboe: http://oboejs.com/
Hey I see this post is a bit old, but I was having similar issues, and I was able to solve this only using node.
My directory is set up accordingly for this test:
.
├── s1
│ └── lorem.txt
├── s2
├── server.js
└── transmitter.js
Given that, I was able to solve this using the following code:
transmitter.js
const http = require('http');
const fs = require('fs');
const requestOpts = {
hostname: 'localhost',
port: 1337,
method: 'POST'
};
const readStream = fs.createReadStream('./s1/lorem.txt');
const request = http.request(requestOpts);
readStream.pipe(request);
server.js
const http = require('http');
const fs = require('fs');
http.createServer((request, response) => {
const writeStream = fs.createWriteStream('./s2/lorem.txt');
request.on('data', (chunk) => {
writeStream.write(chunk);
});
request.on('end', () => {
writeStream.close();
response.end();
});
}).listen(1337);
You can test this by running node server.js in one terminal window, and, in another, running transmitter.js.
The result should be lorem.txt appearing in the s2 directory exactly as it appears in s1.
Obviously there's a lot more to be done to make this production worthy, but hopefully that helps the next person to wander through here as I did.
I have indexed data from firebase via elasticSearch .
And Its Working Properly .
Now I am Searching data via firebase below cloud function :
exports.searchIt = functions.database.ref('/search/{key}')
.onUpdate(event => {
let key=event.params.key;
let elasticSearchConfig = functions.config().elasticsearch;
const esClient = new elastic.Client({
host: 'http://35.198.221.164',
log: 'error'
});
console.log('client Created');
let searchBody = {
size: 20,
from: 0,
query: {
match_all: {}
}
};
esClient.search({index: 'offers', body: searchBody})
.then(results => {
console.log('Successfully Entered');
results.hits.hits.forEach(
(hit, index) => console.log(hit)
)
})
.catch(console.error);
});
But this gives error below :
textPayload: "{ Error: Not Found at respond
(/user_code/node_modules/elasticsearch/src/lib/transport.js:307:15) at
checkRespForFailure
(/user_code/node_modules/elasticsearch/src/lib/transport.js:266:7) at
HttpConnector.
(/user_code/node_modules/elasticsearch/src/lib/connectors/http.js:159:7)
at IncomingMessage.bound
(/user_code/node_modules/elasticsearch/node_modules/lodash/dist/lodash.js:729:21)
at emitNone (events.js:91:20) at IncomingMessage.emit
(events.js:185:7) at endReadableNT (_stream_readable.js:974:12) at
_combinedTickCallback (internal/process/next_tick.js:80:11) at process._tickDomainCallback (internal/process/next_tick.js:128:9)
And on changing host to any other it still create client.
Why is this happening?
I am using musicmetadata module of node js to read duration and size but it is not working for all mp3 files.getting below error:-
Error: Could not find metadata header
at /var/www/html/Live/node_modules/musicmetadata/lib/id3v1.js:13:19
at Stream.done (/var/www/html/Live/node_modules/musicmetadata/lib/common.js:31:5)
at emitNone (events.js:91:20)
at Stream.emit (events.js:185:7)
at drain
(/var/www/html/Live/node_modules/musicmetadata/node_modules/through/index.js:34:23)
at Stream.stream.queue.stream.push (/var/www/html/Live/node_modules/musicmetadata/node_modules/through/index.js:45:5)
at Stream.end (/var/www/html/Live/node_modules/musicmetadata/node_modules/through/index.js:15:35)
at _end (/var/www/html/Live/node_modules/musicmetadata/node_modules/through/index.js:65:9)
at Stream.stream.end (/var/www/html/Live/node_modules/musicmetadata/node_modules/through/index.js:74:5)
at ReadStream.onend (_stream_readable.js:512:10)
at ReadStream.g (events.js:286:16)
at emitNone (events.js:91:20)
at ReadStream.emit (events.js:185:7)
at endReadableNT (_stream_readable.js:975:12)
at _combinedTickCallback (internal/process/next_tick.js:74:11)
at process._tickDomainCallback (internal/process/next_tick.js:122:9)
+ Error: NODE_ENV is not defined! Using default development environment
code example:it contain required code to understand issue.
var fs = require('fs');
var musicData = require('musicmetadata');
//working for "01 .Brown Rang -Mp3 Songs -[320kbps]-[Exclusive]~~~[CooL GuY] {{a2zRG}}.mp3"
var parser = musicData(fs.createReadStream('03 RANG DE CHUNRIYA.mp3'),{ duration: true }, function (err, metadata) {
var talkItem = {};
if (err)
{
console.log('err:',err);
}
else{
if(metadata.title === undefined || metadata.title === "")
{
talkItem.title ='';
}
else{
talkItem.title = metadata.title;
}
talkItem.duration = metadata.duration;
console.log('talkItem:',talkItem);
}
});
you can see code mp3 file here
how I can ignore pages with cycle redirects?
I use this code to fetching pages:
var libxml = require("libxmljs"),
http = require('follow-redirects').http,
url = require("url");
var request = http.request( { "host": host, "path": URL, "port": 80 }, function( response ) {
var str = '';
response.on( 'data', function( chunk ) {
str += chunk;
});
response.on( 'end', function() {
callback( str, response.statusCode );
}).on( 'error', function ( err ) {
console.log( err );
});
}).end();
It will not go to 'error' block, and I've got an exception:
events.js:85
throw er; // Unhandled 'error' event
^
Error: Max redirects exceeded.
at ClientRequest.cb (/var/parsing/node_modules/follow-redirects/create.js:55:19)
at ClientRequest.g (events.js:199:16)
at ClientRequest.emit (events.js:107:17)
at HTTPParser.parserOnIncomingClient [as onIncoming] (_http_client.js:426:21)
at HTTPParser.parserOnHeadersComplete (_http_common.js:111:23)
at Socket.socketOnData (_http_client.js:317:20)
at Socket.emit (events.js:107:17)
at readableAddChunk (_stream_readable.js:163:16)
at Socket.Readable.push (_stream_readable.js:126:10)
at TCP.onread (net.js:538:20)
The error is being thrown by the request object, not the response object, so you need to add an (additional) error listener to request;
var request = http.request(...).on('error', function(err) {
...
}).end();
Looking at the docs for the package you are using (https://www.npmjs.com/package/follow-redirects), it looks like it just has a maxRedirects option. Directly from the linked page:
require('follow-redirects').maxRedirects = 10; // Has global affect (be careful!)
https.request({
host: 'bitly.com',
path: '/UHfDGO',
maxRedirects: 3 // per request setting
}, function (res) {/* ... */});