I'm trying to use the Unirest Jar feature to make session authenticated requests however I cant seem to get my name console logged. The following is sample code Im using. Any ideas on how I can get my name console logged?
var cheerio = require('cheerio');
var unirest = require('unirest');
var CookieJar = unirest.jar();
var twitterLogin = 'https://twitter.com/sessions';
var twitterUsername = 'TWITTERUSERNAME';
var twitterPassword = 'TWITTERPASSWORD!'
var Request = unirest.get('https://twitter.com/login').jar(CookieJar);
Request.end(function(response) {
var $ = cheerio.load(response.body);
var authToken = $('input[name="authenticity_token"]').val();
console.log(authToken)
unirest.post(twitterLogin)
.jar(CookieJar)
.followRedirect(true)
.header('Referer', 'https://twitter.com/login')
.header('content-type', 'application/x-www-form-urlencoded')
.header('User-Agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 Safari/537.36')
.field('authenticity_token', authToken)
.field('session[username_or_email]', twitterUsername)
.field('session[password]', twitterPassword)
.end(function(response) {
console.log(response.statusCode)
console.log(response.body)
unirest.get('https://twitter.com')
.jar(CookieJar)
.followRedirect(true)
.end(function(response) {
var $ = cheerio.load(response.body)
console.log($('.DashboardProfileCard-name').text());
})
})
});
From the docs:
Request.jar(Boolean) or Request.jar(Jar)
Sets jar, cookie container, on Request.options. When set to true it stores cookies for future usage.
You should call unirest.jar() with true as argument:
var CookieJar = unirest.jar(true);
Related
Hello for some reasons i don't like to use the libs from Lightstreamer for example debugging with Charles Webproxy or sending raw text commands.
I get a connection to the server, but i fail after sending any command.
I used this https://demos.lightstreamer.com/HelloWorld/ and try make me a Node/TS version of it.
I send from the example the first string which is "wsok" and all what i get from server is (1011) Cannot continue due to an unexpected error.
My problem must be related to sending the message because without the connection is work.
http://192.168.185.24:8888 is my Charles Webdebugging.
import * as WebSocket from 'ws';
var url = require('url');
var HttpsProxyAgent = require('https-proxy-agent');
class sg{
ws:WebSocket;
constructor(){
var headers = {};
headers["Host"] ="push.lightstreamer.com";
headers["User-Agent"] ="Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:105.0) Gecko/20100101 Firefox/105.0";
headers["Accept"] ="*/*";
headers["Accept-Language"] ="de,en-US;q=0.7,en;q=0.3";
headers["Accept-Encoding"] ="gzip, deflate, br";
headers["Sec-WebSocket-Version"] ="13";
headers["Origin"] ="https://demos.lightstreamer.com";
headers["Sec-WebSocket-Protocol"] ="TLCP-2.2.0.lightstreamer.com";
headers["Sec-WebSocket-Extensions"] ="permessage-deflate";
headers["Sec-WebSocket-Key"] ="a key==";
headers["Connection"] ="keep-alive, Upgrade";
headers["Sec-Fetch-Dest"] ="websocket";
headers["Sec-Fetch-Mode"] ="websocket";
headers["Sec-Fetch-Site"] ="same-site";
headers["Pragma"] ="no-cache";
headers["Cache-Control"] ="no-cache";
headers["Upgrade"] ="websocket";
var options = url.parse('http://192.168.185.24:8888');
var agent = new HttpsProxyAgent(options);
this.ws = new WebSocket("wss://push.lightstreamer.com/lightstreamer", ["js.lightstreamer.com", "websocket"], {
headers, agent: agent, rejectUnauthorized: false
});
this.ws.onopen = (e) => {
console.log("open")
setTimeout(() => {
this.ws.send(`wsok`);
}, 1500);
};
this.ws.onmessage = (e) => {
console.log(e)
}
this.ws.onerror = error => {
console.log(`WebSocket error: ${error}`)
}
}
}
let xsg = new sg();
You are not using the object WebSocket correctly.
Try this instead:
ws = new WebSocket("wss://push.lightstreamer.com/lightstreamer", "TLCP-2.2.0.lightstreamer.com")
ws.onopen = (e) => {
console.log("open")
ws.send("wsok")
}
For the rest, you must look at the TLCP Specification document, the section Hands on in particular.
I have a SOAP endpoint I need to gather data from and I have a working prototype using python3 however I would like this to be a node JS project to diversify my portfolio however when sending Headers to the endpoint I have noticed an error (Media Type not supported) so I looked to the headers and noticed something odd with them, some of the keys are in quotes and other not and i believe this may be the source of the Issue, any help would be appreciated,
Headers when request is made
{
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.0.0 Safari/537.36',
'Content-Type': 'text/xml; charset=utf-8',
SOAPAction: 'http://api.neatoscan.com/RequestOrdersReport',
Host: 'desktopbridge.neatoscan.com',
'Content-Length': '486',
Expect: '100-continue',
Connection: 'Keep-Alive'
}
Request Code (All vars in header are pulled from a config file to keep authentication separate from main file)
import {Client_User_Name, Token,start_date_time,end_date_time, SOAP_Header_CONTENT_TYPE,SOAP_Header_Host_Config,USER_AGENT,SOAP_actions,
SOAP_Content_len,SOAP_urls} from './config.js';
import { createRequire } from 'module';
const require = createRequire(import.meta.url);
const axios = require('axios');
const soapRequest = require('easy-soap-request');
var fs = require('fs'),parseString = require("xml2js").parseString,xml2js = require("xml2js");
var Request_Book_Order_report_XML = '/home/malachi/Desktop/Projects/Daily_Reports_Goodwill/NeatoScan/xml_files/Request_Book_Order_Report.xml'
var Report_Status_Books_XML = '/home/malachi/Desktop/Projects/Daily_Reports_Goodwill/NeatoScan/xml_files/Request_Report_Status.xml'
const url = SOAP_urls['Books_Url'];
function req(xml, headers) {
axios.post(url,xml, headers
).then(response => {console.log(response.data)}
).catch(err => {console.log(err)});
}
var SA = 'Book_Report_Request'
var CL = 'Book_Report_Request_Content_Len'
var url_Headers = {
"User-Agent": USER_AGENT,'Content-Type': SOAP_Header_CONTENT_TYPE['Books_Content_Type'],'SOAPAction': SOAP_actions[SA] ,
'Host': SOAP_Header_Host_Config['Books_Host'], 'Content-Length':SOAP_Content_len[CL], 'Expect':'100-continue', 'Connection': 'Keep-Alive'};
//
fs.readFile(Request_Book_Order_report_XML, "utf-8", function(err, data) {
parseString(data, function(err, result) {
var json = result;
var auth_filter = json['soap:Envelope']['soap:Body'][0]['RequestOrdersReport'][0];
var auth_Client_User = auth_filter['username'] = Client_User_Name;
var auth_token = auth_filter['token'] = Token;
var date_start_filter = auth_filter['startDate'] = start_date_time;
var date_end_filter = auth_filter['endDate'] = end_date_time;
var builder = new xml2js.Builder();
var xml = builder.buildObject(json);
console.log(xml);
console.log(url_Headers);
// req(xml, url_Headers)
});
});
im trying to make my own api and one part of it is that I want it to be able to receive text and files from a discord webhook, the text part works fine but when I send the file I can't seem to figure out how to receive it. I looked around for a bit and saw that req.files will return the file but I just get undefined. Here is my api code:
const express = require('express'),
bodyParser = require('body-parser'),
base32 = require('base32'),
const pass = "5'g5?cDzy}\\p5zAwvT[DJ/SeD"
const port = 8080
function denied(res) {
res.status(403);
res.send('no');
}
const api = express();
api.use(bodyParser.json());
api.listen(port, () => { console.log(`api running on port ${port}`) });
api.post('/webhook', (req, res) => {
if (!req.headers) {
denied(res);
}
var auth = req.headers['authorization'];
if (auth && base32.decode(auth) === pass) {
console.log(req.files) //undefined
res.send('done');
} else {
denied(res);
}
});
and this is the code I use to send the file and text:
import requests
key = '6mkped9zcd27mybxbhr3ayj1exv58pu498qn6ta4'
header = {
"Content-Type": "application/json",
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11",
'Authorization': key
}
info = {
"avatar_url":"https://cdn.discordapp.com/attachments/901070985434918965/904813984753000469/nedladdning_14.jpg",
"name":"test",
"embeds": [...]
}
r = requests.post('http://localhost:8080/webhook', headers=header, json=info)
r2 = requests.post('http://localhost:8080/webhook', headers={'Authorization': key}, files={'file': open('test.zip','rb')})
print(r.text, r.status_code)
print(r2.text, r2.status_code)
Upload files (e.g. how requests does it) are sent as multipart/form-data payloads.
As per body-parser's npm page,
[...] does not handle multipart bodies, due to their complex and typically large nature. For multipart bodies, you may be interested in the following modules...
You'll need one of those modules, and you'll then need to read its documentation to see if it provides eg. the req.files you've seen somewhere. Otherwise it will indeed be undefined.
i am trying to scrape today's matches on betfair and wanna get:
home team
away team
x odd
draw odd
y odd
problem is i keep getting multiple spaces, i have tried alot and cannot fix it, the problem is not with trim but with the execution flow that causes empty lines
Can somebody tell me what im doing wrong?
My code:
const request = require('request');
const cheerio = require('cheerio');
const fs = require('fs');
var url = 'https://www.betfair.com/sport/football';
var customHeaderRequest = request.defaults({
headers: {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.133 Safari/537.36'}
})
customHeaderRequest.get(url, function(err, resp, body){
$ = cheerio.load(body);
links = $('.section-list .section:nth-child(2) .event-list li');
$(links).each(function(i, link){
var home = $(link).find('.event-information div:nth-child(3) div a div span.team-name:nth-child(1)');
var h = home.text();
if(h != null || h!=''){
fs.appendFile('message.txt', h+'\n', function (err) {});
}
});
});
You shouldn't be calling fs.appendFile() in a loop like this and you may need a better test for an empty line than just what you were using. fs.appendFile() is an asynchronous operation and you're essentially calling a whole bunch of fs.appendFile() operations one after another without waiting for the prior ones to finish.
You can either use a stream or you have to wait until the previous fs.appendFile() is done before calling the next one.
And, if you want to make sure you have no blank-looking results, you need a better filter for results that have only whitespace in them (I added .trim() to my code below).
Here's one way to do that:
const request = require('request');
const cheerio = require('cheerio');
const fs = require('fs');
const util = require('util');
const appendFile = util.promisify(fs.appendFile);
var url = 'https://www.betfair.com/sport/football';
var customHeaderRequest = request.defaults({
headers: {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.133 Safari/537.36'}
})
customHeaderRequest.get(url, async function(err, resp, body){
try {
let $ = cheerio.load(body);
let links = $('.section-list .section:nth-child(2) .event-list li').toArray();
for (let link of links) {
const home = $(link).find('.event-information div:nth-child(3) div a div span.team-name:nth-child(1)').text().trim();
if (home) {
await appendFile('message.txt', home +'\n');
}
}
} catch(e) {
// error writing to the file, handle that error here
}
});
Other notes: You should also always declare all local variables you are using so they are never allowed to be implicit globals.
my requirement is to read and store the simple image resources from facebook.
Image URL:https://badge.facebook.com/badge/1.png
following are the Code snippet ( which i used to read a image from google) is not working as per my requirement it just storing a invalid image in my local disk
var fs = require('fs'),
request = require('request');
var download = function(uri, filename, callback){
request.get(uri, function(err, res, body){
console.log('content-type:', res.headers['content-type']);
console.log('content-length:', res.headers['content-length']);
var r = request(uri).pipe(fs.createWriteStream(filename));
r.on('close', callback);
r.on('error', function(err) {console.log(err)})
});
};
download('https://badge.facebook.com/badge/1.png', 'google.png', function(){
console.log('Done downloading..');
});
please help me on this....
It's not problem of nodejs or request but facebook.
You can just use text editor to open google.png, and it's a html file.
So perhaps facebook has some anti-crawler firewall.
just set header, for example:
var fs = require('fs'),
request = require('request');
var download = function(uri, filename, callback){
var r = request({
url: uri,
headers: {
'accept': 'image/*',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36',
}
}).pipe(fs.createWriteStream(filename));
r.on('error', function(err) {console.log(err)})
r.on('close', callback);
};
download('https://badge.facebook.com/badge/1.png', 'google.png', function(){
console.log('Done downloading..');
});