So basically I have created my own "Imgur" where I enter a image URL etc.
https://images.hdsydsvenskan.se/980x588/Q2GO5t2lmKtW6WwGuEVQNTPmt4o.jpg
The issue is that it worked before but now I am getting errors (And I haven't change the code at all - My guess that it has to do with some URLS?) that are saying:
{ Error: read ECONNRESET
at TLSWrap.onread (net.js:622:25) errno: 'ECONNRESET', code: 'ECONNRESET', syscall: 'read'}
and honestly I don't see issue because it does work on other pictures but if I choose etc the one below. It keeps giving me that issue.
const fs = require('fs')
const path = require('path')
const request = require('request')
const express = require('express')
const app = express()
const PORT = process.env.PORT || 8888
// Path to images directory
const images = path.join(__dirname, 'images')
// JSON parsing middleware
app.use(express.json())
app.post('/newimage', (req, res) => {
const imageUrl = req.body.image_url
const imageName = req.body.image_name
request({
url: imageUrl,
headers: {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36'
}
})
.on('error', (err) => {
console.log(err)
console.log(res.status())
res.status(500)
res.send('not ok')
})
.on('response', (response) => {
console.log(`Response for ${imageUrl}`)
console.log(response.statusCode + '\n')
res.send('ok')
})
.pipe(fs.createWriteStream(path.join(images, imageName + '.png')))
})
app.get('/images/:imagename', (req, res) => {
const imageName = req.params.imagename
const filePath = path.join(images, imageName + '.png')
console.log(imageName)
console.log(filePath)
if (fs.existsSync(filePath)) {
fs.createReadStream(filePath)
.pipe(res)
} else {
res.send('No image found')
}
})
app.listen(PORT, () => {
console.log(`Server listening on port ${PORT}`)
console.log(`http://xx.xxx.xx.xx:${PORT}`)
})
If anyone has any clue how I can solve it! I would appreciate it!
There's something funky going on with the Adidas-server that you mention in your comment, it requires a particular list of headers to be set before it works (requests time out for me otherwise):
request({
url: imageUrl,
headers: {
Accept: '*/*',
'Accept-Encoding': 'gzip, deflate',
Connection: 'keep-alive',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36'
}
})
It looks like the Varnish cache that they're using might be misconfigured (or it's some strange attempt to try and block scrapers).
Related
I have to print from the server the request header sent by the client.
var qs = require('querystring');
var http = require('http');
const { request } = require('https');
// create server with anonymous callback function
http.createServer(function (request, response) {
response.writeHead(200, { 'Content-Type': 'text/plain' });
var options = {
method: 'HEAD',
host: '127.0.0.1',
port: '40012'
};
//here is where the server should display the clients request header
var clientHeader = request.headers;
console.log(request.headers);
}).listen(40012, '127.0.0.1');
console.log('Server running http://127.0.0.1:40012');
console.log('Process ID:', process.pid);
The server is printing only: { host: '127.0.0.1:40012', connection: 'close' }
What am I doing wrong?
I've tried executing your code locally. and I got the following response. on hitting the URL http://127.0.0.1:40012
Server running http://127.0.0.1:40012
Process ID: 52240
{
host: 'localhost:40012',
connection: 'keep-alive',
'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="99", "Google Chrome";v="99"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Linux"',
'upgrade-insecure-requests': '1',
'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.84 Safari/537.36',
accept: 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'sec-fetch-site': 'none',
'sec-fetch-mode': 'navigate',
'sec-fetch-user': '?1',
'sec-fetch-dest': 'document',
'accept-encoding': 'gzip, deflate, br',
'accept-language': 'en-US,en;q=0.9,hi;q=0.8,gu;q=0.7,ar;q=0.6,en-GB;q=0.5',
cookie: 'token=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyIjp7Il9pZCI6IjYxNzEzYzVmOWYzYjAzZWIyMmQ0NmYzNSIsImVtYWlsIjoicmFqZW5kcmEyMDc5MUBnbWFpbC5jb20ifSwiaWF0IjoxNjM0ODQzMzI4fQ.CgQWS5dImbFCNnvBpbtAuWyjpScRSTYxvZvhDmHQ0Lw'
}
However, the code is incomplete so the request from the browser will be stuck. the complete code can be as follows. We need to give a response to the client.
var http = require('http');
// create server with anonymous callback function
http.createServer(function (request, response) {
response.writeHead(200, { 'Content-Type': 'text/plain' });
//here is where the server should display the clients request header
console.log(request.headers);
// This is response body.
response.write('This is response');
// This will mark the end of the response.
response.end();
}).listen(40012, '127.0.0.1');
console.log('Server running http://127.0.0.1:40012');
console.log('Process ID:', process.pid);
I am using NodeJS and axios.
Within my server, I am simply making a GET request to this API: https://cdn-api.co-vin.in/api/v2/admin/location/states.
I am getting the data properly in localhost, postman, as well as simple browser request.
But if I deploy it to HEROKU and then make the request, it fails with the error 403. But when I make the same request on localhost, it works.
I tried setting the header "User-Agent". It still doesn't work.
The Procfile, process.env.PORT are correctly set.
This is the actual GET request to be made to my server:
https://vac-finder.herokuapp.com/api/sessions/getStates
Here's my server.js
const express = require('express');
const cors = require('cors');
const app = express();
app.use(cors({origin: '*'}))
app.use(express.json());
app.use('/api/sessions', require('./routes/api/sessions'));
app.listen(process.env.PORT || 2000, () => {
console.log('Server started on 2000');
});
Here's my actual code (./routes/api/sessions):
const express = require('express');
const axios = require('axios');
const router = express.Router();
router.get('/getStates', (req, res) => {
const config = {
method: 'get',
url: `https://cdn-api.co-vin.in/api/v2/admin/location/states`,
headers: {
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.76 Safari/537.36"
}
};
axios(config)
.then((response) => {
res.json({data: response.data.states});
})
.catch((error) => {
res.json({
errorMessage: 'There has been an error in getting data. Please try after some time.',
error: error
});
});
})
module.exports = router;
Here's the error response (from HEROKU):
"error": {
"message": "Request failed with status code 403",
"name": "Error",
"stack": "Error: Request failed with status code 403\n at createError (/app/node_modules/axios/lib/core/createError.js:16:15)\n at settle (/app/node_modules/axios/lib/core/settle.js:17:12)\n at IncomingMessage.handleStreamEnd (/app/node_modules/axios/lib/adapters/http.js:260:11)\n at IncomingMessage.emit (events.js:412:35)\n at endReadableNT (internal/streams/readable.js:1317:12)\n at processTicksAndRejections (internal/process/task_queues.js:82:21)",
"config": {
"url": "https://cdn-api.co-vin.in/api/v2/admin/location/states",
"method": "get",
"headers": {
"Accept": "application/json, text/plain, */*",
"User-Agent": "axios/0.21.1"
},
"transformRequest": [
null
],
"transformResponse": [
null
],
"timeout": 0,
"xsrfCookieName": "XSRF-TOKEN",
"xsrfHeaderName": "X-XSRF-TOKEN",
"maxContentLength": -1,
"maxBodyLength": -1
}
}
Why is this happening?
This is my first question on Stack Over Flow. Suggestions on framing the question better are very welcome.
I have this react-app that is connected to a node/express server. The node/express server all it has is app.get(api)
I connected it to 4 different apis, it gets the data and sends simply returns it to the front-end/client.
When I was building it and running it locally, it worked no problem. I would get the json objects and my front-end cleans the data and displays it. As soon as I deployed my server to heroku, it stopped working. I'm using axios to make the requests and I changed it from http://localhost/3000 to the url that heroku gave me. Now instead of getting my json data I get this error:
If it helps, when I run the url directly in my browser it works, I do see the data like I'm supposed to. Why is that? Thank you so much to whoever helps me!
As requested, this is a part of my express server, the other two routes are basically the same thing but of course to different APIs.
const express = require("express");
const axios = require("axios");
const app = express();
const cors = require("cors");
require("dotenv").config();
// This enables our app (server 3000) to make requests
// can also do "*" to allow anyone to make them but not recommended
// for security reasons
app.use(
cors({
origin: "*",
}),
);
app.get("/walmart/:item", (req, res) => {
let url = `https://www.walmart.com/search/api/preso?prg=desktop&page=1&query=${req.params.item}`;
var config = {
method: "get",
url: url,
headers: {
"User-Agent":
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:92.0) Gecko/20100101 Firefox/92.0",
Accept: "application/json",
"Accept-Language": "en-US,en;q=0.5",
"content-type": "application/json",
wm_client_ip: "",
Connection: "keep-alive",
Cookie: process.env.WALMART_COOKIE,
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "same-origin",
},
};
axios(config)
.then((response) => {
res.send(response.data.items);
})
.catch((err) => {
res.send(err);
});
});
app.get("/wholefoods/:item", (req, res) => {
let url = `https://www.wholefoodsmarket.com/api/search?text=${req.params.item}&store=10152&limit=60&offset=0`;
axios
.get(url)
.then((response) => {
res.send(response.data.results);
})
.catch((err) => {
res.send(err);
});
});
and this is the data I should be receiving when I call those routes from my react app (and the one I get when I use my browser at the moment)
I am trying to forward my request from my NodeJS Proxy server to another server. The request I am trying to forward contains FormData()
I created FormData as per MDN docs
const payload = new FormData();
payload.append('addresses', file); // <---- UPLOADED FILE
payload.append('reason', 'reason');
payload.append('type', 'type');
This is how I am essentially sending the request to my NodeJS server
fetch("localhost:3000/v1/addresses", {
method: 'PUT',
body: payload
});
NodeJS Server at localhost:3000
const multer = require('multer');
const upload = multer();
app.put('/v1/addresses', upload.single('addresses'), (req, res) => {
let options = {
host: 'localhost',
method: 'PUT',
port: 8000,
path: req.originalUrl,
headers: req.headers,
formData: {
reason: req.body.reason,
type: req.body.type,
}
};
console.log("reason", req.body.reason) // "reason"
console.log("type", req.body.type) // "type"
console.log("addresses", req.file) // FILE OBJECT
const request = http.request(options, response => {
res.writeHead(response.statusCode, response.headers);
response.pipe(res);
});
request.end();
})
The code above, I'm not sure how to send over the actual file to the other service. Also, I am NOT seeing the reason and and type that I've passed over to the service.
What's also strange is that I see this in the incoming request in my NON- PROXY server
PUT /v1/addresses HTTP/1.1
Host: localhost:3000
Connection: keep-alive
Content-Length: 932
Sec-Ch-Ua: "Google Chrome";v="89", "Chromium";v="89", ";Not A Brand";v="99"
Sec-Ch-Ua-Mobile: ?0
User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 11_2_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36
Content-Type: multipart/form-data; boundary=----WebKitFormBoundaryt2p0AWOqJCnz95hg
Accept: */*
Origin: http://localhost:3000
Sec-Fetch-Site: same-origin
Sec-Fetch-Mode: cors
Sec-Fetch-Dest: empty
Referer: http://localhost:3000/blocklist
Accept-Encoding: gzip, deflate, br
Accept-Language: en-US,en;q=0.9
[object Object]
So after lots of searching and experimenting, this post actually provided me with the answer
Here is the code from the post.
const express = require("express");
const app = express();
const bodyParser = require('body-parser');
var multer = require('multer')();
const FormData = require('form-data');
const axios = require('axios');
const fs = require('fs');
app.use(bodyParser.json());
app.post('/fileUpload' , multer.single('fileFieldName'), (req , res) => {
const fileRecievedFromClient = req.file; //File Object sent in 'fileFieldName' field in multipart/form-data
console.log(req.file)
let form = new FormData();
form.append('fileFieldName', fileRecievedFromClient.buffer, fileRecievedFromClient.originalname);
axios.post('http://server2url/fileUploadToServer2', form, {
headers: {
'Content-Type': `multipart/form-data; boundary=${form._boundary}`
}
}).then((responseFromServer2) => {
res.send("SUCCESS")
}).catch((err) => {
res.send("ERROR")
})
})
const server = app.listen(3000, function () {
console.log('Server listening on port 3000');
});
I am using a package called 'concurrently' to run my client and server at the same time on localhost. Client runs on port 3000 while server runs on port 5000. I have set proxy in the package.json of server in the following manner:
"proxy": "https://localhost:5000"
But when I make a request from client in the following manner:
const config = {
headers: {
'Content-Type': 'application/json'
}
};
const res = await axios.post('/api/users', body, config);
It says: POST http://localhost:3000/api/users 404 (Not Found). I don't understand why but despite setting proxy, axios keeps making request to port 3000 instead of port 5000. What is the issue?
I just want to say that the solution of adding cors is not a solution. You need to include the proxy "proxy" : "https://localhost:5000" in the package.json, you may need to restart or something or other-- but if you choose to use cors instead, you are allowing anyone to access your API. That means your database is wide open for people to poke around with. Passwords, emails, users, etc. It's all compromised.
I got it working correctly. What I did was:
1) change axios.post('/api/users', body, config); to axios.post('http://localhost:5000/api/users', body, config);
2) Then in the 'users' express route on the server side, add CORS functionality by installing 'cors' npm package, and then adding the following lines:
const router = express.Router();
...
// add these lines
var cors = require('cors');
router.use(cors());
...
router.post('/', async (req, res) => {
...
});
as far as I understood your question, what you need is to refer Axios developer documents. for time being.
check this
import axios, { AxiosInstance } from 'axios';
import * as tunnel from 'tunnel';
const agent = tunnel.httpsOverHttp({
proxy: {
host: 'proxy.mycorp.com',
port: 8000,
},
});
const axiosClient: AxiosInstance = axios.create({
baseURL: 'https://some.api.com',
httpsAgent: agent,
});
In my case I didn't check well, it appeared that the call was actually forwarded to the api server at the proxy address.
Check that the server is running and whether it is receiving your calls or not.
when I send request to a server with proxy my code works successful. I hope this answer useful to you or another peoples.
const axios = require("axios");
const cheerio = require('cheerio');
async function sendRequestWithProxy() {
let proxyServer = {
"host" : "YOUR_PROXY_HOST",
"port" : "YOUR_PROXY_PORT",
"username" : "YOUR_PROXY_USERNAME",
"password" : "YOUR_PROXY_PASSWORD"
};
var ProductUrl = "http://YOUR_REQUEST_URL";
await axios.get(ProductUrl, {
proxy: {
host: proxyServer.host,
port: proxyServer.port,
auth: {username: proxyServer.username, password: proxyServer.password}
},
headers: {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36', }
}).then((response) => {
const $ = cheerio.load(response.data);
let productInfo = $("#HTML_ID_ATTRIBUTE").text();
///YOUR CODE
}).catch(function (error) {
if (error.response) {
//ERROR AREA
console.log(error.response.data);
console.log(error.response.status);
console.log(error.response.headers);
}
});
}