Related
I'm trying to code a lambda that triggers an s3 bucket and gets a CSV file when it is uploaded, and parse this file.
I'm using: Node 14x
This is the code:
import { S3Event } from 'aws-lambda';
import { S3 } from 'aws-sdk';
import * as csv from 'fast-csv';
const s3 = new S3({ apiVersion: 'latest' });
export async function hello(event: S3Event, context, cb) {
event.Records.forEach(async (record) => {
const bucket = record.s3.bucket.name;
const key = decodeURIComponent(record.s3.object.key.replace(/\+/g, ' '));
const params: S3.GetObjectRequest = {
Bucket: bucket,
Key: key,
};
const stream = s3.getObject(params).createReadStream();
console.log({ stream });
csv.parseStream(stream, {
headers: true
}).on('data', data => { console.log(data); })
.on('error', error => console.error(error))
.on('end', (rowCount: number) => console.log(`Parsed ${rowCount} rows`));
console.log('processo 01 acabou!');
});
}
When I execute this lambda I'm not receiving anything. In console.log(stream) I'm receiving a PassTrought object...
stream: PassThrough {
_readableState: ReadableState {
objectMode: false,
highWaterMark: 16384,
buffer: BufferList { head: null, tail: null, length: 0 },
length: 0,
pipes: [],
flowing: null,
ended: false,
endEmitted: false,
reading: false,
sync: false,
needReadable: false,
emittedReadable: false,
readableListening: false,
resumeScheduled: false,
errorEmitted: false,
emitClose: true,
autoDestroy: true,
destroyed: false,
errored: null,
closed: false,
closeEmitted: false,
defaultEncoding: 'utf8',
awaitDrainWriters: null,
multiAwaitDrain: false,
readingMore: false,
dataEmitted: false,
decoder: null,
encoding: null,
[Symbol(kPaused)]: null
},
_events: [Object: null prototype] { prefinish: [Function: prefinish] },
_eventsCount: 1,
_maxListeners: undefined,
_writableState: WritableState {
objectMode: false,
highWaterMark: 16384,
finalCalled: false,
needDrain: false,
ending: false,
ended: false,
finished: false,
destroyed: false,
decodeStrings: true,
defaultEncoding: 'utf8',
length: 0,
writing: false,
corked: 0,
sync: true,
bufferProcessing: false,
onwrite: [Function: bound onwrite],
writecb: null,
writelen: 0,
afterWriteTickInfo: null,
buffered: [],
bufferedIndex: 0,
allBuffers: true,
allNoop: true,
pendingcb: 0,
prefinished: false,
errorEmitted: false,
emitClose: true,
autoDestroy: true,
errored: null,
closed: false
},
allowHalfOpen: true,
[Symbol(kCapture)]: false,
[Symbol(kTransformState)]: {
afterTransform: [Function: bound afterTransform],
needTransform: false,
transforming: false,
writecb: null,
writechunk: null,
writeencoding: null
}
}
}
I have a picture from my CloudWatch
Can anyone help me, and tell me what I'm doing wrong?
The issue with your code is that it's not correctly dealing with the asynchronous nature of JavaScript. Specifically, your code is exiting before any asynchronous activity has completed.
Your Lambda function is async so it should return a promise that is ultimately settled (fulfilled or rejected) when your processing of the S3 object(s) has completed. This allows the AWS Lambda runtime environment to await completion.
For example:
exports.handler = async function(event, context) {
const promises = event.Records.map((record) => {
const Bucket = record.s3.bucket.name;
const Key = decodeURIComponent(record.s3.object.key.replace(/\+/g, ' '));
const params = { Bucket, Key };
const stream = s3.getObject(params).createReadStream();
return new Promise(function(resolve, reject) {
csv.parseStream(stream, {
headers: true
}).on('data', (data) => {
console.log(data);
}).on('error', (error) => {
console.error(error);
reject(error);
}).on('end', (rows) => {
console.log(`Parsed ${rows} rows`);
resolve(rows);
});
});
});
return Promise.all(promises);
}
I have the following code listing the files in the folder. Then trying to send them to the post/upload in the express backend. But the files is always undefined. However it seems like size and other info get through.
Client send
const axios = require('axios');
var FormData = require('form-data');
//requiring path and fs modules
const path = require('path');
const fs = require('fs');
//joining path of directory
const directoryPath = path.join(__dirname, '');
//passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
//handling error
if (err) {
return console.log('Unable to scan directory: ' + err);
}
//listing all files using forEach
var postInfo = files.forEach(async function (file) {
const form_data = new FormData();
form_data.append('file', fs.createReadStream(file));
const request_config = {
method: "post",
url: 'http://localhost:8080/upload',
port: 8080,
headers: {
"Content-Type": "multipart/form-data; boundary=form_data._boundary"
},
data: form_data
};
try {
var req = await axios(request_config);
// Do whatever you want to do with the file
console.log("Request: ", req);
} catch (e) {
console.error(e);
}
});
console.log(postInfo);
});
receiving code in the backend
const http = require('http')
const port = 8080
const express = require('express');
const app = express();
const multer = require('multer');
const server = http.createServer(app)
let storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, './uploads')
},
filename: function (req, file, cb) {
cb(null, file.fieldname + '-' + Date.now())
}
});
const upload = multer({ storage }).single('file');
app.post('/upload', upload, (req, res) => {
console.log(req.files) // this does log the uploaded image data.
})
// bind the server on port
server.listen(port, (err) => {
if (err) {
return console.log('something bad happened', err)
}
console.log(`server is listening on ${port}`)
})
Log:
node file-receive.js
server is listening on 8080
undefined
undefined
undefined
response: undefined,
isAxiosError: true,
toJSON: [Function] }
{ Error: socket hang up
at createHangUpError (_http_client.js:323:15)
at Socket.socketOnEnd (_http_client.js:426:23)
at Socket.emit (events.js:194:15)
at endReadableNT (_stream_readable.js:1103:12)
at process._tickCallback (internal/process/next_tick.js:63:19)
code: 'ECONNRESET',
config:
{ url: 'http://localhost:8080/upload',
method: 'post',
data:
FormData {
_overheadLength: 169,
_valueLength: 0,
_valuesToMeasure: [Array],
writable: false,
readable: true,
dataSize: 0,
maxDataSize: 2097152,
pauseStreams: true,
_released: true,
_streams: [],
_currentStream: null,
_insideLoop: false,
_pendingNext: false,
_boundary: '--------------------------046964089061111513973474',
_events: [Object],
_eventsCount: 1 },
headers:
{ Accept: 'application/json, text/plain, */*',
'Content-Type': 'application/x-www-form-urlencoded',
'User-Agent': 'axios/0.19.2' },
transformRequest: [ [Function: transformRequest] ],
transformResponse: [ [Function: transformResponse] ],
timeout: 0,
adapter: [Function: httpAdapter],
xsrfCookieName: 'XSRF-TOKEN',
xsrfHeaderName: 'X-XSRF-TOKEN',
maxContentLength: -1,
validateStatus: [Function: validateStatus],
port: 8080 },
request:
Writable {
_writableState:
WritableState {
objectMode: false,
highWaterMark: 16384,
finalCalled: false,
needDrain: false,
ending: false,
ended: false,
finished: false,
destroyed: false,
decodeStrings: true,
defaultEncoding: 'utf8',
length: 0,
writing: false,
corked: 0,
sync: true,
bufferProcessing: false,
onwrite: [Function: bound onwrite],
writecb: null,
writelen: 0,
bufferedRequest: null,
lastBufferedRequest: null,
pendingcb: 0,
prefinished: false,
errorEmitted: false,
emitClose: true,
bufferedRequestCount: 0,
corkedRequestsFree: [Object] },
writable: true,
_events:
[Object: null prototype] {
response: [Function: handleResponse],
error: [Function: handleRequestError] },
_eventsCount: 2,
_maxListeners: undefined,
_options:
{ protocol: 'http:',
maxRedirects: 21,
maxBodyLength: 10485760,
path: '/upload',
method: 'POST',
headers: [Object],
agent: undefined,
agents: [Object],
auth: undefined,
hostname: 'localhost',
port: '8080',
nativeProtocols: [Object],
pathname: '/upload' },
_redirectCount: 0,
_redirects: [],
_requestBodyLength: 983,
_requestBodyBuffers: [ [Object], [Object], [Object] ],
_onNativeResponse: [Function],
_currentRequest:
ClientRequest {
_events: [Object],
_eventsCount: 6,
_maxListeners: undefined,
output: [],
outputEncodings: [],
outputCallbacks: [],
outputSize: 0,
writable: true,
_last: true,
chunkedEncoding: true,
shouldKeepAlive: false,
useChunkedEncodingByDefault: true,
sendDate: false,
_removedConnection: false,
_removedContLen: false,
_removedTE: false,
_contentLength: null,
_hasBody: true,
_trailer: '',
finished: true,
_headerSent: true,
socket: [Socket],
connection: [Socket],
_header:
'POST /upload HTTP/1.1\r\nAccept: application/json, text/plain, */*\r\nContent-Type: application/x-www-form-urlencoded\r\nUser-Agent: axios/0.19.2\r\nHost: localhost:8080\r\nConnection: close\r\nTransfer-Encoding: chunked\r\n\r\n',
_onPendingData: [Function: noopPendingOutput],
agent: [Agent],
socketPath: undefined,
timeout: undefined,
method: 'POST',
path: '/upload',
_ended: false,
res: null,
aborted: undefined,
timeoutCb: null,
upgradeOrConnect: false,
parser: null,
maxHeadersCount: null,
_redirectable: [Circular],
[Symbol(isCorked)]: false,
[Symbol(outHeadersKey)]: [Object] },
_currentUrl: 'http://localhost:8080/upload' },
response: undefined,
isAxiosError: true,
toJSON: [Function] }
I was able to reproduce the issue and was able to fix the code with the following ajdustments (note that I'm uploading a single file for sake of simplicty).
On the client side you basically just provide the form-data to axios and call getHeaders() to get the already set up header - also you need to adjust the file-name in the form.
// client.js
...
const form_data = new FormData();
form_data.append('file', fs.createReadStream(__dirname + '/file-you-want-to-upload'));
const request_config = {
method: "post",
url: 'http://localhost:8080/upload',
port: 8080,
data: form_data,
headers: form_data.getHeaders()
};
...
On the backend side you need to make sure to access file not files as you're using upload.single and actually send a response in your handler as the resposse would hang otherwise:
//server.js
...
const upload = multer({storage}).single('file');
app.post('/upload', upload, (req, res) => {
console.log(req.file) // this does log the uploaded image data.
res.send("File uploaded");
});
...
I'm using firebase functions to schedule a puppeteer task from a firebase pubsub namespace.
I also want to post the response back to an API endpoint outside google using axios.
I tried Axios directly from the worker but I get a 500 error.
My goal:
Use firebase schedule and pubsub namespace to generate puppeteer
jobs - complete
Post results back to external endpoint using axios - need your help :)
Code:
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const functions = require("firebase-functions");
const admin = require("firebase-admin");
const puppeteer = require('puppeteer');
const url = require('url');
// Request Data From A URL
var axios = require('axios');
var https = require('https');
// var cors = require("cors");
// Initalise App
admin.initializeApp();
const db = admin.firestore();
const workers = {
extract: async ({ uri, post_id, store_id, domain }) => {
let theTitle = null;
let thePrice = null;
let host = url.parse(uri).host;
let postId = post_id;
let storeId = store_id;
let theDomain = domain;
const SELECTORS = {
amazonPrice: '#priceblock_ourprice',
ebayPrice: '#prcIsum',
const browser = await puppeteer.launch({ args: ['--no-sandbox', '--disable-setuid-sandbox'] });
console.log('spawning chrome headless');
console.log(postId);
console.log(storeId);
console.log(domain);
const page = await browser.newPage();
// Goto page and then do stuff
console.log('going to ', uri);
await page.goto(uri, {
waitUntil: ["domcontentloaded", "networkidle0"]
});
console.log('waiting for page to load ');
console.log(host);
// theTitle = await page.title();
try {
theTitle = await page.title();
// find amazon price
if (host === 'www.amazon.co.uk' || 'amazon.co.uk') {
const priceInput = await page.$(SELECTORS.amazonPrice)
thePrice = await page.evaluate(element => element.textContent, priceInput)
}
// find ebay price
if (host === 'www.ebay.co.uk' || 'ebay.co.uk') {
const priceInput = await page.$(SELECTORS.ebayPrice)
thePrice = await page.evaluate(element => element.value, priceInput)
}
else {
console.log('failed scrape at', host);
}
}
catch (error) {
console.error('There was an error processing the page:', error);
}
finally {
// close browser
if (browser !== null) {
await browser.close();
}
}
console.log(theTitle);
console.log(thePrice);
const response = {
title: theTitle,
price: thePrice,
};
console.log('post' + postId + storeId + thePrice + theDomain);
axios.post('endpoint', {
price: thePrice,
store_id: storeId,
post_id: postId,
domain: theDomain,
},
{
headers: {
'Content-Type': 'multipart/form-data',
}
})
.then(function (response) {
console.log(response);
})
.catch(function (error) {
console.log(error);
});
// axios post end
return response;
}
};
exports.taskRunner = functions.runWith({ memory: '2GB' }).pubsub
// export const taskRunner = functions.region('europe-west2').runWith( { memory: '2GB' }).pubsub
.schedule('*/15 * * * *').onRun(async (context) => {
// Consistent timestamp
const now = admin.firestore.Timestamp.now();
// Query all documents ready to perform
const query = db.collection('tasks').where('performAt', '<=', now).where('status', '==', 'scheduled');
const tasks = await query.get();
// Jobs to execute concurrently.
const jobs = [];
// Loop over documents and push job.
tasks.forEach(snapshot => {
const { worker, options } = snapshot.data();
const job = workers[worker](options)
// Update doc with status on success or error
// .then(() => snapshot.ref.update({ status: 'complete' }))
.catch((err) => snapshot.ref.update({ status: 'error' }));
jobs.push(job);
});
// Execute all jobs concurrently
return await Promise.all(jobs);
});
Error
Error: Request failed with status code 500
at createError (/srv/node_modules/axios/lib/core/createError.js:16:15)
at settle (/srv/node_modules/axios/lib/core/settle.js:17:12)
at IncomingMessage.handleStreamEnd (/srv/node_modules/axios/lib/adapters/http.js:236:11)
at emitNone (events.js:111:20)
at IncomingMessage.emit (events.js:208:7)
at endReadableNT (_stream_readable.js:1064:12)
at _combinedTickCallback (internal/process/next_tick.js:139:11)
at process._tickDomainCallback (internal/process/next_tick.js:219:9)
config:
{ url: 'endpoint',
method: 'post',
data: '{"£59.99":null,"store_id":32,"post_id":25,"domain":"amazon.co.uk"}',
headers:
{ Accept: 'application/json, text/plain, */*',
'Content-Type': 'multipart/form-data',
'User-Agent': 'axios/0.19.2',
'Content-Length': 65 },
transformRequest: [ [Function: transformRequest] ],
transformResponse: [ [Function: transformResponse] ],
timeout: 0,
adapter: [Function: httpAdapter],
xsrfCookieName: 'XSRF-TOKEN',
xsrfHeaderName: 'X-XSRF-TOKEN',
maxContentLength: -1,
validateStatus: [Function: validateStatus] },
request:
ClientRequest {
domain:
Domain {
domain: null,
_events: [Object],
_eventsCount: 1,
_maxListeners: undefined,
members: [Array] },
_events:
{ socket: [Function],
abort: [Function],
aborted: [Function],
error: [Function],
timeout: [Function],
prefinish: [Function: requestOnPrefinish] },
_eventsCount: 6,
_maxListeners: undefined,
output: [],
outputEncodings: [],
outputCallbacks: [],
outputSize: 0,
writable: true,
_last: true,
upgrading: false,
chunkedEncoding: false,
shouldKeepAlive: false,
useChunkedEncodingByDefault: true,
sendDate: false,
_removedConnection: false,
_removedContLen: false,
_removedTE: false,
_contentLength: null,
_hasBody: true,
_trailer: '',
finished: true,
_headerSent: true,
socket:
TLSSocket {
_tlsOptions: [Object],
_secureEstablished: true,
_securePending: false,
_newSessionPending: false,
_controlReleased: true,
_SNICallback: null,
servername: 'SERVERNAME',
npnProtocol: false,
alpnProtocol: false,
authorized: true,
authorizationError: null,
encrypted: true,
_events: [Object],
_eventsCount: 9,
connecting: false,
_hadError: false,
_handle: [Object],
_parent: null,
_host: 'HOST',
_readableState: [Object],
readable: true,
domain: [Object],
_maxListeners: undefined,
_writableState: [Object],
writable: false,
allowHalfOpen: false,
_bytesDispatched: 259,
_sockname: null,
_pendingData: null,
_pendingEncoding: '',
server: undefined,
_server: null,
ssl: [Object],
_requestCert: true,
_rejectUnauthorized: true,
parser: null,
_httpMessage: [Circular],
[Symbol(asyncId)]: 538,
[Symbol(bytesRead)]: 0 },
connection:
TLSSocket {
_tlsOptions: [Object],
_secureEstablished: true,
_securePending: false,
_newSessionPending: false,
_controlReleased: true,
_SNICallback: null,
servername: 'HOST',
npnProtocol: false,
alpnProtocol: false,
authorized: true,
authorizationError: null,
encrypted: true,
_events: [Object],
_eventsCount: 9,
connecting: false,
_hadError: false,
_handle: [Object],
_parent: null,
_host: 'HOST',
_readableState: [Object],
readable: true,
domain: [Object],
_maxListeners: undefined,
_writableState: [Object],
writable: false,
allowHalfOpen: false,
_bytesDispatched: 259,
_sockname: null,
_pendingData: null,
_pendingEncoding: '',
server: undefined,
_server: null,
ssl: [Object],
_requestCert: true,
_rejectUnauthorized: true,
parser: null,
_httpMessage: [Circular],
[Symbol(asyncId)]: 538,
[Symbol(bytesRead)]: 0 },
_header: 'POST ENDPOINT HTTP/1.1\r\nAccept: application/json, text/plain, */*\r\nContent-Type: multipart/form-data\r\nUser-Agent: axios/0.19.2\r\nContent-Length: 65\r\nHost: HOST\r\nConnection: close\r\n\r\n',
_onPendingData: [Function: noopPendingOutput],
agent:
Agent {
domain: null,
_events: [Object],
_eventsCount: 1,
_maxListeners: undefined,
defaultPort: 443,
protocol: 'https:',
options: [Object],
requests: {},
sockets:
more_vert
Logs are subject to Cloud Logging's
After some work and trial & error. I settled on using the Request package to solve for the issue - https://www.npmjs.com/package/request
Not had time to investigate why Axios was not working but will review and if enlightened I will post here.
I can't figure out how i can correctly save a file i got from formidable to the file system my server is running on.
I am able to console.log the files, however i do not know what to do with the information provided there.
app.post("/sendImages", (req, res) => {
const files = req.files;
Object.keys(files).forEach((key) => {
console.log(files[key]);
fs.writeFile('images/' + files[key].name, files[key], 'binary', (error) => {
if (error) console.log(error);
else console.log('image created');
});
})
});
This request handler right here creates files with the correct names, but when i try to open them in VS Code the only thing i see is [object Object].
An example of a console logged file:
File {
_events: [Object: null prototype] {},
_eventsCount: 0,
_maxListeners: undefined,
size: 3835864,
path:
'C:\\Users\\MY_USER_DIR\\AppData\\Local\\Temp\\upload_b099c61751b3b25772344e20df06a4d9',
name: '20190602_134136.jpg',
type: 'image/jpeg',
hash: null,
lastModifiedDate: 2019-06-30T15:03:22.060Z,
_writeStream:
WriteStream {
_writableState:
WritableState {
objectMode: false,
highWaterMark: 16384,
finalCalled: true,
needDrain: true,
ending: true,
ended: true,
finished: true,
destroyed: true,
decodeStrings: true,
defaultEncoding: 'utf8',
length: 0,
writing: false,
corked: 0,
sync: false,
bufferProcessing: false,
onwrite: [Function: bound onwrite],
writecb: null,
writelen: 0,
bufferedRequest: null,
lastBufferedRequest: null,
pendingcb: 0,
prefinished: true,
errorEmitted: false,
emitClose: false,
autoDestroy: false,
bufferedRequestCount: 0,
corkedRequestsFree: [Object] },
writable: false,
_events: [Object: null prototype] {},
_eventsCount: 0,
_maxListeners: undefined,
path:
'C:\\Users\\MY_USER_DIR\\AppData\\Local\\Temp\\upload_b099c61751b3b25772344e20df06a4d9',
fd: null,
flags: 'w',
mode: 438,
start: undefined,
autoClose: true,
pos: undefined,
bytesWritten: 3835864,
closed: false } }
I hope someone of you can tell me what i did wrong here, i am new to node in general and still have some problems here and there :)
You should copy files from tmp folder to images folder, like this (Node.js >= 8.5.0):
const fs = require('fs');
const util = require('util');
const path = require('path');
const copyFile = util.promisify(fs.copyFile);
app.post('/sendImages', async (req, res) => {
const files = req.files;
const results = Object.keys(files).map((key) => {
const file = files[key];
const dest = path.join('images/', file.name);
return copyFile(file.path, dest);
});
await Promise.all(results);
// ...
});
And if you don't want to save files to tmp folder, you should check the api document for change the uploadDir. Like express-formidable:
app.use(formidableMiddleware({
encoding: 'utf-8',
uploadDir: 'images/',
multiples: true
});
I built a azure web app which link to my work account. Now I want to use my personal account to put it on Visual Studio TEAM Service(for continuous dev). I know I need to create a build for it....but since my personal account doesn't link to the web app, I can't use the azure web app template. After searching online, I found a way to do it, which is by a publish.js. Since it is working for azure bot service, so I just give a try. However, after the build is completed, I noticed the change does not gets reflect on the web app....I wonder Is it possible for me to deploy an azure web app by zip on Visual studio Team service?
Here sample publish.js code:
var zipFolder = require('zip-folder');
var path = require('path');
var fs = require('fs');
var request = require('request');
var rootFolder = path.resolve('.');
var zipPath = path.resolve(rootFolder, './test.zip');
var kuduApi = '';
var userName = '';
var password = '';
function uploadZip(callback) {
fs.createReadStream(zipPath).pipe(request.put(kuduApi, {
auth: {
username: userName,
password: password,
sendImmediately: true
},
headers: {
"Content-Type": "applicaton/zip"
}
}))
.on('response', function(resp){
if (resp.statusCode >= 200 && resp.statusCode < 300) {
fs.unlink(zipPath);
callback(null);
} else if (resp.statusCode >= 400) {
callback(resp);
}
})
.on('error', function(err) {
callback(err)
});
}
function publish(callback) {
zipFolder(rootFolder, zipPath, function(err) {
if (!err) {
uploadZip(callback);
} else {
callback(err);
}
})
}
publish(function(err) {
if (!err) {
console.log('testit-89d8 publish');
} else {
console.error('failed to publish testit-89d8', err);
}
});
I found the error log, it is too long, so here it just a head of it:
failed to publish qnalist IncomingMessage {
2018-04-18T19:36:20.1013703Z _readableState:
2018-04-18T19:36:20.1013848Z ReadableState {
2018-04-18T19:36:20.1013952Z objectMode: false,
2018-04-18T19:36:20.1014061Z highWaterMark: 16384,
2018-04-18T19:36:20.1014184Z buffer: BufferList { head: null, tail: null, length: 0 },
2018-04-18T19:36:20.1014367Z length: 0,
2018-04-18T19:36:20.1014485Z pipes: null,
2018-04-18T19:36:20.1014589Z pipesCount: 0,
2018-04-18T19:36:20.1014694Z flowing: null,
2018-04-18T19:36:20.1015019Z ended: false,
2018-04-18T19:36:20.1015112Z endEmitted: false,
2018-04-18T19:36:20.1015203Z reading: false,
2018-04-18T19:36:20.1015292Z sync: true,
2018-04-18T19:36:20.1015427Z needReadable: false,
2018-04-18T19:36:20.1015524Z emittedReadable: false,
2018-04-18T19:36:20.1015681Z readableListening: false,
2018-04-18T19:36:20.1015821Z resumeScheduled: false,
2018-04-18T19:36:20.1015928Z destroyed: false,
2018-04-18T19:36:20.1016022Z defaultEncoding: 'utf8',
2018-04-18T19:36:20.1016113Z awaitDrain: 0,
2018-04-18T19:36:20.1016253Z readingMore: true,
2018-04-18T19:36:20.1016343Z decoder: null,
2018-04-18T19:36:20.1016433Z encoding: null },
2018-04-18T19:36:20.1016522Z readable: true,
2018-04-18T19:36:20.1016653Z domain: null,
2018-04-18T19:36:20.1016739Z _events:
2018-04-18T19:36:20.1016841Z { end: [ [Function: responseOnEnd], [Function], [Object] ],
2018-04-18T19:36:20.1016980Z close: [Function] },
2018-04-18T19:36:20.1017072Z _eventsCount: 2,
2018-04-18T19:36:20.1017164Z _maxListeners: undefined,
2018-04-18T19:36:20.1017252Z socket:
2018-04-18T19:36:20.1017377Z TLSSocket {
2018-04-18T19:36:20.1017467Z _tlsOptions:
2018-04-18T19:36:20.1017557Z { pipe: false,
2018-04-18T19:36:20.1018481Z secureContext: [Object],
2018-04-18T19:36:20.1018628Z isServer: false,
2018-04-18T19:36:20.1019590Z requestCert: true,
2018-04-18T19:36:20.1019690Z rejectUnauthorized: true,
2018-04-18T19:36:20.1019839Z session: undefined,
2018-04-18T19:36:20.1019937Z NPNProtocols: undefined,
2018-04-18T19:36:20.1020036Z ALPNProtocols: undefined,
2018-04-18T19:36:20.1020134Z requestOCSP: undefined },
2018-04-18T19:36:20.1020267Z _secureEstablished: true,
2018-04-18T19:36:20.1020363Z _securePending: false,
2018-04-18T19:36:20.1020459Z _newSessionPending: false,
2018-04-18T19:36:20.1020555Z _controlReleased: true,
2018-04-18T19:36:20.1020690Z _SNICallback: null,
2018-04-18T19:36:20.1020783Z servername: null,
2018-04-18T19:36:20.1020877Z npnProtocol: undefined,
2018-04-18T19:36:20.1021011Z alpnProtocol: false,
2018-04-18T19:36:20.1021224Z authorized: true,
2018-04-18T19:36:20.1021637Z authorizationError: null,
2018-04-18T19:36:20.1021749Z encrypted: true,
2018-04-18T19:36:20.1021910Z _events:
2018-04-18T19:36:20.1022014Z { close: [Array],
2018-04-18T19:36:20.1022119Z end: [Array],
2018-04-18T19:36:20.1022232Z finish: [Function: onSocketFinish],
2018-04-18T19:36:20.1022399Z _socketEnd: [Function: onSocketEnd],
2018-04-18T19:36:20.1022514Z secure: [Function],
2018-04-18T19:36:20.1022626Z free: [Function: onFree],
2018-04-18T19:36:20.1022784Z agentRemove: [Function: onRemove],
2018-04-18T19:36:20.1022901Z drain: [Function: ondrain],
2018-04-18T19:36:20.1023018Z error: [Function: socketErrorListener],
2018-04-18T19:36:20.1023137Z data: [Function: socketOnData] },
2018-04-18T19:36:20.1023285Z _eventsCount: 10,
2018-04-18T19:36:20.1023392Z connecting: false,
2018-04-18T19:36:20.1023497Z _hadError: false,
2018-04-18T19:36:20.1023598Z _handle:
2018-04-18T19:36:20.1023750Z TLSWrap {
2018-04-18T19:36:20.1023856Z _parent: [Object],
2018-04-18T19:36:20.1023965Z _parentWrap: undefined,
2018-04-18T19:36:20.1024126Z _secureContext: [Object],
2018-04-18T19:36:20.1024235Z reading: true,
2018-04-18T19:36:20.1024340Z owner: [Circular],
2018-04-18T19:36:20.1024451Z onread: [Function: onread],
2018-04-18T19:36:20.1024720Z writeQueueSize: 0,
You need to use zip deployment Kudu API (/api/zipdeploy).
Deploying from a zip file
On the other hand, for Azure Web App deployment task, it uses the Azure subscription endpoint for authorization (Choose in Azure subscription input box of Azure App Service Deploy task).