CLIENT_ERROR when uploading Video To LinkedIn API - node.js

I followed the LinkedIn docs Here to upload a Video to their Vector Assets API
The video uploaded, but When I checked the status of the Video I keep getting CLIENT_ERROR under recipes.
This is the code I used:
let fileName = 'redacted.mp4'; //This sample file is about 24MB
let fileStream = fs.createReadStream(fileName);
let organisationId = 'redacted';
let isVideo = module.exports.isVideo(fileName);
let fileStats = fs.statSync(fileName);
let fileSizeBytes = fileStats.size;
let mediaUploadInitAction = `https://api.linkedin.com/v2/assets?action=registerUpload`;
let registerUploadRequest = {
"registerUploadRequest": {
"owner": `urn:li:organization:${organisationId}`,
"recipes": [
`urn:li:digitalmediaRecipe:feedshare-${isVideo ? 'video' : 'image'}`
],
"serviceRelationships": [
{
"identifier": "urn:li:userGeneratedContent",
"relationshipType": "OWNER"
}
]
}
};
if (!isVideo) {
registerUploadRequest['registerUploadRequest']["supportedUploadMechanism"] = [
"SYNCHRONOUS_UPLOAD"
]
} else {
if (fileSizeBytes > 52428800) { //I was told if the file length is more than 52428800, I should use multipart upload mechanism. My current file size is less than that though.
registerUploadRequest['registerUploadRequest']["supportedUploadMechanism"] = [
"MULTIPART_UPLOAD"
];
registerUploadRequest['registerUploadRequest']['fileSize'] = fileSizeBytes;
}
}
let { data: mediaUploadRegisterResponse } = await axios.post(mediaUploadInitAction, registerUploadRequest, { headers: headers });
let uploadRegisterData = mediaUploadRegisterResponse.value;
let uploadMechanism = uploadRegisterData['uploadMechanism'];
let singleUploadRequest = uploadMechanism['com.linkedin.digitalmedia.uploading.MediaUploadHttpRequest'];
let uploadUrl = singleUploadRequest ? singleUploadRequest['uploadUrl'] : '';
let uploadHeaders = singleUploadRequest ? singleUploadRequest['headers'] : '';
let multipartUpload = uploadMechanism['com.linkedin.digitalmedia.uploading.MultipartUpload'];
if (singleUploadRequest) { //This always work flawlessly for image uploads but not for video uploads
await axios.put(uploadUrl, fileStream, {
headers: {
'Accept': '*/*',
...uploadHeaders,
...headers,
'Content-Type': 'application/octet-stream'
},
maxContentLength: Infinity,
maxBodyLength: Infinity,
});
fs.unlink(fileName, () => { });
}
The above code works flawlessly for Image Uploads. But for Video Uploads, I keep getting back CLIENT_ERROR.
This is the full status message I keep getting back:
{
"recipes": [
{
"recipe": "urn:li:digitalmediaRecipe:feedshare-video",
"status": "CLIENT_ERROR"
}
],
"serviceRelationships": [
{
"relationshipType": "OWNER",
"identifier": "urn:li:userGeneratedContent"
}
],
"mediaTypeFamily": "VIDEO",
"created": 1641646213127,
"id": "C4adhdhahahhdKJZw",
"lastModified": 1641646215307,
"status": "ALLOWED"
}
Please, what can I do to resolve this?
Thank you

For anyone who might experience similar issue in the future. After brute-forcing all possible options, here is what worked for me.
If the video file is less than 52MB don't use fs.createReadStream. Use the code below instead:
fs.readFile(fileName, async (err, data) => {
if (err) {
console.log(`Error Reading LinkedIn Video File ${fileName}`, err);
return;
}
await axios.put(uploadUrl, data, {
headers: singleFileUploadHeaders,
maxContentLength: Infinity,
maxBodyLength: Infinity,
});
});
And the video finally uploaded without any CLIENT_ERROR again!!!

Related

Elasticsearch node js point in time search_phase_execution_exception

const body = {
query: {
geo_shape: {
geometry: {
relation: 'within',
shape: {
type: 'polygon',
coordinates: [$polygon],
},
},
},
},
pit: {
id: "t_yxAwEPZXNyaS1wYzYtMjAxN3IxFjZxU2RBTzNyUXhTUV9XbzhHSk9IZ3cAFjhlclRmRGFLUU5TVHZKNXZReUc3SWcAAAAAAAALmpMWQkNwYmVSeGVRaHU2aDFZZExFRjZXZwEWNnFTZEFPM3JReFNRX1dvOEdKT0hndwAA",
keep_alive: "1m",
},
};
Query fails with search_phase_execution_exception at onBody
Without pit query works fine but it's needed to retrieve more than 10000 hits
Well, using PIT in NodeJS ElasticSearch's client is not clear, or at least is not well documented. You can create a PIT using the client like:
const pitRes = await elastic.openPointInTime({
index: index,
keep_alive: "1m"
});
pit_id = pitRes.body.id;
But there is no way to use that pit_id in the search method, and it's not documented properly :S
BUT, you can use the scroll API as follows:
const scrollSearch = await elastic.helpers.scrollSearch({
index: index,
body: {
"size": 10000,
"query": {
"query_string": {
"fields": [ "vm_ref", "org", "vm" ],
"query": organization + moreQuery
},
"sort": [
{ "utc_date": "desc" }
]
}
}});
And then read the results as follows:
let res = [];
try {
for await (const result of scrollSearch) {
res.push(...result.body.hits.hits);
}
} catch (e) {
console.log(e);
}
I know that's not the exact answer to your question, but I hope it helps ;)
The usage of point-in-time for pagination of search results is now documented in ElasticSearch. You can find more or less detailed explanations here: Paginate search results
I prepared an example that may give an idea about how to implement the workflow, described in the documentation:
async function searchWithPointInTime(cluster, index, chunkSize, keepAlive) {
if (!chunkSize) {
chunkSize = 5000;
}
if (!keepAlive) {
keepAlive = "1m";
}
const client = new Client({ node: cluster });
let pointInTimeId = null;
let searchAfter = null;
try {
// Open point in time
pointInTimeId = (await client.openPointInTime({ index, keep_alive: keepAlive })).body.id;
// Query next chunk of data
while (true) {
const size = remained === null ? chunkSize : Math.min(remained, chunkSize);
const response = await client.search({
// Pay attention: no index here (because it will come from the point-in-time)
body: {
size: chunkSize,
track_total_hits: false, // This will make query faster
query: {
// (1) TODO: put any filter you need here (instead of match_all)
match_all: {},
},
pit: {
id: pointInTimeId,
keep_alive: keepAlive,
},
// Sorting should be by _shard_doc or at least include _shard_doc
sort: [{ _shard_doc: "desc" }],
// The next parameter is very important - it tells Elastic to bring us next portion
...(searchAfter !== null && { search_after: [searchAfter] }),
},
});
const { hits } = response.body.hits;
if (!hits || !hits.length) {
break; // No more data
}
for (hit of hits) {
// (2) TODO: Do whatever you need with results
}
// Check if we done reading the data
if (hits.length < size) {
break; // We finished reading all data
}
// Get next value for the 'search after' position
// by extracting the _shard_doc from the sort key of the last hit
searchAfter = hits[hits.length - 1].sort[0];
}
} catch (ex) {
console.error(ex);
} finally {
// Close point in time
if (pointInTime) {
await client.closePointInTime({ body: { id: pointInTime } });
}
}
}

how to adjust my code to send data in json format in angular

I hope you can help me, I need to send some parameters in json format like this:
{
"InformationA": {
"str_id": 1,
"str_description": "message",
"str_email": "abcd#abcd.com.co"
},
"AddConfiguration": [
{
"int_code": 1,
"str_valor": "32201"
},
{
"int_code": 104,
"str_valor": "https://www.google.com.co/"
},
{
"int_code": 108,
"str_valor": "1"
}
]
}
I am trying to send the json through the angular service in this way but I don't know if it is correct?:
sendData(InformationA,AddConfiguration){
const params = 'InformationA=' +JSON.stringify(InformationA)+'AddConfiguration=' +
JSON.stringify(AddConfiguration);
return this.http.post<any>(`${this.route}/send-data`, params , { headers: this.headers });
}
also create a function in the nodejs backend to see how it would arrive:
#Post('send-data')
async receibeData(#Req() req, #Res() res) {
try {
const data = req.body;
res.status(HttpStatus.OK).json(data)
} catch (err) {
throw err;
}
}
and by console it is printed in this way:
{,…}
InformationA:"
[{"str_id":"1","str_description":"message","str_email":"abcd#abcd.com.co"}]Addconfiguration=
[{"int_code":1,"str_valor":"32201 "},{"int_code":104,"str_valor":"https://www.google.com.co
"},{"int_code":108,"str_valor":"1 "}]"
I am really very new to this and I would like to know how I adapt my data so that it can be sent as requested.
I think you should try to build the JSON object corresponding to your requirement. You should not use JSON.stringify for this purpose. I hope this will help you out.
sendData(InformationA,AddConfiguration) {
const params = {
InformationA: InformationA,
AddConfiguration: AddConfiguration
};
return this.http.post<any>(`${this.route}/send-data`, params , { headers: this.headers });
}

Unable to write item(s) to DynamoDB table utilizing DocumentClient - Nodejs

I'm absolutely brand new to DynamoDb and I'm trying to simply write an object from a NodeJS Lambda. Based on what I've read and researched I should probably be using DocumentClient from the aws-sdk. I also found the following question here regarding issues with DocumentClient, but it doesn't seem to address my specific issue....which I can't really find/pinpoint unfortunately. I've set up a debugger to help with SAM local development, but it appears to be only providing some of the errors.
The code's implementation is shown here.
var params = {
TableName: "March-Madness-Teams",
Item: {
"Id": {"S": randstring.generate(9)},
"School":{"S": team_name},
"Seed": {"S": seed},
"ESPN_Id": {"S": espn_id}
}
}
console.log(JSON.stringify(params))
dynamodb.put(params, (error,data) => {
if (error) {
console.log("Error ", error)
} else {
console.log("Success! ", data)
}
})
Basically I'm scrubbing a website utilizing cheerio library and cherry picking values from the DOM and saving them into the json object shown below.
{
"TableName": "March-Madness-Teams",
"Item": {
"Id": {
"S": "ED311Oi3N"
},
"School": {
"S": "BAYLOR"
},
"Seed": {
"S": "1"
},
"ESPN_Id": {
"S": "239"
}
}
}
When I attempt to push this json object to Dynamo, I get errors says
Error MultipleValidationErrors: There were 2 validation errors:
* MissingRequiredParameter: Missing required key 'TableName' in params
* MissingRequiredParameter: Missing required key 'Item' in params
The above error is all good in well....I assume it didn't like the fact that I had wrapped those to keys in strings, so I removed the quotes and sent the following
{
TableName: "March-Madness-Teams",
Item: {
"Id": {
"S": "ED311Oi3N"
},
"School": {
"S": "BAYLOR"
},
"Seed": {
"S": "1"
},
"ESPN_Id": {
"S": "239"
}
}
}
However, when I do that...I kind of get nothing.
Here is a larger code snippet.
return new Promise((resolve,reject) => {
axios.get('http://www.espn.com/mens-college-basketball/bracketology')
.then(html => {
const dynamodb = new aws.DynamoDB.DocumentClient()
let $ = cheerio.load(html.data)
$('.region').each(async function(index, element){
var preregion = $(element).children('h3,b').text()
var region = preregion.substr(0, preregion.indexOf('(') - 1)
$(element).find('a').each(async function(index2, element2){
var seed = $(element2).siblings('span.rank').text()
if (seed.length > 2){
seed = $(element2).siblings('span.rank').text().substring(0, 2)
}
var espn_id = $(element2).attr('href').split('/').slice(-2)[0]
var team_name = $(element2).text()
var params = {
TableName: "March-Madness-Teams",
Item: {
"Id": randstring.generate(9),
"School":team_name,
"Seed": seed,
"ESPN_Id": espn_id
}
}
console.log(JSON.stringify(params))
// dynamodb.put(params)
// .then(function(data) {
// console.log(`Success`, data)
// })
})
})
})
})
Can you try without the type?
Instead of
"School":{"S": team_name},
for example, use
"School": team_name,
From your code, I can see the mis promise on the dynamodb request. Try to change your lines :
dynamodb.put(params).then(function(data) {
console.log(`Success`, data)
})
to be :
dynamodb.put(params).promise().then(function(data) {
console.log(`Success`, data)
})
you can combine with await too :
await dynamodb.put(params).promise().then(function(data) {
console.log(`Success`, data)
})
exports.lambdaHandler = async (event, context) => {
const html = await axios.get('http://www.espn.com/mens-college-basketball/bracketology')
let $ = cheerio.load(html.data)
const schools = buildCompleteSchoolObject(html, $)
try {
await writeSchoolsToDynamo(schools)
return { statusCode: 200 }
} catch (error) {
return { statusCode: 400, message: error.message }
}
}
const writeSchoolsToDynamo = async (schools) => {
const promises = schools.map(async school => {
await dynamodb.put(school).promise()
})
await Promise.all(promises)
}
const buildCompleteSchoolObject = (html, $) => {
const schools = []
$('.region').each(loopThroughSubRegions(schools, $))
return schools
}
const loopThroughSubRegions = (schools, $) => {
return (index, element) => {
var preregion = $(element).children('h3,b').text()
var region = preregion.substr(0, preregion.indexOf('(') - 1)
$(element).find('a').each(populateSchoolObjects(schools, $))
}
}
const populateSchoolObjects = (schools, $) => {
return (index, element) => {
var seed = $(element).siblings('span.rank').text()
if (seed.length > 2) {
seed = $(element).siblings('span.rank').text().substring(0, 2)
}
var espn_id = $(element).attr('href').split('/').slice(-2)[0]
var team_name = $(element).text()
schools.push({
TableName: "March-Madness-Teams",
Item: {
"Id": randstring.generate(9),
"School": team_name,
"Seed": seed,
"ESPN_Id": espn_id
}
})
}
}
I know this is drastically different from what I started with but I did some more digging and kind of kind of worked to this...I'm not sure if this is the best way, but I seemed to get it to work...Let me know if something should change!
Oh I understand what you want.
Maybe you can see the code above works, but there is one concept you have to improve here about async - await and promise especially on lambda function.
I have some notes here from your code above, maybe can be your consideration to improve your lambda :
Using await for every promise in lambda is not the best approach because we know the lambda time limitation. But sometimes we can do that for other case.
Maybe you can change the dynamodb.put method to be dynamodb.batchWriteItem :
The BatchWriteItem operation puts or deletes multiple items in one or more tables.
Or If you have to use dynamodb.put instead, try to get improve the code to be like so :
const writeSchoolsToDynamo = async (schools) => {
const promises = schools.map(school => {
dynamodb.put(school).promise()
})
return Promise.all(promises)
}

Request to Cloudflare DNS from Cloudflare worker not returning the DNS result

I have a Cloudflare (CF) worker that I want to have make a few DNS requests using the CF DNS (https://developers.cloudflare.com/1.1.1.1/dns-over-https/json-format/).
So a pretty basic worker:
/**
* readRequestBody reads in the incoming request body
* Use await readRequestBody(..) in an async function to get the string
* #param {Request} request the incoming request to read from
*/
async function readRequestBody(request) {
const { headers } = request
const contentType = headers.get('content-type')
if (contentType.includes('application/json')) {
const body = await request.json()
return JSON.stringify(body)
}
return ''
}
/**
* Respond to the request
* #param {Request} request
*/
async function handleRequest(request) {
let reqBody = await readRequestBody(request)
var jsonTlds = JSON.parse(reqBody);
const fetchInit = {
method: 'GET',
}
let promises = []
for (const tld of jsonTlds.tlds) {
//Dummy request until I can work out why I am not getting the response of the DNS query
var requestStr = 'https://cloudflare-dns.com/dns-query?ct=application/dns-json&name=example.com&type=A'
let promise = fetch(requestStr, fetchInit)
promises.push(promise)
}
try {
let results = await Promise.all(promises)
return new Response(JSON.stringify(results), {status: 200})
} catch(err) {
return new Response(JSON.stringify(err), {status: 500})
}
}
addEventListener('fetch', event => {
event.respondWith(handleRequest(event.request))
})
I have just hardcoded the DNS query at the moment to:
https://cloudflare-dns.com/dns-query?ct=application/dns-json&name=example.com&type=A
and I would expect that the JSON result I would get is:
{
"Status": 0,
"TC": false,
"RD": true,
"RA": true,
"AD": true,
"CD": false,
"Question": [
{
"name": "example.com.",
"type": 1
}
],
"Answer": [
{
"name": "example.com.",
"type": 1,
"TTL": 9540,
"data": "93.184.216.34"
}
]
}
however instead in results I get what appears to be the outcome of the websocket established as part of the fetch() (assuming I go around the loop once)
[
{
"webSocket": null,
"url": "https://cloudflare-dns.com/dns-query?ct=application/dns-json&name=example.com&type=A",
"redirected": false,
"ok": true,
"headers": {},
"statusText": "OK",
"status": 200,
"bodyUsed": false,
"body": {
"locked": false
}
}
]
So my question is, what am I doing wrong here such that I am not getting the DNS JSON response from the 1.1.1.1 API?
fetch() returns a promise for a Response object, which contains the response status, headers, and the body stream. This object is what you're seeing in your "results". In order to read the response body, you must make further calls.
Try defining a function like this:
async function fetchJsonBody(req, init) {
let response = await fetch(req, init);
if (!response.ok()) {
// Did not return status 200; throw an error.
throw new Error(response.status + " " + response.statusText);
}
// OK, now we can read the body and parse it as JSON.
return await response.json();
}
Now you can change:
let promise = fetch(requestStr, fetchInit)
to:
let promise = fetchJsonBody(requestStr, fetchInit)

Write a file keeping tabs and EOL

I am trying to write the content of a string in a file in node.js
I have some raml files and I am able to join them. If I print the variable in console, I see it well parsed but as soon as I save in on a file, the file just contains one single line:
var raml = require('raml-parser');
var fs = require('fs');
var path = require('path');
var os = require('os')
path.join(__dirname, './')
raml.loadFile('schema.raml').then( function(data) {
console.log(data);
var filePath = "schema.raml";
fs.unlinkSync(filePath);
fs.writeFile("./new.raml", JSON.stringify(data).replace('/\n', os.EOL), function(err) {
if(err) {
return console.log(err);
}
console.log("The file was saved!");
});
}, function(error) {
console.log('Error parsing: ' + error);
});
I added a replace EOL to change all "\n" in file. If I delete that, file will contain "\n" in each end of line.
On console, this is the output:
{ title: 'RAML Flattener',
baseUri: 'http://github.com/joeledwards/node-flat-raml',
version: '1',
mediaType: 'application/json',
protocols: [ 'HTTP' ],
resources:
[ { relativeUri: '/base',
methods: [Object],
resources: [Object],
relativeUriPathSegments: [Object] } ] }
data is a Javascript object; how that is being displayed when you console.log() it doesn't have much to do with how it will end up in the file you are writing.
The problem is that you are using JSON.stringify(), which, by default, will not pretty-print the output string.
Instead, try this:
JSON.stringify(data, null, 2)
This will make your output look like this:
{
"title": "RAML Flattener",
"baseUri": "http://github.com/joeledwards/node-flat-raml",
"version": "1",
"mediaType": "application/json",
"protocols": [
"HTTP"
],
"resources": [
{
"relativeUri": "/base",
"methods": { ... },
"resources": { ... },
"relativeUriPathSegments": { ... }
}
]
}
You may or may not need to call .replace() on its output. If you do, use this (the one you're using isn't valid):
.replace(/\n/, os.EOL)

Resources