I'm getting started trying to write a lambda function with node and puppeteer. I'm using the serverless framework
I've been trying to follow directions at https://github.com/alixaxel/chrome-aws-lambda. My function is working as expected locally, with:
$ sls invoke local -f hello
working as expected. However when I run:
$ sls invoke -f hello
I get:
$ sls invoke -f hello
{
"errorType": "Error",
"errorMessage": "Cannot find module 'chr'\nRequire stack:\n- /var/task/index.js\n- /var/task/handler.js\n- /var/task/s_hello.js\n- /var/runtime/UserFunction.js\n- /var/runtime/index.js",
"trace": [
"Error: Cannot find module 'chr'",
"Require stack:",
"- /var/task/index.js",
"- /var/task/handler.js",
"- /var/task/s_hello.js",
"- /var/runtime/UserFunction.js",
"- /var/runtime/index.js",
" at Function.Module._resolveFilename (internal/modules/cjs/loader.js:957:15)",
" at Module._require.o.require (/var/task/serverless_sdk/index.js:9:72748)",
" at require (internal/modules/cjs/helpers.js:77:18)",
" at Object.main (/var/task/index.js:47:26)",
" at module.exports.hello (/var/task/handler.js:6:25)",
" at Runtime.handler (/var/task/serverless_sdk/index.js:9:137040)",
" at Runtime.handleOnce (/var/runtime/Runtime.js:66:25)"
]
How can I get this working?
My handler.js contains:
'use strict';
var index = require('./index.js');
module.exports.hello = async event => {
// var t = async event => {
var res = await index.main();
console.log('hello');
console.log(res);
console.log('IN HANDLER');
return {
statusCode: 200,
body: JSON.stringify(
{
message: 'main function executed!',
input: event,
......
my index.js contains:
async function main(event, context, callback) {
const os = require('os');
let result = null;
let browser = null;
if (os.platform=='win32') {
const puppeteer= require('puppeteer-core');
browser = await puppeteer.launch({
executablePath: 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe',
headless: false,
ignoreHTTPSErrors:true
})
} else {
// launch a headless browser
const chromeLambda = require('chrome-aws-lambda');
console.log(os.platform());
console.log('lambda');
browser = await chromeLambda.puppeteer.launch({
args: chromeLambda.args,
executablePath: await chromeLambda.executablePath,
defaultViewport,
headless:true
});
var page = await browser.newPage();
........
};
module.exports.main = main;
package.json:
"license": "ISC",
"dependencies": {
"chrome-aws-lambda": "^3.1.1",
"puppeteer-core": "^3.1.0"
}
serverless.yml:
# Welcome to Serverless!
#
.......
# Happy Coding!
plugins:
- serverless-offline
service: xxxxx
# app and org for use with dashboard.serverless.com
app: yyyyy
org: xxxx
# You can pin your service to only deploy with a specific Serverless version
# Check out our docs for more details
# frameworkVersion: "=X.X.X"
provider:
name: aws
runtime: nodejs12.x
region: us-east-1
# here we put the layers we want to use
layers:
# Google Chrome for AWS Lambda as a layer
# Make sure you use the latest version depending on the region
# https://github.com/shelfio/chrome-aws-lambda-layer
- arn:aws:lambda:us-east-1:764866452798:layer:chrome-aws-lambda:10
- arn:aws:lambda:us-east-1:155754363046:layer:chr:1
.....
functions:
hello:
handler: handler.hello
# main:
# handler: handler.main
# The following are a few example events you can configure
# NOTE: Please make sure to change your handler code to work with those events
# Check the event documentation for details
events:
- http:
path: hello/get
method: get
.....
Related
I am trying to run a simple query locally in Node JS using serverless - for the eventual purpose of uploading an Apollo Server API onto AWS Lambda.
However, I am not able to get anywhere near the deployment step as it appears that Node is unable to run a single instance of Apollo Server/Serverless locally in the first place due to a multitude of errors which shall be explained below:
Steps I have taken:
git clone the example API and follow all instructions here: https://github.com/fullstack-hy2020/rate-repository-api (I ensured everything works perfectly)
Follow all instructions on Apollographql up to "Running Server Locally": https://www.apollographql.com/docs/apollo-server/deployment/lambda/ - then run following command: serverless invoke local -f graphql -p query.json
ERROR - cannot use import statement outside module .... Solution - add "type": "module" to package.json - run command: serverless invoke local -f graphql -p query.json
ERROR - Cannot find module 'C:\Users\Julius\Documents\Web Development\rate-repository-api\src\utils\authService' imported from C:\Users\Julius\Documents\Web Development\rate-repository-api\src\apolloServer.js... Solution - install webpack as per solution here: Serverless does not recognise subdirectories in Node then run serverless invoke local -f graphql -p query.json
ERROR - Error [ERR_MODULE_NOT_FOUND]: Cannot find module 'C:\Users\Julius\Documents\Web Development\rate-repository-api\src\utils\authService' imported from C:\Users\Julius\Documents\Web Development\rate-repository-api\src\apolloServer.js
I do not know how to proceed from here, I am hoping that someone can point me in the right direction.
File Structure:
apolloServer.js:
import { ApolloServer, toApolloError, ApolloError } from '#apollo/server';
import { ValidationError } from 'yup';
import { startServerAndCreateLambdaHandler } from '#as-integrations/aws-lambda';
import AuthService from './utils/authService';
import createDataLoaders from './utils/createDataLoaders';
import logger from './utils/logger';
import { resolvers, typeDefs } from './graphql/schema';
const apolloErrorFormatter = (error) => {
logger.error(error);
const { originalError } = error;
const isGraphQLError = !(originalError instanceof Error);
let normalizedError = new ApolloError(
'Something went wrong',
'INTERNAL_SERVER_ERROR',
);
if (originalError instanceof ValidationError) {
normalizedError = toApolloError(error, 'BAD_USER_INPUT');
} else if (error.originalError instanceof ApolloError || isGraphQLError) {
normalizedError = error;
}
return normalizedError;
};
const createApolloServer = () => {
return new ApolloServer({
resolvers,
typeDefs,
formatError: apolloErrorFormatter,
context: ({ req }) => {
const authorization = req.headers.authorization;
const accessToken = authorization
? authorization.split(' ')[1]
: undefined;
const dataLoaders = createDataLoaders();
return {
authService: new AuthService({
accessToken,
dataLoaders,
}),
dataLoaders,
};
},
});
};
export const graphqlHandler = startServerAndCreateLambdaHandler(createApolloServer());
export default createApolloServer;
Serverless.yml:
service: apollo-lambda
provider:
name: aws
runtime: nodejs16.x
httpApi:
cors: true
functions:
graphql:
# Make sure your file path is correct!
# (e.g., if your file is in the root folder use server.graphqlHandler )
# The format is: <FILENAME>.<HANDLER>
handler: src/apolloServer.graphqlHandler
events:
- httpApi:
path: /
method: POST
- httpApi:
path: /
method: GET
custom:
webpack:
packager: 'npm'
webpackConfig: 'webpack.config.js' # Name of webpack configuration file
includeModules:
forceInclude:
- pg
Webpack.config.js
const path = require('path');
module.exports = {
mode: 'development',
entry: './src/index.js',
output: {
path: path.resolve(__dirname, 'build'),
filename: 'foo.bundle.js',
},
};
Right now what I'm trying to do is that every time a request is made, a query is made to the Redis service. The problem is that when using a basic configuration, it would not be working. The error is the following:
INFO Redis Client Error Error: connec at TCPConnectWrap.afterConnect [as oncomplete] (node} port: 6379127.0.0.1',
I have as always running redis-server with its corresponding credentials listening to port 127.0.0.1:6379. I know that AWS SAM runs with a container, and the issue is probably due to a network configuration, but the only command that AWS SAM CLI provides me is --host. How could i fix this?
my code is the following, although it is not very relevant:
import { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';
import { createClient } from 'redis';
import processData from './src/lambda-data-dictionary-read/core/service/controllers/processData';
export async function lambdaHandler(event: APIGatewayProxyEvent): Promise<APIGatewayProxyResult> {
const body: any = await processData(event.queryStringParameters);
const url = process.env.REDIS_URL || 'redis://127.0.0.1:6379';
const client = createClient({
url,
});
client.on('error', (err) => console.log('Redis Client Error', err));
await client.connect();
await client.set('key', 'value');
const value = await client.get('key');
console.log('----', value, '----');
const response: APIGatewayProxyResult = {
statusCode: 200,
body,
};
if (body.error) {
return {
statusCode: 404,
body,
};
}
return response;
}
My template.yaml:
Transform: AWS::Serverless-2016-10-31
Description: >
lambda-data-dictionary-read
Sample SAM Template for lambda-data-dictionary-read
Globals:
Function:
Timeout: 0
Resources:
IndexFunction:
Type: AWS::Serverless::Function
Properties:
CodeUri: app/
Handler: index.lambdaHandler
Runtime: nodejs16.x
Timeout: 10
Architectures:
- x86_64
Environment:
Variables:
ENV: !Ref develope
REDIS_URL: !Ref redis://127.0.0.1:6379
Events:
Index:
Type: Api
Properties:
Path: /api/lambda-data-dictionary-read
Method: get
Metadata:
BuildMethod: esbuild
BuildProperties:
Minify: true
Target: 'es2020'
Sourcemap: true
UseNpmCi: true
Im using:
"scripts": {
"dev": "sam build --cached --beta-features && sam local start-api --port 8080 --host 127.0.0.1"
}
I'm getting started trying to write a lambda function with node and puppeteer. I'm using the serverless framework
I've been trying to follow directions at https://github.com/alixaxel/chrome-aws-lambda. My function is working as expected locally, with:
$ sls invoke local -f hello
However when I run:
$ sls invoke -f hello
I get:
{
"errorType": "Error",
"errorMessage": "spawn ETXTBSY",
"trace": [
"Error: spawn ETXTBSY",
" at ChildProcess.spawn (internal/child_process.js:407:11)",
" at Object.spawn (child_process.js:548:9)",
" at Launcher.launch (/opt/nodejs/node_modules/puppeteer-core/lib/Launcher.js:132:40)",
" at async Object.main (/var/task/index.js:50:15)",
" at async module.exports.hello (/var/task/handler.js:6:13)"
]
How can I get this working?
My handler.js contains:
'use strict';
var index = require('./index.js');
module.exports.hello = async event => {
// var t = async event => {
var res = await index.main();
console.log('hello');
console.log(res);
console.log('IN HANDLER');
return {
statusCode: 200,
body: JSON.stringify(
{
message: 'main function executed!',
input: event,
......
my index.js contains:
async function main(event, context, callback) {
const os = require('os');
let result = null;
let browser = null;
if (os.platform=='win32') {
const puppeteer= require('puppeteer-core');
browser = await puppeteer.launch({
executablePath: 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe',
headless: false,
ignoreHTTPSErrors:true
})
} else {
// launch a headless browser
const chromeLambda = require('chrome-aws-lambda');
console.log(os.platform());
console.log('lambda');
browser = await chromeLambda.puppeteer.launch({
args: chromeLambda.args,
executablePath: await chromeLambda.executablePath,
defaultViewport,
headless:true
});
var page = await browser.newPage();
........
};
module.exports.main = main;
package.json:
"license": "ISC",
"dependencies": {
"chrome-aws-lambda": "^3.1.1",
"puppeteer-core": "^3.1.0"
}
serverless.yml:
# Welcome to Serverless!
#
.......
# Happy Coding!
plugins:
- serverless-offline
service: xxxxx
# app and org for use with dashboard.serverless.com
app: yyyyy
org: xxxx
# You can pin your service to only deploy with a specific Serverless version
# Check out our docs for more details
# frameworkVersion: "=X.X.X"
provider:
name: aws
runtime: nodejs12.x
region: us-east-1
# here we put the layers we want to use
layers:
# Google Chrome for AWS Lambda as a layer
# Make sure you use the latest version depending on the region
# https://github.com/shelfio/chrome-aws-lambda-layer
- arn:aws:lambda:us-east-1:764866452798:layer:chrome-aws-lambda:10
# function parameters
# you can overwrite defaults here
# stage: dev
# region: us-east-1
.....
functions:
hello:
handler: handler.hello
# main:
# handler: handler.main
# The following are a few example events you can configure
# NOTE: Please make sure to change your handler code to work with those events
# Check the event documentation for details
events:
- http:
path: hello/get
method: get
.....
You can remove this error by using below command:-
$ npm install --no-bin-links
I have created a simple application that accepts a URL and converts it to a PDF. It stores the resultant PDF in an S3 bucket and returns the URL of the PDF. It uses Chrome (running headless) to convert the URL to a PDF. I used the serverless framework, AWS Lambda, and the chrome-aws-lambda npm package. When I execute this setup locally using serverless it all works great. I can use postman to make a request with a URL and it returns the URL of the resultant PDF. When I deploy this setup to AWS Lambda, it returns a 502 internal server error response. When I look at the AWS logs for my application I see the following:
{
"errorType": "Error",
"errorMessage": "ENOENT: no such file or directory, open '//../bin/chromium.br'",
"code": "ENOENT",
"errno": -2,
"syscall": "open",
"path": "//../bin/chromium.br",
"stack": [
"Error: ENOENT: no such file or directory, open '//../bin/chromium.br'"
]
}
Here is the main handler for the application:
import AWS from 'aws-sdk'
import middy from 'middy'
import chromium from 'chrome-aws-lambda'
import {
cors,
doNotWaitForEmptyEventLoop,
httpHeaderNormalizer,
httpErrorHandler
} from 'middy/middlewares'
const handler = async (event) => {
// Request body is passed in as a JSON encoded string in 'event.body'
const data = JSON.parse(event.body)
const executablePath = event.isOffline
? './node_modules/puppeteer/.local-chromium/linux-706915/chrome-linux/chrome'
: await chromium.executablePath
const browser = await chromium.puppeteer.launch({
args: chromium.args,
defaultViewport: chromium.defaultViewport,
executablePath: executablePath,
headless: true
})
const page = await browser.newPage()
await page.goto(data.url, {
waitUntil: ['networkidle0', 'load', 'domcontentloaded']
})
const pdfStream = await page.pdf()
var upload = new AWS.S3.ManagedUpload({
params: {
Bucket: 'bucketname',
Body: pdfStream,
Key: `${Date.now()}-result.pdf`,
ACL: 'public-read'
}
})
var promise = upload.promise()
return promise.then(
function (data) {
console.log(data.Location)
return {
statusCode: 200,
body: data.Location
}
},
function (err) {
console.log('Error', err)
return {
statusCode: 500,
body: err
}
}
)
}
export const generate = middy(handler)
.use(httpHeaderNormalizer())
.use(cors())
.use(doNotWaitForEmptyEventLoop())
.use(httpErrorHandler())
Here is the serverless framework configuration file:
service: print-pdf
package:
individually: true
provider:
name: aws
runtime: nodejs12.x
region: us-east-2
stage: prod
plugins:
- serverless-bundle # Package our functions with Webpack
- serverless-offline
# Create our resources with separate CloudFormation templates
resources:
# API Gateway Errors
- ${file(resources/api-gateway-errors.yml)}
# S3
- ${file(resources/s3-bucket.yml)}
# 'iamRoleStatements' defines the permission policy for the Lambda function.
# In this case Lambda functions are granted with permissions to access S3.
iamRoleStatements:
- Effect: Allow
Action:
- s3:GetObject
- s3:PutObject
Resource: "arn:aws:s3:us-east-2:*:*"
functions:
give-me-the-pdf:
handler: handler.generate
events:
- http:
path: pdf
method: post
cors: true
authorizer: aws_iam
Here is the package.json:
{
"name": "print-pdf",
"version": "1.0.0",
"main": "handler.js",
"author": "Dean Andreakis <dean#deanware.com>",
"license": "MIT",
"private": true,
"scripts": {
"test": "serverless-bundle test"
},
"dependencies": {
"chrome-aws-lambda": "^1.20.4",
"middy": "^0.28.4",
"puppeteer-core": "^1.20.0"
},
"devDependencies": {
"aws-sdk": "^2.597.0",
"jest": "^24.9.0",
"puppeteer": "^2.0.0",
"serverless": ">=1.48.1",
"serverless-bundle": "^1.2.5",
"serverless-dotenv-plugin": "^2.1.1",
"serverless-offline": "^5.3.3"
}
}
Why is Chrome not found when deployed to AWS versus running locally?
You could use serverless-webpack and configure chrome-aws-lamdba as an external.
There's a similar issue here.
Add this to your webpack config:
externals: ['aws-sdk', 'chrome-aws-lambda']
serverless-bundle only includes the JS code that you use in your handler and strips everything else to minimize your bundle. That means the chrome binaries are excluded.
To include those binaries, add the following to your serverless.yml:
custom:
bundle:
copyFiles:
- from: 'node_modules/chrome-aws-lambda/bin/*'
to: './'
Not sure what is happening with my code.
It never executes my updated code on my local. If I update my code and run sls invoke local , it still runs the old code. ALso, it does not send out SES EMail.
For some reason, it always executes the code which was already deployed to AWS platform rather than executing my local code. This is confusing.
Below is my serverless.yml:
service: lambda-test
provider:
name: aws
runtime: nodejs8.10
stage: dev
region: ap-southeast-1
iamRoleStatements:
- Effect: Allow
Action:
- lambda:InvokeFunction
- lambda:InvokeAsync
Resource: "*"
functions:
hello:
handler: handler.hello
environment:
events:
- http:
path: /
method: get
trulyYours:
handler: handler.trulyYours
environment:
events:
- http:
path: /trulyYours
method: get
sendRegistrationEmail:
handler: handler.sendRegistrationEmail
environment:
events:
- http:
path: /sendRegistrationEmail
method: get
plugins:
- serverless-offline
I am not sure if I should continue to edit code in AWS web console itself or try
setting up local dev environment. Been tying since last two days, but turning out to be useless to spend time.
'use strict';
var aws = require("aws-sdk");
var nodeMailer = require("nodemailer");
//aws.config.loadFromPath('aws_config.json');
var ses = new aws.SES();
var s3 = new aws.S3();
module.exports.hello = async (event, context) => {
console.log("executing lambda function 'hello' XX...");
// return {
// statusCode: 200,
// body: JSON.stringify({
// message: 'v1.0',
// }),
// };
// Use this code if you don't use the http event with the LAMBDA-PROXY integration
// return { message: 'Go Serverless v1.0! Your function executed successfully!', event };
};
//
exports.trulyYours = async (event, context, callback) => {
console.log('Lambda trulyYours Received event:', JSON.stringify(event, null, 2));
//context.succeed('Hello from' + event.name);
return {
statusCode: 200,
body: JSON.stringify({
message: 'hello from trulyYours' + event.name,
}),
};
}
/*function trulyYours (foo, bar) {
// MyLambdaFunction logic here
}*/
module.exports.sendRegistrationEmail = (event, context) => {
console.log("Executing sendRegistrationEmail...");
var lambda = new aws.Lambda({
region: 'ap-southeast-1' //change to your region
});
var params = {
FunctionName: 'lambda-test-dev-trulyYours', // the lambda function we are going to invoke
InvocationType: 'RequestResponse',
LogType: 'Tail',
Payload: '{ "name" : "Alexa" }'
};
lambda.invoke(params, function (err, data) {
if (err) {
context.fail(err);
} else if (data.Payload) {
context.succeed('Lambda trulyYours ' + data.Payload);
}
});
//
// return {
// statusCode: 200,
// body: JSON.stringify({
// message: 'sent email successfully',
// }),
// };
// Use this code if you don't use the http event with the LAMBDA-PROXY integration
// return { message: 'Go Serverless v1.0! Your function executed successfully!', event };
};
Try to create a serverless-local.yml, for example :
config:
region: eu-west-1
environment:
VAR: local
database:
hostname: localhost
port: 8000
username: root
password: toor
database: db
url: http://localhost:3000
and in you serverless.yml in provider add this line :
stage: ${opt:stage, 'dev'}
then in you terminal try this cli :
sls invoke local -f functionName -s local