0 * * * *
will trigger your function every hour at minute 0. Refer to Crontab Guru for more information on CRON schedules.
Transforming images in an Object Storage bucket using Serverless Functions and Triggers - Deployment
- serverless
- triggers
- sqs
- queue
Introduction
In the previous tutorial, you learned how to set up your environment to use Serverless Functions with triggers to resize images from a bucket and push them to another one.
Before you start
To complete the actions presented below, you must have:
- Owner status or IAM permissions allowing you to perform actions in the intended Organization
- A Scaleway Project dedicated to this tutorial
- A source bucket containing images
- An empty destination bucket
- A Scaleway queue with credentials
You will now learn how to deploy Serverless Functions and connect them using triggers.
Creating the BucketScan function
-
Create a folder named FunctionsTutorial on your desktop and access it.
-
Create a folder named BucketScan in this folder and access it.
-
Create and open the file BucketScan.js in a code editor of your choice.
-
Copy and paste the following code in the editor:
console.log("init");const { S3Client, ListObjectsV2Command } = require("@aws-sdk/client-s3");const { SQSClient, SendMessageCommand } = require("@aws-sdk/client-sqs");// Get info from env variablesconst S3_ACCESS_KEY = process.env.S3_ACCESS_KEY;const S3_ACCESS_KEY_ID = process.env.S3_ACCESS_KEY_ID;const SOURCE_BUCKET = process.env.SOURCE_BUCKET;const S3_REGION = process.env.S3_REGION;const SQS_ACCESS_KEY = process.env.SQS_ACCESS_KEY;const SQS_ACCESS_KEY_ID = process.env.SQS_ACCESS_KEY_ID;const QUEUE_URL = process.env.QUEUE_URL;const SQS_ENDPOINT = process.env.SQS_ENDPOINT;const S3_ENDPOINT = `https://s3.${S3_REGION}.scw.cloud`;// Create Object Storage service objectconst s3Client = new S3Client({credentials: {accessKeyId: S3_ACCESS_KEY_ID,secretAccessKey: S3_ACCESS_KEY,},endpoint: S3_ENDPOINT,region: S3_REGION});// Configure parameters for the listObjectsV2 Commandconst input = {"Bucket": SOURCE_BUCKET};// Create SQS servicevar sqsClient = new SQSClient({credentials: {accessKeyId: SQS_ACCESS_KEY_ID,secretAccessKey: SQS_ACCESS_KEY},region: "par",endpoint: SQS_ENDPOINT,})console.log("init Ok")exports.handle = async (event, context, callback) => {const s3ListCommand = new ListObjectsV2Command(input);const s3List = await s3Client.send(s3ListCommand);const contents = s3List.Contents;const total = contents.length;contents.forEach(async function (content) {// Infer the image type from the file suffix.const srcKey = content.Key;const typeMatch = srcKey.match(/\.([^.]*)$/);if (!typeMatch) {console.error(`${srcKey}: Could not determine the image type.`)}const imageType = typeMatch[1].toLowerCase();// Check that the image type is supportedif (["jpeg", "jpg", "png"].includes(imageType) !== true) {console.error(`${srcKey}: Unsupported image type: ${imageType}`)}else {try {var sendMessageCommand = new SendMessageCommand({QueueUrl: QUEUE_URL,MessageBody: srcKey,});var sendMessage = await sqsClient.send(sendMessageCommand);console.log(sendMessage.MessageId);} catch (error) {console.error(error);}}});return {statusCode: 200,body: JSON.stringify({status: "Done",message: `All images from the bucket have been processed over ${total} files in the bucket`,}),headers: {"Content-Type": "application/json",},};};/* This is used to test locally and will not be executed on Scaleway Functions */if (process.env.NODE_ENV === 'test') {import("@scaleway/serverless-functions").then(scw_fnc_node => {scw_fnc_node.serveHandler(exports.handle, 8080);});} -
Save the file and exit the editor.
-
Run the following command in the same terminal to initialize a new NPM project and create an empty
package.json
file:npm init --yes -
Run the following command to download the required dependencies and packages:
npm i @aws-sdk/client-s3 @aws-sdk/client-sqs @scaleway/serverless-functions -
Run the following command in the same terminal to zip the content of the folder:
zip -r BucketScan.zip .
Creating the ImageTransform function
-
Create a folder named BucketScan in the FunctionsTutorial folder and access it.
-
Create a ImageTransform.js file and open it in a code editor of your choice:
-
Copy and paste the following code in the editor:
// Add dependenciesconsole.log("init");const { S3Client, PutObjectCommand, GetObjectCommand } = require("@aws-sdk/client-s3");const sharp = require("sharp");// Get connexion information from secret environment variablesconst S3_ACCESS_KEY=process.env.S3_ACCESS_KEY;const S3_ACCESS_KEY_ID=process.env.S3_ACCESS_KEY_ID;const SOURCE_BUCKET=process.env.SOURCE_BUCKET;const DESTINATION_BUCKET=process.env.DESTINATION_BUCKET;const S3_REGION=process.env.S3_REGION;const RESIZED_WIDTH=process.env.RESIZED_WIDTH;const S3_ENDPOINT = `https://s3.${S3_REGION}.scw.cloud`;let width = parseInt(RESIZED_WIDTH, 10);if (width < 1 || width > 1000) {width = 200;}// Create Object Storage service objectconst s3Client = new S3Client({credentials: {accessKeyId: S3_ACCESS_KEY_ID,secretAccessKey: S3_ACCESS_KEY,},endpoint: S3_ENDPOINT,region: S3_REGION});// Handlerexports.handle = async (event, context, callback) => {// Object key may have spaces or unicode non-ASCII characters.const srcKey = event.body;console.log(srcKey);const dstKey = "resized-" + srcKey;// Infer the image type from the file suffix.const typeMatch = srcKey.match(/\.([^.]*)$/);if (!typeMatch) {console.error(`${srcKey}: Could not determine the image type.`);return {statusCode: 500,body: JSON.stringify({status: "Error",message: `${srcKey}: Could not determine the image type.`,}),headers: {"Content-Type": "application/json",},};}// Check that the image type is supportedconst imageType = typeMatch[1].toLowerCase();if (["jpeg", "jpg", "png"].includes(imageType) !== true) {console.error(`Unsupported image type: ${imageType}`);return {statusCode: 500,body: JSON.stringify({status: "Error",message: `${srcKey}: Unsupported image type: ${imageType}`,}),headers: {"Content-Type": "application/json",},};};// Download the image from the Object Storage source bucket.try {const input = {Bucket: SOURCE_BUCKET,Key: srcKey,};//var origimage = await s3.getObject(params).promise();const getObjectCommand = new GetObjectCommand(input);var getObjectResult = await s3Client.send(getObjectCommand);} catch (error) {console.error(error);return error;}// Use the sharp module to resize the image.try {var sharpImg = sharp().resize({ width, withoutEnlargement: true })getObjectResult.Body.pipe(sharpImg);} catch (error) {console.error(error);return error;}// Upload the image as a Buffer to the destination buckettry {const destinput = {Bucket: DESTINATION_BUCKET,Key: dstKey,Body: await sharpImg.toBuffer(),ContentType: "image",};const putObjectCommand = new PutObjectCommand(destinput);const putimage = await s3Client.send(putObjectCommand);console.log(putimage.VersionId)} catch (error) {console.log(error);return error;}console.log(`Successfully resized ${SOURCE_BUCKET}/${srcKey} and uploaded it to ${DESTINATION_BUCKET}/${dstKey}`)return {statusCode: 201,body: JSON.stringify({status: "ok",message: `Image : ${srcKey} has successfully been resized and pushed to the bucket ${DESTINATION_BUCKET}`}),headers: {"Content-Type": "application/json",},};};/* This is used to test locally and will not be executed on Scaleway Functions */if (process.env.NODE_ENV === 'test') {import("@scaleway/serverless-functions").then(scw_fnc_node => {scw_fnc_node.serveHandler(exports.handle, 8081);});} -
Save the file and exit the code editor.
-
Run the following command in the same terminal to initialize a new NPM project and create an empty
package.json
file:npm init --yes -
Run the following command to download the required dependencies and packages:
npm i @aws-sdk/client-s3 @aws-sdk/client-sqs @scaleway/serverless-functionsnpm install sharp --platform=linuxmusl --arch=x64 sharp --ignore-script=false -
Run the following command in the same terminal to zip the content of the folder:
zip -r ImageTransform.zip .
Deploying functions
Creating a namespace
- Click Functions in the Serverless section of the side menu.
- Click + Create namespace.
- Enter a name for your namespace.
- Choose the AMSTERDAM region.
- Enter the following environment variables:
SOURCE_BUCKET
: the name of your source bucket (e.g.:source-images-<YOUR_NAME>
).S3_REGION
:nl-ams
- Enter the following secrets:
S3_ACCESS_KEY_ID
: the IAM access key ID previously created.S3_ACCESS_KEY
: the IAM secret key previously created.
- Click Create namespace to finish. The Functions tab of your namespace displays.
Deploying the BucketScan function
- Click + Create function.
- Enter
BucketScan.handle
as a handler. - Select NodeJS 20.
- Select Upload a ZIP and upload the BucketScan.zip archive previously created.
- Name the function bucketscan.
- Set resources to 1024 MB - 560 mvCPU.
- Enter the following environment variables:
SQS_ENDPOINT
: the endpoint of Scaleway Queues.QUEUE_URL
: the URL of the Queue previously created.
- Enter the following secrets:
SQS_ACCESS_KEY_ID
: the Scaleway Queues access key ID previously created.SQS_ACCESS_KEY
: the Scaleway Queues secret key previously created.
- Click Create function. Deploying a function takes a few minutes to complete.
Securing the BucketScan function
- Click the name of your function and access the Security tab.
- Click Generate Token.
- Add a description such as Functions tutorial.
- Leave the default expiration date.
- Click Generate token.
- Store the token securely, as you will not be able to access it later.
Adding a CRON trigger to the BucketScan function
-
Open the Triggers tab of your function.
-
Click + Create trigger.
-
Select CRON as a type.
-
Enter
0 * * * *
as a UNIX Schedule.Note -
Leave the default JSON arguments.
-
Click Create Trigger.
Deploying the ImageTransform function
- Click + Create Function.
- Enter
ImageTransform.handle
as a handler. - Select NodeJS 20.
- Select Upload a ZIP and upload the ImageTransform.zip archive previously created.
- Name the function imagetransform.
- Set resources to 1024 MB - 560 mvCPU.
- Enter the following environment variables:
RESIZED_WIDTH
: the width (in pixels) you want to resize your images to.DESTINATION_BUCKET
: the name of your destination bucket (e.g.:dest-images-<YOUR_NAME>
).
- Click Create function. Deploying a function takes a few minutes to complete.
Adding a queue trigger to the ImageTransform function
- Open the Triggers tab of your function.
- Click + Create trigger.
- Select Queues (Scaleway) as a type.
- Select the queue previously created.
- Click Create trigger.
Testing the functions
-
Open a new terminal.
-
Run the following command:
curl --location --request GET '<BUCKETSCAN_ENDPOINT_URL>' \\ --header 'X-Auth-Token: <BUCKETSCAN_TOKEN>'The following response displays:
{status: "Done", "message": "All images from the bucket have been processed over X files in the bucket" } -
Open the Cockpit tab of the BucketScan function and click Open dashboards.
-
Click Functions Logs and select the function that ends with bucketscan from the drop-down menu. Similar logs appear:
2023-06-05 10:40:07.977 DEBUG - e7400516-7546-4f30-96cd-459632c2ac37 file=BucketScan.js line=77 source=user2023-06-05 10:40:07.973 DEBUG - 2d61d7c4-088a-472d-b82f-2b5c064f5d7b file=BucketScan.js line=77 source=user2023-06-05 10:40:07.954 DEBUG - 3dd90c82-eb8e-4838-b708-26982ba094a1 file=BucketScan.js line=77 source=user2023-06-05 10:40:07.951 DEBUG - 9e3087c2-b8db-4233-9643-9a4eab5cc722 file=BucketScan.js line=77 source=user2023-06-05 10:40:07.947 DEBUG - 2e69ab78-571f-46e5-bb2b-e5b24fbb0a7f file=BucketScan.js line=77 source=user2023-06-05 10:40:07.946 DEBUG - c2c0b81a-e8e6-49df-996d-ea06022be2d7 file=BucketScan.js line=77 source=user2023-06-05 10:40:07.944 DEBUG - 59bb6dcd-bdf4-4917-ba8d-6b7964952a25 file=BucketScan.js line=77 source=user2023-06-05 10:40:07.926 DEBUG - b931f887-5351-49b6-ba62-dd456c0ba01b file=BucketScan.js line=77 source=user2023-06-05 10:40:07.894 DEBUG - 80073154-ff74-4785-959a-6de9b177c4b4 file=BucketScan.js line=77 source=user2023-06-05 10:40:07.064 DEBUG - init Ok file=BucketScan.js line=42 source=user -
Open your destination bucket.
It now contains all the resized images.
Check out our serverless-examples repository for more examples and real-life use cases using Scaleway’s Serverless products.