Hi @henna.s
Thanks for the response. I have given this a go now thanks to the documentation with the new usage of importing the S3 Node.js client.
However, I’m seeing an overhead using the node.js AWS package.
For example, take a look at these functions. There is a 40x increase in the time it takes to get an object from S3!
2326ms vs 57ms
Node.js package - 2326ms
exports = async function(){
// Load the AWS SDK for Node.js
const S3 = require('aws-sdk/clients/s3');
const s3 = new S3({
accessKeyId: context.values.get("AWS_ACCESS_KEY"),
secretAccessKey: context.values.get("AWS_ACCESS_SECRET"),
region: "ap-southeast-2",
});
// Call S3 to get object
const beforeNodeSDK = new Date()
const getResult = await s3.getObject({
Bucket: "myBucket",
Key: "myKey"
}).promise()
const afterNodeSDK = new Date()
const timeTakenNodeSDK = afterNodeSDK - beforeNodeSDK
return timeTakenNodeSDK // (result = 2326)
};
3rd Party Services (Go SDK) - 57ms
exports = async function() {
// Load the built in AWS service
const s3 = context.services.get("AWS").s3("ap-southeast-2");
// Call S3 to get object
const beforeGoSDK = new Date()
const result = await s3.GetObject({
Bucket: "myBucket",
Key: "myKey"
});
const afterGoSDK = new Date()
const timeTakenGoSDK = afterGoSDK - beforeGoSDK
return timeTakenGoSDK // (result = 57)
};
Thanks!