The function that tries to make the SQS call is triggered by a database trigger (it actually does both S3 and SQS - S3 works, SQS does not) - in Atlas_Triggers_ETL_Function:
exports = async function(changeEvent) {
const AWS_CONFIG = await context.functions.execute('aws_getConfig')
const { S3Client, PutObjectCommand } = require('@aws-sdk/client-s3')
const { SQSClient, SendMessageCommand } = require('@aws-sdk/client-sqs')
const { S3_BUCKET_ETL_ENV, SQS_QUEUE_ETL_URLS } = context.environment.values
const S3Bucket = context.values.get("S3_ETL_BUCKET")
if (changeEvent && (changeEvent.operationType == 'insert' || changeEvent.operationType == 'delete' || changeEvent.operationType == 'update')) {
const s3 = new S3Client(AWS_CONFIG)
const sqs = new SQSClient(AWS_CONFIG)
const collection = changeEvent.ns.coll
const SQSQueueUrl = SQS_QUEUE_ETL_URLS[collection.toLowerCase()]
let objectsToPutInS3 = [], objectsToPutInS3ForSQS = []
const fullStringBody = changeEvent.fullDocument ? JSON.stringify(changeEvent.fullDocument) : JSON.stringify(changeEvent.documentKey)
const baseKeyName = S3_BUCKET_ETL_ENV + "/" + collection + "/" + changeEvent.documentKey._id + "-" + Date.now()
if(fullStringBody) {
const anObj = {
Bucket: S3Bucket,
Key: baseKeyName,
Body: fullStringBody
}
objectsToPutInS3ForSQS.push(anObj)
objectsToPutInS3.push(new PutObjectCommand(anObj))
}
const s3Promises = objectsToPutInS3.map(object => s3.send(object).then(data => {
console.log('S3 put object result: ' + JSON.stringify(data))
return data
}))
await Promise.all(s3Promises)
const sqsMsgBody = JSON.stringify({
operation: changeEvent.operationType,
S3FilePartsOfJSONDocument: objectsToPutInS3ForSQS.map(object => {return {Bucket: object.Bucket, Key: object.Key}})
})
console.log(`SQSQueueUrl: ${SQSQueueUrl} and is of type ${typeof SQSQueueUrl}`)
//"https://sqs.us-east-1.amazonaws.com/1234567890/MongoAtlasETLUsersDev.fifo" and is of type string
console.log(`collection: ${collection} and is of type ${typeof collection}`)
//"users" and is of type string
console.log(`sqsMsgBody: ${sqsMsgBody} and is of type ${typeof sqsMsgBody}`)
//something like: "{\"operation\":\"update\",\"S3FilePartsOfJSONDocument\":[{\"Bucket\":\"bucket-name\",\"Key\":\"dev/users/5bb2377885432223fg-1690592434481\"}]}" and is of type string
console.log(`baseKeyName: ${baseKeyName} and is of type ${typeof baseKeyName}`)
//something like: "dev/users/5bb2377885432223fg-1690592434481" and is of type string
//Everything is fine up to here
try {
const sqsResult = await sqs.send(new SendMessageCommand({
QueueUrl: SQSQueueUrl,
MessageGroupId: collection,
MessageBody: sqsMsgBody,
MessageDeduplicationId: baseKeyName
}))
} catch (e) {
console.log('Error writing to SQS: ' + e)
//error is: "TypeError: Data must be a string or a buffer"
}
}
}
So, the full error message is "“TypeError: Data must be a string or a buffer”