If an object is passed to $out it must have exactly 2 fields: 'db' and 'coll'

I am trying to copy data from Mongo DB to s3 bucket. I followed this tutorial : How to Automate Continuous Data Copying from MongoDB to S3 | MongoDB

Steps :

Created s3 bucket and IAM role with all the required permissions (including access policy)
Created a data lake in mongo DB
Connected the data lake with S3
While Creating the Trigger I am facing this issue.

exports = function()

{

   const datalake = context.services.get("v3ProdCluster-us-east-1");
   const db = datalake.db("v3StagingDB");
   const events = db.collection("work_sessions");

   const pipeline = [
      {
            $match: {
               "time": {
                  $gte: new Date(Date.now() - 60 * 60 * 10000000000000000),
                  $lt: new Date(Date.now())
               }
            }
      }, {
            "$out": {
               "s3": {
                  "bucket": "mongodb-s3-staging",
                  "region": "us-east-1",
                  "filename":
                  { "$concat": [
                      "work_sessions/",
                      
                      "$_id"
                    ]
                  },
                  "format": {
                        "name": "json",
                        "maxFileSize": "10GB",
                      
                  }
               }
            }
      }
   ];

   return events.aggregate(pipeline);
};

i am having the same problem

Welcome to the MongoDB Community @Marina_Stolet !

Can you confirm the MongoDB Atlas cluster tier you are using (M_)? Are you following the same tutorial as the original poster?

Regards,
Stennie

I am using M10 and yes, the same tutorial, although I now adapted it. What I did was I created a federated database using my cluster and a “analytics” db and assessments coll. I am not getting that same error anymore, but no data comes into my s3 bucket. That’s the code:

exports = function () {

   const datalake = context.services.get("FederatedDatabaseInstance-analytics");
   const db = datalake.db("analytics");
   const coll = db.collection("assessments");

   const pipeline = [
      {
        "$out": {
          "s3": {
            "bucket": "322104163088-mongodb-data-ingestion",
            "region": "eu-west-2",
            "filename": "analytics/",
            "format": {
              "name": "json",
              "maxFileSize": "100GB"
            }
          }
        }
      }
   ];

   return coll.aggregate(pipeline);
};