Oplog capacity for connector

I use a connector to collect data from oplog. When updating a large amount of data, I observe a lag when delivering from mongi. Is it possible to expand the download batch from oplog?

config

    "schema.compatibility": "BACKWARD",
    "auto.offset.reset": "latest",
    "connector.class": "com.mongodb.kafka.connect.MongoSourceConnector",
    "tasks.max": "4",
    "key.converter": "org.apache.kafka.connect.storage.StringConverter",
    "value.converter": "org.apache.kafka.connect.storage.StringConverter",
    "errors.log.enable": "true",
    "errors.log.include.messages": "true",
    "database": "db",
    "collection": "tran",
    "pipeline": "[{\"$match\":{\"operationType\":{\"$in\":[\"insert\",\"update\"]}}}]",
    "batch.size": "0",
    "publish.full.document.only": "true",
    "change.stream.full.document": "updateLookup",
    "poll.max.batch.size": "10000",
    "poll.await.time.ms": "100",
    "output.json.formatter": "com.mongodb.kafka.connect.source.json.formatter.SimplifiedJson",
    "copy.existing.max.threads": "4",
    "copy.existing.queue.size": "32000",
    "mongo.errors.log.enable": "true",
    "heartbeat.interval.ms": "60000",
    "heartbeat.topic.name": "heartbeats-mongodb"
1 Like