Installing kafka with k raft

Can you please suggest mongodb connector for k raft
We are unable to connect mongoDB

As we are using below docker-compose.yml
Can you please help on this issue

version: ‘2’
services:
broker:
image: confluentinc/cp-kafka:7.6.1
hostname: broker
container_name: kafka-0
restart: unless-stopped
ports:
- “9092:9092”
- “9101:9101”
environment:
KAFKA_NODE_ID: 1
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: ‘CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT’
KAFKA_ADVERTISED_LISTENERS: ‘PLAINTEXT://broker:29092,PLAINTEXT_HOST://0.0.0.0:9092’
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_JMX_PORT: 9101
KAFKA_JMX_HOSTNAME: 0.0.0.0
KAFKA_PROCESS_ROLES: ‘broker,controller’
KAFKA_CONTROLLER_QUORUM_VOTERS: ‘1@broker:29093’
KAFKA_LISTENERS: ‘PLAINTEXT://broker:29092,CONTROLLER://broker:29093,PLAINTEXT_HOST://0.0.0.0:9092’
KAFKA_INTER_BROKER_LISTENER_NAME: ‘PLAINTEXT’
KAFKA_CONTROLLER_LISTENER_NAMES: ‘CONTROLLER’
KAFKA_LOG_DIRS: ‘/tmp/kraft-combined-logs’
CLUSTER_ID: ‘MkU3OEVBNTcwNTJENDM2Qk’
volumes:
- ./kraft-combined-logs:/u01/kraft-combined-logs
- kafka-data:/var/lib/kafka/data
user: “1000:1000”

registry:
image: confluentinc/cp-schema-registry:7.6.1
hostname: registry
environment:
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: PLAINTEXT://broker:29092
SCHEMA_REGISTRY_HOST_NAME: registry
SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081
SCHEMA_REGISTRY_DEBUG: “true”
SCHEMA_REGISTRY_ACCESS_CONTROL_ALLOW_ORIGIN: ‘*’ # for CORS in UI, remove on production
SCHEMA_REGISTRY_ACCESS_CONTROL_ALLOW_METHODS: ‘GET,POST,PUT,OPTIONS’ # for CORS in UI, remove on production
ports:
- 8081:8081
depends_on:
- broker

connect:
image: confluentinc/cp-server-connect:7.5.2
container_name: connect
ports:
- “8083:8083”
environment:
CONNECT_BOOTSTRAP_SERVERS: 0.0.0.0:9092
CONNECT_GROUP_ID: “debezium-example”
CONNECT_REST_ADVERTISED_HOST_NAME: connect
# topic for storing configurations
CONNECT_CONFIG_STORAGE_TOPIC: connect-demo-configs
# topic for storing offsets
CONNECT_OFFSET_STORAGE_TOPIC: connect-demo-offsets
# topic for storing connector statuses
CONNECT_STATUS_STORAGE_TOPIC: connect-demo-statuses

  CONNECT_REPLICATION_FACTOR: 1
  CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
  CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
  CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
  # specify the converter for keys
  CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
  # specify the converter for values
  CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
  #Plugin_path
  CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
  # since we are using Avro + Schema Registry, specify the URL for Schema Registry
  CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
  # specify the MongoDB connection details
  MONGO_URI: "mongodb://........"
  MONGO_DB: "dev-Mdh"
volumes:
  - database:/opt/docker/db/data
  - $PWD/stack-configs:/opt/docker/stack-configs
# Now, we install the Debezium MongoDB connector from the Confluent Hub
command:
  - bash
  - -c
  - |
    echo "Installing connector plugins"
    confluent-hub install --no-prompt debezium/debezium-connector-mongodb:2.2.1
    echo "Launching Kafka Connect worker"
    /etc/confluent/docker/run

mongo-sink-connector:
image: debezium/connect:1.7
#image: confluentinc/cp-kafka-connect-mongodb:1.2.0
container_name: mongo-sink-connector
depends_on:
- broker
- connect
restart: unless-stopped
environment:
BOOTSTRAP_SERVERS: 0.0.0.0:9092
GROUP_ID: connect-mongo-sink
CONFIG_STORAGE_TOPIC: _connect-configs
OFFSET_STORAGE_TOPIC: _connect-offsets
STATUS_STORAGE_TOPIC: _connect-status
CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://registry:8081
CONNECT_INTERNAL_KEY_CONVERTER: “org.apache.kafka.connect.json.JsonConverter”
CONNECT_INTERNAL_VALUE_CONVERTER: “org.apache.kafka.connect.json.JsonConverter”
CONNECT_INTERNAL_KEY_CONVERTER_SCHEMAS_ENABLE: “false”
CONNECT_INTERNAL_VALUE_CONVERTER_SCHEMAS_ENABLE: “false”
CONNECT_PRODUCER_INTERCEPTOR_CLASSES: “io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor”
CONNECT_CONSUMER_INTERCEPTOR_CLASSES: “io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor”
CONNECT_PLUGIN_PATH: “/usr/share/java,/usr/share/confluent-hub-components”
#CONNECTOR_CLASS: “com.mongodb.kafka.connect.MongoSinkConnector”
CONNECTOR_CLASS: “io.debezium.connector.mongodb.MongoDbConnector”
CONNECTION_URI: “mongodb://…”
TOPICS: “topic name”
DATABASE: “xyz”
COLLECTION: “kafka”

connect-file-pulse:
image: streamthoughts/kafka-connect-file-pulse:2.14.1
container_name: connect-file-pulse
depends_on:
- broker
ports:
- “8084:8083”
- “8000:8000”
restart: unless-stopped
environment:
CONNECT_BOOTSTRAP_SERVERS: ‘broker:29092’
CONNECT_REST_ADVERTISED_HOST_NAME: connect
CONNECT_REST_PORT: 8083
CONNECT_GROUP_ID: compose-connect-group
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://registry:8081
CONNECT_PLUGIN_PATH: “/usr/share/java,/usr/share/confluent-hub-components/”
volumes:
- /tmp/kafka-connect:/tmp/kafka-connect

volumes:
kafka-data:
database: