Failed runing dataset spark read from mongodb as a document told

i creaeed this java file :

package com.mongo;

import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;

import org.bson.Document;

import com.mongodb.spark.MongoSpark;
import com.mongodb.spark.rdd.api.java.JavaMongoRDD;

public final class MongoConnectRead {
      public static void main(final String[] args) throws InterruptedException {
            SparkSession spark = SparkSession.builder()
                        .appName("MongoSparkConnectorIntro")
                        .config("spark.mongodb.input.uri", "mongodb://127.0.0.1:27117,127.0.0.1:27118/test.user")
                        .config("spark.mongodb.output.uri", "mongodb://127.0.0.1:27117,127.0.0.1:27118/test.user")

                        // .config("spark.mongodb.input.partitioner", "MongoPaginateBySizePartitioner")

                        .getOrCreate();
            // Create a JavaSparkContext using the SparkSession's SparkContext object
            System.out.println("1hummmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm-----------------------------------");
            JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());
            /* Start Example: Read data from MongoDB ************************/
            System.out.println("hummmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm-----------------------------------");
            JavaMongoRDD<Document> rdd = MongoSpark.load(jsc);
            System.out.println("23hummmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm-----------------------------------");
            /* End Example **************************************************/
            // Analyze data from MongoDB
            System.out.println(rdd.count());
            System.out.println("hummmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm-----------------------------------");
            System.out.println(rdd.first().toJson());
            System.out.println("hummmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm-----------------------------------");
            jsc.close();
      }
}

and run it in

bin/spark-submit   --class "com.mongo.MongoConnectRead"   --master local[4] "F:\spark\spark-sql\target\spark-sql-1.0-SNAPSHOT.jar" 

also i put jars in spark jars folder maybe the jars make this error

mongo-java-driver-3.10.2.jar
mongo-spark-connector_2.12-3.0.2.jar

but we see this error:


WARNING: Partitioning failed.

Partitioning using the ‘DefaultMongoPartitioner$’ failed.

Please check the stacktrace to determine the cause of the failure or check the Partitioner API documentation.
Note: Not all partitioners are suitable for all toplogies and not all partitioners support views.%n


Exception in thread “main” java.lang.NoSuchMethodError: ‘com.mongodb.connection.ClusterDescription com.mongodb.client.MongoClient.getClusterDescription()’
at com.mongodb.spark.connection.MongoClientCache.$anonfun$logClient$1(MongoClientCache.scala:161)
at com.mongodb.spark.LoggingTrait.logInfo(LoggingTrait.scala:48)
at com.mongodb.spark.LoggingTrait.logInfo$(LoggingTrait.scala:47)
at com.mongodb.spark.Logging.logInfo(Logging.scala:24)
at com.mongodb.spark.connection.MongoClientCache.logClient(MongoClientCache.scala:161)
at com.mongodb.spark.connection.MongoClientCache.acquire(MongoClientCache.scala:56)
at com.mongodb.spark.MongoConnector.acquireClient(MongoConnector.scala:239)
at com.mongodb.spark.MongoConnector.withMongoClientDo(MongoConnector.scala:152)
at com.mongodb.spark.MongoConnector.withDatabaseDo(MongoConnector.scala:171)
at com.mongodb.spark.MongoConnector.hasSampleAggregateOperator(MongoConnector.scala:234)
at com.mongodb.spark.rdd.partitioner.DefaultMongoPartitioner.partitions(DefaultMongoPartitioner.scala:33)
at com.mongodb.spark.rdd.MongoRDD.getPartitions(MongoRDD.scala:135)
at org.apache.spark.rdd.RDD.$anonfun$partitions$2(RDD.scala:292)
at scala.Option.getOrElse(Option.scala:189)
at org.apache.spark.rdd.RDD.partitions(RDD.scala:288)
at org.apache.spark.SparkContext.runJob(SparkContext.scala:2303)
at org.apache.spark.rdd.RDD.count(RDD.scala:1274)
at org.apache.spark.api.java.JavaRDDLike.count(JavaRDDLike.scala:469)
at org.apache.spark.api.java.JavaRDDLike.count$(JavaRDDLike.scala:469)
at org.apache.spark.api.java.AbstractJavaRDDLike.count(JavaRDDLike.scala:45)
at com.mongo.MongoConnectRead.main(MongoConnectRead.java:30)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:568)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:958)
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1046)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1055)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

Hi Kube_ctl,

Can you provide the “MongoDB server” version that you are using? There seems to be some version misconfiguration between the mongoDB spark connector and other drivers

Also have you looked at the samples provided here: https://www.mongodb.com/docs/spark-connector/v3.0/java/write-to-mongodb/