From c1dd024378ff2024d9e1141bcf86a6ce63372e0f Mon Sep 17 00:00:00 2001 From: stratiocommit Date: Tue, 27 Oct 2015 10:56:11 +0000 Subject: [PATCH 01/17] [RELEASE] Prepare for next development iteration --- doc/pom.xml | 2 +- pom.xml | 2 +- spark-mongodb_2.10/pom.xml | 2 +- spark-mongodb_2.11/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/pom.xml b/doc/pom.xml index 2055047..fb7d9b2 100644 --- a/doc/pom.xml +++ b/doc/pom.xml @@ -26,7 +26,7 @@ com.stratio.datasource spark-mongodb-parent - 0.10.0-SNAPSHOT + 0.11.0-SNAPSHOT true diff --git a/pom.xml b/pom.xml index 111222d..c97f91b 100644 --- a/pom.xml +++ b/pom.xml @@ -21,7 +21,7 @@ 4.0.0 com.stratio.datasource spark-mongodb-parent - 0.10.0-SNAPSHOT + 0.11.0-SNAPSHOT pom Stratio Spark Mongodb Datasource A Spark SQL library for MongoDB diff --git a/spark-mongodb_2.10/pom.xml b/spark-mongodb_2.10/pom.xml index 6caa94e..a6964d7 100644 --- a/spark-mongodb_2.10/pom.xml +++ b/spark-mongodb_2.10/pom.xml @@ -25,7 +25,7 @@ spark-mongodb-parent com.stratio.datasource - 0.10.0-SNAPSHOT + 0.11.0-SNAPSHOT 2.10.4 diff --git a/spark-mongodb_2.11/pom.xml b/spark-mongodb_2.11/pom.xml index 679568f..47e2758 100644 --- a/spark-mongodb_2.11/pom.xml +++ b/spark-mongodb_2.11/pom.xml @@ -25,7 +25,7 @@ spark-mongodb-parent com.stratio.datasource - 0.10.0-SNAPSHOT + 0.11.0-SNAPSHOT 2.11.6 From b03a583471c90e64903e6dd519f04a26618e7497 Mon Sep 17 00:00:00 2001 From: Pedro Madrigal Marina Date: Thu, 19 Nov 2015 10:59:09 +0100 Subject: [PATCH 02/17] Update README.md --- README.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/README.md b/README.md index 5f6a53d..89679ec 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,15 @@ from/into MongoDB collections. If you are using this Data Source, feel free to briefly share your experience by Pull Request this [file](https://github.com/Stratio/spark-mongodb/blob/master/doc/src/site/sphinx/PoweredBy.rst). +## Latest compatible versions## + +| spark-MongoDB | Apache Spark | MongoDB | +| ------------- | ------------- | -------- | +| 0.10.x | 1.5.x | 3.0.x | +| 0.8.2-0.9.2 | 1.4.0 | 3.0.x | +| 0.8.1 | 1.3.0 | 3.0.x | +| 0.8.0 | 1.2.1 | 3.0.x | + ## How to use Spark-MongoDB## From 11ef6470e7dfde5a81e56e4cf4d2a7dfa7ae9710 Mon Sep 17 00:00:00 2001 From: Pedro Madrigal Marina Date: Thu, 19 Nov 2015 11:00:31 +0100 Subject: [PATCH 03/17] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 89679ec..9484bb2 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ If you are using this Data Source, feel free to briefly share your experience by | spark-MongoDB | Apache Spark | MongoDB | | ------------- | ------------- | -------- | | 0.10.x | 1.5.x | 3.0.x | -| 0.8.2-0.9.2 | 1.4.0 | 3.0.x | +| 0.8.2 - 0.9.2 | 1.4.0 | 3.0.x | | 0.8.1 | 1.3.0 | 3.0.x | | 0.8.0 | 1.2.1 | 3.0.x | From a001ae6144e069ab27e110aea64d043838959813 Mon Sep 17 00:00:00 2001 From: Pedro Madrigal Marina Date: Thu, 19 Nov 2015 11:02:32 +0100 Subject: [PATCH 04/17] Update pom.xml --- spark-mongodb-examples/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spark-mongodb-examples/pom.xml b/spark-mongodb-examples/pom.xml index 5ea3327..e69693c 100644 --- a/spark-mongodb-examples/pom.xml +++ b/spark-mongodb-examples/pom.xml @@ -40,7 +40,7 @@ 2.10 - 0.10.0-SNAPSHOT + 0.10.1 1.5.1 From 150661381988139703bb75b6f20e860eb87f0814 Mon Sep 17 00:00:00 2001 From: Pedro Madrigal Marina Date: Thu, 26 Nov 2015 15:41:40 +0100 Subject: [PATCH 05/17] Update PoweredBy.rst --- doc/src/site/sphinx/PoweredBy.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/src/site/sphinx/PoweredBy.rst b/doc/src/site/sphinx/PoweredBy.rst index 0c77fbe..fdd95f6 100644 --- a/doc/src/site/sphinx/PoweredBy.rst +++ b/doc/src/site/sphinx/PoweredBy.rst @@ -1,5 +1,5 @@ ============ -Powered By +Powered by ============ If you are using this Data Source, feel free to briefly share your experience by Pull Request. @@ -9,8 +9,8 @@ If you are using this Data Source, feel free to briefly share your experience by Companies *************** -* `Stratio Connectors `_ : Description of use case. - +- `Stratio platform `_ : + - `Crossdata < https://github.com/Stratio/crossdata>`_: Crossdata uses spark-mongodb datasource as a piece of the MongoDB connector. From 2ff103c362803b9a43727268356480f25d667662 Mon Sep 17 00:00:00 2001 From: David Arroyo Cazorla Date: Thu, 26 Nov 2015 15:47:01 +0100 Subject: [PATCH 06/17] Update about.rst --- doc/src/site/sphinx/about.rst | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/doc/src/site/sphinx/about.rst b/doc/src/site/sphinx/about.rst index ef22c8a..86f953e 100644 --- a/doc/src/site/sphinx/about.rst +++ b/doc/src/site/sphinx/about.rst @@ -10,23 +10,3 @@ fast and general-purpose cluster computing system that can run applications up t Integrating MongoDB and Spark gives us a system that combines the best of both worlds opening to MongoDB the possibility of solving a wide range of new use cases. - - -Latest compatible versions -========================== -+-----------------+----------------+----------+ -| Spark-MongoDB | Apache Spark | MongoDB | -+=================+================+==========+ -| 0.8.7 | 1.4.0 | 3.0.+ | -+-----------------+----------------+----------+ -| 0.8.2 - 0.8.6 | 1.4.0 | 3.0.+ | -+-----------------+----------------+----------+ -| 0.8.1 | 1.3.0 | 3.0.+ | -+-----------------+----------------+----------+ -| 0.8.0 | 1.2.1 | 3.0.+ | -+-----------------+----------------+----------+ - - -Requirements -============ -This library requires Apache Spark 1.5.X, Scala 2.10 or Scala 2.11, Casbah 2.8.X \ No newline at end of file From 981b8d18cf0908a0db7628dc1135f11cdc9b552b Mon Sep 17 00:00:00 2001 From: David Arroyo Cazorla Date: Thu, 26 Nov 2015 15:48:18 +0100 Subject: [PATCH 07/17] Update PoweredBy.rst --- doc/src/site/sphinx/PoweredBy.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/src/site/sphinx/PoweredBy.rst b/doc/src/site/sphinx/PoweredBy.rst index fdd95f6..0781a70 100644 --- a/doc/src/site/sphinx/PoweredBy.rst +++ b/doc/src/site/sphinx/PoweredBy.rst @@ -9,8 +9,8 @@ If you are using this Data Source, feel free to briefly share your experience by Companies *************** -- `Stratio platform `_ : - - `Crossdata < https://github.com/Stratio/crossdata>`_: Crossdata uses spark-mongodb datasource as a piece of the MongoDB connector. +- `Stratio platform `_: + - `Crossdata `_: Crossdata uses spark-mongodb datasource as a piece of the MongoDB connector. From cfbe81d4e417f3f02e4a6f90cab3eee896fb5420 Mon Sep 17 00:00:00 2001 From: David Arroyo Cazorla Date: Thu, 26 Nov 2015 15:58:43 +0100 Subject: [PATCH 08/17] Update README.md --- README.md | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 9484bb2..ddefafb 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,12 @@ from/into MongoDB collections. If you are using this Data Source, feel free to briefly share your experience by Pull Request this [file](https://github.com/Stratio/spark-mongodb/blob/master/doc/src/site/sphinx/PoweredBy.rst). -## Latest compatible versions## + +## Requirements## + +This library requires Apache Spark, Scala 2.10 or Scala 2.11, Casbah 2.8.X + +#### Latest compatible versions#### | spark-MongoDB | Apache Spark | MongoDB | | ------------- | ------------- | -------- | @@ -17,8 +22,6 @@ If you are using this Data Source, feel free to briefly share your experience by ## How to use Spark-MongoDB## -Requirements of this project can be found in [about] (). - There also exists a [First Steps] () document where we show some simple examples. From fe13fbd1b2952b91b925e00cad43e1773436fc97 Mon Sep 17 00:00:00 2001 From: Pedro Madrigal Marina Date: Thu, 26 Nov 2015 16:35:28 +0100 Subject: [PATCH 09/17] Update First_Steps.rst --- doc/src/site/sphinx/First_Steps.rst | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/doc/src/site/sphinx/First_Steps.rst b/doc/src/site/sphinx/First_Steps.rst index 1853e4f..f0e5cfc 100644 --- a/doc/src/site/sphinx/First_Steps.rst +++ b/doc/src/site/sphinx/First_Steps.rst @@ -3,6 +3,21 @@ First steps We are going to introduce how to use our MongoDB datasource for Apache Spark. +Table of Contents +***************** + +- `Using the library <#using-the-library>`__ + +- `Configuration parameters <#configuration-parameters>`__ + +- `Examples <#examples>`__ + + - `Scala API <#scala-api>`__ + - `Python API <#python-api>`__ + - `R API <#r-api>`__ + + + Using the library ================= @@ -125,7 +140,7 @@ To save a DataFrame in MongoDB you should use the saveToMongodb() function as fo import com.mongodb.casbah.{WriteConcern => MongodbWriteConcern} import com.stratio.datasource.mongodb._ import MongodbConfig._ - val saveConfig = MongodbConfigBuilder(Map(Host -> List("localhost:27017"), Database -> "highschool", Collection -> "students", SamplingRatio -> 1.0, WriteConcern -> MongodbWriteConcern.Normal, SplitKey -> "_id", SplitSize -> 8, SplitKey -> "_id")) + val saveConfig = MongodbConfigBuilder(Map(Host -> List("localhost:27017"), Database -> "highschool", Collection -> "students", SamplingRatio -> 1.0, WriteConcern -> MongodbWriteConcern.Normal, SplitSize -> 8, SplitKey -> "_id")) dataFrame.saveToMongodb(saveConfig.build) From f9abddb24c83b31686445710857376a9eebc1073 Mon Sep 17 00:00:00 2001 From: pmadrigal Date: Thu, 26 Nov 2015 17:05:30 +0100 Subject: [PATCH 10/17] doc updated --- README.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/README.md b/README.md index ddefafb..a54143b 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,13 @@ This library requires Apache Spark, Scala 2.10 or Scala 2.11, Casbah 2.8.X There also exists a [First Steps] () document where we show some simple examples. +- [Using the library](https://github.com/Stratio/spark-mongodb/blob/master/doc/src/site/sphinx/First_Steps.rst#using-the-library) +- [Configuration parameters](https://github.com/Stratio/spark-mongodb/blob/master/doc/src/site/sphinx/First_Steps.rst#configuration-parameters) +- [Examples](https://github.com/Stratio/spark-mongodb/blob/master/doc/src/site/sphinx/First_Steps.rst#examples) + - [Scala API](https://github.com/Stratio/spark-mongodb/blob/master/doc/src/site/sphinx/First_Steps.rst#scala-api) + - [Python API](https://github.com/Stratio/spark-mongodb/blob/master/doc/src/site/sphinx/First_Steps.rst#python-api) + - [R API](https://github.com/Stratio/spark-mongodb/blob/master/doc/src/site/sphinx/First_Steps.rst#r-api) + # License # From 6f84f5905b94b170aecfb39ecab8118e155368c8 Mon Sep 17 00:00:00 2001 From: pmadrigal Date: Thu, 26 Nov 2015 17:05:57 +0100 Subject: [PATCH 11/17] support for double in bigint type --- .../com/stratio/datasource/mongodb/schema/JsonSupport.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/schema/JsonSupport.scala b/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/schema/JsonSupport.scala index 533392f..aa8e3c8 100644 --- a/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/schema/JsonSupport.scala +++ b/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/schema/JsonSupport.scala @@ -67,6 +67,7 @@ trait JsonSupport { value match { case value: java.lang.Integer => value.asInstanceOf[Int].toLong case value: java.lang.Long => value.asInstanceOf[Long] + case value: java.lang.Double => value.asInstanceOf[Double].toLong } } From c0896ab35033bbc531e8a811c48ede6615eb0d2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Fco=2E=20P=C3=A9rez=20Hidalgo?= Date: Tue, 29 Dec 2015 12:03:08 +0100 Subject: [PATCH 12/17] Changed the way the returned rows are built to make them include the schema. This should enable new functionalities at Spark side. --- .../datasource/mongodb/schema/MongodbRowConverter.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/schema/MongodbRowConverter.scala b/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/schema/MongodbRowConverter.scala index c47cb3b..8af8708 100644 --- a/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/schema/MongodbRowConverter.scala +++ b/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/schema/MongodbRowConverter.scala @@ -19,7 +19,7 @@ import com.mongodb.casbah.Imports._ import com.stratio.datasource.schema.RowConverter import org.apache.spark.rdd.RDD import org.apache.spark.sql.Row -import org.apache.spark.sql.catalyst.expressions.GenericRow +import org.apache.spark.sql.catalyst.expressions.{GenericRow, GenericRowWithSchema} import org.apache.spark.sql.types.{ArrayType, DataType, StructField, StructType} import scala.collection.mutable.ArrayBuffer @@ -70,7 +70,7 @@ object MongodbRowConverter extends RowConverter[DBObject] json.get(name).flatMap(v => Option(v)).map( toSQL(_, dataType)).orNull } - Row.fromSeq(values) + new GenericRowWithSchema(values.toArray, schema) } /** From ddcbf41dcd6f248efd41b075d388bad0fdd33083 Mon Sep 17 00:00:00 2001 From: WOO-CHEOL CHOI Date: Thu, 7 Jan 2016 15:32:14 +0900 Subject: [PATCH 13/17] Null-safe equality comparison --- .../com/stratio/datasource/mongodb/reader/MongodbReader.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/reader/MongodbReader.scala b/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/reader/MongodbReader.scala index bc024a3..92ef03a 100644 --- a/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/reader/MongodbReader.scala +++ b/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/reader/MongodbReader.scala @@ -107,6 +107,8 @@ class MongodbReader( sFilters.foreach { case EqualTo(attribute, value) => queryBuilder.put(attribute).is(value) + case EqualNullSafe(attribute, value) => + queryBuilder.put(attribute).is(value) case GreaterThan(attribute, value) => queryBuilder.put(attribute).greaterThan(value) case GreaterThanOrEqual(attribute, value) => From 7566e8608208cf7221fffb5fffc4e1b0f6255552 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Fco=2E=20P=C3=A9rez=20Hidalgo?= Date: Thu, 7 Jan 2016 09:50:54 +0100 Subject: [PATCH 14/17] `pruneSchema` armortized time performance improvement: (N: No columns) From O(N^2) to O(N) --- .../stratio/datasource/mongodb/MongodbRelation.scala | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/MongodbRelation.scala b/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/MongodbRelation.scala index cdcdad1..646021d 100644 --- a/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/MongodbRelation.scala +++ b/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/MongodbRelation.scala @@ -122,9 +122,13 @@ object MongodbRelation { */ def pruneSchema( schema: StructType, - requiredColumns: Array[String]): StructType = + requiredColumns: Array[String]): StructType = { + + val name2sfield: Map[String, StructField] = schema.fields.map(f => f.name -> f).toMap StructType( - requiredColumns.flatMap(column => - schema.fields.find(_.name == column))) + requiredColumns.flatMap(name2sfield.get(_)) + ) + + } } \ No newline at end of file From 0b50c66387bdaad9cb7d1c7fb6206d2b126ae851 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Fco=2E=20P=C3=A9rez=20Hidalgo?= Date: Thu, 7 Jan 2016 15:08:08 +0100 Subject: [PATCH 15/17] Adds support to arrays random element access. --- .../datasource/mongodb/MongodbRelation.scala | 30 +++++++++++++++++-- .../mongodb/schema/MongodbRowConverter.scala | 13 ++++++-- 2 files changed, 39 insertions(+), 4 deletions(-) diff --git a/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/MongodbRelation.scala b/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/MongodbRelation.scala index cdcdad1..89f3474 100644 --- a/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/MongodbRelation.scala +++ b/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/MongodbRelation.scala @@ -123,8 +123,34 @@ object MongodbRelation { def pruneSchema( schema: StructType, requiredColumns: Array[String]): StructType = + pruneSchema(schema, requiredColumns.map(_ -> None): Array[(String, Option[Int])]) + + + /** + * Prune whole schema in order to fit with + * required columns taking in consideration nested columns (array elements) in Spark SQL statement. + * @param schema Whole field projection schema. + * @param requiredColumnsWithIndex Required fields in statement including index within field for random accesses. + * @return A new pruned schema + */ + private[this] def pruneSchema( + schema: StructType, + requiredColumnsWithIndex: Array[(String, Option[Int])]): StructType = { + val name2sfield: Map[String, StructField] = schema.fields.map(f => f.name -> f).toMap StructType( - requiredColumns.flatMap(column => - schema.fields.find(_.name == column))) + requiredColumnsWithIndex.flatMap { + case (colname, None) => name2sfield.get(colname) + case (colname, Some(idx)) => name2sfield.get(colname) collect { + case field @ StructField(name, ArrayType(et,_), nullable, _) => + val mdataBuilder = new MetadataBuilder + //Non-functional area + mdataBuilder.putLong("idx", idx.toLong) + mdataBuilder.putString("colname", name) + //End of non-functional area + StructField(s"$name[$idx]", et, true, mdataBuilder.build()) + } + } + ) + } } \ No newline at end of file diff --git a/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/schema/MongodbRowConverter.scala b/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/schema/MongodbRowConverter.scala index 8af8708..b4285c2 100644 --- a/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/schema/MongodbRowConverter.scala +++ b/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/schema/MongodbRowConverter.scala @@ -19,8 +19,8 @@ import com.mongodb.casbah.Imports._ import com.stratio.datasource.schema.RowConverter import org.apache.spark.rdd.RDD import org.apache.spark.sql.Row -import org.apache.spark.sql.catalyst.expressions.{GenericRow, GenericRowWithSchema} -import org.apache.spark.sql.types.{ArrayType, DataType, StructField, StructType} +import org.apache.spark.sql.catalyst.expressions.{GenericRowWithSchema, GenericRow} +import org.apache.spark.sql.types._ import scala.collection.mutable.ArrayBuffer @@ -65,7 +65,15 @@ object MongodbRowConverter extends RowConverter[DBObject] def recordAsRow( json: Map[String, AnyRef], schema: StructType): Row = { + val values: Seq[Any] = schema.fields.map { + case StructField(name, et, _, mdata) + if(mdata.contains("idx") && mdata.contains("colname")) => + val colName = mdata.getString("colname") + val idx = mdata.getLong("idx").toInt + json.get(colName).flatMap(v => Option(v)).map(toSQL(_, ArrayType(et, true))).collect { + case elemsList: ArrayBuffer[_] if((0 until elemsList.size) contains idx) => elemsList(idx) + } orNull case StructField(name, dataType, _, _) => json.get(name).flatMap(v => Option(v)).map( toSQL(_, dataType)).orNull @@ -73,6 +81,7 @@ object MongodbRowConverter extends RowConverter[DBObject] new GenericRowWithSchema(values.toArray, schema) } + /** * Given a schema, it converts a Row into a DBObject * @param row Row to be converted From 374263ae80be590e04086c9dccd61016c3d0f789 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Fco=2E=20P=C3=A9rez=20Hidalgo?= Date: Thu, 7 Jan 2016 17:47:26 +0100 Subject: [PATCH 16/17] Changed matching with to matching with in order to avoid hazards derived from changes in the mongodb API --- .../stratio/datasource/mongodb/schema/MongodbRowConverter.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/schema/MongodbRowConverter.scala b/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/schema/MongodbRowConverter.scala index b4285c2..f6ef5d1 100644 --- a/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/schema/MongodbRowConverter.scala +++ b/spark-mongodb/src/main/scala/com/stratio/datasource/mongodb/schema/MongodbRowConverter.scala @@ -72,7 +72,7 @@ object MongodbRowConverter extends RowConverter[DBObject] val colName = mdata.getString("colname") val idx = mdata.getLong("idx").toInt json.get(colName).flatMap(v => Option(v)).map(toSQL(_, ArrayType(et, true))).collect { - case elemsList: ArrayBuffer[_] if((0 until elemsList.size) contains idx) => elemsList(idx) + case elemsList: Seq[_] if((0 until elemsList.size) contains idx) => elemsList(idx) } orNull case StructField(name, dataType, _, _) => json.get(name).flatMap(v => Option(v)).map( From 7a0440449b677124eb799abc032a4ace2e32e450 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pablo=20Fco=2E=20P=C3=A9rez=20Hidalgo?= Date: Fri, 8 Jan 2016 10:59:02 +0100 Subject: [PATCH 17/17] Spark version update to 1.5.2 --- spark-mongodb-examples/pom.xml | 2 +- spark-mongodb_2.10/pom.xml | 2 +- spark-mongodb_2.11/pom.xml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/spark-mongodb-examples/pom.xml b/spark-mongodb-examples/pom.xml index e69693c..0b88d40 100644 --- a/spark-mongodb-examples/pom.xml +++ b/spark-mongodb-examples/pom.xml @@ -41,7 +41,7 @@ 2.10 0.10.1 - 1.5.1 + 1.5.2 diff --git a/spark-mongodb_2.10/pom.xml b/spark-mongodb_2.10/pom.xml index 3dc51a7..91fcd04 100644 --- a/spark-mongodb_2.10/pom.xml +++ b/spark-mongodb_2.10/pom.xml @@ -33,7 +33,7 @@ spark-mongodb 2.2.5 3.2.1 - 1.5.1 + 1.5.2 2.8.0 diff --git a/spark-mongodb_2.11/pom.xml b/spark-mongodb_2.11/pom.xml index a740410..a2fb122 100644 --- a/spark-mongodb_2.11/pom.xml +++ b/spark-mongodb_2.11/pom.xml @@ -33,7 +33,7 @@ spark-mongodb 2.2.5 3.2.1 - 1.5.1 + 1.5.2 2.8.0