From f64b6e06b4b085253bc51a590a85bcf7afc59074 Mon Sep 17 00:00:00 2001 From: Pasha Finkelshteyn Date: Wed, 19 Jan 2022 21:34:50 +0300 Subject: [PATCH 001/213] [maven-release-plugin] prepare for next development iteration --- core/3.2/pom_2.12.xml | 2 +- dummy/pom.xml | 2 +- examples/pom-3.2_2.12.xml | 2 +- kotlin-spark-api/3.2/pom_2.12.xml | 2 +- kotlin-spark-api/common/pom.xml | 2 +- pom.xml | 4 ++-- pom_2.12.xml | 2 +- 7 files changed, 8 insertions(+), 8 deletions(-) diff --git a/core/3.2/pom_2.12.xml b/core/3.2/pom_2.12.xml index 8cbd069b..8ed1eb4f 100644 --- a/core/3.2/pom_2.12.xml +++ b/core/3.2/pom_2.12.xml @@ -8,7 +8,7 @@ org.jetbrains.kotlinx.spark kotlin-spark-api-parent_2.12 - 1.0.3 + 1.0.4-SNAPSHOT ../../pom_2.12.xml diff --git a/dummy/pom.xml b/dummy/pom.xml index 041d9c4d..1fd6ab08 100644 --- a/dummy/pom.xml +++ b/dummy/pom.xml @@ -3,7 +3,7 @@ kotlin-spark-api-parent org.jetbrains.kotlinx.spark - 1.0.3 + 1.0.4-SNAPSHOT pom 4.0.0 diff --git a/examples/pom-3.2_2.12.xml b/examples/pom-3.2_2.12.xml index 5a4510d0..b5267352 100644 --- a/examples/pom-3.2_2.12.xml +++ b/examples/pom-3.2_2.12.xml @@ -9,7 +9,7 @@ org.jetbrains.kotlinx.spark kotlin-spark-api-parent_2.12 - 1.0.3 + 1.0.4-SNAPSHOT ../pom_2.12.xml diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index c0eabd06..7195f912 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -9,7 +9,7 @@ org.jetbrains.kotlinx.spark kotlin-spark-api-parent_2.12 - 1.0.3 + 1.0.4-SNAPSHOT ../../pom_2.12.xml jar diff --git a/kotlin-spark-api/common/pom.xml b/kotlin-spark-api/common/pom.xml index bdbb3cca..19959fdb 100644 --- a/kotlin-spark-api/common/pom.xml +++ b/kotlin-spark-api/common/pom.xml @@ -8,7 +8,7 @@ org.jetbrains.kotlinx.spark kotlin-spark-api-parent - 1.0.3 + 1.0.4-SNAPSHOT ../.. diff --git a/pom.xml b/pom.xml index c0cd569e..4f0974c5 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ Parent project for Kotlin for Apache Spark org.jetbrains.kotlinx.spark kotlin-spark-api-parent - 1.0.3 + 1.0.4-SNAPSHOT pom @@ -279,7 +279,7 @@ scm:git:https://github.com/JetBrains/kotlin-spark-api.git https://github.com/JetBrains/kotlin-spark-api - kotlin-spark-3.2-1.0.3 + HEAD diff --git a/pom_2.12.xml b/pom_2.12.xml index 1fc53d70..87e64994 100644 --- a/pom_2.12.xml +++ b/pom_2.12.xml @@ -8,7 +8,7 @@ org.jetbrains.kotlinx.spark kotlin-spark-api-parent - 1.0.3 + 1.0.4-SNAPSHOT pom.xml pom From 3c638538d7fda1a84a38341c49c2c44d0da19579 Mon Sep 17 00:00:00 2001 From: Pasha Finkelshteyn Date: Wed, 19 Jan 2022 21:43:48 +0300 Subject: [PATCH 002/213] Update README.md --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 3f518316..f090f0cf 100644 --- a/README.md +++ b/README.md @@ -34,6 +34,7 @@ We have opened a Spark Project Improvement Proposal: [Kotlin support for Apache | 3.0.0+ | 2.12 | kotlin-spark-api-3.0:1.0.2 | | 2.4.1+ | 2.12 | kotlin-spark-api-2.4_2.12:1.0.2 | | 2.4.1+ | 2.11 | kotlin-spark-api-2.4_2.11:1.0.2 | +| 3.2.0+ | 2.12 | kotlin-spark-api-2.4_2.12:1.0.3 | ## Releases From bfbc6ae4fab6b6252a00ec27ffbf810ea475173b Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Tue, 15 Feb 2022 11:53:09 +0100 Subject: [PATCH 003/213] adding kdocs --- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 32935f40..075d908c 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -21,6 +21,7 @@ package org.jetbrains.kotlinx.spark.api +import org.apache.hadoop.shaded.org.apache.commons.math3.exception.util.ArgUtils import org.apache.spark.SparkContext import org.apache.spark.api.java.JavaSparkContext import org.apache.spark.api.java.function.* @@ -192,15 +193,33 @@ private fun kotlinClassEncoder(schema: DataType, kClass: KClass<*>): Encoder ) } +/** + * (Kotlin-specific) + * Returns a new Dataset that contains the result of applying [func] to each element. + */ inline fun Dataset.map(noinline func: (T) -> R): Dataset = map(MapFunction(func), encoder()) +/** + * (Kotlin-specific) + * Returns a new Dataset by first applying a function to all elements of this Dataset, + * and then flattening the results. + */ inline fun Dataset.flatMap(noinline func: (T) -> Iterator): Dataset = flatMap(func, encoder()) +/** + * (Kotlin-specific) + * Returns a new Dataset by flattening. This means that a Dataset of an iterable such as + * `listOf(listOf(1, 2, 3), listOf(4, 5, 6))` will be flattened to a Dataset of `listOf(1, 2, 3, 4, 5, 6).` + */ inline fun > Dataset.flatten(): Dataset = flatMap(FlatMapFunction { it.iterator() }, encoder()) +/** + * (Kotlin-specific) + * Returns a [KeyValueGroupedDataset] where the data is grouped by the given key [func]. + */ inline fun Dataset.groupByKey(noinline func: (T) -> R): KeyValueGroupedDataset = groupByKey(MapFunction(func), encoder()) From ced2ef87e916dc33e6594a839cb22c64b34768d2 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Tue, 15 Feb 2022 12:05:23 +0100 Subject: [PATCH 004/213] more docs --- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 43 ++++++++++++++++++- 1 file changed, 42 insertions(+), 1 deletion(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 075d908c..134f17a5 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -211,7 +211,7 @@ inline fun Dataset.flatMap(noinline func: (T) -> Iterator): /** * (Kotlin-specific) * Returns a new Dataset by flattening. This means that a Dataset of an iterable such as - * `listOf(listOf(1, 2, 3), listOf(4, 5, 6))` will be flattened to a Dataset of `listOf(1, 2, 3, 4, 5, 6).` + * `listOf(listOf(1, 2, 3), listOf(4, 5, 6))` will be flattened to a Dataset of `listOf(1, 2, 3, 4, 5, 6)`. */ inline fun > Dataset.flatten(): Dataset = flatMap(FlatMapFunction { it.iterator() }, encoder()) @@ -223,18 +223,59 @@ inline fun > Dataset.flatten(): Dataset = inline fun Dataset.groupByKey(noinline func: (T) -> R): KeyValueGroupedDataset = groupByKey(MapFunction(func), encoder()) +/** + * (Kotlin-specific) + * Returns a new Dataset that contains the result of applying [func] to each partition. + */ inline fun Dataset.mapPartitions(noinline func: (Iterator) -> Iterator): Dataset = mapPartitions(func, encoder()) +/** + * (Kotlin-specific) + * Filters rows to eliminate [null] values. + */ @Suppress("UNCHECKED_CAST") fun Dataset.filterNotNull(): Dataset = filter { it != null } as Dataset +/** + * Returns a new [KeyValueGroupedDataset] where the given function [func] has been applied + * to the data. The grouping key is unchanged by this. + * + * ```kotlin + * // Create values grouped by key from a Dataset> + * ds.groupByKey { it._1 }.mapValues { it._2 } + * ``` + */ inline fun KeyValueGroupedDataset.mapValues(noinline func: (VALUE) -> R): KeyValueGroupedDataset = mapValues(MapFunction(func), encoder()) +/** + * (Kotlin-specific) + * Applies the given function to each group of data. For each unique group, the function will + * be passed the group key and an iterator that contains all the elements in the group. The + * function can return an element of arbitrary type which will be returned as a new [Dataset]. + * + * This function does not support partial aggregation, and as a result requires shuffling all + * the data in the [Dataset]. If an application intends to perform an aggregation over each + * key, it is best to use the reduce function or an + * [org.apache.spark.sql.expressions.Aggregator]. + * + * Internally, the implementation will spill to disk if any given group is too large to fit into + * memory. However, users must take care to avoid materializing the whole iterator for a group + * (for example, by calling [toList]) unless they are sure that this is possible given the memory + * constraints of their cluster. + */ inline fun KeyValueGroupedDataset.mapGroups(noinline func: (KEY, Iterator) -> R): Dataset = mapGroups(MapGroupsFunction(func), encoder()) +/** + * (Kotlin-specific) + * Reduces the elements of each group of data using the specified binary function. + * The given function must be commutative and associative or the result may be non-deterministic. + * + * Note that you need to use [reduceGroupsK] always instead of the Java- or Scala-specific + * [KeyValueGroupedDataset.reduceGroups] to make the compiler work. + */ inline fun KeyValueGroupedDataset.reduceGroupsK(noinline func: (VALUE, VALUE) -> VALUE): Dataset> = reduceGroups(ReduceFunction(func)) .map { t -> t._1 to t._2 } From 9de5ae7a1452fb80f8ff1c09bfa6a640232a2d59 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Tue, 15 Feb 2022 12:45:14 +0100 Subject: [PATCH 005/213] more docs --- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 129 +++++++++++++++++- 1 file changed, 127 insertions(+), 2 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 134f17a5..59d12e6b 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -288,23 +288,63 @@ inline fun KeyValueGroupedDataset.reduc inline fun Dataset.reduceK(noinline func: (T, T) -> T): T = reduce(ReduceFunction(func)) +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "keys" or [Tuple2._1] values. + */ @JvmName("takeKeysTuple2") inline fun Dataset>.takeKeys(): Dataset = map { it._1() } +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "keys" or [Pair.first] values. + */ inline fun Dataset>.takeKeys(): Dataset = map { it.first } +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "keys" or [Arity2._1] values. + */ @JvmName("takeKeysArity2") inline fun Dataset>.takeKeys(): Dataset = map { it._1 } +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "values" or [Tuple2._2] values. + */ @JvmName("takeValuesTuple2") inline fun Dataset>.takeValues(): Dataset = map { it._2() } +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "values" or [Pair.second] values. + */ inline fun Dataset>.takeValues(): Dataset = map { it.second } +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "values" or [Arity2._2] values. + */ @JvmName("takeValuesArity2") inline fun Dataset>.takeValues(): Dataset = map { it._2 } - +/** + * (Kotlin-specific) + * Applies the given function to each group of data. For each unique group, the function will + * be passed the group key and an iterator that contains all the elements in the group. The + * function can return an iterator containing elements of an arbitrary type which will be returned + * as a new [Dataset]. + * + * This function does not support partial aggregation, and as a result requires shuffling all + * the data in the [Dataset]. If an application intends to perform an aggregation over each + * key, it is best to use the reduce function or an + * [org.apache.spark.sql.expressions.Aggregator]. + * + * Internally, the implementation will spill to disk if any given group is too large to fit into + * memory. However, users must take care to avoid materializing the whole iterator for a group + * (for example, by calling [toList]) unless they are sure that this is possible given the memory + * constraints of their cluster. + */ inline fun KeyValueGroupedDataset.flatMapGroups( noinline func: (key: K, values: Iterator) -> Iterator, ): Dataset = flatMapGroups( @@ -312,12 +352,57 @@ inline fun KeyValueGroupedDataset.flatMapGroups( encoder() ) +/** + * (Kotlin-specific) + * Returns the group state value if it exists, else [null]. + * This is comparable to [GroupState.getOption], but instead utilises Kotlin's nullability features + * to get the same result. + */ fun GroupState.getOrNull(): S? = if (exists()) get() else null +/** + * (Kotlin-specific) + * Allows the group state object to be used as a delegate. Will be [null] if it does not exist. + * + * For example: + * ```kotlin + * groupedDataset.mapGroupsWithState(GroupStateTimeout.NoTimeout()) { key, values, state: GroupState -> + * var s by state + * ... + * } + * ``` + */ operator fun GroupState.getValue(thisRef: Any?, property: KProperty<*>): S? = getOrNull() -operator fun GroupState.setValue(thisRef: Any?, property: KProperty<*>, value: S?): Unit = update(value) +/** + * (Kotlin-specific) + * Allows the group state object to be used as a delegate. Will be [null] if it does not exist. + * + * For example: + * ```kotlin + * groupedDataset.mapGroupsWithState(GroupStateTimeout.NoTimeout()) { key, values, state: GroupState -> + * var s by state + * ... + * } + * ``` + */ +operator fun GroupState.setValue(thisRef: Any?, property: KProperty<*>, value: S?): Unit = update(value) +/** + * (Kotlin-specific) + * Applies the given function to each group of data, while maintaining a user-defined per-group + * state. The result Dataset will represent the objects returned by the function. + * For a static batch Dataset, the function will be invoked once per group. For a streaming + * Dataset, the function will be invoked for each group repeatedly in every trigger, and + * updates to each group's state will be saved across invocations. + * See [org.apache.spark.sql.streaming.GroupState] for more details. + * + * @param S The type of the user-defined state. Must be encodable to Spark SQL types. + * @param U The type of the output objects. Must be encodable to Spark SQL types. + * @param func Function to be called on every group. + * + * See [Encoder] for more details on what types are encodable to Spark SQL. + */ inline fun KeyValueGroupedDataset.mapGroupsWithState( noinline func: (key: K, values: Iterator, state: GroupState) -> U, ): Dataset = mapGroupsWithState( @@ -326,6 +411,22 @@ inline fun KeyValueGroupedDataset.mapGroupsWi encoder() ) +/** + * (Kotlin-specific) + * Applies the given function to each group of data, while maintaining a user-defined per-group + * state. The result Dataset will represent the objects returned by the function. + * For a static batch Dataset, the function will be invoked once per group. For a streaming + * Dataset, the function will be invoked for each group repeatedly in every trigger, and + * updates to each group's state will be saved across invocations. + * See [org.apache.spark.sql.streaming.GroupState] for more details. + * + * @param S The type of the user-defined state. Must be encodable to Spark SQL types. + * @param U The type of the output objects. Must be encodable to Spark SQL types. + * @param func Function to be called on every group. + * @param timeoutConf Timeout configuration for groups that do not receive data for a while. + * + * See [Encoder] for more details on what types are encodable to Spark SQL. + */ inline fun KeyValueGroupedDataset.mapGroupsWithState( timeoutConf: GroupStateTimeout, noinline func: (key: K, values: Iterator, state: GroupState) -> U, @@ -336,6 +437,23 @@ inline fun KeyValueGroupedDataset.mapGroupsWi timeoutConf ) +/** + * (Kotlin-specific) + * Applies the given function to each group of data, while maintaining a user-defined per-group + * state. The result Dataset will represent the objects returned by the function. + * For a static batch Dataset, the function will be invoked once per group. For a streaming + * Dataset, the function will be invoked for each group repeatedly in every trigger, and + * updates to each group's state will be saved across invocations. + * See [GroupState] for more details. + * + * @param S The type of the user-defined state. Must be encodable to Spark SQL types. + * @param U The type of the output objects. Must be encodable to Spark SQL types. + * @param func Function to be called on every group. + * @param outputMode The output mode of the function. + * @param timeoutConf Timeout configuration for groups that do not receive data for a while. + * + * See [Encoder] for more details on what types are encodable to Spark SQL. + */ inline fun KeyValueGroupedDataset.flatMapGroupsWithState( outputMode: OutputMode, timeoutConf: GroupStateTimeout, @@ -348,6 +466,13 @@ inline fun KeyValueGroupedDataset.flatMapGrou timeoutConf ) +/** + * (Kotlin-specific) + * Applies the given function to each cogrouped data. For each unique group, the function will + * be passed the grouping key and 2 iterators containing all elements in the group from + * [Dataset] [this] and [other]. The function can return an iterator containing elements of an + * arbitrary type which will be returned as a new [Dataset]. + */ inline fun KeyValueGroupedDataset.cogroup( other: KeyValueGroupedDataset, noinline func: (key: K, left: Iterator, right: Iterator) -> Iterator, From 15d41b0f5db720b9d407335d5ea69e4f789a9e22 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Tue, 15 Feb 2022 15:38:43 +0100 Subject: [PATCH 006/213] more docs and deprecated downcast() --- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 60 ++++++++++++++++++- 1 file changed, 58 insertions(+), 2 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 59d12e6b..6c656046 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -482,13 +482,69 @@ inline fun KeyValueGroupedDataset.cogroup( encoder() ) +/** DEPRECATED: Use [as] or [to] for this. */ +@Deprecated( + message = "Deprecated, since we already have `as`() and to().", + replaceWith = ReplaceWith("this.to()"), + level = DeprecationLevel.ERROR, +) inline fun Dataset.downcast(): Dataset = `as`(encoder()) + +/** + * (Kotlin-specific) + * Returns a new Dataset where each record has been mapped on to the specified type. The + * method used to map columns depend on the type of [R]: + * - When [R] is a class, fields for the class will be mapped to columns of the same name + * (case sensitivity is determined by [spark.sql.caseSensitive]). + * - When [R] is a tuple, the columns will be mapped by ordinal (i.e. the first column will + * be assigned to `_1`). + * - When [R] is a primitive type (i.e. [String], [Int], etc.), then the first column of the + * `DataFrame` will be used. + * + * If the schema of the Dataset does not match the desired [R] type, you can use [Dataset.select]/[selectTyped] + * along with [Dataset.alias] or [as]/[to] to rearrange or rename as required. + * + * Note that [as]/[to] only changes the view of the data that is passed into typed operations, + * such as [map], and does not eagerly project away any columns that are not present in + * the specified class. + * + * @see to as alias for [as] + */ inline fun Dataset<*>.`as`(): Dataset = `as`(encoder()) + +/** + * (Kotlin-specific) + * Returns a new Dataset where each record has been mapped on to the specified type. The + * method used to map columns depend on the type of [R]: + * - When [R] is a class, fields for the class will be mapped to columns of the same name + * (case sensitivity is determined by [spark.sql.caseSensitive]). + * - When [R] is a tuple, the columns will be mapped by ordinal (i.e. the first column will + * be assigned to `_1`). + * - When [R] is a primitive type (i.e. [String], [Int], etc.), then the first column of the + * `DataFrame` will be used. + * + * If the schema of the Dataset does not match the desired [R] type, you can use [Dataset.select]/[selectTyped] + * along with [Dataset.alias] or [as]/[to] to rearrange or rename as required. + * + * Note that [as]/[to] only changes the view of the data that is passed into typed operations, + * such as [map], and does not eagerly project away any columns that are not present in + * the specified class. + * + * @see as as alias for [to] + */ inline fun Dataset<*>.to(): Dataset = `as`(encoder()) -inline fun Dataset.forEach(noinline func: (T) -> Unit) = foreach(ForeachFunction(func)) +/** + * (Kotlin-specific) + * Applies a function [func] to all rows. + */ +inline fun Dataset.forEach(noinline func: (T) -> Unit): Unit = foreach(ForeachFunction(func)) -inline fun Dataset.forEachPartition(noinline func: (Iterator) -> Unit) = +/** + * (Kotlin-specific) + * Runs [func] on each partition of this Dataset. + */ +inline fun Dataset.forEachPartition(noinline func: (Iterator) -> Unit): Unit = foreachPartition(ForeachPartitionFunction(func)) /** From 195da8daec512ca86710063573709525e940a51b Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Tue, 15 Feb 2022 16:46:18 +0100 Subject: [PATCH 007/213] 2 todo's left, the rest of ApiV1.kt is now kdocced --- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 69 ++++++++++++++----- 1 file changed, 53 insertions(+), 16 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 6c656046..09c64213 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -37,6 +37,7 @@ import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions import scala.Product import scala.Tuple2 import scala.reflect.ClassTag +import scala.reflect.api.TypeTags.TypeTag import java.beans.PropertyDescriptor import java.math.BigDecimal import java.sql.Date @@ -83,7 +84,7 @@ import kotlin.reflect.full.primaryConstructor import kotlin.to @JvmField -val ENCODERS = mapOf, Encoder<*>>( +val ENCODERS: Map, Encoder<*>> = mapOf( Boolean::class to BOOLEAN(), Byte::class to BYTE(), Short::class to SHORT(), @@ -165,6 +166,9 @@ inline fun List.toDS(spark: SparkSession): Dataset = @OptIn(ExperimentalStdlibApi::class) inline fun encoder(): Encoder = generateEncoder(typeOf(), T::class) +/** + * @see encoder + */ fun generateEncoder(type: KType, cls: KClass<*>): Encoder { @Suppress("UNCHECKED_CAST") return when { @@ -173,7 +177,8 @@ fun generateEncoder(type: KType, cls: KClass<*>): Encoder { } as Encoder } -private fun isSupportedClass(cls: KClass<*>): Boolean = cls.isData +private fun isSupportedClass(cls: KClass<*>): Boolean = + cls.isData || cls.isSubclassOf(Map::class) || cls.isSubclassOf(Iterable::class) || cls.isSubclassOf(Product::class) @@ -550,18 +555,25 @@ inline fun Dataset.forEachPartition(noinline func: (Iterator) /** * It's hard to call `Dataset.debugCodegen` from kotlin, so here is utility for that */ -fun Dataset.debugCodegen() = also { KSparkExtensions.debugCodegen(it) } +fun Dataset.debugCodegen(): Dataset = also { KSparkExtensions.debugCodegen(it) } -val SparkSession.sparkContext +/** + * Returns the Spark context associated with this Spark session. + */ +val SparkSession.sparkContext: SparkContext get() = KSparkExtensions.sparkContext(this) /** * It's hard to call `Dataset.debug` from kotlin, so here is utility for that */ -fun Dataset.debug() = also { KSparkExtensions.debug(it) } +fun Dataset.debug(): Dataset = also { KSparkExtensions.debug(it) } @Suppress("FunctionName") -@Deprecated("Changed to \"`===`\" to better reflect Scala API.", ReplaceWith("this `===` c")) +@Deprecated( + message = "Changed to \"`===`\" to better reflect Scala API.", + replaceWith = ReplaceWith("this `===` c"), + level = DeprecationLevel.ERROR, +) infix fun Column.`==`(c: Column) = `$eq$eq$eq`(c) /** @@ -889,7 +901,10 @@ operator fun Column.rem(other: Any): Column = `$percent`(other) */ operator fun Column.get(key: Any): Column = getItem(key) -fun lit(a: Any) = functions.lit(a) +// TODO deprecate? +fun lit(a: Any): Column = functions.lit(a) + +fun typedLit(literal: Any): Column = functions.lit(literal) /** * Provides a type hint about the expected return value of this column. This information can @@ -996,8 +1011,13 @@ inline fun Dataset.withCached( return cached.executeOnCached().also { cached.unpersist(blockingUnpersist) } } -inline fun Dataset.toList() = KSparkExtensions.collectAsList(to()) -inline fun Dataset<*>.toArray(): Array = to().collect() as Array +/** + * TODO + */ +inline fun Dataset<*>.toList(): List = to().collectAsList() as List +inline fun Dataset<*>.toArray(): Array = to().collect() as Array +//inline fun Dataset.toList() = KSparkExtensions.collectAsList(to()) +//inline fun Dataset<*>.toArray(): Array = to().collect() as Array /** * Selects column based on the column name and returns it as a [Column]. @@ -1014,7 +1034,6 @@ operator fun Dataset.invoke(colName: String): Column = col(colName) * ``` * @see invoke */ - @Suppress("UNCHECKED_CAST") inline fun Dataset.col(column: KProperty1): TypedColumn = col(column.name).`as`() as TypedColumn @@ -1129,6 +1148,14 @@ inline fun = mapOf()): DataType { val primitiveSchema = knownDataTypes[type.classifier] @@ -1228,15 +1255,24 @@ fun schema(type: KType, map: Map = mapOf()): DataType { } } +/** + * The entry point to programming Spark with the Dataset and DataFrame API. + * + * @see org.apache.spark.sql.SparkSession + */ typealias SparkSession = org.apache.spark.sql.SparkSession -fun SparkContext.setLogLevel(level: SparkLogLevel) = setLogLevel(level.name) +/** + * Control our logLevel. This overrides any user-defined log settings. + * @param level The desired log level as [SparkLogLevel]. + */ +fun SparkContext.setLogLevel(level: SparkLogLevel): Unit = setLogLevel(level.name) enum class SparkLogLevel { ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN } -private val knownDataTypes = mapOf( +private val knownDataTypes: Map, DataType> = mapOf( Byte::class to DataTypes.ByteType, Short::class to DataTypes.ShortType, Int::class to DataTypes.IntegerType, @@ -1248,7 +1284,7 @@ private val knownDataTypes = mapOf( LocalDate::class to `DateType$`.`MODULE$`, Date::class to `DateType$`.`MODULE$`, Timestamp::class to `TimestampType$`.`MODULE$`, - Instant::class to `TimestampType$`.`MODULE$` + Instant::class to `TimestampType$`.`MODULE$`, ) private fun transitiveMerge(a: Map, b: Map): Map { @@ -1258,11 +1294,12 @@ private fun transitiveMerge(a: Map, b: Map): Map(val f: (T) -> R) : (T) -> R { + private val values = ConcurrentHashMap() - override fun invoke(x: T) = - values.getOrPut(x, { f(x) }) + + override fun invoke(x: T): R = values.getOrPut(x) { f(x) } } private fun ((T) -> R).memoize(): (T) -> R = Memoize1(this) -private val memoizedSchema = { x: KType -> schema(x) }.memoize() +private val memoizedSchema: (KType) -> DataType = { x: KType -> schema(x) }.memoize() From 0b6c10c97f879175cc59fb0c4ba75fb8fa0a172b Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Tue, 15 Feb 2022 20:11:58 +0100 Subject: [PATCH 008/213] 1 todo left, lit() --- .../kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 09c64213..85177d02 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -904,8 +904,6 @@ operator fun Column.get(key: Any): Column = getItem(key) // TODO deprecate? fun lit(a: Any): Column = functions.lit(a) -fun typedLit(literal: Any): Column = functions.lit(literal) - /** * Provides a type hint about the expected return value of this column. This information can * be used by operations such as `select` on a [Dataset] to automatically convert the @@ -1012,12 +1010,14 @@ inline fun Dataset.withCached( } /** - * TODO + * Collects the dataset as list where each item has been mapped to type [T]. */ inline fun Dataset<*>.toList(): List = to().collectAsList() as List + +/** + * Collects the dataset as Array where each item has been mapped to type [T]. + */ inline fun Dataset<*>.toArray(): Array = to().collect() as Array -//inline fun Dataset.toList() = KSparkExtensions.collectAsList(to()) -//inline fun Dataset<*>.toArray(): Array = to().collect() as Array /** * Selects column based on the column name and returns it as a [Column]. From 6ed3424459836479bdf1d45647c0b3602e2db7f4 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Wed, 16 Feb 2022 12:01:23 +0100 Subject: [PATCH 009/213] changed default master value to first check the system variables. Updated docs --- .../kotlinx/spark/api/SparkHelper.kt | 22 ++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt index 3ef0b177..6188daae 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt @@ -19,22 +19,27 @@ */ package org.jetbrains.kotlinx.spark.api +import org.apache.spark.SparkConf import org.apache.spark.sql.SparkSession.Builder import org.apache.spark.sql.UDFRegistration import org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR /** - * Wrapper for spark creation which allows to set different spark params + * Wrapper for spark creation which allows setting different spark params. * * @param props spark options, value types are runtime-checked for type-correctness - * @param master [SparkSession.Builder.master] - * @param appName [SparkSession.Builder.appName] + * @param master Sets the Spark master URL to connect to, such as "local" to run locally, "local[4]" to + * run locally with 4 cores, or "spark://master:7077" to run on a Spark standalone cluster. By default, it + * tries to get the system value "spark.master", otherwise it uses "local[*]" + * @param appName Sets a name for the application, which will be shown in the Spark web UI. + * If no application name is set, a randomly generated name will be used. + * @param logLevel Control our logLevel. This overrides any user-defined log settings. * @param func function which will be executed in context of [KSparkSession] (it means that `this` inside block will point to [KSparkSession]) */ @JvmOverloads inline fun withSpark( props: Map = emptyMap(), - master: String = "local[*]", + master: String = SparkConf().get("spark.master", "local[*]"), appName: String = "Kotlin Spark Sample", logLevel: SparkLogLevel = ERROR, func: KSparkSession.() -> Unit, @@ -58,10 +63,17 @@ inline fun withSpark( } +/** + * Wrapper for spark creation which allows setting different spark params. + * + * @param builder A [SparkSession.Builder] object, configured how you want. + * @param logLevel Control our logLevel. This overrides any user-defined log settings. + * @param func function which will be executed in context of [KSparkSession] (it means that `this` inside block will point to [KSparkSession]) + */ @JvmOverloads inline fun withSpark(builder: Builder, logLevel: SparkLogLevel = ERROR, func: KSparkSession.() -> Unit) { builder - .orCreate + .getOrCreate() .apply { KSparkSession(this).apply { sparkContext.setLogLevel(logLevel) From a298239c0f9dd480a9f15d10524709af31712878 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Wed, 16 Feb 2022 14:45:23 +0100 Subject: [PATCH 010/213] last doc done --- .../kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 85177d02..e679f561 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -901,7 +901,16 @@ operator fun Column.rem(other: Any): Column = `$percent`(other) */ operator fun Column.get(key: Any): Column = getItem(key) -// TODO deprecate? +/** + * Creates a [Column] of literal value. + * + * The passed in object is returned directly if it is already a [Column]. + * If the object is a Scala Symbol, it is converted into a [Column] also. + * Otherwise, a new [Column] is created to represent the literal value. + * + * This is just a shortcut to the function from [org.apache.spark.sql.functions]. + * For all the functions, simply add `import org.apache.spark.sql.functions.*` to your file. + */ fun lit(a: Any): Column = functions.lit(a) /** From e81292e4663fdb5077d30ec6b6145679ca4175f6 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Tue, 15 Feb 2022 21:31:28 +0100 Subject: [PATCH 011/213] updated badge to official as suggested in https://github.com/JetBrains/kotlin-spark-api/issues/110 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index f090f0cf..79c669e4 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Kotlin for Apache® Spark™ [![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:org.jetbrains.kotlinx.spark%20AND%20v:1.0.2) [![official JetBrains project](http://jb.gg/badges/incubator.svg)](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub) +# Kotlin for Apache® Spark™ [![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:org.jetbrains.kotlinx.spark%20AND%20v:1.0.2) [![official JetBrains project](http://jb.gg/badges/official.svg)](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub) Your next API to work with [Apache Spark](https://spark.apache.org/). From 9c1addc06816bbd56f3b513e1ded61ab7f05c258 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Tue, 15 Feb 2022 21:31:28 +0100 Subject: [PATCH 012/213] updated badge to official as suggested in https://github.com/JetBrains/kotlin-spark-api/issues/110 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index f090f0cf..79c669e4 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Kotlin for Apache® Spark™ [![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:org.jetbrains.kotlinx.spark%20AND%20v:1.0.2) [![official JetBrains project](http://jb.gg/badges/incubator.svg)](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub) +# Kotlin for Apache® Spark™ [![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:org.jetbrains.kotlinx.spark%20AND%20v:1.0.2) [![official JetBrains project](http://jb.gg/badges/official.svg)](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub) Your next API to work with [Apache Spark](https://spark.apache.org/). From 238d6e8639b1ff0c49a0b2db75f634fe1310edee Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Wed, 16 Feb 2022 12:01:23 +0100 Subject: [PATCH 013/213] changed default master value to first check the system variables. Updated docs --- .../kotlinx/spark/api/SparkHelper.kt | 22 ++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt index 3ef0b177..6188daae 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt @@ -19,22 +19,27 @@ */ package org.jetbrains.kotlinx.spark.api +import org.apache.spark.SparkConf import org.apache.spark.sql.SparkSession.Builder import org.apache.spark.sql.UDFRegistration import org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR /** - * Wrapper for spark creation which allows to set different spark params + * Wrapper for spark creation which allows setting different spark params. * * @param props spark options, value types are runtime-checked for type-correctness - * @param master [SparkSession.Builder.master] - * @param appName [SparkSession.Builder.appName] + * @param master Sets the Spark master URL to connect to, such as "local" to run locally, "local[4]" to + * run locally with 4 cores, or "spark://master:7077" to run on a Spark standalone cluster. By default, it + * tries to get the system value "spark.master", otherwise it uses "local[*]" + * @param appName Sets a name for the application, which will be shown in the Spark web UI. + * If no application name is set, a randomly generated name will be used. + * @param logLevel Control our logLevel. This overrides any user-defined log settings. * @param func function which will be executed in context of [KSparkSession] (it means that `this` inside block will point to [KSparkSession]) */ @JvmOverloads inline fun withSpark( props: Map = emptyMap(), - master: String = "local[*]", + master: String = SparkConf().get("spark.master", "local[*]"), appName: String = "Kotlin Spark Sample", logLevel: SparkLogLevel = ERROR, func: KSparkSession.() -> Unit, @@ -58,10 +63,17 @@ inline fun withSpark( } +/** + * Wrapper for spark creation which allows setting different spark params. + * + * @param builder A [SparkSession.Builder] object, configured how you want. + * @param logLevel Control our logLevel. This overrides any user-defined log settings. + * @param func function which will be executed in context of [KSparkSession] (it means that `this` inside block will point to [KSparkSession]) + */ @JvmOverloads inline fun withSpark(builder: Builder, logLevel: SparkLogLevel = ERROR, func: KSparkSession.() -> Unit) { builder - .orCreate + .getOrCreate() .apply { KSparkSession(this).apply { sparkContext.setLogLevel(logLevel) From ca37bc518d6cbce706a3c153ec29c1fd4c31876b Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Tue, 15 Feb 2022 11:53:09 +0100 Subject: [PATCH 014/213] adding kdocs --- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 32935f40..075d908c 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -21,6 +21,7 @@ package org.jetbrains.kotlinx.spark.api +import org.apache.hadoop.shaded.org.apache.commons.math3.exception.util.ArgUtils import org.apache.spark.SparkContext import org.apache.spark.api.java.JavaSparkContext import org.apache.spark.api.java.function.* @@ -192,15 +193,33 @@ private fun kotlinClassEncoder(schema: DataType, kClass: KClass<*>): Encoder ) } +/** + * (Kotlin-specific) + * Returns a new Dataset that contains the result of applying [func] to each element. + */ inline fun Dataset.map(noinline func: (T) -> R): Dataset = map(MapFunction(func), encoder()) +/** + * (Kotlin-specific) + * Returns a new Dataset by first applying a function to all elements of this Dataset, + * and then flattening the results. + */ inline fun Dataset.flatMap(noinline func: (T) -> Iterator): Dataset = flatMap(func, encoder()) +/** + * (Kotlin-specific) + * Returns a new Dataset by flattening. This means that a Dataset of an iterable such as + * `listOf(listOf(1, 2, 3), listOf(4, 5, 6))` will be flattened to a Dataset of `listOf(1, 2, 3, 4, 5, 6).` + */ inline fun > Dataset.flatten(): Dataset = flatMap(FlatMapFunction { it.iterator() }, encoder()) +/** + * (Kotlin-specific) + * Returns a [KeyValueGroupedDataset] where the data is grouped by the given key [func]. + */ inline fun Dataset.groupByKey(noinline func: (T) -> R): KeyValueGroupedDataset = groupByKey(MapFunction(func), encoder()) From 7e622ec18b38ca16deab32c2203611a35a76d93d Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Tue, 15 Feb 2022 12:05:23 +0100 Subject: [PATCH 015/213] more docs --- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 43 ++++++++++++++++++- 1 file changed, 42 insertions(+), 1 deletion(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 075d908c..134f17a5 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -211,7 +211,7 @@ inline fun Dataset.flatMap(noinline func: (T) -> Iterator): /** * (Kotlin-specific) * Returns a new Dataset by flattening. This means that a Dataset of an iterable such as - * `listOf(listOf(1, 2, 3), listOf(4, 5, 6))` will be flattened to a Dataset of `listOf(1, 2, 3, 4, 5, 6).` + * `listOf(listOf(1, 2, 3), listOf(4, 5, 6))` will be flattened to a Dataset of `listOf(1, 2, 3, 4, 5, 6)`. */ inline fun > Dataset.flatten(): Dataset = flatMap(FlatMapFunction { it.iterator() }, encoder()) @@ -223,18 +223,59 @@ inline fun > Dataset.flatten(): Dataset = inline fun Dataset.groupByKey(noinline func: (T) -> R): KeyValueGroupedDataset = groupByKey(MapFunction(func), encoder()) +/** + * (Kotlin-specific) + * Returns a new Dataset that contains the result of applying [func] to each partition. + */ inline fun Dataset.mapPartitions(noinline func: (Iterator) -> Iterator): Dataset = mapPartitions(func, encoder()) +/** + * (Kotlin-specific) + * Filters rows to eliminate [null] values. + */ @Suppress("UNCHECKED_CAST") fun Dataset.filterNotNull(): Dataset = filter { it != null } as Dataset +/** + * Returns a new [KeyValueGroupedDataset] where the given function [func] has been applied + * to the data. The grouping key is unchanged by this. + * + * ```kotlin + * // Create values grouped by key from a Dataset> + * ds.groupByKey { it._1 }.mapValues { it._2 } + * ``` + */ inline fun KeyValueGroupedDataset.mapValues(noinline func: (VALUE) -> R): KeyValueGroupedDataset = mapValues(MapFunction(func), encoder()) +/** + * (Kotlin-specific) + * Applies the given function to each group of data. For each unique group, the function will + * be passed the group key and an iterator that contains all the elements in the group. The + * function can return an element of arbitrary type which will be returned as a new [Dataset]. + * + * This function does not support partial aggregation, and as a result requires shuffling all + * the data in the [Dataset]. If an application intends to perform an aggregation over each + * key, it is best to use the reduce function or an + * [org.apache.spark.sql.expressions.Aggregator]. + * + * Internally, the implementation will spill to disk if any given group is too large to fit into + * memory. However, users must take care to avoid materializing the whole iterator for a group + * (for example, by calling [toList]) unless they are sure that this is possible given the memory + * constraints of their cluster. + */ inline fun KeyValueGroupedDataset.mapGroups(noinline func: (KEY, Iterator) -> R): Dataset = mapGroups(MapGroupsFunction(func), encoder()) +/** + * (Kotlin-specific) + * Reduces the elements of each group of data using the specified binary function. + * The given function must be commutative and associative or the result may be non-deterministic. + * + * Note that you need to use [reduceGroupsK] always instead of the Java- or Scala-specific + * [KeyValueGroupedDataset.reduceGroups] to make the compiler work. + */ inline fun KeyValueGroupedDataset.reduceGroupsK(noinline func: (VALUE, VALUE) -> VALUE): Dataset> = reduceGroups(ReduceFunction(func)) .map { t -> t._1 to t._2 } From d97c654913862ab9e239eca4f9e3eee7da2f77f4 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Tue, 15 Feb 2022 12:45:14 +0100 Subject: [PATCH 016/213] more docs --- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 129 +++++++++++++++++- 1 file changed, 127 insertions(+), 2 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 134f17a5..59d12e6b 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -288,23 +288,63 @@ inline fun KeyValueGroupedDataset.reduc inline fun Dataset.reduceK(noinline func: (T, T) -> T): T = reduce(ReduceFunction(func)) +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "keys" or [Tuple2._1] values. + */ @JvmName("takeKeysTuple2") inline fun Dataset>.takeKeys(): Dataset = map { it._1() } +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "keys" or [Pair.first] values. + */ inline fun Dataset>.takeKeys(): Dataset = map { it.first } +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "keys" or [Arity2._1] values. + */ @JvmName("takeKeysArity2") inline fun Dataset>.takeKeys(): Dataset = map { it._1 } +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "values" or [Tuple2._2] values. + */ @JvmName("takeValuesTuple2") inline fun Dataset>.takeValues(): Dataset = map { it._2() } +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "values" or [Pair.second] values. + */ inline fun Dataset>.takeValues(): Dataset = map { it.second } +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "values" or [Arity2._2] values. + */ @JvmName("takeValuesArity2") inline fun Dataset>.takeValues(): Dataset = map { it._2 } - +/** + * (Kotlin-specific) + * Applies the given function to each group of data. For each unique group, the function will + * be passed the group key and an iterator that contains all the elements in the group. The + * function can return an iterator containing elements of an arbitrary type which will be returned + * as a new [Dataset]. + * + * This function does not support partial aggregation, and as a result requires shuffling all + * the data in the [Dataset]. If an application intends to perform an aggregation over each + * key, it is best to use the reduce function or an + * [org.apache.spark.sql.expressions.Aggregator]. + * + * Internally, the implementation will spill to disk if any given group is too large to fit into + * memory. However, users must take care to avoid materializing the whole iterator for a group + * (for example, by calling [toList]) unless they are sure that this is possible given the memory + * constraints of their cluster. + */ inline fun KeyValueGroupedDataset.flatMapGroups( noinline func: (key: K, values: Iterator) -> Iterator, ): Dataset = flatMapGroups( @@ -312,12 +352,57 @@ inline fun KeyValueGroupedDataset.flatMapGroups( encoder() ) +/** + * (Kotlin-specific) + * Returns the group state value if it exists, else [null]. + * This is comparable to [GroupState.getOption], but instead utilises Kotlin's nullability features + * to get the same result. + */ fun GroupState.getOrNull(): S? = if (exists()) get() else null +/** + * (Kotlin-specific) + * Allows the group state object to be used as a delegate. Will be [null] if it does not exist. + * + * For example: + * ```kotlin + * groupedDataset.mapGroupsWithState(GroupStateTimeout.NoTimeout()) { key, values, state: GroupState -> + * var s by state + * ... + * } + * ``` + */ operator fun GroupState.getValue(thisRef: Any?, property: KProperty<*>): S? = getOrNull() -operator fun GroupState.setValue(thisRef: Any?, property: KProperty<*>, value: S?): Unit = update(value) +/** + * (Kotlin-specific) + * Allows the group state object to be used as a delegate. Will be [null] if it does not exist. + * + * For example: + * ```kotlin + * groupedDataset.mapGroupsWithState(GroupStateTimeout.NoTimeout()) { key, values, state: GroupState -> + * var s by state + * ... + * } + * ``` + */ +operator fun GroupState.setValue(thisRef: Any?, property: KProperty<*>, value: S?): Unit = update(value) +/** + * (Kotlin-specific) + * Applies the given function to each group of data, while maintaining a user-defined per-group + * state. The result Dataset will represent the objects returned by the function. + * For a static batch Dataset, the function will be invoked once per group. For a streaming + * Dataset, the function will be invoked for each group repeatedly in every trigger, and + * updates to each group's state will be saved across invocations. + * See [org.apache.spark.sql.streaming.GroupState] for more details. + * + * @param S The type of the user-defined state. Must be encodable to Spark SQL types. + * @param U The type of the output objects. Must be encodable to Spark SQL types. + * @param func Function to be called on every group. + * + * See [Encoder] for more details on what types are encodable to Spark SQL. + */ inline fun KeyValueGroupedDataset.mapGroupsWithState( noinline func: (key: K, values: Iterator, state: GroupState) -> U, ): Dataset = mapGroupsWithState( @@ -326,6 +411,22 @@ inline fun KeyValueGroupedDataset.mapGroupsWi encoder() ) +/** + * (Kotlin-specific) + * Applies the given function to each group of data, while maintaining a user-defined per-group + * state. The result Dataset will represent the objects returned by the function. + * For a static batch Dataset, the function will be invoked once per group. For a streaming + * Dataset, the function will be invoked for each group repeatedly in every trigger, and + * updates to each group's state will be saved across invocations. + * See [org.apache.spark.sql.streaming.GroupState] for more details. + * + * @param S The type of the user-defined state. Must be encodable to Spark SQL types. + * @param U The type of the output objects. Must be encodable to Spark SQL types. + * @param func Function to be called on every group. + * @param timeoutConf Timeout configuration for groups that do not receive data for a while. + * + * See [Encoder] for more details on what types are encodable to Spark SQL. + */ inline fun KeyValueGroupedDataset.mapGroupsWithState( timeoutConf: GroupStateTimeout, noinline func: (key: K, values: Iterator, state: GroupState) -> U, @@ -336,6 +437,23 @@ inline fun KeyValueGroupedDataset.mapGroupsWi timeoutConf ) +/** + * (Kotlin-specific) + * Applies the given function to each group of data, while maintaining a user-defined per-group + * state. The result Dataset will represent the objects returned by the function. + * For a static batch Dataset, the function will be invoked once per group. For a streaming + * Dataset, the function will be invoked for each group repeatedly in every trigger, and + * updates to each group's state will be saved across invocations. + * See [GroupState] for more details. + * + * @param S The type of the user-defined state. Must be encodable to Spark SQL types. + * @param U The type of the output objects. Must be encodable to Spark SQL types. + * @param func Function to be called on every group. + * @param outputMode The output mode of the function. + * @param timeoutConf Timeout configuration for groups that do not receive data for a while. + * + * See [Encoder] for more details on what types are encodable to Spark SQL. + */ inline fun KeyValueGroupedDataset.flatMapGroupsWithState( outputMode: OutputMode, timeoutConf: GroupStateTimeout, @@ -348,6 +466,13 @@ inline fun KeyValueGroupedDataset.flatMapGrou timeoutConf ) +/** + * (Kotlin-specific) + * Applies the given function to each cogrouped data. For each unique group, the function will + * be passed the grouping key and 2 iterators containing all elements in the group from + * [Dataset] [this] and [other]. The function can return an iterator containing elements of an + * arbitrary type which will be returned as a new [Dataset]. + */ inline fun KeyValueGroupedDataset.cogroup( other: KeyValueGroupedDataset, noinline func: (key: K, left: Iterator, right: Iterator) -> Iterator, From 9b3239be2d7e2aeeaa77ac68991dfa0fc2d9e353 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Tue, 15 Feb 2022 15:38:43 +0100 Subject: [PATCH 017/213] more docs and deprecated downcast() --- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 60 ++++++++++++++++++- 1 file changed, 58 insertions(+), 2 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 59d12e6b..6c656046 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -482,13 +482,69 @@ inline fun KeyValueGroupedDataset.cogroup( encoder() ) +/** DEPRECATED: Use [as] or [to] for this. */ +@Deprecated( + message = "Deprecated, since we already have `as`() and to().", + replaceWith = ReplaceWith("this.to()"), + level = DeprecationLevel.ERROR, +) inline fun Dataset.downcast(): Dataset = `as`(encoder()) + +/** + * (Kotlin-specific) + * Returns a new Dataset where each record has been mapped on to the specified type. The + * method used to map columns depend on the type of [R]: + * - When [R] is a class, fields for the class will be mapped to columns of the same name + * (case sensitivity is determined by [spark.sql.caseSensitive]). + * - When [R] is a tuple, the columns will be mapped by ordinal (i.e. the first column will + * be assigned to `_1`). + * - When [R] is a primitive type (i.e. [String], [Int], etc.), then the first column of the + * `DataFrame` will be used. + * + * If the schema of the Dataset does not match the desired [R] type, you can use [Dataset.select]/[selectTyped] + * along with [Dataset.alias] or [as]/[to] to rearrange or rename as required. + * + * Note that [as]/[to] only changes the view of the data that is passed into typed operations, + * such as [map], and does not eagerly project away any columns that are not present in + * the specified class. + * + * @see to as alias for [as] + */ inline fun Dataset<*>.`as`(): Dataset = `as`(encoder()) + +/** + * (Kotlin-specific) + * Returns a new Dataset where each record has been mapped on to the specified type. The + * method used to map columns depend on the type of [R]: + * - When [R] is a class, fields for the class will be mapped to columns of the same name + * (case sensitivity is determined by [spark.sql.caseSensitive]). + * - When [R] is a tuple, the columns will be mapped by ordinal (i.e. the first column will + * be assigned to `_1`). + * - When [R] is a primitive type (i.e. [String], [Int], etc.), then the first column of the + * `DataFrame` will be used. + * + * If the schema of the Dataset does not match the desired [R] type, you can use [Dataset.select]/[selectTyped] + * along with [Dataset.alias] or [as]/[to] to rearrange or rename as required. + * + * Note that [as]/[to] only changes the view of the data that is passed into typed operations, + * such as [map], and does not eagerly project away any columns that are not present in + * the specified class. + * + * @see as as alias for [to] + */ inline fun Dataset<*>.to(): Dataset = `as`(encoder()) -inline fun Dataset.forEach(noinline func: (T) -> Unit) = foreach(ForeachFunction(func)) +/** + * (Kotlin-specific) + * Applies a function [func] to all rows. + */ +inline fun Dataset.forEach(noinline func: (T) -> Unit): Unit = foreach(ForeachFunction(func)) -inline fun Dataset.forEachPartition(noinline func: (Iterator) -> Unit) = +/** + * (Kotlin-specific) + * Runs [func] on each partition of this Dataset. + */ +inline fun Dataset.forEachPartition(noinline func: (Iterator) -> Unit): Unit = foreachPartition(ForeachPartitionFunction(func)) /** From a81a75b6e495718054d22c66c923b87d0017d272 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Tue, 15 Feb 2022 16:46:18 +0100 Subject: [PATCH 018/213] 2 todo's left, the rest of ApiV1.kt is now kdocced --- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 69 ++++++++++++++----- 1 file changed, 53 insertions(+), 16 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 6c656046..09c64213 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -37,6 +37,7 @@ import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions import scala.Product import scala.Tuple2 import scala.reflect.ClassTag +import scala.reflect.api.TypeTags.TypeTag import java.beans.PropertyDescriptor import java.math.BigDecimal import java.sql.Date @@ -83,7 +84,7 @@ import kotlin.reflect.full.primaryConstructor import kotlin.to @JvmField -val ENCODERS = mapOf, Encoder<*>>( +val ENCODERS: Map, Encoder<*>> = mapOf( Boolean::class to BOOLEAN(), Byte::class to BYTE(), Short::class to SHORT(), @@ -165,6 +166,9 @@ inline fun List.toDS(spark: SparkSession): Dataset = @OptIn(ExperimentalStdlibApi::class) inline fun encoder(): Encoder = generateEncoder(typeOf(), T::class) +/** + * @see encoder + */ fun generateEncoder(type: KType, cls: KClass<*>): Encoder { @Suppress("UNCHECKED_CAST") return when { @@ -173,7 +177,8 @@ fun generateEncoder(type: KType, cls: KClass<*>): Encoder { } as Encoder } -private fun isSupportedClass(cls: KClass<*>): Boolean = cls.isData +private fun isSupportedClass(cls: KClass<*>): Boolean = + cls.isData || cls.isSubclassOf(Map::class) || cls.isSubclassOf(Iterable::class) || cls.isSubclassOf(Product::class) @@ -550,18 +555,25 @@ inline fun Dataset.forEachPartition(noinline func: (Iterator) /** * It's hard to call `Dataset.debugCodegen` from kotlin, so here is utility for that */ -fun Dataset.debugCodegen() = also { KSparkExtensions.debugCodegen(it) } +fun Dataset.debugCodegen(): Dataset = also { KSparkExtensions.debugCodegen(it) } -val SparkSession.sparkContext +/** + * Returns the Spark context associated with this Spark session. + */ +val SparkSession.sparkContext: SparkContext get() = KSparkExtensions.sparkContext(this) /** * It's hard to call `Dataset.debug` from kotlin, so here is utility for that */ -fun Dataset.debug() = also { KSparkExtensions.debug(it) } +fun Dataset.debug(): Dataset = also { KSparkExtensions.debug(it) } @Suppress("FunctionName") -@Deprecated("Changed to \"`===`\" to better reflect Scala API.", ReplaceWith("this `===` c")) +@Deprecated( + message = "Changed to \"`===`\" to better reflect Scala API.", + replaceWith = ReplaceWith("this `===` c"), + level = DeprecationLevel.ERROR, +) infix fun Column.`==`(c: Column) = `$eq$eq$eq`(c) /** @@ -889,7 +901,10 @@ operator fun Column.rem(other: Any): Column = `$percent`(other) */ operator fun Column.get(key: Any): Column = getItem(key) -fun lit(a: Any) = functions.lit(a) +// TODO deprecate? +fun lit(a: Any): Column = functions.lit(a) + +fun typedLit(literal: Any): Column = functions.lit(literal) /** * Provides a type hint about the expected return value of this column. This information can @@ -996,8 +1011,13 @@ inline fun Dataset.withCached( return cached.executeOnCached().also { cached.unpersist(blockingUnpersist) } } -inline fun Dataset.toList() = KSparkExtensions.collectAsList(to()) -inline fun Dataset<*>.toArray(): Array = to().collect() as Array +/** + * TODO + */ +inline fun Dataset<*>.toList(): List = to().collectAsList() as List +inline fun Dataset<*>.toArray(): Array = to().collect() as Array +//inline fun Dataset.toList() = KSparkExtensions.collectAsList(to()) +//inline fun Dataset<*>.toArray(): Array = to().collect() as Array /** * Selects column based on the column name and returns it as a [Column]. @@ -1014,7 +1034,6 @@ operator fun Dataset.invoke(colName: String): Column = col(colName) * ``` * @see invoke */ - @Suppress("UNCHECKED_CAST") inline fun Dataset.col(column: KProperty1): TypedColumn = col(column.name).`as`() as TypedColumn @@ -1129,6 +1148,14 @@ inline fun = mapOf()): DataType { val primitiveSchema = knownDataTypes[type.classifier] @@ -1228,15 +1255,24 @@ fun schema(type: KType, map: Map = mapOf()): DataType { } } +/** + * The entry point to programming Spark with the Dataset and DataFrame API. + * + * @see org.apache.spark.sql.SparkSession + */ typealias SparkSession = org.apache.spark.sql.SparkSession -fun SparkContext.setLogLevel(level: SparkLogLevel) = setLogLevel(level.name) +/** + * Control our logLevel. This overrides any user-defined log settings. + * @param level The desired log level as [SparkLogLevel]. + */ +fun SparkContext.setLogLevel(level: SparkLogLevel): Unit = setLogLevel(level.name) enum class SparkLogLevel { ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN } -private val knownDataTypes = mapOf( +private val knownDataTypes: Map, DataType> = mapOf( Byte::class to DataTypes.ByteType, Short::class to DataTypes.ShortType, Int::class to DataTypes.IntegerType, @@ -1248,7 +1284,7 @@ private val knownDataTypes = mapOf( LocalDate::class to `DateType$`.`MODULE$`, Date::class to `DateType$`.`MODULE$`, Timestamp::class to `TimestampType$`.`MODULE$`, - Instant::class to `TimestampType$`.`MODULE$` + Instant::class to `TimestampType$`.`MODULE$`, ) private fun transitiveMerge(a: Map, b: Map): Map { @@ -1258,11 +1294,12 @@ private fun transitiveMerge(a: Map, b: Map): Map(val f: (T) -> R) : (T) -> R { + private val values = ConcurrentHashMap() - override fun invoke(x: T) = - values.getOrPut(x, { f(x) }) + + override fun invoke(x: T): R = values.getOrPut(x) { f(x) } } private fun ((T) -> R).memoize(): (T) -> R = Memoize1(this) -private val memoizedSchema = { x: KType -> schema(x) }.memoize() +private val memoizedSchema: (KType) -> DataType = { x: KType -> schema(x) }.memoize() From f813423d5c3f23fc41e372cdf858f9b7ade8e7f5 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Tue, 15 Feb 2022 20:11:58 +0100 Subject: [PATCH 019/213] 1 todo left, lit() --- .../kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 09c64213..85177d02 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -904,8 +904,6 @@ operator fun Column.get(key: Any): Column = getItem(key) // TODO deprecate? fun lit(a: Any): Column = functions.lit(a) -fun typedLit(literal: Any): Column = functions.lit(literal) - /** * Provides a type hint about the expected return value of this column. This information can * be used by operations such as `select` on a [Dataset] to automatically convert the @@ -1012,12 +1010,14 @@ inline fun Dataset.withCached( } /** - * TODO + * Collects the dataset as list where each item has been mapped to type [T]. */ inline fun Dataset<*>.toList(): List = to().collectAsList() as List + +/** + * Collects the dataset as Array where each item has been mapped to type [T]. + */ inline fun Dataset<*>.toArray(): Array = to().collect() as Array -//inline fun Dataset.toList() = KSparkExtensions.collectAsList(to()) -//inline fun Dataset<*>.toArray(): Array = to().collect() as Array /** * Selects column based on the column name and returns it as a [Column]. From 75db05ddf26f3143bd1c2481a1c74e270c229bb5 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Wed, 16 Feb 2022 14:45:23 +0100 Subject: [PATCH 020/213] last doc done --- .../kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 85177d02..e679f561 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -901,7 +901,16 @@ operator fun Column.rem(other: Any): Column = `$percent`(other) */ operator fun Column.get(key: Any): Column = getItem(key) -// TODO deprecate? +/** + * Creates a [Column] of literal value. + * + * The passed in object is returned directly if it is already a [Column]. + * If the object is a Scala Symbol, it is converted into a [Column] also. + * Otherwise, a new [Column] is created to represent the literal value. + * + * This is just a shortcut to the function from [org.apache.spark.sql.functions]. + * For all the functions, simply add `import org.apache.spark.sql.functions.*` to your file. + */ fun lit(a: Any): Column = functions.lit(a) /** From 0cc61c02b941935606977bd80d54141598e9c84c Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Thu, 17 Feb 2022 12:09:07 +0100 Subject: [PATCH 021/213] added docs for iterators --- .../main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/kotlin-spark-api/common/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt b/kotlin-spark-api/common/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt index b93ce377..9f7de351 100644 --- a/kotlin-spark-api/common/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt +++ b/kotlin-spark-api/common/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt @@ -57,9 +57,13 @@ class FilteringIterator( done() } } + +/** Maps the values of the iterator lazily using [func]. */ fun Iterator.map(func: (T) -> R): Iterator = MappingIterator(this, func) +/** Filters the values of the iterator lazily using [func]. */ fun Iterator.filter(func: (T) -> Boolean): Iterator = FilteringIterator(this, func) +/** Partitions the values of the iterator lazily in groups of [size]. */ fun Iterator.partition(size: Int): Iterator> = PartitioningIterator(this, size) From a9db561eb2d06378137b7f38b2c641ade593e28c Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Thu, 17 Feb 2022 12:47:38 +0100 Subject: [PATCH 022/213] attempting to create github action to generate dokka docs --- .github/workflows/generate_docs.yml | 25 +++++++++++++++++++++++++ pom.xml | 2 +- 2 files changed, 26 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/generate_docs.yml diff --git a/.github/workflows/generate_docs.yml b/.github/workflows/generate_docs.yml new file mode 100644 index 00000000..0017ad46 --- /dev/null +++ b/.github/workflows/generate_docs.yml @@ -0,0 +1,25 @@ +name: Generate and publish docs + +on: + push: + branches: + - "spark-3.2" + pull_request: + branches: + - "spark-3.2" + +jobs: + generate-and-publish-docs: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up JDK 11 + uses: actions/setup-java@v1 + with: + distributions: adopt + java-version: 11 + check-latest: true + - name: Generate docs + run: ./mvnw clean package site -Dmaven.test.skip=true + # TODO create branch and copy the docs over \ No newline at end of file diff --git a/pom.xml b/pom.xml index 4f0974c5..64cb115d 100644 --- a/pom.xml +++ b/pom.xml @@ -11,7 +11,7 @@ 1.5.30 - 1.4.32 + 1.6.10 0.16.0 4.6.0 1.0.1 From 015411f3042324dbc50c718e0164d5cb4acb535b Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Thu, 17 Feb 2022 13:32:11 +0100 Subject: [PATCH 023/213] merging common into the api so the docs are merged --- .github/workflows/generate_docs.yml | 2 +- kotlin-spark-api/3.2/pom_2.12.xml | 5 +- .../jetbrains/kotlinx/spark/api/Iterators.kt | 0 .../jetbrains/kotlinx/spark/api/VarArities.kt | 0 kotlin-spark-api/common/pom.xml | 56 ------------------- pom.xml | 6 -- 6 files changed, 2 insertions(+), 67 deletions(-) rename kotlin-spark-api/{common => 3.2}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt (100%) rename kotlin-spark-api/{common => 3.2}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/VarArities.kt (100%) delete mode 100644 kotlin-spark-api/common/pom.xml diff --git a/.github/workflows/generate_docs.yml b/.github/workflows/generate_docs.yml index 0017ad46..9ca5d938 100644 --- a/.github/workflows/generate_docs.yml +++ b/.github/workflows/generate_docs.yml @@ -22,4 +22,4 @@ jobs: check-latest: true - name: Generate docs run: ./mvnw clean package site -Dmaven.test.skip=true - # TODO create branch and copy the docs over \ No newline at end of file + # TODO create branch and copy the docs over from kotlin-spark-api/3.2/target/dokka \ No newline at end of file diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 7195f912..756d9c2b 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -27,10 +27,7 @@ org.jetbrains.kotlinx.spark core-3.2_${scala.compat.version} - - org.jetbrains.kotlinx.spark - kotlin-spark-api-common - + diff --git a/kotlin-spark-api/common/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt similarity index 100% rename from kotlin-spark-api/common/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt rename to kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt diff --git a/kotlin-spark-api/common/src/main/kotlin/org/jetbrains/kotlinx/spark/api/VarArities.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/VarArities.kt similarity index 100% rename from kotlin-spark-api/common/src/main/kotlin/org/jetbrains/kotlinx/spark/api/VarArities.kt rename to kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/VarArities.kt diff --git a/kotlin-spark-api/common/pom.xml b/kotlin-spark-api/common/pom.xml deleted file mode 100644 index 19959fdb..00000000 --- a/kotlin-spark-api/common/pom.xml +++ /dev/null @@ -1,56 +0,0 @@ - - - 4.0.0 - - Kotlin Spark API: Common - kotlin-spark-api-common - Kotlin API for Apache Spark: common parts - - org.jetbrains.kotlinx.spark - kotlin-spark-api-parent - 1.0.4-SNAPSHOT - ../.. - - - - - org.jetbrains.kotlin - kotlin-stdlib-jdk8 - - - - - src/main/kotlin - src/test/kotlin - - - org.jetbrains.kotlin - kotlin-maven-plugin - - - org.jetbrains.dokka - dokka-maven-plugin - ${dokka.version} - - 8 - - - - dokka - - dokka - - pre-site - - - javadocjar - - javadocJar - - pre-integration-test - - - - - - diff --git a/pom.xml b/pom.xml index 64cb115d..47043737 100644 --- a/pom.xml +++ b/pom.xml @@ -35,7 +35,6 @@ - kotlin-spark-api/common dummy @@ -51,11 +50,6 @@ kotlin-reflect ${kotlin.version} - - org.jetbrains.kotlinx.spark - kotlin-spark-api-common - ${project.version} - From 301f46d36345af4a57c77c5fb82f62326cf482d3 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Thu, 17 Feb 2022 13:40:28 +0100 Subject: [PATCH 024/213] added copy docs action --- .github/workflows/generate_docs.yml | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/.github/workflows/generate_docs.yml b/.github/workflows/generate_docs.yml index 9ca5d938..efa17b88 100644 --- a/.github/workflows/generate_docs.yml +++ b/.github/workflows/generate_docs.yml @@ -22,4 +22,17 @@ jobs: check-latest: true - name: Generate docs run: ./mvnw clean package site -Dmaven.test.skip=true - # TODO create branch and copy the docs over from kotlin-spark-api/3.2/target/dokka \ No newline at end of file + - name: Copy docs to "docs" branch + env: + SRC_FOLDER: "kotlin-spark-api/3.2/target/dokka" + TARGET_BRANCH: "docs" + run: | + files=$(find $SRC_FOLDER -type f) # get the file list + git config --global user.name 'GitHub Action' + git config --global user.email 'action@github.com' + git fetch # fetch branches + git checkout $TARGET_BRANCH # checkout to your branch + git checkout ${GITHUB_REF##*/} -- $files # copy files from the source branch + git add -A + git diff-index --quiet HEAD || git commit -am "updated docs" # commit to the repository (ignore if no modification) + git push origin $TARGET_BRANCH # push to remote branch From dca5df879e8b568536860a0c7cc852aa4b794a68 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Thu, 17 Feb 2022 13:52:41 +0100 Subject: [PATCH 025/213] added copy docs action --- .github/workflows/generate_docs.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/generate_docs.yml b/.github/workflows/generate_docs.yml index efa17b88..4a42bc6a 100644 --- a/.github/workflows/generate_docs.yml +++ b/.github/workflows/generate_docs.yml @@ -3,10 +3,10 @@ name: Generate and publish docs on: push: branches: - - "spark-3.2" + - "origin/spark-3.2" pull_request: branches: - - "spark-3.2" + - "origin/spark-3.2" jobs: generate-and-publish-docs: @@ -25,7 +25,7 @@ jobs: - name: Copy docs to "docs" branch env: SRC_FOLDER: "kotlin-spark-api/3.2/target/dokka" - TARGET_BRANCH: "docs" + TARGET_BRANCH: "origin/docs" run: | files=$(find $SRC_FOLDER -type f) # get the file list git config --global user.name 'GitHub Action' From 905593225aaf91932e2601aae4f220617e01ebed Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Thu, 17 Feb 2022 14:25:23 +0100 Subject: [PATCH 026/213] alright, let's first test it for this branch. If that works, we can merge it with spark-3.2 --- .github/workflows/generate_docs.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/generate_docs.yml b/.github/workflows/generate_docs.yml index 4a42bc6a..0f221f64 100644 --- a/.github/workflows/generate_docs.yml +++ b/.github/workflows/generate_docs.yml @@ -3,10 +3,12 @@ name: Generate and publish docs on: push: branches: - - "origin/spark-3.2" +# - "origin/spark-3.2" + - "origin/more-documentation" pull_request: branches: - - "origin/spark-3.2" +# - "origin/spark-3.2" + - "origin/more-documentation" jobs: generate-and-publish-docs: From adae99f1af77519ce0eca7ef212908986c6170e5 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Thu, 17 Feb 2022 14:31:58 +0100 Subject: [PATCH 027/213] alright, let's first test it for this branch. If that works, we can merge it with spark-3.2 --- .github/workflows/generate_docs.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/generate_docs.yml b/.github/workflows/generate_docs.yml index 0f221f64..79ce086e 100644 --- a/.github/workflows/generate_docs.yml +++ b/.github/workflows/generate_docs.yml @@ -3,12 +3,13 @@ name: Generate and publish docs on: push: branches: -# - "origin/spark-3.2" - - "origin/more-documentation" + - "more-documentation" +# - "origin/spark-3.2"- pull_request: branches: + - "more-documentation" # - "origin/spark-3.2" - - "origin/more-documentation" + jobs: generate-and-publish-docs: From 104b4c302ab674b77510b934ca95128120a2b78e Mon Sep 17 00:00:00 2001 From: The Gitter Badger Date: Thu, 17 Feb 2022 18:53:39 +0000 Subject: [PATCH 028/213] Add Gitter badge --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 79c669e4..2ea4f90d 100644 --- a/README.md +++ b/README.md @@ -42,7 +42,7 @@ The list of Kotlin for Apache Spark releases is available [here](https://github. The Kotlin for Spark artifacts adhere to the following convention: `[Apache Spark version]_[Scala core version]:[Kotlin for Apache Spark API version]` -[![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:"org.jetbrains.kotlinx.spark"%20AND%20a:"kotlin-spark-api-3.0") +[![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:"org.jetbrains.kotlinx.spark"%20AND%20a:"kotlin-spark-api-3.0") [![Join the chat at https://gitter.im/JetBrains/kotlin-spark-api](https://badges.gitter.im/JetBrains/kotlin-spark-api.svg)](https://gitter.im/JetBrains/kotlin-spark-api?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) ## How to configure Kotlin for Apache Spark in your project From f7ea531a69a86949989eed7ebf5b270e2a8e9bd1 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Fri, 18 Feb 2022 12:17:12 +0100 Subject: [PATCH 029/213] Update README.md moved to right position --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 2ea4f90d..380ee310 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Kotlin for Apache® Spark™ [![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:org.jetbrains.kotlinx.spark%20AND%20v:1.0.2) [![official JetBrains project](http://jb.gg/badges/official.svg)](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub) +# Kotlin for Apache® Spark™ [![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:org.jetbrains.kotlinx.spark%20AND%20v:1.0.2) [![official JetBrains project](http://jb.gg/badges/official.svg)](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub) [![Join the chat at https://gitter.im/JetBrains/kotlin-spark-api](https://badges.gitter.im/JetBrains/kotlin-spark-api.svg)](https://gitter.im/JetBrains/kotlin-spark-api?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) Your next API to work with [Apache Spark](https://spark.apache.org/). @@ -42,7 +42,7 @@ The list of Kotlin for Apache Spark releases is available [here](https://github. The Kotlin for Spark artifacts adhere to the following convention: `[Apache Spark version]_[Scala core version]:[Kotlin for Apache Spark API version]` -[![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:"org.jetbrains.kotlinx.spark"%20AND%20a:"kotlin-spark-api-3.0") [![Join the chat at https://gitter.im/JetBrains/kotlin-spark-api](https://badges.gitter.im/JetBrains/kotlin-spark-api.svg)](https://gitter.im/JetBrains/kotlin-spark-api?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) +[![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:"org.jetbrains.kotlinx.spark"%20AND%20a:"kotlin-spark-api-3.0") ## How to configure Kotlin for Apache Spark in your project From d4b8b5cd468ef2edf96d259373edef02de21c993 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Fri, 18 Feb 2022 12:21:33 +0100 Subject: [PATCH 030/213] alright, let's first test it for this branch. If that works, we can merge it with spark-3.2 --- .github/workflows/generate_docs.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/generate_docs.yml b/.github/workflows/generate_docs.yml index 79ce086e..e5676b57 100644 --- a/.github/workflows/generate_docs.yml +++ b/.github/workflows/generate_docs.yml @@ -4,11 +4,11 @@ on: push: branches: - "more-documentation" -# - "origin/spark-3.2"- +# - "spark-3.2"- pull_request: branches: - "more-documentation" -# - "origin/spark-3.2" +# - "spark-3.2" jobs: @@ -31,6 +31,7 @@ jobs: TARGET_BRANCH: "origin/docs" run: | files=$(find $SRC_FOLDER -type f) # get the file list + git add ${files}/* git config --global user.name 'GitHub Action' git config --global user.email 'action@github.com' git fetch # fetch branches From 96b43da661b7ae28891885531d9c42a7c690fd37 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Fri, 18 Feb 2022 12:32:04 +0100 Subject: [PATCH 031/213] alright, let's first test it for this branch. If that works, we can merge it with spark-3.2 --- .github/workflows/generate_docs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/generate_docs.yml b/.github/workflows/generate_docs.yml index e5676b57..37e03885 100644 --- a/.github/workflows/generate_docs.yml +++ b/.github/workflows/generate_docs.yml @@ -31,7 +31,7 @@ jobs: TARGET_BRANCH: "origin/docs" run: | files=$(find $SRC_FOLDER -type f) # get the file list - git add ${files}/* + git add ${SRC_FOLDER}/* git config --global user.name 'GitHub Action' git config --global user.email 'action@github.com' git fetch # fetch branches From 883bdc5bbe309e3c33655c6436c31656847a6796 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Fri, 18 Feb 2022 12:38:40 +0100 Subject: [PATCH 032/213] alright, let's first test it for this branch. If that works, we can merge it with spark-3.2 --- .github/workflows/generate_docs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/generate_docs.yml b/.github/workflows/generate_docs.yml index 37e03885..9d91fe8d 100644 --- a/.github/workflows/generate_docs.yml +++ b/.github/workflows/generate_docs.yml @@ -31,7 +31,7 @@ jobs: TARGET_BRANCH: "origin/docs" run: | files=$(find $SRC_FOLDER -type f) # get the file list - git add ${SRC_FOLDER}/* + git add -f ${SRC_FOLDER}/* git config --global user.name 'GitHub Action' git config --global user.email 'action@github.com' git fetch # fetch branches From 82d3940192a26dfbc1419111f00de8daf5534bef Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Fri, 18 Feb 2022 12:50:22 +0100 Subject: [PATCH 033/213] maybe copycat works better --- .github/workflows/generate_docs.yml | 27 +++++++++++---------------- 1 file changed, 11 insertions(+), 16 deletions(-) diff --git a/.github/workflows/generate_docs.yml b/.github/workflows/generate_docs.yml index 9d91fe8d..9ba0c76c 100644 --- a/.github/workflows/generate_docs.yml +++ b/.github/workflows/generate_docs.yml @@ -3,11 +3,11 @@ name: Generate and publish docs on: push: branches: - - "more-documentation" + - "more-documentation" # TODO make "spark-3.2" # - "spark-3.2"- pull_request: branches: - - "more-documentation" + - "more-documentation" # TODO make "spark-3.2" # - "spark-3.2" @@ -26,17 +26,12 @@ jobs: - name: Generate docs run: ./mvnw clean package site -Dmaven.test.skip=true - name: Copy docs to "docs" branch - env: - SRC_FOLDER: "kotlin-spark-api/3.2/target/dokka" - TARGET_BRANCH: "origin/docs" - run: | - files=$(find $SRC_FOLDER -type f) # get the file list - git add -f ${SRC_FOLDER}/* - git config --global user.name 'GitHub Action' - git config --global user.email 'action@github.com' - git fetch # fetch branches - git checkout $TARGET_BRANCH # checkout to your branch - git checkout ${GITHUB_REF##*/} -- $files # copy files from the source branch - git add -A - git diff-index --quiet HEAD || git commit -am "updated docs" # commit to the repository (ignore if no modification) - git push origin $TARGET_BRANCH # push to remote branch + uses: andstor/copycat-action@v3 + with: + personal_token: ${{ secrets.PERSONAL_TOKEN }} + src_path: "kotlin-spark-api/3.2/target/dokka" + src_branch: "more-documentation" # TODO make "spark-3.2" + dst_owner: "JetBrains" + dst_repo_name: "kotlin-spark-api" + dst_branch: "docs" + From 671fc3b1efcf983b87c9660e9a6e592a87e5e9a5 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Fri, 18 Feb 2022 13:08:06 +0100 Subject: [PATCH 034/213] using actions-gh-pages? --- .github/workflows/generate_docs.yml | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/.github/workflows/generate_docs.yml b/.github/workflows/generate_docs.yml index 9ba0c76c..07d8da73 100644 --- a/.github/workflows/generate_docs.yml +++ b/.github/workflows/generate_docs.yml @@ -26,12 +26,10 @@ jobs: - name: Generate docs run: ./mvnw clean package site -Dmaven.test.skip=true - name: Copy docs to "docs" branch - uses: andstor/copycat-action@v3 + uses: peaceiris/actions-gh-pages@v3 with: - personal_token: ${{ secrets.PERSONAL_TOKEN }} - src_path: "kotlin-spark-api/3.2/target/dokka" - src_branch: "more-documentation" # TODO make "spark-3.2" - dst_owner: "JetBrains" - dst_repo_name: "kotlin-spark-api" - dst_branch: "docs" + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_branch: docs + publish_dir: ./kotlin-spark-api/3.2/target/dokka + From b5ece11d0ea873aeeca37974f7d36b347fad8f8d Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Fri, 18 Feb 2022 13:15:53 +0100 Subject: [PATCH 035/213] it's working now! --- .github/workflows/generate_docs.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/generate_docs.yml b/.github/workflows/generate_docs.yml index 07d8da73..8977f513 100644 --- a/.github/workflows/generate_docs.yml +++ b/.github/workflows/generate_docs.yml @@ -3,12 +3,10 @@ name: Generate and publish docs on: push: branches: - - "more-documentation" # TODO make "spark-3.2" -# - "spark-3.2"- + - "spark-3.2" pull_request: branches: - - "more-documentation" # TODO make "spark-3.2" -# - "spark-3.2" + - "spark-3.2" jobs: From 38003112ea17df5e26d0ca643215df6a5d0f6f00 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Fri, 18 Feb 2022 13:39:30 +0100 Subject: [PATCH 036/213] small docs fixes --- .github/workflows/generate_docs.yml | 4 ---- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 14 +++++++------- 2 files changed, 7 insertions(+), 11 deletions(-) diff --git a/.github/workflows/generate_docs.yml b/.github/workflows/generate_docs.yml index 8977f513..0aee6d85 100644 --- a/.github/workflows/generate_docs.yml +++ b/.github/workflows/generate_docs.yml @@ -4,10 +4,6 @@ on: push: branches: - "spark-3.2" - pull_request: - branches: - - "spark-3.2" - jobs: generate-and-publish-docs: diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index e679f561..1061a21a 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -601,7 +601,7 @@ operator fun Column.unaryMinus(): Column = `unary_$minus`() * * // Kotlin: * import org.jetbrains.kotlinx.spark.api.* - * df.select( !df("amount") ) + * df.filter( !df("amount") ) * * // Java: * import static org.apache.spark.sql.functions.*; @@ -665,7 +665,7 @@ infix fun Column.`===`(other: Any): Column = `$eq$eq$eq`(other) * * // Java: * import static org.apache.spark.sql.functions.*; - * df.filter( col("colA").notEqual(col("colB")) ); + * df.select( col("colA").notEqual(col("colB")) ); * ``` */ infix fun Column.neq(other: Any): Column = `$eq$bang$eq`(other) @@ -687,7 +687,7 @@ infix fun Column.neq(other: Any): Column = `$eq$bang$eq`(other) * * // Java: * import static org.apache.spark.sql.functions.*; - * df.filter( col("colA").notEqual(col("colB")) ); + * df.select( col("colA").notEqual(col("colB")) ); * ``` */ infix fun Column.`=!=`(other: Any): Column = `$eq$bang$eq`(other) @@ -802,9 +802,9 @@ infix fun Column.or(other: Any): Column = `$bar$bar`(other) * * // Kotlin: * import org.jetbrains.kotlinx.spark.api.* - * people.filter( people("inSchool") and people("isEmployed") ) + * people.select( people("inSchool") and people("isEmployed") ) * // or - * people.filter( people("inSchool") `&&` people("isEmployed") ) + * people.select( people("inSchool") `&&` people("isEmployed") ) * * // Java: * import static org.apache.spark.sql.functions.*; @@ -821,9 +821,9 @@ infix fun Column.and(other: Any): Column = `$amp$amp`(other) * * // Kotlin: * import org.jetbrains.kotlinx.spark.api.* - * people.filter( people("inSchool") and people("isEmployed") ) + * people.select( people("inSchool") and people("isEmployed") ) * // or - * people.filter( people("inSchool") `&&` people("isEmployed") ) + * people.select( people("inSchool") `&&` people("isEmployed") ) * * // Java: * import static org.apache.spark.sql.functions.*; From c57eaaf5c2109b1d46eb7c93dc6fd49a5965a373 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Fri, 18 Feb 2022 21:00:04 +0100 Subject: [PATCH 037/213] added streaming and exploring how it works with kotlin spark api --- examples/pom-3.2_2.12.xml | 5 ++ .../kotlinx/spark/examples/Streaming.kt | 61 +++++++++++++++++++ kotlin-spark-api/3.2/pom_2.12.xml | 6 ++ 3 files changed, 72 insertions(+) create mode 100644 examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt diff --git a/examples/pom-3.2_2.12.xml b/examples/pom-3.2_2.12.xml index b5267352..5f214b69 100644 --- a/examples/pom-3.2_2.12.xml +++ b/examples/pom-3.2_2.12.xml @@ -24,6 +24,11 @@ spark-sql_${scala.compat.version} ${spark3.version} + + org.apache.spark + spark-streaming_${scala.compat.version} + ${spark3.version} + diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt new file mode 100644 index 00000000..7c562bd5 --- /dev/null +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt @@ -0,0 +1,61 @@ +/*- + * =LICENSE= + * Kotlin Spark API: Examples for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.examples + +import org.apache.spark.SparkConf +import org.apache.spark.api.java.JavaSparkContext +import org.apache.spark.streaming.Durations +import org.apache.spark.streaming.api.java.JavaStreamingContext +import org.jetbrains.kotlinx.spark.api.withSpark +import scala.Tuple2 +import java.io.Serializable + +data class Row @JvmOverloads constructor( + var word: String = "", +) : Serializable + +fun main() = withSpark { + + val context = JavaStreamingContext( + SparkConf() + .setMaster("local[*]") + .setAppName("Test"), + Durations.seconds(1), + ) + + val lines = context.socketTextStream("localhost", 9999) + + val words = lines.flatMap { it.split(" ").iterator() } + + words.foreachRDD { rdd, time -> + + // todo convert rdd to dataset using kotlin data class? + + val rowRdd = rdd.map { Row(it) } + + val dataframe = spark.createDataFrame(rowRdd, Row::class.java) + + + } + + + context.start() + context.awaitTermination() +} \ No newline at end of file diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 756d9c2b..826547d2 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -36,6 +36,12 @@ ${spark3.version} provided + + org.apache.spark + spark-streaming_${scala.compat.version} + ${spark3.version} + provided + From bb39fc79f4b528850337428a738761e84168c752 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Mon, 21 Feb 2022 15:26:31 +0100 Subject: [PATCH 038/213] Adds helpful rdd to dataset conversion, as well as a new withSpark function taking a SparkConf --- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 14 +++++++++++ .../kotlinx/spark/api/SparkHelper.kt | 23 ++++++++++++++++++- 2 files changed, 36 insertions(+), 1 deletion(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 1061a21a..d41958ec 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -23,9 +23,11 @@ package org.jetbrains.kotlinx.spark.api import org.apache.hadoop.shaded.org.apache.commons.math3.exception.util.ArgUtils import org.apache.spark.SparkContext +import org.apache.spark.api.java.JavaRDD import org.apache.spark.api.java.JavaSparkContext import org.apache.spark.api.java.function.* import org.apache.spark.broadcast.Broadcast +import org.apache.spark.rdd.RDD import org.apache.spark.sql.* import org.apache.spark.sql.Encoders.* import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder @@ -154,6 +156,18 @@ inline fun SparkSession.dsOf(vararg t: T): Dataset = inline fun List.toDS(spark: SparkSession): Dataset = spark.createDataset(this, encoder()) +/** + * Utility method to create dataset from RDD + */ +inline fun RDD.toDS(spark: SparkSession): Dataset = + spark.createDataset(this, encoder()) + +/** + * Utility method to create dataset from JavaRDD + */ +inline fun JavaRDD.toDS(spark: SparkSession): Dataset = + spark.createDataset(this.rdd(), encoder()) + /** * Main method of API, which gives you seamless integration with Spark: * It creates encoder for any given supported type T diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt index 6188daae..213636a8 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt @@ -20,6 +20,8 @@ package org.jetbrains.kotlinx.spark.api import org.apache.spark.SparkConf +import org.apache.spark.api.java.JavaRDD +import org.apache.spark.rdd.RDD import org.apache.spark.sql.SparkSession.Builder import org.apache.spark.sql.UDFRegistration import org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR @@ -83,13 +85,32 @@ inline fun withSpark(builder: Builder, logLevel: SparkLogLevel = ERROR, func: KS .also { it.stop() } } +/** + * Wrapper for spark creation which copies params from [sparkConf]. + * + * @param sparkConf Sets a list of config options based on this. + * @param logLevel Control our logLevel. This overrides any user-defined log settings. + * @param func function which will be executed in context of [KSparkSession] (it means that `this` inside block will point to [KSparkSession]) + */ +@JvmOverloads +inline fun withSpark(sparkConf: SparkConf, logLevel: SparkLogLevel = ERROR, func: KSparkSession.() -> Unit) { + withSpark( + builder = SparkSession.builder().config(sparkConf), + logLevel = logLevel, + func = func, + ) +} + /** * This wrapper over [SparkSession] which provides several additional methods to create [org.apache.spark.sql.Dataset] */ +@JvmInline @Suppress("EXPERIMENTAL_FEATURE_WARNING", "unused") -inline class KSparkSession(val spark: SparkSession) { +value class KSparkSession(val spark: SparkSession) { inline fun List.toDS() = toDS(spark) inline fun Array.toDS() = spark.dsOf(*this) inline fun dsOf(vararg arg: T) = spark.dsOf(*arg) + inline fun RDD.toDS() = toDS(spark) + inline fun JavaRDD.toDS() = toDS(spark) val udf: UDFRegistration get() = spark.udf() } From 4bd3fe190146ce0fb9a089a3bd26e2018b2d9996 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Mon, 21 Feb 2022 16:16:30 +0100 Subject: [PATCH 039/213] makes javaRDD toDS function more generic. Adds sc JavaSparkContext to KSparkSession --- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 5 +-- .../kotlinx/spark/api/SparkHelper.kt | 15 ++++--- .../jetbrains/kotlinx/spark/api/ApiTest.kt | 42 +++++++++++++++++++ 3 files changed, 54 insertions(+), 8 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index d41958ec..7e9ef135 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -23,8 +23,7 @@ package org.jetbrains.kotlinx.spark.api import org.apache.hadoop.shaded.org.apache.commons.math3.exception.util.ArgUtils import org.apache.spark.SparkContext -import org.apache.spark.api.java.JavaRDD -import org.apache.spark.api.java.JavaSparkContext +import org.apache.spark.api.java.* import org.apache.spark.api.java.function.* import org.apache.spark.broadcast.Broadcast import org.apache.spark.rdd.RDD @@ -165,7 +164,7 @@ inline fun RDD.toDS(spark: SparkSession): Dataset = /** * Utility method to create dataset from JavaRDD */ -inline fun JavaRDD.toDS(spark: SparkSession): Dataset = +inline fun JavaRDDLike.toDS(spark: SparkSession): Dataset = spark.createDataset(this.rdd(), encoder()) /** diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt index 213636a8..d9b4823a 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt @@ -21,7 +21,10 @@ package org.jetbrains.kotlinx.spark.api import org.apache.spark.SparkConf import org.apache.spark.api.java.JavaRDD +import org.apache.spark.api.java.JavaRDDLike +import org.apache.spark.api.java.JavaSparkContext import org.apache.spark.rdd.RDD +import org.apache.spark.sql.Dataset import org.apache.spark.sql.SparkSession.Builder import org.apache.spark.sql.UDFRegistration import org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR @@ -80,9 +83,10 @@ inline fun withSpark(builder: Builder, logLevel: SparkLogLevel = ERROR, func: KS KSparkSession(this).apply { sparkContext.setLogLevel(logLevel) func() + sc.stop() + spark.stop() } } - .also { it.stop() } } /** @@ -104,13 +108,14 @@ inline fun withSpark(sparkConf: SparkConf, logLevel: SparkLogLevel = ERROR, func /** * This wrapper over [SparkSession] which provides several additional methods to create [org.apache.spark.sql.Dataset] */ -@JvmInline -@Suppress("EXPERIMENTAL_FEATURE_WARNING", "unused") -value class KSparkSession(val spark: SparkSession) { +class KSparkSession(val spark: SparkSession) { + + val sc: JavaSparkContext = JavaSparkContext(spark.sparkContext) + inline fun List.toDS() = toDS(spark) inline fun Array.toDS() = spark.dsOf(*this) inline fun dsOf(vararg arg: T) = spark.dsOf(*arg) inline fun RDD.toDS() = toDS(spark) - inline fun JavaRDD.toDS() = toDS(spark) + inline fun JavaRDDLike.toDS() = toDS(spark) val udf: UDFRegistration get() = spark.udf() } diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index ed784b13..936b5b2c 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -21,6 +21,11 @@ import ch.tutteli.atrium.api.fluent.en_GB.* import ch.tutteli.atrium.api.verbs.expect import io.kotest.core.spec.style.ShouldSpec import io.kotest.matchers.shouldBe +import org.apache.spark.api.java.JavaDoubleRDD +import org.apache.spark.api.java.JavaPairRDD +import org.apache.spark.api.java.JavaRDD +import org.apache.spark.api.java.JavaSparkContext +import org.apache.spark.rdd.RDD import org.apache.spark.sql.Dataset import org.apache.spark.sql.functions.* import org.apache.spark.sql.streaming.GroupState @@ -593,6 +598,43 @@ class ApiTest : ShouldSpec({ it.nullable() shouldBe true } } + should("Easily convert a (Java)RDD to a Dataset") { + // scala RDD + val rdd0: RDD = sc.parallelize( + listOf(1, 2, 3, 4, 5, 6) + ).rdd() + val dataset0: Dataset = rdd0.toDS() + dataset0.show() + + dataset0.toList() shouldBe listOf(1, 2, 3, 4, 5, 6) + + // normal JavaRDD + val rdd1: JavaRDD = sc.parallelize( + listOf(1, 2, 3, 4, 5, 6) + ) + val dataset1: Dataset = rdd1.toDS() + dataset1.show() + + dataset1.toList() shouldBe listOf(1, 2, 3, 4, 5, 6) + + // JavaDoubleRDD + val rdd2: JavaDoubleRDD = sc.parallelizeDoubles( + listOf(1.0, 2.0, 3.0, 4.0, 5.0, 6.0) + ) + val dataset2: Dataset = rdd2.toDS() + dataset2.show() + + dataset2.toList() shouldBe listOf(1.0, 2.0, 3.0, 4.0, 5.0, 6.0) + + // JavaPairRDD + val rdd3: JavaPairRDD = sc.parallelizePairs( + listOf(Tuple2(1, 1.0), Tuple2(2, 2.0), Tuple2(3, 3.0)) + ) + val dataset3: Dataset> = rdd3.toDS() + dataset3.show() + + dataset3.toList>() shouldBe listOf(Tuple2(1, 1.0), Tuple2(2, 2.0), Tuple2(3, 3.0)) + } } } }) From b24172f9c36f576c893f0e5e78df5720c77ceced Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Mon, 21 Feb 2022 17:11:49 +0100 Subject: [PATCH 040/213] added withSparkStreaming function and example. Let's see if something like this could form the basis of easy streaming support :) --- .../kotlinx/spark/examples/Streaming.kt | 39 ++++-------- .../kotlinx/spark/api/SparkHelper.kt | 59 ++++++++++++++++++- 2 files changed, 70 insertions(+), 28 deletions(-) diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt index 7c562bd5..bc9284d5 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt @@ -20,42 +20,29 @@ package org.jetbrains.kotlinx.spark.examples import org.apache.spark.SparkConf -import org.apache.spark.api.java.JavaSparkContext +import org.apache.spark.sql.Dataset +import org.apache.spark.streaming.Duration import org.apache.spark.streaming.Durations import org.apache.spark.streaming.api.java.JavaStreamingContext -import org.jetbrains.kotlinx.spark.api.withSpark -import scala.Tuple2 -import java.io.Serializable +import org.jetbrains.kotlinx.spark.api.* -data class Row @JvmOverloads constructor( - var word: String = "", -) : Serializable +data class TestRow( + val word: String, +) -fun main() = withSpark { - - val context = JavaStreamingContext( - SparkConf() - .setMaster("local[*]") - .setAppName("Test"), - Durations.seconds(1), - ) - - val lines = context.socketTextStream("localhost", 9999) +fun main() = withSparkStreaming(Durations.seconds(1)) { + val lines = ssc.socketTextStream("localhost", 9999) val words = lines.flatMap { it.split(" ").iterator() } words.foreachRDD { rdd, time -> + val dataframe: Dataset = rdd.map { TestRow(it) }.toDS() - // todo convert rdd to dataset using kotlin data class? - - val rowRdd = rdd.map { Row(it) } - - val dataframe = spark.createDataFrame(rowRdd, Row::class.java) - + dataframe + .groupByKey { it.word } + .count() + .show() } - - context.start() - context.awaitTermination() } \ No newline at end of file diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt index d9b4823a..db093cfc 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt @@ -27,6 +27,8 @@ import org.apache.spark.rdd.RDD import org.apache.spark.sql.Dataset import org.apache.spark.sql.SparkSession.Builder import org.apache.spark.sql.UDFRegistration +import org.apache.spark.streaming.Duration +import org.apache.spark.streaming.api.java.JavaStreamingContext import org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR /** @@ -105,10 +107,57 @@ inline fun withSpark(sparkConf: SparkConf, logLevel: SparkLogLevel = ERROR, func ) } + +/** + * Wrapper for spark streaming creation. `spark: SparkSession` and `ssc: JavaStreamingContext` are provided, started, + * awaited, and stopped automatically. + * + * @param batchDuration The time interval at which streaming data will be divided into batches + * @param props spark options, value types are runtime-checked for type-correctness + * @param master Sets the Spark master URL to connect to, such as "local" to run locally, "local[4]" to + * run locally with 4 cores, or "spark://master:7077" to run on a Spark standalone cluster. By default, it + * tries to get the system value "spark.master", otherwise it uses "local[*]" + * @param appName Sets a name for the application, which will be shown in the Spark web UI. + * If no application name is set, a randomly generated name will be used. + * @param logLevel Control our logLevel. This overrides any user-defined log settings. + * @param func function which will be executed in context of [KSparkStreamingSession] (it means that `this` inside block will point to [KSparkStreamingSession]) + * todo: provide alternatives with path instead of batchDuration etc + */ +@JvmOverloads +inline fun withSparkStreaming( + batchDuration: Duration, + props: Map = emptyMap(), + master: String = SparkConf().get("spark.master", "local[*]"), + appName: String = "Kotlin Spark Sample", + logLevel: SparkLogLevel = SparkLogLevel.ERROR, + func: KSparkStreamingSession.() -> Unit, +) { + val conf = SparkConf() + .setMaster(master) + .setAppName(appName) + .apply { + props.forEach { + set(it.key, it.toString()) + } + } + + val ssc = JavaStreamingContext(conf, batchDuration) + val spark = SparkSession.builder().config(conf).getOrCreate() + + KSparkStreamingSession(spark, ssc).apply { + spark.sparkContext.setLogLevel(logLevel) + func() + ssc.start() + ssc.awaitTermination() + sc.stop() + spark.stop() + } +} + /** - * This wrapper over [SparkSession] which provides several additional methods to create [org.apache.spark.sql.Dataset] + * This wrapper over [SparkSession] provides several additional methods to create [org.apache.spark.sql.Dataset] */ -class KSparkSession(val spark: SparkSession) { +open class KSparkSession(val spark: SparkSession) { val sc: JavaSparkContext = JavaSparkContext(spark.sparkContext) @@ -119,3 +168,9 @@ class KSparkSession(val spark: SparkSession) { inline fun JavaRDDLike.toDS() = toDS(spark) val udf: UDFRegistration get() = spark.udf() } + +/** + * This wrapper over [SparkSession] and [JavaStreamingContext] provides several additional methods to create [org.apache.spark.sql.Dataset] + */ +class KSparkStreamingSession(spark: SparkSession, val ssc: JavaStreamingContext) : KSparkSession(spark) + From a378070c2368c770edf40bb575802abf5e783079 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Mon, 21 Feb 2022 17:47:53 +0100 Subject: [PATCH 041/213] makes withSparkStreaming reuse the normal withSpark --- .../kotlinx/spark/api/SparkHelper.kt | 37 +++++++------------ 1 file changed, 14 insertions(+), 23 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt index db093cfc..2e750c81 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt @@ -30,6 +30,7 @@ import org.apache.spark.sql.UDFRegistration import org.apache.spark.streaming.Duration import org.apache.spark.streaming.api.java.JavaStreamingContext import org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR +import kotlin.math.log /** * Wrapper for spark creation which allows setting different spark params. @@ -132,35 +133,25 @@ inline fun withSparkStreaming( logLevel: SparkLogLevel = SparkLogLevel.ERROR, func: KSparkStreamingSession.() -> Unit, ) { - val conf = SparkConf() - .setMaster(master) - .setAppName(appName) - .apply { - props.forEach { - set(it.key, it.toString()) - } + withSpark( + props = props, + master = master, + appName = appName, + logLevel = logLevel, + ) { + val ssc = JavaStreamingContext(sc, batchDuration) + KSparkStreamingSession(session = this, ssc = ssc).apply { + func() + ssc.start() + ssc.awaitTermination() } - - val ssc = JavaStreamingContext(conf, batchDuration) - val spark = SparkSession.builder().config(conf).getOrCreate() - - KSparkStreamingSession(spark, ssc).apply { - spark.sparkContext.setLogLevel(logLevel) - func() - ssc.start() - ssc.awaitTermination() - sc.stop() - spark.stop() } } /** * This wrapper over [SparkSession] provides several additional methods to create [org.apache.spark.sql.Dataset] */ -open class KSparkSession(val spark: SparkSession) { - - val sc: JavaSparkContext = JavaSparkContext(spark.sparkContext) - +open class KSparkSession(val spark: SparkSession, val sc: JavaSparkContext = JavaSparkContext(spark.sparkContext)) { inline fun List.toDS() = toDS(spark) inline fun Array.toDS() = spark.dsOf(*this) inline fun dsOf(vararg arg: T) = spark.dsOf(*arg) @@ -172,5 +163,5 @@ open class KSparkSession(val spark: SparkSession) { /** * This wrapper over [SparkSession] and [JavaStreamingContext] provides several additional methods to create [org.apache.spark.sql.Dataset] */ -class KSparkStreamingSession(spark: SparkSession, val ssc: JavaStreamingContext) : KSparkSession(spark) +class KSparkStreamingSession(session: KSparkSession, val ssc: JavaStreamingContext) : KSparkSession(session.spark, session.sc) From c0ead09a3aabbd410fae069873a8325dbe918451 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Mon, 21 Feb 2022 19:49:36 +0100 Subject: [PATCH 042/213] added encoders: Duration, Period, ByteArray (Binary, now actually working) added tests for: Instant datatype, duration, period, binary, and others added datatypes: ByteArray (BinaryType), CalendarInterval, Nothing (All three not yet working) --- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 40 +++++++++++++------ .../jetbrains/kotlinx/spark/api/ApiTest.kt | 31 ++++++++++++++ 2 files changed, 58 insertions(+), 13 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 1061a21a..0e7abe2d 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -33,17 +33,22 @@ import org.apache.spark.sql.streaming.GroupState import org.apache.spark.sql.streaming.GroupStateTimeout import org.apache.spark.sql.streaming.OutputMode import org.apache.spark.sql.types.* +import org.apache.spark.sql.types.DataTypes.DateType +import org.apache.spark.unsafe.types.CalendarInterval import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions import scala.Product import scala.Tuple2 +import scala.concurrent.duration.`Duration$` import scala.reflect.ClassTag import scala.reflect.api.TypeTags.TypeTag import java.beans.PropertyDescriptor import java.math.BigDecimal import java.sql.Date import java.sql.Timestamp +import java.time.Duration import java.time.Instant import java.time.LocalDate +import java.time.Period import java.util.* import java.util.concurrent.ConcurrentHashMap import kotlin.Any @@ -95,10 +100,12 @@ val ENCODERS: Map, Encoder<*>> = mapOf( String::class to STRING(), BigDecimal::class to DECIMAL(), Date::class to DATE(), - LocalDate::class to LOCALDATE(), // 3.0 only + LocalDate::class to LOCALDATE(), // 3.0+ Timestamp::class to TIMESTAMP(), - Instant::class to INSTANT(), // 3.0 only - ByteArray::class to BINARY() + Instant::class to INSTANT(), // 3.0+ + ByteArray::class to BINARY(), + Duration::class to DURATION(), // 3.2+ + Period::class to PERIOD(), // 3.2+ ) @@ -177,12 +184,16 @@ fun generateEncoder(type: KType, cls: KClass<*>): Encoder { } as Encoder } -private fun isSupportedClass(cls: KClass<*>): Boolean = - cls.isData - || cls.isSubclassOf(Map::class) - || cls.isSubclassOf(Iterable::class) - || cls.isSubclassOf(Product::class) - || cls.java.isArray +private fun isSupportedClass(cls: KClass<*>): Boolean = when { + cls == ByteArray::class -> false // uses binary encoder + cls.isData -> true + cls.isSubclassOf(Map::class) -> true + cls.isSubclassOf(Iterable::class) -> true + cls.isSubclassOf(Product::class) -> true + cls.java.isArray -> true + else -> false + } + private fun kotlinClassEncoder(schema: DataType, kClass: KClass<*>): Encoder { return ExpressionEncoder( @@ -1290,10 +1301,13 @@ private val knownDataTypes: Map, DataType> = mapOf( Float::class to DataTypes.FloatType, Double::class to DataTypes.DoubleType, String::class to DataTypes.StringType, - LocalDate::class to `DateType$`.`MODULE$`, - Date::class to `DateType$`.`MODULE$`, - Timestamp::class to `TimestampType$`.`MODULE$`, - Instant::class to `TimestampType$`.`MODULE$`, + LocalDate::class to DataTypes.DateType, + Date::class to DataTypes.DateType, + Timestamp::class to DataTypes.TimestampType, + Instant::class to DataTypes.TimestampType, + ByteArray::class to DataTypes.BinaryType, + CalendarInterval::class to DataTypes.CalendarIntervalType, + Nothing::class to DataTypes.NullType, ) private fun transitiveMerge(a: Map, b: Map): Map { diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index ed784b13..e9936e0b 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -25,6 +25,7 @@ import org.apache.spark.sql.Dataset import org.apache.spark.sql.functions.* import org.apache.spark.sql.streaming.GroupState import org.apache.spark.sql.streaming.GroupStateTimeout +import org.apache.spark.unsafe.types.CalendarInterval import scala.Product import scala.Tuple1 import scala.Tuple2 @@ -33,8 +34,10 @@ import scala.collection.Seq import java.io.Serializable import java.sql.Date import java.sql.Timestamp +import java.time.Duration import java.time.Instant import java.time.LocalDate +import java.time.Period import kotlin.collections.Iterator import scala.collection.Iterator as ScalaIterator import scala.collection.Map as ScalaMap @@ -325,6 +328,10 @@ class ApiTest : ShouldSpec({ val dataset: Dataset = dsOf(Instant.now(), Instant.now()) dataset.show() } + should("Be able to serialize Instant") { // uses knownDataTypes + val dataset = dsOf(Instant.now() to Instant.now()) + dataset.show() + } should("be able to serialize Date") { // uses knownDataTypes val dataset: Dataset> = dsOf(Date.valueOf("2020-02-10") to 5) dataset.show() @@ -337,6 +344,30 @@ class ApiTest : ShouldSpec({ val dataset = dsOf(Timestamp(0L) to 2) dataset.show() } + should("handle Duration Datasets") { // uses encoder + val dataset = dsOf(Duration.ZERO) + dataset.show() + } + should("handle Period Datasets") { // uses encoder + val dataset = dsOf(Period.ZERO) + dataset.show() + } + should("handle binary datasets") { // uses encoder + val dataset = dsOf(byteArrayOf(1, 0, 1, 0)) + dataset.show() + } + should("be able to serialize binary") { // uses knownDataTypes + val dataset = dsOf(byteArrayOf(1, 0, 1, 0) to 2) + dataset.show() + } + should("be able to serialize CalendarInterval") { // uses knownDataTypes + val dataset = dsOf(CalendarInterval(1, 0, 0L) to 2) + dataset.show() + } + should("be able to serialize null") { // uses knownDataTypes + val dataset: Dataset> = dsOf(null to 2) + dataset.show() + } should("Be able to serialize Scala Tuples including data classes") { val dataset = dsOf( Tuple2("a", Tuple3("a", 1, LonLat(1.0, 1.0))), From 09e9bb5438c929f798084935ced429be1a657e8b Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Tue, 22 Feb 2022 13:06:26 +0100 Subject: [PATCH 043/213] Arity is now Serializable, removed sc.stop(), sc is now lazy, updates tests, removed streaming example --- .../kotlinx/spark/examples/Streaming.kt | 61 ------------------- .../kotlinx/spark/api/SparkHelper.kt | 3 +- .../jetbrains/kotlinx/spark/api/VarArities.kt | 54 ++++++++-------- .../jetbrains/kotlinx/spark/api/ApiTest.kt | 22 +++++++ 4 files changed, 51 insertions(+), 89 deletions(-) delete mode 100644 examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt deleted file mode 100644 index 7c562bd5..00000000 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt +++ /dev/null @@ -1,61 +0,0 @@ -/*- - * =LICENSE= - * Kotlin Spark API: Examples for Spark 3.2+ (Scala 2.12) - * ---------- - * Copyright (C) 2019 - 2022 JetBrains - * ---------- - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * =LICENSEEND= - */ -package org.jetbrains.kotlinx.spark.examples - -import org.apache.spark.SparkConf -import org.apache.spark.api.java.JavaSparkContext -import org.apache.spark.streaming.Durations -import org.apache.spark.streaming.api.java.JavaStreamingContext -import org.jetbrains.kotlinx.spark.api.withSpark -import scala.Tuple2 -import java.io.Serializable - -data class Row @JvmOverloads constructor( - var word: String = "", -) : Serializable - -fun main() = withSpark { - - val context = JavaStreamingContext( - SparkConf() - .setMaster("local[*]") - .setAppName("Test"), - Durations.seconds(1), - ) - - val lines = context.socketTextStream("localhost", 9999) - - val words = lines.flatMap { it.split(" ").iterator() } - - words.foreachRDD { rdd, time -> - - // todo convert rdd to dataset using kotlin data class? - - val rowRdd = rdd.map { Row(it) } - - val dataframe = spark.createDataFrame(rowRdd, Row::class.java) - - - } - - - context.start() - context.awaitTermination() -} \ No newline at end of file diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt index d9b4823a..98fdae8d 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt @@ -83,7 +83,6 @@ inline fun withSpark(builder: Builder, logLevel: SparkLogLevel = ERROR, func: KS KSparkSession(this).apply { sparkContext.setLogLevel(logLevel) func() - sc.stop() spark.stop() } } @@ -110,7 +109,7 @@ inline fun withSpark(sparkConf: SparkConf, logLevel: SparkLogLevel = ERROR, func */ class KSparkSession(val spark: SparkSession) { - val sc: JavaSparkContext = JavaSparkContext(spark.sparkContext) + val sc: JavaSparkContext by lazy { JavaSparkContext(spark.sparkContext) } inline fun List.toDS() = toDS(spark) inline fun Array.toDS() = spark.dsOf(*this) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/VarArities.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/VarArities.kt index a4b2bdd7..af870038 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/VarArities.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/VarArities.kt @@ -22,32 +22,34 @@ */ package org.jetbrains.kotlinx.spark.api -data class Arity1(val _1: T1) -data class Arity2(val _1: T1, val _2: T2) -data class Arity3(val _1: T1, val _2: T2, val _3: T3) -data class Arity4(val _1: T1, val _2: T2, val _3: T3, val _4: T4) -data class Arity5(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5) -data class Arity6(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6) -data class Arity7(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7) -data class Arity8(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8) -data class Arity9(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9) -data class Arity10(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10) -data class Arity11(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11) -data class Arity12(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12) -data class Arity13(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13) -data class Arity14(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14) -data class Arity15(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15) -data class Arity16(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16) -data class Arity17(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17) -data class Arity18(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18) -data class Arity19(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19) -data class Arity20(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20) -data class Arity21(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21) -data class Arity22(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22) -data class Arity23(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23) -data class Arity24(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23, val _24: T24) -data class Arity25(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23, val _24: T24, val _25: T25) -data class Arity26(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23, val _24: T24, val _25: T25, val _26: T26) +import java.io.Serializable + +data class Arity1(val _1: T1): Serializable +data class Arity2(val _1: T1, val _2: T2): Serializable +data class Arity3(val _1: T1, val _2: T2, val _3: T3): Serializable +data class Arity4(val _1: T1, val _2: T2, val _3: T3, val _4: T4): Serializable +data class Arity5(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5): Serializable +data class Arity6(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6): Serializable +data class Arity7(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7): Serializable +data class Arity8(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8): Serializable +data class Arity9(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9): Serializable +data class Arity10(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10): Serializable +data class Arity11(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11): Serializable +data class Arity12(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12): Serializable +data class Arity13(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13): Serializable +data class Arity14(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14): Serializable +data class Arity15(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15): Serializable +data class Arity16(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16): Serializable +data class Arity17(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17): Serializable +data class Arity18(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18): Serializable +data class Arity19(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19): Serializable +data class Arity20(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20): Serializable +data class Arity21(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21): Serializable +data class Arity22(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22): Serializable +data class Arity23(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23): Serializable +data class Arity24(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23, val _24: T24): Serializable +data class Arity25(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23, val _24: T24, val _25: T25): Serializable +data class Arity26(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23, val _24: T24, val _25: T25, val _26: T26): Serializable fun c(_1: T1) = Arity1(_1) fun c(_1: T1, _2: T2) = Arity2(_1, _2) fun c(_1: T1, _2: T2, _3: T3) = Arity3(_1, _2, _3) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index 936b5b2c..149e6500 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -634,11 +634,33 @@ class ApiTest : ShouldSpec({ dataset3.show() dataset3.toList>() shouldBe listOf(Tuple2(1, 1.0), Tuple2(2, 2.0), Tuple2(3, 3.0)) + + // Kotlin Serializable data class RDD + val rdd4 = sc.parallelize( + listOf(SomeClass(intArrayOf(1, 2), 0)) + ) + val dataset4 = rdd4.toDS() + dataset4.show() + + dataset4.toList().first().let { (a, b) -> + a contentEquals intArrayOf(1, 2) shouldBe true + b shouldBe 0 + } + + // Arity + val rdd5 = sc.parallelize( + listOf(c(1.0, 4)) + ) + val dataset5 = rdd5.toDS() + dataset5.show() + + dataset5.toList>() shouldBe listOf(c(1.0, 4)) } } } }) + data class DataClassWithTuple(val tuple: T) data class LonLat(val lon: Double, val lat: Double) From 597b6f1de4df16c8827e0cc43ce3047eac3be066 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Tue, 22 Feb 2022 22:00:26 +0100 Subject: [PATCH 044/213] copying over some other missing parts from ScalaReflection.scala. Did some refactoring. still trying to get serializing of binary to work --- .../apache/spark/sql/KotlinReflection.scala | 2233 +++++++++-------- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 14 +- .../jetbrains/kotlinx/spark/api/ApiTest.kt | 4 +- 3 files changed, 1266 insertions(+), 985 deletions(-) diff --git a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala index be808af0..5c0b3cf7 100644 --- a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala +++ b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala @@ -22,6 +22,7 @@ package org.apache.spark.sql import org.apache.spark.internal.Logging import org.apache.spark.sql.catalyst.DeserializerBuildHelper._ +import org.apache.spark.sql.catalyst.ScalaReflection.{Schema, getClassFromType, isSubtype, javaBoxedType, localTypeOf} import org.apache.spark.sql.catalyst.SerializerBuildHelper._ import org.apache.spark.sql.catalyst.analysis.GetColumnByOrdinal import org.apache.spark.sql.catalyst.expressions.objects._ @@ -30,6 +31,7 @@ import org.apache.spark.sql.catalyst.util.ArrayBasedMapData import org.apache.spark.sql.catalyst.{InternalRow, ScalaReflection, WalkedTypePath} import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String} +import org.apache.spark.util.Utils import java.beans.{Introspector, PropertyDescriptor} @@ -45,944 +47,1215 @@ trait DefinedByConstructorParams * KotlinReflection is heavily inspired by ScalaReflection and even extends it just to add several methods */ object KotlinReflection extends KotlinReflection { - /** - * Returns the Spark SQL DataType for a given java class. Where this is not an exact mapping - * to a native type, an ObjectType is returned. - * - * Unlike `inferDataType`, this function doesn't do any massaging of types into the Spark SQL type - * system. As a result, ObjectType will be returned for things like boxed Integers. - */ - private def inferExternalType(cls: Class[_]): DataType = cls match { - case c if c == java.lang.Boolean.TYPE => BooleanType - case c if c == java.lang.Byte.TYPE => ByteType - case c if c == java.lang.Short.TYPE => ShortType - case c if c == java.lang.Integer.TYPE => IntegerType - case c if c == java.lang.Long.TYPE => LongType - case c if c == java.lang.Float.TYPE => FloatType - case c if c == java.lang.Double.TYPE => DoubleType - case c if c == classOf[Array[Byte]] => BinaryType - case _ => ObjectType(cls) - } - - val universe: scala.reflect.runtime.universe.type = scala.reflect.runtime.universe - - // Since we are creating a runtime mirror using the class loader of current thread, - // we need to use def at here. So, every time we call mirror, it is using the - // class loader of the current thread. - override def mirror: universe.Mirror = { - universe.runtimeMirror(Thread.currentThread().getContextClassLoader) - } - - import universe._ - - // The Predef.Map is scala.collection.immutable.Map. - // Since the map values can be mutable, we explicitly import scala.collection.Map at here. - import scala.collection.Map - - - def isSubtype(t: universe.Type, t2: universe.Type): Boolean = t <:< t2 - - /** - * Synchronize to prevent concurrent usage of `<:<` operator. - * This operator is not thread safe in any current version of scala; i.e. - * (2.11.12, 2.12.10, 2.13.0-M5). - * - * See https://github.com/scala/bug/issues/10766 - */ - /* - private[catalyst] def isSubtype(tpe1: `Type`, tpe2: `Type`): Boolean = { - ScalaReflection.ScalaSubtypeLock.synchronized { - tpe1 <:< tpe2 - } + /** + * Returns the Spark SQL DataType for a given java class. Where this is not an exact mapping + * to a native type, an ObjectType is returned. + * + * Unlike `inferDataType`, this function doesn't do any massaging of types into the Spark SQL type + * system. As a result, ObjectType will be returned for things like boxed Integers. + */ + private def inferExternalType(cls: Class[_]): DataType = cls match { + case c if c == java.lang.Boolean.TYPE => BooleanType + case c if c == java.lang.Byte.TYPE => ByteType + case c if c == java.lang.Short.TYPE => ShortType + case c if c == java.lang.Integer.TYPE => IntegerType + case c if c == java.lang.Long.TYPE => LongType + case c if c == java.lang.Float.TYPE => FloatType + case c if c == java.lang.Double.TYPE => DoubleType + case c if c == classOf[Array[Byte]] => BinaryType + case _ => ObjectType(cls) } - */ - - private def dataTypeFor(tpe: `Type`): DataType = cleanUpReflectionObjects { - tpe.dealias match { - case t if isSubtype(t, definitions.NullTpe) => NullType - case t if isSubtype(t, definitions.IntTpe) => IntegerType - case t if isSubtype(t, definitions.LongTpe) => LongType - case t if isSubtype(t, definitions.DoubleTpe) => DoubleType - case t if isSubtype(t, definitions.FloatTpe) => FloatType - case t if isSubtype(t, definitions.ShortTpe) => ShortType - case t if isSubtype(t, definitions.ByteTpe) => ByteType - case t if isSubtype(t, definitions.BooleanTpe) => BooleanType - case t if isSubtype(t, localTypeOf[Array[Byte]]) => BinaryType - case t if isSubtype(t, localTypeOf[CalendarInterval]) => CalendarIntervalType - case t if isSubtype(t, localTypeOf[Decimal]) => DecimalType.SYSTEM_DEFAULT - case _ => - val className = getClassNameFromType(tpe) - className match { - case "scala.Array" => - val TypeRef(_, _, Seq(elementType)) = tpe - arrayClassFor(elementType) - case _ => - val clazz = getClassFromType(tpe) - ObjectType(clazz) - } + + val universe: scala.reflect.runtime.universe.type = scala.reflect.runtime.universe + + // Since we are creating a runtime mirror using the class loader of current thread, + // we need to use def at here. So, every time we call mirror, it is using the + // class loader of the current thread. + override def mirror: universe.Mirror = { + universe.runtimeMirror(Thread.currentThread().getContextClassLoader) } - } - - /** - * Given a type `T` this function constructs `ObjectType` that holds a class of type - * `Array[T]`. - * - * Special handling is performed for primitive types to map them back to their raw - * JVM form instead of the Scala Array that handles auto boxing. - */ - private def arrayClassFor(tpe: `Type`): ObjectType = cleanUpReflectionObjects { - val cls = tpe.dealias match { - case t if isSubtype(t, definitions.IntTpe) => classOf[Array[Int]] - case t if isSubtype(t, definitions.LongTpe) => classOf[Array[Long]] - case t if isSubtype(t, definitions.DoubleTpe) => classOf[Array[Double]] - case t if isSubtype(t, definitions.FloatTpe) => classOf[Array[Float]] - case t if isSubtype(t, definitions.ShortTpe) => classOf[Array[Short]] - case t if isSubtype(t, definitions.ByteTpe) => classOf[Array[Byte]] - case t if isSubtype(t, definitions.BooleanTpe) => classOf[Array[Boolean]] - case other => - // There is probably a better way to do this, but I couldn't find it... - val elementType = dataTypeFor(other).asInstanceOf[ObjectType].cls - java.lang.reflect.Array.newInstance(elementType, 0).getClass + import universe._ + + // The Predef.Map is scala.collection.immutable.Map. + // Since the map values can be mutable, we explicitly import scala.collection.Map at here. + import scala.collection.Map + + + def isSubtype(t: universe.Type, t2: universe.Type): Boolean = t <:< t2 + + /** + * Synchronize to prevent concurrent usage of `<:<` operator. + * This operator is not thread safe in any current version of scala; i.e. + * (2.11.12, 2.12.10, 2.13.0-M5). + * + * See https://github.com/scala/bug/issues/10766 + */ + /* + private[catalyst] def isSubtype(tpe1: `Type`, tpe2: `Type`): Boolean = { + ScalaReflection.ScalaSubtypeLock.synchronized { + tpe1 <:< tpe2 + } + } + */ + + private def dataTypeFor(tpe: `Type`): DataType = cleanUpReflectionObjects { + tpe.dealias match { + case t if isSubtype(t, definitions.NullTpe) => NullType + case t if isSubtype(t, definitions.IntTpe) => IntegerType + case t if isSubtype(t, definitions.LongTpe) => LongType + case t if isSubtype(t, definitions.DoubleTpe) => DoubleType + case t if isSubtype(t, definitions.FloatTpe) => FloatType + case t if isSubtype(t, definitions.ShortTpe) => ShortType + case t if isSubtype(t, definitions.ByteTpe) => ByteType + case t if isSubtype(t, definitions.BooleanTpe) => BooleanType + case t if isSubtype(t, localTypeOf[Array[Byte]]) => BinaryType + case t if isSubtype(t, localTypeOf[CalendarInterval]) => CalendarIntervalType + case t if isSubtype(t, localTypeOf[Decimal]) => DecimalType.SYSTEM_DEFAULT + case _ => { + val className = getClassNameFromType(tpe) + className match { + case "scala.Array" => { + val TypeRef(_, _, Seq(elementType)) = tpe + arrayClassFor(elementType) + } + case _ => { + val clazz = getClassFromType(tpe) + ObjectType(clazz) + } + } + } + } } - ObjectType(cls) - } - - /** - * Returns true if the value of this data type is same between internal and external. - */ - def isNativeType(dt: DataType): Boolean = dt match { - case NullType | BooleanType | ByteType | ShortType | IntegerType | LongType | - FloatType | DoubleType | BinaryType | CalendarIntervalType => true - case _ => false - } - - private def baseType(tpe: `Type`): `Type` = { - tpe.dealias match { - case annotatedType: AnnotatedType => annotatedType.underlying - case other => other + + /** + * Given a type `T` this function constructs `ObjectType` that holds a class of type + * `Array[T]`. + * + * Special handling is performed for primitive types to map them back to their raw + * JVM form instead of the Scala Array that handles auto boxing. + */ + private def arrayClassFor(tpe: `Type`): ObjectType = cleanUpReflectionObjects { + val cls = tpe.dealias match { + case t if isSubtype(t, definitions.IntTpe) => classOf[Array[Int]] + case t if isSubtype(t, definitions.LongTpe) => classOf[Array[Long]] + case t if isSubtype(t, definitions.DoubleTpe) => classOf[Array[Double]] + case t if isSubtype(t, definitions.FloatTpe) => classOf[Array[Float]] + case t if isSubtype(t, definitions.ShortTpe) => classOf[Array[Short]] + case t if isSubtype(t, definitions.ByteTpe) => classOf[Array[Byte]] + case t if isSubtype(t, definitions.BooleanTpe) => classOf[Array[Boolean]] + case t if isSubtype(t, localTypeOf[Array[Byte]]) => classOf[Array[Array[Byte]]] + case t if isSubtype(t, localTypeOf[CalendarInterval]) => classOf[Array[CalendarInterval]] + case t if isSubtype(t, localTypeOf[Decimal]) => classOf[Array[Decimal]] + case other => { + // There is probably a better way to do this, but I couldn't find it... + val elementType = dataTypeFor(other).asInstanceOf[ObjectType].cls + java.lang.reflect.Array.newInstance(elementType, 0).getClass + } + + } + ObjectType(cls) } - } - - /** - * Returns an expression that can be used to deserialize a Spark SQL representation to an object - * of type `T` with a compatible schema. The Spark SQL representation is located at ordinal 0 of - * a row, i.e., `GetColumnByOrdinal(0, _)`. Nested classes will have their fields accessed using - * `UnresolvedExtractValue`. - * - * The returned expression is used by `ExpressionEncoder`. The encoder will resolve and bind this - * deserializer expression when using it. - */ - def deserializerForType(tpe: `Type`): Expression = { - val clsName = getClassNameFromType(tpe) - val walkedTypePath = WalkedTypePath().recordRoot(clsName) - val Schema(dataType, nullable) = schemaFor(tpe) - - // Assumes we are deserializing the first column of a row. - deserializerForWithNullSafetyAndUpcast(GetColumnByOrdinal(0, dataType), dataType, - nullable = nullable, walkedTypePath, - (casted, typePath) => deserializerFor(tpe, casted, typePath)) - } - - - /** - * Returns an expression that can be used to deserialize an input expression to an object of type - * `T` with a compatible schema. - * - * @param tpe The `Type` of deserialized object. - * @param path The expression which can be used to extract serialized value. - * @param walkedTypePath The paths from top to bottom to access current field when deserializing. - */ - private def deserializerFor( - tpe: `Type`, - path: Expression, - walkedTypePath: WalkedTypePath, - predefinedDt: Option[DataTypeWithClass] = None - ): Expression = cleanUpReflectionObjects { - baseType(tpe) match { - - // - case t if isSubtype(t, localTypeOf[java.lang.Integer]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Integer]) - - case t if isSubtype(t, localTypeOf[Int]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Integer]) - - case t if isSubtype(t, localTypeOf[java.lang.Long]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Long]) - case t if isSubtype(t, localTypeOf[Long]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Long]) - - case t if isSubtype(t, localTypeOf[java.lang.Double]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Double]) - case t if isSubtype(t, localTypeOf[Double]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Double]) - - case t if isSubtype(t, localTypeOf[java.lang.Float]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Float]) - case t if isSubtype(t, localTypeOf[Float]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Float]) - - case t if isSubtype(t, localTypeOf[java.lang.Short]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Short]) - case t if isSubtype(t, localTypeOf[Short]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Short]) - - case t if isSubtype(t, localTypeOf[java.lang.Byte]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Byte]) - case t if isSubtype(t, localTypeOf[Byte]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Byte]) - - case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Boolean]) - case t if isSubtype(t, localTypeOf[Boolean]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Boolean]) - - case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => - createDeserializerForLocalDate(path) - - case t if isSubtype(t, localTypeOf[java.sql.Date]) => - createDeserializerForSqlDate(path) - // - - case t if isSubtype(t, localTypeOf[java.time.Instant]) => - createDeserializerForInstant(path) - - case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => - createDeserializerForSqlTimestamp(path) - - case t if isSubtype(t, localTypeOf[java.lang.String]) => - createDeserializerForString(path, returnNullable = false) - - case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => - createDeserializerForJavaBigDecimal(path, returnNullable = false) - - case t if isSubtype(t, localTypeOf[BigDecimal]) => - createDeserializerForScalaBigDecimal(path, returnNullable = false) - - case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => - createDeserializerForJavaBigInteger(path, returnNullable = false) - - case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => - createDeserializerForScalaBigInt(path) - - case t if isSubtype(t, localTypeOf[Array[_]]) => - var TypeRef(_, _, Seq(elementType)) = t - if (predefinedDt.isDefined && !elementType.dealias.typeSymbol.isClass) - elementType = getType(predefinedDt.get.asInstanceOf[KComplexTypeWrapper].dt.asInstanceOf[ArrayType].elementType.asInstanceOf[DataTypeWithClass].cls) - val Schema(dataType, elementNullable) = predefinedDt.map(it => { - val elementInfo = it.asInstanceOf[KComplexTypeWrapper].dt.asInstanceOf[ArrayType].elementType.asInstanceOf[DataTypeWithClass] - Schema(elementInfo.dt, elementInfo.nullable) - }) - .getOrElse(schemaFor(elementType)) - val className = getClassNameFromType(elementType) - val newTypePath = walkedTypePath.recordArray(className) - - val mapFunction: Expression => Expression = element => { - // upcast the array element to the data type the encoder expected. - deserializerForWithNullSafetyAndUpcast( - element, - dataType, - nullable = elementNullable, - newTypePath, - (casted, typePath) => deserializerFor(elementType, casted, typePath, predefinedDt.map(_.asInstanceOf[KComplexTypeWrapper].dt.asInstanceOf[ArrayType].elementType).filter(_.isInstanceOf[ComplexWrapper]).map(_.asInstanceOf[ComplexWrapper]))) + + /** + * Returns true if the value of this data type is same between internal and external. + */ + def isNativeType(dt: DataType): Boolean = dt match { + case NullType | BooleanType | ByteType | ShortType | IntegerType | LongType | + FloatType | DoubleType | BinaryType | CalendarIntervalType => { + true } + case _ => false + } - val arrayData = UnresolvedMapObjects(mapFunction, path) - val arrayCls = arrayClassFor(elementType) - - val methodName = elementType match { - case t if isSubtype(t, definitions.IntTpe) => "toIntArray" - case t if isSubtype(t, definitions.LongTpe) => "toLongArray" - case t if isSubtype(t, definitions.DoubleTpe) => "toDoubleArray" - case t if isSubtype(t, definitions.FloatTpe) => "toFloatArray" - case t if isSubtype(t, definitions.ShortTpe) => "toShortArray" - case t if isSubtype(t, definitions.ByteTpe) => "toByteArray" - case t if isSubtype(t, definitions.BooleanTpe) => "toBooleanArray" - // non-primitive - case _ => "array" + private def baseType(tpe: `Type`): `Type` = { + tpe.dealias match { + case annotatedType: AnnotatedType => annotatedType.underlying + case other => other } - Invoke(arrayData, methodName, arrayCls, returnNullable = false) + } - // We serialize a `Set` to Catalyst array. When we deserialize a Catalyst array - // to a `Set`, if there are duplicated elements, the elements will be de-duplicated. + /** + * Returns an expression that can be used to deserialize a Spark SQL representation to an object + * of type `T` with a compatible schema. The Spark SQL representation is located at ordinal 0 of + * a row, i.e., `GetColumnByOrdinal(0, _)`. Nested classes will have their fields accessed using + * `UnresolvedExtractValue`. + * + * The returned expression is used by `ExpressionEncoder`. The encoder will resolve and bind this + * deserializer expression when using it. + */ + def deserializerForType(tpe: `Type`): Expression = { + val clsName = getClassNameFromType(tpe) + val walkedTypePath = WalkedTypePath().recordRoot(clsName) + val Schema(dataType, nullable) = schemaFor(tpe) + + // Assumes we are deserializing the first column of a row. + deserializerForWithNullSafetyAndUpcast( + GetColumnByOrdinal(0, dataType), dataType, + nullable = nullable, walkedTypePath, + (casted, typePath) => deserializerFor(tpe, casted, typePath) + ) + } - case t if isSubtype(t, localTypeOf[Map[_, _]]) => - val TypeRef(_, _, Seq(keyType, valueType)) = t - val classNameForKey = getClassNameFromType(keyType) - val classNameForValue = getClassNameFromType(valueType) + /** + * Returns an expression that can be used to deserialize an input expression to an object of type + * `T` with a compatible schema. + * + * @param tpe The `Type` of deserialized object. + * @param path The expression which can be used to extract serialized value. + * @param walkedTypePath The paths from top to bottom to access current field when deserializing. + */ + private def deserializerFor( + tpe: `Type`, + path: Expression, + walkedTypePath: WalkedTypePath, + predefinedDt: Option[DataTypeWithClass] = None + ): Expression = cleanUpReflectionObjects { + baseType(tpe) match { + + // + case t if isSubtype(t, localTypeOf[java.lang.Integer]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Integer]) + } + case t if isSubtype(t, localTypeOf[Int]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Integer]) + } + case t if isSubtype(t, localTypeOf[java.lang.Long]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Long]) + } + case t if isSubtype(t, localTypeOf[Long]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Long]) + } + case t if isSubtype(t, localTypeOf[java.lang.Double]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Double]) + } + case t if isSubtype(t, localTypeOf[Double]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Double]) + } + case t if isSubtype(t, localTypeOf[java.lang.Float]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Float]) + } + case t if isSubtype(t, localTypeOf[Float]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Float]) + } + case t if isSubtype(t, localTypeOf[java.lang.Short]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Short]) + } + case t if isSubtype(t, localTypeOf[Short]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Short]) + } + case t if isSubtype(t, localTypeOf[java.lang.Byte]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Byte]) + } + case t if isSubtype(t, localTypeOf[Byte]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Byte]) + } + case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Boolean]) + } + case t if isSubtype(t, localTypeOf[Boolean]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Boolean]) + } + case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => { + createDeserializerForLocalDate(path) + } + case t if isSubtype(t, localTypeOf[java.sql.Date]) => { + createDeserializerForSqlDate(path) + } // - val newTypePath = walkedTypePath.recordMap(classNameForKey, classNameForValue) + case t if isSubtype(t, localTypeOf[java.time.Instant]) => { + createDeserializerForInstant(path) + } + case t if isSubtype(t, localTypeOf[java.lang.Enum[_]]) => { + createDeserializerForTypesSupportValueOf( + Invoke(path, "toString", ObjectType(classOf[String]), returnNullable = false), + getClassFromType(t), + ) + } + case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => { + createDeserializerForSqlTimestamp(path) + } + case t if isSubtype(t, localTypeOf[java.time.LocalDateTime]) => { + createDeserializerForLocalDateTime(path) + } + case t if isSubtype(t, localTypeOf[java.time.Duration]) => { + createDeserializerForDuration(path) + } + case t if isSubtype(t, localTypeOf[java.time.Period]) => { + createDeserializerForPeriod(path) + } + case t if isSubtype(t, localTypeOf[java.lang.String]) => { + createDeserializerForString(path, returnNullable = false) + } + case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => { + createDeserializerForJavaBigDecimal(path, returnNullable = false) + } + case t if isSubtype(t, localTypeOf[BigDecimal]) => { + createDeserializerForScalaBigDecimal(path, returnNullable = false) + } + case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => { + createDeserializerForJavaBigInteger(path, returnNullable = false) + } + case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => { + createDeserializerForScalaBigInt(path) + } // TODO case t if isSubtype(t, localTypeOf[Array[Byte]]) => + // createDeserializerForTypesSupportValueOf(path, classOf[Array[Byte]]) + + case t if isSubtype(t, localTypeOf[Array[_]]) => { + var TypeRef(_, _, Seq(elementType)) = t + if (predefinedDt.isDefined && !elementType.dealias.typeSymbol.isClass) + elementType = getType(predefinedDt.get.asInstanceOf[KComplexTypeWrapper].dt.asInstanceOf[ArrayType] + .elementType.asInstanceOf[DataTypeWithClass].cls + ) + val Schema(dataType, elementNullable) = predefinedDt.map { it => + val elementInfo = it.asInstanceOf[KComplexTypeWrapper].dt.asInstanceOf[ArrayType].elementType + .asInstanceOf[DataTypeWithClass] + Schema(elementInfo.dt, elementInfo.nullable) + }.getOrElse(schemaFor(elementType)) + val className = getClassNameFromType(elementType) + val newTypePath = walkedTypePath.recordArray(className) - UnresolvedCatalystToExternalMap( - path, - p => deserializerFor(keyType, p, newTypePath), - p => deserializerFor(valueType, p, newTypePath), - mirror.runtimeClass(t.typeSymbol.asClass) - ) + val mapFunction: Expression => Expression = element => { + // upcast the array element to the data type the encoder expected. + deserializerForWithNullSafetyAndUpcast( + element, + dataType, + nullable = elementNullable, + newTypePath, + (casted, typePath) => deserializerFor( + tpe = elementType, + path = casted, + walkedTypePath = typePath, + predefinedDt = predefinedDt + .map(_.asInstanceOf[KComplexTypeWrapper].dt.asInstanceOf[ArrayType].elementType) + .filter(_.isInstanceOf[ComplexWrapper]) + .map(_.asInstanceOf[ComplexWrapper]) + ) + ) + } - case t if isSubtype(t, localTypeOf[java.lang.Enum[_]]) => - createDeserializerForTypesSupportValueOf( - createDeserializerForString(path, returnNullable = false), Class.forName(t.toString)) - - case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => - val udt = getClassFromType(t).getAnnotation(classOf[SQLUserDefinedType]).udt(). - getConstructor().newInstance() - val obj = NewInstance( - udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt(), - Nil, - dataType = ObjectType(udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt())) - Invoke(obj, "deserialize", ObjectType(udt.userClass), path :: Nil) - - case t if UDTRegistration.exists(getClassNameFromType(t)) => - val udt = UDTRegistration.getUDTFor(getClassNameFromType(t)).get.getConstructor(). - newInstance().asInstanceOf[UserDefinedType[_]] - val obj = NewInstance( - udt.getClass, - Nil, - dataType = ObjectType(udt.getClass)) - Invoke(obj, "deserialize", ObjectType(udt.userClass), path :: Nil) - - case _ if predefinedDt.isDefined => - predefinedDt.get match { - case wrapper: KDataTypeWrapper => - val structType = wrapper.dt - val cls = wrapper.cls - val arguments = structType - .fields - .map(field => { - val dataType = field.dataType.asInstanceOf[DataTypeWithClass] - val nullable = dataType.nullable - val clsName = getClassNameFromType(getType(dataType.cls)) - val newTypePath = walkedTypePath.recordField(clsName, field.name) - - // For tuples, we based grab the inner fields by ordinal instead of name. - val newPath = deserializerFor( - getType(dataType.cls), - addToPath(path, field.name, dataType.dt, newTypePath), - newTypePath, - Some(dataType).filter(_.isInstanceOf[ComplexWrapper]) - ) - expressionWithNullSafety( - newPath, - nullable = nullable, - newTypePath - ) + val arrayData = UnresolvedMapObjects(mapFunction, path) + val arrayCls = arrayClassFor(elementType) + + val methodName = elementType match { + case t if isSubtype(t, definitions.IntTpe) => "toIntArray" + case t if isSubtype(t, definitions.LongTpe) => "toLongArray" + case t if isSubtype(t, definitions.DoubleTpe) => "toDoubleArray" + case t if isSubtype(t, definitions.FloatTpe) => "toFloatArray" + case t if isSubtype(t, definitions.ShortTpe) => "toShortArray" + case t if isSubtype(t, definitions.ByteTpe) => "toByteArray" + case t if isSubtype(t, definitions.BooleanTpe) => "toBooleanArray" + // non-primitive + case _ => "array" + } + Invoke(arrayData, methodName, arrayCls, returnNullable = false) + } - }) - val newInstance = NewInstance(cls, arguments, ObjectType(cls), propagateNull = false) + // We serialize a `Set` to Catalyst array. When we deserialize a Catalyst array + // to a `Set`, if there are duplicated elements, the elements will be de-duplicated. - org.apache.spark.sql.catalyst.expressions.If( - IsNull(path), - org.apache.spark.sql.catalyst.expressions.Literal.create(null, ObjectType(cls)), - newInstance - ) + case t if isSubtype(t, localTypeOf[Map[_, _]]) => { + val TypeRef(_, _, Seq(keyType, valueType)) = t - case t: ComplexWrapper => - t.dt match { - case MapType(kt, vt, _) => - val Seq(keyType, valueType) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass].cls).map(getType(_)) - val Seq(keyDT, valueDT) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass]) val classNameForKey = getClassNameFromType(keyType) val classNameForValue = getClassNameFromType(valueType) val newTypePath = walkedTypePath.recordMap(classNameForKey, classNameForValue) - val keyData = - Invoke( - UnresolvedMapObjects( - p => deserializerFor(keyType, p, newTypePath, Some(keyDT).filter(_.isInstanceOf[ComplexWrapper])), - MapKeys(path)), - "array", - ObjectType(classOf[Array[Any]])) - - val valueData = - Invoke( - UnresolvedMapObjects( - p => deserializerFor(valueType, p, newTypePath, Some(valueDT).filter(_.isInstanceOf[ComplexWrapper])), - MapValues(path)), - "array", - ObjectType(classOf[Array[Any]])) - - StaticInvoke( - ArrayBasedMapData.getClass, - ObjectType(classOf[java.util.Map[_, _]]), - "toJavaMap", - keyData :: valueData :: Nil, - returnNullable = false) - - case ArrayType(elementType, containsNull) => - val dataTypeWithClass = elementType.asInstanceOf[DataTypeWithClass] - val mapFunction: Expression => Expression = element => { - // upcast the array element to the data type the encoder expected. - val et = getType(dataTypeWithClass.cls) - val className = getClassNameFromType(et) - val newTypePath = walkedTypePath.recordArray(className) - deserializerForWithNullSafetyAndUpcast( - element, - dataTypeWithClass.dt, - nullable = dataTypeWithClass.nullable, - newTypePath, - (casted, typePath) => { - deserializerFor(et, casted, typePath, Some(dataTypeWithClass).filter(_.isInstanceOf[ComplexWrapper]).map(_.asInstanceOf[ComplexWrapper])) - }) + UnresolvedCatalystToExternalMap( + path, + p => deserializerFor(keyType, p, newTypePath), + p => deserializerFor(valueType, p, newTypePath), + mirror.runtimeClass(t.typeSymbol.asClass) + ) + } + + case t if isSubtype(t, localTypeOf[java.lang.Enum[_]]) => { + createDeserializerForTypesSupportValueOf( + createDeserializerForString(path, returnNullable = false), + Class.forName(t.toString), + ) + } + case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => { + val udt = getClassFromType(t).getAnnotation(classOf[SQLUserDefinedType]).udt(). + getConstructor().newInstance() + val obj = NewInstance( + udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt(), + Nil, + dataType = ObjectType(udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt()) + ) + Invoke(obj, "deserialize", ObjectType(udt.userClass), path :: Nil) + } + + case t if UDTRegistration.exists(getClassNameFromType(t)) => { + val udt = UDTRegistration.getUDTFor(getClassNameFromType(t)).get.getConstructor(). + newInstance().asInstanceOf[UserDefinedType[_]] + val obj = NewInstance( + udt.getClass, + Nil, + dataType = ObjectType(udt.getClass) + ) + Invoke(obj, "deserialize", ObjectType(udt.userClass), path :: Nil) + } + + case _ if predefinedDt.isDefined => { + predefinedDt.get match { + + case wrapper: KDataTypeWrapper => { + val structType = wrapper.dt + val cls = wrapper.cls + val arguments = structType + .fields + .map { field => + val dataType = field.dataType.asInstanceOf[DataTypeWithClass] + val nullable = dataType.nullable + val clsName = getClassNameFromType(getType(dataType.cls)) + val newTypePath = walkedTypePath.recordField(clsName, field.name) + + // For tuples, we based grab the inner fields by ordinal instead of name. + val newPath = deserializerFor( + tpe = getType(dataType.cls), + path = addToPath(path, field.name, dataType.dt, newTypePath), + walkedTypePath = newTypePath, + predefinedDt = Some(dataType).filter(_.isInstanceOf[ComplexWrapper]) + ) + expressionWithNullSafety( + newPath, + nullable = nullable, + newTypePath + ) + } + val newInstance = NewInstance(cls, arguments, ObjectType(cls), propagateNull = false) + + org.apache.spark.sql.catalyst.expressions.If( + IsNull(path), + org.apache.spark.sql.catalyst.expressions.Literal.create(null, ObjectType(cls)), + newInstance + ) + } + + case t: ComplexWrapper => { + + t.dt match { + case MapType(kt, vt, _) => { + val Seq(keyType, valueType) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass].cls) + .map(getType(_)) + val Seq(keyDT, valueDT) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass]) + val classNameForKey = getClassNameFromType(keyType) + val classNameForValue = getClassNameFromType(valueType) + + val newTypePath = walkedTypePath.recordMap(classNameForKey, classNameForValue) + + val keyData = + Invoke( + UnresolvedMapObjects( + p => deserializerFor( + keyType, p, newTypePath, Some(keyDT) + .filter(_.isInstanceOf[ComplexWrapper]) + ), + MapKeys(path) + ), + "array", + ObjectType(classOf[Array[Any]]) + ) + + val valueData = + Invoke( + UnresolvedMapObjects( + p => deserializerFor( + valueType, p, newTypePath, Some(valueDT) + .filter(_.isInstanceOf[ComplexWrapper]) + ), + MapValues(path) + ), + "array", + ObjectType(classOf[Array[Any]]) + ) + + StaticInvoke( + ArrayBasedMapData.getClass, + ObjectType(classOf[java.util.Map[_, _]]), + "toJavaMap", + keyData :: valueData :: Nil, + returnNullable = false + ) + } + + case ArrayType(elementType, containsNull) => { + val dataTypeWithClass = elementType.asInstanceOf[DataTypeWithClass] + val mapFunction: Expression => Expression = element => { + // upcast the array element to the data type the encoder expected. + val et = getType(dataTypeWithClass.cls) + val className = getClassNameFromType(et) + val newTypePath = walkedTypePath.recordArray(className) + deserializerForWithNullSafetyAndUpcast( + element, + dataTypeWithClass.dt, + nullable = dataTypeWithClass.nullable, + newTypePath, + (casted, typePath) => { + deserializerFor( + et, casted, typePath, Some(dataTypeWithClass) + .filter(_.isInstanceOf[ComplexWrapper]) + .map(_.asInstanceOf[ComplexWrapper]) + ) + } + ) + } + + UnresolvedMapObjects(mapFunction, path, customCollectionCls = Some(t.cls)) + } + + case StructType(elementType: Array[StructField]) => { + val cls = t.cls + + val arguments = elementType.map { field => + val dataType = field.dataType.asInstanceOf[DataTypeWithClass] + val nullable = dataType.nullable + val clsName = getClassNameFromType(getType(dataType.cls)) + val newTypePath = walkedTypePath.recordField(clsName, field.name) + + // For tuples, we based grab the inner fields by ordinal instead of name. + val newPath = deserializerFor( + getType(dataType.cls), + addToPath(path, field.name, dataType.dt, newTypePath), + newTypePath, + Some(dataType).filter(_.isInstanceOf[ComplexWrapper]) + ) + expressionWithNullSafety( + newPath, + nullable = nullable, + newTypePath + ) + } + val newInstance = NewInstance(cls, arguments, ObjectType(cls), propagateNull = false) + + org.apache.spark.sql.catalyst.expressions.If( + IsNull(path), + org.apache.spark.sql.catalyst.expressions.Literal.create(null, ObjectType(cls)), + newInstance + ) + } + + case _ => { + throw new UnsupportedOperationException( + s"No Encoder found for $tpe\n" + walkedTypePath + ) + } + } + } } + } - UnresolvedMapObjects(mapFunction, path, customCollectionCls = Some(t.cls)) - - case StructType(elementType: Array[StructField]) => - val cls = t.cls - - val arguments = elementType.map { field => - val dataType = field.dataType.asInstanceOf[DataTypeWithClass] - val nullable = dataType.nullable - val clsName = getClassNameFromType(getType(dataType.cls)) - val newTypePath = walkedTypePath.recordField(clsName, field.name) - - // For tuples, we based grab the inner fields by ordinal instead of name. - val newPath = deserializerFor( - getType(dataType.cls), - addToPath(path, field.name, dataType.dt, newTypePath), - newTypePath, - Some(dataType).filter(_.isInstanceOf[ComplexWrapper]) - ) - expressionWithNullSafety( - newPath, - nullable = nullable, - newTypePath - ) + case t if definedByConstructorParams(t) => { + val params = getConstructorParameters(t) + + val cls = getClassFromType(tpe) + + val arguments = params.zipWithIndex.map { case ((fieldName, fieldType), i) => + val Schema(dataType, nullable) = schemaFor(fieldType) + val clsName = getClassNameFromType(fieldType) + val newTypePath = walkedTypePath.recordField(clsName, fieldName) + + // For tuples, we based grab the inner fields by ordinal instead of name. + val newPath = if (cls.getName startsWith "scala.Tuple") { + deserializerFor( + fieldType, + addToPathOrdinal(path, i, dataType, newTypePath), + newTypePath + ) + } else { + deserializerFor( + fieldType, + addToPath(path, fieldName, dataType, newTypePath), + newTypePath + ) + } + expressionWithNullSafety( + newPath, + nullable = nullable, + newTypePath + ) } + val newInstance = NewInstance(cls, arguments, ObjectType(cls), propagateNull = false) org.apache.spark.sql.catalyst.expressions.If( - IsNull(path), - org.apache.spark.sql.catalyst.expressions.Literal.create(null, ObjectType(cls)), - newInstance + IsNull(path), + org.apache.spark.sql.catalyst.expressions.Literal.create(null, ObjectType(cls)), + newInstance ) + } - - case _ => + case _ => { throw new UnsupportedOperationException( - s"No Encoder found for $tpe\n" + walkedTypePath) + s"No Encoder found for $tpe\n" + walkedTypePath + ) } } + } - case t if definedByConstructorParams(t) => - val params = getConstructorParameters(t) - - val cls = getClassFromType(tpe) - - val arguments = params.zipWithIndex.map { case ((fieldName, fieldType), i) => - val Schema(dataType, nullable) = schemaFor(fieldType) - val clsName = getClassNameFromType(fieldType) - val newTypePath = walkedTypePath.recordField(clsName, fieldName) - - // For tuples, we based grab the inner fields by ordinal instead of name. - val newPath = if (cls.getName startsWith "scala.Tuple") { - deserializerFor( - fieldType, - addToPathOrdinal(path, i, dataType, newTypePath), - newTypePath) - } else { - deserializerFor( - fieldType, - addToPath(path, fieldName, dataType, newTypePath), - newTypePath) - } - expressionWithNullSafety( - newPath, - nullable = nullable, - newTypePath) - } + /** + * Returns an expression for serializing an object of type T to Spark SQL representation. The + * input object is located at ordinal 0 of a row, i.e., `BoundReference(0, _)`. + * + * If the given type is not supported, i.e. there is no encoder can be built for this type, + * an [[UnsupportedOperationException]] will be thrown with detailed error message to explain + * the type path walked so far and which class we are not supporting. + * There are 4 kinds of type path: + * * the root type: `root class: "abc.xyz.MyClass"` + * * the value type of [[Option]]: `option value class: "abc.xyz.MyClass"` + * * the element type of [[Array]] or [[Seq]]: `array element class: "abc.xyz.MyClass"` + * * the field of [[Product]]: `field (class: "abc.xyz.MyClass", name: "myField")` + */ + def serializerForType(tpe: `Type`): Expression = ScalaReflection.cleanUpReflectionObjects { + val clsName = getClassNameFromType(tpe) + val walkedTypePath = WalkedTypePath().recordRoot(clsName) + + // The input object to `ExpressionEncoder` is located at first column of an row. + val isPrimitive = tpe.typeSymbol.asClass.isPrimitive + val inputObject = BoundReference(0, dataTypeFor(tpe), nullable = !isPrimitive) + + serializerFor(inputObject, tpe, walkedTypePath) + } - val newInstance = NewInstance(cls, arguments, ObjectType(cls), propagateNull = false) + def getType[T](clazz: Class[T]): universe.Type = { + val mir = runtimeMirror(clazz.getClassLoader) + mir.classSymbol(clazz).toType + } - org.apache.spark.sql.catalyst.expressions.If( - IsNull(path), - org.apache.spark.sql.catalyst.expressions.Literal.create(null, ObjectType(cls)), - newInstance + def deserializerFor(cls: java.lang.Class[_], dt: DataTypeWithClass): Expression = { + val tpe = getType(cls) + val clsName = getClassNameFromType(tpe) + val walkedTypePath = WalkedTypePath().recordRoot(clsName) + + // Assumes we are deserializing the first column of a row. + deserializerForWithNullSafetyAndUpcast( + GetColumnByOrdinal(0, dt.dt), + dt.dt, + nullable = dt.nullable, + walkedTypePath, + (casted, typePath) => deserializerFor(tpe, casted, typePath, Some(dt)) ) - - case _ => - throw new UnsupportedOperationException( - s"No Encoder found for $tpe\n" + walkedTypePath) } - } - - /** - * Returns an expression for serializing an object of type T to Spark SQL representation. The - * input object is located at ordinal 0 of a row, i.e., `BoundReference(0, _)`. - * - * If the given type is not supported, i.e. there is no encoder can be built for this type, - * an [[UnsupportedOperationException]] will be thrown with detailed error message to explain - * the type path walked so far and which class we are not supporting. - * There are 4 kinds of type path: - * * the root type: `root class: "abc.xyz.MyClass"` - * * the value type of [[Option]]: `option value class: "abc.xyz.MyClass"` - * * the element type of [[Array]] or [[Seq]]: `array element class: "abc.xyz.MyClass"` - * * the field of [[Product]]: `field (class: "abc.xyz.MyClass", name: "myField")` - */ - def serializerForType(tpe: `Type`): Expression = ScalaReflection.cleanUpReflectionObjects { - val clsName = getClassNameFromType(tpe) - val walkedTypePath = WalkedTypePath().recordRoot(clsName) - - // The input object to `ExpressionEncoder` is located at first column of an row. - val isPrimitive = tpe.typeSymbol.asClass.isPrimitive - val inputObject = BoundReference(0, dataTypeFor(tpe), nullable = !isPrimitive) - - serializerFor(inputObject, tpe, walkedTypePath) - } - - def getType[T](clazz: Class[T]): universe.Type = { - val mir = runtimeMirror(clazz.getClassLoader) - mir.classSymbol(clazz).toType - } - - def deserializerFor(cls: java.lang.Class[_], dt: DataTypeWithClass): Expression = { - val tpe = getType(cls) - val clsName = getClassNameFromType(tpe) - val walkedTypePath = WalkedTypePath().recordRoot(clsName) - - // Assumes we are deserializing the first column of a row. - deserializerForWithNullSafetyAndUpcast( - GetColumnByOrdinal(0, dt.dt), - dt.dt, - nullable = dt.nullable, - walkedTypePath, - (casted, typePath) => deserializerFor(tpe, casted, typePath, Some(dt)) - ) - } - - - def serializerFor(cls: java.lang.Class[_], dt: DataTypeWithClass): Expression = { - - val tpe = getType(cls) - val clsName = getClassNameFromType(tpe) - val walkedTypePath = WalkedTypePath().recordRoot(clsName) - val inputObject = BoundReference(0, ObjectType(cls), nullable = true) - serializerFor(inputObject, tpe, walkedTypePath, predefinedDt = Some(dt)) - } - - /** - * Returns an expression for serializing the value of an input expression into Spark SQL - * internal representation. - */ - private def serializerFor( - inputObject: Expression, - tpe: `Type`, - walkedTypePath: WalkedTypePath, - seenTypeSet: Set[`Type`] = Set.empty, - predefinedDt: Option[DataTypeWithClass] = None - ): Expression = cleanUpReflectionObjects { - - def toCatalystArray(input: Expression, elementType: `Type`, predefinedDt: Option[DataTypeWithClass] = None): Expression = { - predefinedDt.map(_.dt).getOrElse(dataTypeFor(elementType)) match { - - case dt@(MapType(_, _, _) | ArrayType(_, _) | StructType(_)) => - val clsName = getClassNameFromType(elementType) - val newPath = walkedTypePath.recordArray(clsName) - createSerializerForMapObjects(input, ObjectType(predefinedDt.get.cls), - serializerFor(_, elementType, newPath, seenTypeSet, predefinedDt)) - - case dt: ObjectType => - val clsName = getClassNameFromType(elementType) - val newPath = walkedTypePath.recordArray(clsName) - createSerializerForMapObjects(input, dt, - serializerFor(_, elementType, newPath, seenTypeSet)) - - case dt@(BooleanType | ByteType | ShortType | IntegerType | LongType | - FloatType | DoubleType) => - val cls = input.dataType.asInstanceOf[ObjectType].cls - if (cls.isArray && cls.getComponentType.isPrimitive) { - createSerializerForPrimitiveArray(input, dt) - } else { - createSerializerForGenericArray(input, dt, nullable = predefinedDt.map(_.nullable).getOrElse(schemaFor(elementType).nullable)) - } - - case _: StringType => - val clsName = getClassNameFromType(typeOf[String]) - val newPath = walkedTypePath.recordArray(clsName) - createSerializerForMapObjects(input, ObjectType(Class.forName(getClassNameFromType(elementType))), - serializerFor(_, elementType, newPath, seenTypeSet)) - - - case dt => - createSerializerForGenericArray(input, dt, nullable = predefinedDt.map(_.nullable).getOrElse(schemaFor(elementType).nullable)) - } + + + def serializerFor(cls: java.lang.Class[_], dt: DataTypeWithClass): Expression = { + val tpe = getType(cls) + val clsName = getClassNameFromType(tpe) + val walkedTypePath = WalkedTypePath().recordRoot(clsName) + val inputObject = BoundReference(0, ObjectType(cls), nullable = true) + serializerFor(inputObject, tpe, walkedTypePath, predefinedDt = Some(dt)) } - baseType(tpe) match { - - // - case _ if !inputObject.dataType.isInstanceOf[ObjectType] && !predefinedDt.exists(_.isInstanceOf[ComplexWrapper]) => inputObject - - case t if isSubtype(t, localTypeOf[Option[_]]) => - val TypeRef(_, _, Seq(optType)) = t - val className = getClassNameFromType(optType) - val newPath = walkedTypePath.recordOption(className) - val unwrapped = UnwrapOption(dataTypeFor(optType), inputObject) - serializerFor(unwrapped, optType, newPath, seenTypeSet) - - // Since List[_] also belongs to localTypeOf[Product], we put this case before - // "case t if definedByConstructorParams(t)" to make sure it will match to the - // case "localTypeOf[Seq[_]]" - case t if isSubtype(t, localTypeOf[Seq[_]]) => - val TypeRef(_, _, Seq(elementType)) = t - toCatalystArray(inputObject, elementType) - - case t if isSubtype(t, localTypeOf[Array[_]]) && predefinedDt.isEmpty => - val TypeRef(_, _, Seq(elementType)) = t - toCatalystArray(inputObject, elementType) - - case t if isSubtype(t, localTypeOf[Map[_, _]]) => - val TypeRef(_, _, Seq(keyType, valueType)) = t - val keyClsName = getClassNameFromType(keyType) - val valueClsName = getClassNameFromType(valueType) - val keyPath = walkedTypePath.recordKeyForMap(keyClsName) - val valuePath = walkedTypePath.recordValueForMap(valueClsName) - - createSerializerForMap( - inputObject, - MapElementInformation( - dataTypeFor(keyType), - nullable = !keyType.typeSymbol.asClass.isPrimitive, - serializerFor(_, keyType, keyPath, seenTypeSet)), - MapElementInformation( - dataTypeFor(valueType), - nullable = !valueType.typeSymbol.asClass.isPrimitive, - serializerFor(_, valueType, valuePath, seenTypeSet)) - ) + /** + * Returns an expression for serializing the value of an input expression into Spark SQL + * internal representation. + */ + private def serializerFor( + inputObject: Expression, + tpe: `Type`, + walkedTypePath: WalkedTypePath, + seenTypeSet: Set[`Type`] = Set.empty, + predefinedDt: Option[DataTypeWithClass] = None, + ): Expression = cleanUpReflectionObjects { + + def toCatalystArray( + input: Expression, + elementType: `Type`, + predefinedDt: Option[DataTypeWithClass] = None, + ): Expression = { + val dataType = predefinedDt + .map(_.dt) + .getOrElse { + dataTypeFor(elementType) + } - case t if isSubtype(t, localTypeOf[scala.collection.Set[_]]) => - val TypeRef(_, _, Seq(elementType)) = t + dataType match { - // There's no corresponding Catalyst type for `Set`, we serialize a `Set` to Catalyst array. - // Note that the property of `Set` is only kept when manipulating the data as domain object. - val newInput = - Invoke( - inputObject, - "toSeq", - ObjectType(classOf[Seq[_]])) - - toCatalystArray(newInput, elementType) - - case t if isSubtype(t, localTypeOf[String]) => - createSerializerForString(inputObject) - case t if isSubtype(t, localTypeOf[java.time.Instant]) => - createSerializerForJavaInstant(inputObject) - - case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => - createSerializerForSqlTimestamp(inputObject) - - case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => - createSerializerForJavaLocalDate(inputObject) - - case t if isSubtype(t, localTypeOf[java.sql.Date]) => createSerializerForSqlDate(inputObject) - - case t if isSubtype(t, localTypeOf[BigDecimal]) => - createSerializerForScalaBigDecimal(inputObject) - - case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => - createSerializerForJavaBigDecimal(inputObject) - - case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => - createSerializerForJavaBigInteger(inputObject) - - case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => - createSerializerForScalaBigInt(inputObject) - - case t if isSubtype(t, localTypeOf[java.lang.Integer]) => - createSerializerForInteger(inputObject) - case t if isSubtype(t, localTypeOf[Int]) => - createSerializerForInteger(inputObject) - case t if isSubtype(t, localTypeOf[java.lang.Long]) => createSerializerForLong(inputObject) - case t if isSubtype(t, localTypeOf[Long]) => createSerializerForLong(inputObject) - case t if isSubtype(t, localTypeOf[java.lang.Double]) => createSerializerForDouble(inputObject) - case t if isSubtype(t, localTypeOf[Double]) => createSerializerForDouble(inputObject) - case t if isSubtype(t, localTypeOf[java.lang.Float]) => createSerializerForFloat(inputObject) - case t if isSubtype(t, localTypeOf[Float]) => createSerializerForFloat(inputObject) - case t if isSubtype(t, localTypeOf[java.lang.Short]) => createSerializerForShort(inputObject) - case t if isSubtype(t, localTypeOf[Short]) => createSerializerForShort(inputObject) - case t if isSubtype(t, localTypeOf[java.lang.Byte]) => createSerializerForByte(inputObject) - case t if isSubtype(t, localTypeOf[Byte]) => createSerializerForByte(inputObject) - case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => createSerializerForBoolean(inputObject) - case t if isSubtype(t, localTypeOf[Boolean]) => createSerializerForBoolean(inputObject) - - case t if isSubtype(t, localTypeOf[java.lang.Enum[_]]) => - createSerializerForString( - Invoke(inputObject, "name", ObjectType(classOf[String]), returnNullable = false)) - - case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => - val udt = getClassFromType(t) - .getAnnotation(classOf[SQLUserDefinedType]).udt().getConstructor().newInstance() - val udtClass = udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt() - createSerializerForUserDefinedType(inputObject, udt, udtClass) - - case t if UDTRegistration.exists(getClassNameFromType(t)) => - val udt = UDTRegistration.getUDTFor(getClassNameFromType(t)).get.getConstructor(). - newInstance().asInstanceOf[UserDefinedType[_]] - val udtClass = udt.getClass - createSerializerForUserDefinedType(inputObject, udt, udtClass) - // - - case _ if predefinedDt.isDefined => - predefinedDt.get match { - case dataType: KDataTypeWrapper => - val cls = dataType.cls - val properties = getJavaBeanReadableProperties(cls) - val structFields = dataType.dt.fields.map(_.asInstanceOf[KStructField]) - val fields = structFields.map { structField => - val maybeProp = properties.find(it => it.getReadMethod.getName == structField.getterName) - if (maybeProp.isEmpty) throw new IllegalArgumentException(s"Field ${structField.name} is not found among available props, which are: ${properties.map(_.getName).mkString(", ")}") - val fieldName = structField.name - val propClass = structField.dataType.asInstanceOf[DataTypeWithClass].cls - val propDt = structField.dataType.asInstanceOf[DataTypeWithClass] - val fieldValue = Invoke( - inputObject, - maybeProp.get.getReadMethod.getName, - inferExternalType(propClass), - returnNullable = structField.nullable - ) - val newPath = walkedTypePath.recordField(propClass.getName, fieldName) - (fieldName, serializerFor(fieldValue, getType(propClass), newPath, seenTypeSet, if (propDt.isInstanceOf[ComplexWrapper]) Some(propDt) else None)) - - } - createSerializerForObject(inputObject, fields) - - case otherTypeWrapper: ComplexWrapper => - otherTypeWrapper.dt match { - case MapType(kt, vt, _) => - val Seq(keyType, valueType) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass].cls).map(getType(_)) - val Seq(keyDT, valueDT) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass]) + case dt @ (MapType(_, _, _) | ArrayType(_, _) | StructType(_)) => { + val clsName = getClassNameFromType(elementType) + val newPath = walkedTypePath.recordArray(clsName) + createSerializerForMapObjects( + input, ObjectType(predefinedDt.get.cls), + serializerFor(_, elementType, newPath, seenTypeSet, predefinedDt) + ) + } + + case dt: ObjectType => { + val clsName = getClassNameFromType(elementType) + val newPath = walkedTypePath.recordArray(clsName) + createSerializerForMapObjects( + input, dt, + serializerFor(_, elementType, newPath, seenTypeSet) + ) + } + + // case dt: ByteType => + // createSerializerForPrimitiveArray(input, dt) + + case dt @ (BooleanType | ByteType | ShortType | IntegerType | LongType | FloatType | DoubleType) => { + val cls = input.dataType.asInstanceOf[ObjectType].cls + if (cls.isArray && cls.getComponentType.isPrimitive) { + createSerializerForPrimitiveArray(input, dt) + } else { + createSerializerForGenericArray( + inputObject = input, + dataType = dt, + nullable = predefinedDt + .map(_.nullable) + .getOrElse( + schemaFor(elementType).nullable + ), + ) + } + } + + case _: StringType => { + val clsName = getClassNameFromType(typeOf[String]) + val newPath = walkedTypePath.recordArray(clsName) + createSerializerForMapObjects( + input, ObjectType(Class.forName(getClassNameFromType(elementType))), + serializerFor(_, elementType, newPath, seenTypeSet) + ) + } + + case dt => { + createSerializerForGenericArray( + inputObject = input, + dataType = dt, + nullable = predefinedDt + .map(_.nullable) + .getOrElse { + schemaFor(elementType).nullable + }, + ) + } + } + } + + baseType(tpe) match { + + // + case _ if !inputObject.dataType.isInstanceOf[ObjectType] + && !predefinedDt.exists(_.isInstanceOf[ComplexWrapper]) => { + inputObject + } + case t if isSubtype(t, localTypeOf[Option[_]]) => { + val TypeRef(_, _, Seq(optType)) = t + val className = getClassNameFromType(optType) + val newPath = walkedTypePath.recordOption(className) + val unwrapped = UnwrapOption(dataTypeFor(optType), inputObject) + serializerFor(unwrapped, optType, newPath, seenTypeSet) + } + + // Since List[_] also belongs to localTypeOf[Product], we put this case before + // "case t if definedByConstructorParams(t)" to make sure it will match to the + // case "localTypeOf[Seq[_]]" + case t if isSubtype(t, localTypeOf[Seq[_]]) => { + val TypeRef(_, _, Seq(elementType)) = t + toCatalystArray(inputObject, elementType) + } + + case t if isSubtype(t, localTypeOf[Array[_]]) && predefinedDt.isEmpty => { + val TypeRef(_, _, Seq(elementType)) = t + toCatalystArray(inputObject, elementType) + } + + case t if isSubtype(t, localTypeOf[Map[_, _]]) => { + val TypeRef(_, _, Seq(keyType, valueType)) = t val keyClsName = getClassNameFromType(keyType) val valueClsName = getClassNameFromType(valueType) val keyPath = walkedTypePath.recordKeyForMap(keyClsName) val valuePath = walkedTypePath.recordValueForMap(valueClsName) createSerializerForMap( - inputObject, - MapElementInformation( - dataTypeFor(keyType), - nullable = !keyType.typeSymbol.asClass.isPrimitive, - serializerFor(_, keyType, keyPath, seenTypeSet, Some(keyDT).filter(_.isInstanceOf[ComplexWrapper]))), - MapElementInformation( - dataTypeFor(valueType), - nullable = !valueType.typeSymbol.asClass.isPrimitive, - serializerFor(_, valueType, valuePath, seenTypeSet, Some(valueDT).filter(_.isInstanceOf[ComplexWrapper]))) + inputObject, + MapElementInformation( + dataTypeFor(keyType), + nullable = !keyType.typeSymbol.asClass.isPrimitive, + serializerFor(_, keyType, keyPath, seenTypeSet) + ), + MapElementInformation( + dataTypeFor(valueType), + nullable = !valueType.typeSymbol.asClass.isPrimitive, + serializerFor(_, valueType, valuePath, seenTypeSet) + ) ) - case ArrayType(elementType, _) => - toCatalystArray(inputObject, getType(elementType.asInstanceOf[DataTypeWithClass].cls), Some(elementType.asInstanceOf[DataTypeWithClass])) + } - case StructType(elementType: Array[StructField]) => - val cls = otherTypeWrapper.cls - val names = elementType.map(_.name) + case t if isSubtype(t, localTypeOf[scala.collection.Set[_]]) => { + val TypeRef(_, _, Seq(elementType)) = t - val beanInfo = Introspector.getBeanInfo(cls) - val methods = beanInfo.getMethodDescriptors.filter(it => names.contains(it.getName)) + // There's no corresponding Catalyst type for `Set`, we serialize a `Set` to Catalyst array. + // Note that the property of `Set` is only kept when manipulating the data as domain object. + val newInput = + Invoke( + inputObject, + "toSeq", + ObjectType(classOf[Seq[_]]) + ) + toCatalystArray(newInput, elementType) + } - val fields = elementType.map { structField => + case t if isSubtype(t, localTypeOf[String]) => { + createSerializerForString(inputObject) + } + case t if isSubtype(t, localTypeOf[java.time.Instant]) => { + createSerializerForJavaInstant(inputObject) + } + case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => { + createSerializerForSqlTimestamp(inputObject) + } + case t if isSubtype(t, localTypeOf[java.time.LocalDateTime]) => { + createSerializerForLocalDateTime(inputObject) + } + case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => { + createSerializerForJavaLocalDate(inputObject) + } + case t if isSubtype(t, localTypeOf[java.sql.Date]) => { + createSerializerForSqlDate(inputObject) + } + case t if isSubtype(t, localTypeOf[java.time.Duration]) => { + createSerializerForJavaDuration(inputObject) + } + case t if isSubtype(t, localTypeOf[java.time.Period]) => { + createSerializerForJavaPeriod(inputObject) + } + case t if isSubtype(t, localTypeOf[BigDecimal]) => { + createSerializerForScalaBigDecimal(inputObject) + } + case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => { + createSerializerForJavaBigDecimal(inputObject) + } + case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => { + createSerializerForJavaBigInteger(inputObject) + } + case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => { + createSerializerForScalaBigInt(inputObject) + } - val maybeProp = methods.find(it => it.getName == structField.name) - if (maybeProp.isEmpty) throw new IllegalArgumentException(s"Field ${structField.name} is not found among available props, which are: ${methods.map(_.getName).mkString(", ")}") - val fieldName = structField.name - val propClass = structField.dataType.asInstanceOf[DataTypeWithClass].cls - val propDt = structField.dataType.asInstanceOf[DataTypeWithClass] - val fieldValue = Invoke( - inputObject, - maybeProp.get.getName, - inferExternalType(propClass), - returnNullable = propDt.nullable - ) - val newPath = walkedTypePath.recordField(propClass.getName, fieldName) - (fieldName, serializerFor(fieldValue, getType(propClass), newPath, seenTypeSet, if (propDt.isInstanceOf[ComplexWrapper]) Some(propDt) else None)) + case t if isSubtype(t, localTypeOf[java.lang.Integer]) => { + createSerializerForInteger(inputObject) + } + case t if isSubtype(t, localTypeOf[Int]) => { + createSerializerForInteger(inputObject) + } + case t if isSubtype(t, localTypeOf[java.lang.Long]) => { + createSerializerForLong(inputObject) + } + case t if isSubtype(t, localTypeOf[Long]) => { + createSerializerForLong(inputObject) + } + case t if isSubtype(t, localTypeOf[java.lang.Double]) => { + createSerializerForDouble(inputObject) + } + case t if isSubtype(t, localTypeOf[Double]) => { + createSerializerForDouble(inputObject) + } + case t if isSubtype(t, localTypeOf[java.lang.Float]) => { + createSerializerForFloat(inputObject) + } + case t if isSubtype(t, localTypeOf[Float]) => { + createSerializerForFloat(inputObject) + } + case t if isSubtype(t, localTypeOf[java.lang.Short]) => { + createSerializerForShort(inputObject) + } + case t if isSubtype(t, localTypeOf[Short]) => { + createSerializerForShort(inputObject) + } + case t if isSubtype(t, localTypeOf[java.lang.Byte]) => { + createSerializerForByte(inputObject) + } + case t if isSubtype(t, localTypeOf[Byte]) => { + createSerializerForByte(inputObject) + } + case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => { + createSerializerForBoolean(inputObject) + } + case t if isSubtype(t, localTypeOf[Boolean]) => { + createSerializerForBoolean(inputObject) + } + case t if isSubtype(t, localTypeOf[java.lang.Enum[_]]) => { + createSerializerForString( + Invoke(inputObject, "name", ObjectType(classOf[String]), returnNullable = false) + ) + } + case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => { + val udt = getClassFromType(t) + .getAnnotation(classOf[SQLUserDefinedType]).udt().getConstructor().newInstance() + val udtClass = udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt() + createSerializerForUserDefinedType(inputObject, udt, udtClass) + } + + case t if UDTRegistration.exists(getClassNameFromType(t)) => { + val udt = UDTRegistration.getUDTFor(getClassNameFromType(t)).get.getConstructor(). + newInstance().asInstanceOf[UserDefinedType[_]] + val udtClass = udt.getClass + createSerializerForUserDefinedType(inputObject, udt, udtClass) + } + // + + case _ if predefinedDt.isDefined => { + predefinedDt.get match { + + case dataType: KDataTypeWrapper => { + val cls = dataType.cls + val properties = getJavaBeanReadableProperties(cls) + val structFields = dataType.dt.fields.map(_.asInstanceOf[KStructField]) + val fields: Array[(String, Expression)] = structFields.map { structField => + val maybeProp = properties.find(it => it.getReadMethod.getName == structField.getterName) + if (maybeProp.isEmpty) throw new IllegalArgumentException(s"Field ${ + structField.name + } is not found among available props, which are: ${properties.map(_.getName).mkString(", ")}" + ) + val fieldName = structField.name + val propClass = structField.dataType.asInstanceOf[DataTypeWithClass].cls + val propDt = structField.dataType.asInstanceOf[DataTypeWithClass] + val fieldValue = Invoke( + inputObject, + maybeProp.get.getReadMethod.getName, + inferExternalType(propClass), + returnNullable = structField.nullable + ) + val newPath = walkedTypePath.recordField(propClass.getName, fieldName) + + val tpe = + // if (propClass == classOf[Array[Byte]]) localTypeOf[Array[Byte]] + // else + getType(propClass) + + val serializer = serializerFor( + inputObject = fieldValue, + tpe = tpe, + walkedTypePath = newPath, + seenTypeSet = seenTypeSet, + predefinedDt = if (propDt + .isInstanceOf[ComplexWrapper] /*&& propClass != classOf[Array[Byte]]*/ ) Some(propDt) else None + ) + + (fieldName, serializer) + } + createSerializerForObject(inputObject, fields) + } + + case otherTypeWrapper: ComplexWrapper => { + + otherTypeWrapper.dt match { + + case MapType(kt, vt, _) => { + val Seq(keyType, valueType) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass].cls) + .map(getType(_)) + val Seq(keyDT, valueDT) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass]) + val keyClsName = getClassNameFromType(keyType) + val valueClsName = getClassNameFromType(valueType) + val keyPath = walkedTypePath.recordKeyForMap(keyClsName) + val valuePath = walkedTypePath.recordValueForMap(valueClsName) + + createSerializerForMap( + inputObject, + MapElementInformation( + dataTypeFor(keyType), + nullable = !keyType.typeSymbol.asClass.isPrimitive, + serializerFor( + _, keyType, keyPath, seenTypeSet, Some(keyDT) + .filter(_.isInstanceOf[ComplexWrapper]) + ) + ), + MapElementInformation( + dataTypeFor(valueType), + nullable = !valueType.typeSymbol.asClass.isPrimitive, + serializerFor( + _, valueType, valuePath, seenTypeSet, Some(valueDT) + .filter(_.isInstanceOf[ComplexWrapper]) + ) + ) + ) + } + + case ArrayType(elementType, _) => { + toCatalystArray( + inputObject, + getType(elementType.asInstanceOf[DataTypeWithClass].cls + ), Some(elementType.asInstanceOf[DataTypeWithClass]) + ) + } + + case StructType(elementType: Array[StructField]) => { + val cls = otherTypeWrapper.cls + val names = elementType.map(_.name) + + val beanInfo = Introspector.getBeanInfo(cls) + val methods = beanInfo.getMethodDescriptors.filter(it => names.contains(it.getName)) + + + val fields = elementType.map { structField => + + val maybeProp = methods.find(it => it.getName == structField.name) + if (maybeProp.isEmpty) throw new IllegalArgumentException(s"Field ${ + structField.name + } is not found among available props, which are: ${ + methods.map(_.getName).mkString(", ") + }" + ) + val fieldName = structField.name + val propClass = structField.dataType.asInstanceOf[DataTypeWithClass].cls + val propDt = structField.dataType.asInstanceOf[DataTypeWithClass] + val fieldValue = Invoke( + inputObject, + maybeProp.get.getName, + inferExternalType(propClass), + returnNullable = propDt.nullable + ) + val newPath = walkedTypePath.recordField(propClass.getName, fieldName) + (fieldName, serializerFor( + fieldValue, getType(propClass), newPath, seenTypeSet, if (propDt + .isInstanceOf[ComplexWrapper]) Some(propDt) else None + )) + + } + createSerializerForObject(inputObject, fields) + } + + case _ => { + throw new UnsupportedOperationException( + s"No Encoder found for $tpe\n" + walkedTypePath + ) + } + } + } + } + } + + case t if definedByConstructorParams(t) => { + if (seenTypeSet.contains(t)) { + throw new UnsupportedOperationException( + s"cannot have circular references in class, but got the circular reference of class $t" + ) + } + val params = getConstructorParameters(t) + val fields = params.map { case (fieldName, fieldType) => + if (javaKeywords.contains(fieldName)) { + throw new UnsupportedOperationException(s"`$fieldName` is a reserved keyword and " + + "cannot be used as field name\n" + walkedTypePath + ) + } + + // SPARK-26730 inputObject won't be null with If's guard below. And KnownNotNul + // is necessary here. Because for a nullable nested inputObject with struct data + // type, e.g. StructType(IntegerType, StringType), it will return nullable=true + // for IntegerType without KnownNotNull. And that's what we do not expect to. + val fieldValue = Invoke( + KnownNotNull(inputObject), fieldName, dataTypeFor(fieldType), + returnNullable = !fieldType.typeSymbol.asClass.isPrimitive + ) + val clsName = getClassNameFromType(fieldType) + val newPath = walkedTypePath.recordField(clsName, fieldName) + (fieldName, serializerFor(fieldValue, fieldType, newPath, seenTypeSet + t)) } createSerializerForObject(inputObject, fields) + } - case _ => + case _ => { throw new UnsupportedOperationException( - s"No Encoder found for $tpe\n" + walkedTypePath) - + s"No Encoder found for $tpe\n" + walkedTypePath + ) } } + } - case t if definedByConstructorParams(t) => - if (seenTypeSet.contains(t)) { - throw new UnsupportedOperationException( - s"cannot have circular references in class, but got the circular reference of class $t") - } + def createDeserializerForString(path: Expression, returnNullable: Boolean): Expression = { + Invoke( + path, "toString", ObjectType(classOf[java.lang.String]), + returnNullable = returnNullable + ) + } - val params = getConstructorParameters(t) - val fields = params.map { case (fieldName, fieldType) => - if (javaKeywords.contains(fieldName)) { - throw new UnsupportedOperationException(s"`$fieldName` is a reserved keyword and " + - "cannot be used as field name\n" + walkedTypePath) - } - - // SPARK-26730 inputObject won't be null with If's guard below. And KnownNotNul - // is necessary here. Because for a nullable nested inputObject with struct data - // type, e.g. StructType(IntegerType, StringType), it will return nullable=true - // for IntegerType without KnownNotNull. And that's what we do not expect to. - val fieldValue = Invoke(KnownNotNull(inputObject), fieldName, dataTypeFor(fieldType), - returnNullable = !fieldType.typeSymbol.asClass.isPrimitive) - val clsName = getClassNameFromType(fieldType) - val newPath = walkedTypePath.recordField(clsName, fieldName) - (fieldName, serializerFor(fieldValue, fieldType, newPath, seenTypeSet + t)) - } - createSerializerForObject(inputObject, fields) + def getJavaBeanReadableProperties(beanClass: Class[_]): Array[PropertyDescriptor] = { + val beanInfo = Introspector.getBeanInfo(beanClass) + beanInfo.getPropertyDescriptors.filterNot(_.getName == "class") + .filterNot(_.getName == "declaringClass") + .filter(_.getReadMethod != null) + } - case _ => - throw new UnsupportedOperationException( - s"No Encoder found for $tpe\n" + walkedTypePath) + /* + * Retrieves the runtime class corresponding to the provided type. + */ + def getClassFromType(tpe: Type): Class[_] = mirror.runtimeClass(tpe.dealias.typeSymbol.asClass) + + case class Schema(dataType: DataType, nullable: Boolean) + + /** Returns a catalyst DataType and its nullability for the given Scala Type using reflection. */ + def schemaFor(tpe: `Type`): Schema = cleanUpReflectionObjects { + + baseType(tpe) match { + // this must be the first case, since all objects in scala are instances of Null, therefore + // Null type would wrongly match the first of them, which is Option as of now + case t if isSubtype(t, definitions.NullTpe) => Schema(NullType, nullable = true) + + case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => { + val udt = getClassFromType(t).getAnnotation(classOf[SQLUserDefinedType]).udt(). + getConstructor().newInstance() + Schema(udt, nullable = true) + } + case t if UDTRegistration.exists(getClassNameFromType(t)) => { + val udt = UDTRegistration + .getUDTFor(getClassNameFromType(t)) + .get + .getConstructor() + .newInstance() + .asInstanceOf[UserDefinedType[_]] + Schema(udt, nullable = true) + } + case t if isSubtype(t, localTypeOf[Option[_]]) => { + val TypeRef(_, _, Seq(optType)) = t + Schema(schemaFor(optType).dataType, nullable = true) + } + case t if isSubtype(t, localTypeOf[Array[Byte]]) => { + Schema(BinaryType, nullable = true) + } + case t if isSubtype(t, localTypeOf[Array[_]]) => { + val TypeRef(_, _, Seq(elementType)) = t + val Schema(dataType, nullable) = schemaFor(elementType) + Schema(ArrayType(dataType, containsNull = nullable), nullable = true) + } + case t if isSubtype(t, localTypeOf[Seq[_]]) => { + val TypeRef(_, _, Seq(elementType)) = t + val Schema(dataType, nullable) = schemaFor(elementType) + Schema(ArrayType(dataType, containsNull = nullable), nullable = true) + } + case t if isSubtype(t, localTypeOf[Map[_, _]]) => { + val TypeRef(_, _, Seq(keyType, valueType)) = t + val Schema(valueDataType, valueNullable) = schemaFor(valueType) + Schema( + MapType( + schemaFor(keyType).dataType, + valueDataType, valueContainsNull = valueNullable + ), nullable = true + ) + } + case t if isSubtype(t, localTypeOf[Set[_]]) => { + val TypeRef(_, _, Seq(elementType)) = t + val Schema(dataType, nullable) = schemaFor(elementType) + Schema(ArrayType(dataType, containsNull = nullable), nullable = true) + } + case t if isSubtype(t, localTypeOf[String]) => { + Schema(StringType, nullable = true) + } + case t if isSubtype(t, localTypeOf[java.time.Instant]) => { + Schema(TimestampType, nullable = true) + } + case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => { + Schema(TimestampType, nullable = true) + } + // SPARK-36227: Remove TimestampNTZ type support in Spark 3.2 with minimal code changes. + case t if isSubtype(t, localTypeOf[java.time.LocalDateTime]) && Utils.isTesting => { + Schema(TimestampNTZType, nullable = true) + } + case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => { + Schema(DateType, nullable = true) + } + case t if isSubtype(t, localTypeOf[java.sql.Date]) => { + Schema(DateType, nullable = true) + } + case t if isSubtype(t, localTypeOf[CalendarInterval]) => { + Schema(CalendarIntervalType, nullable = true) + } + case t if isSubtype(t, localTypeOf[java.time.Duration]) => { + Schema(DayTimeIntervalType(), nullable = true) + } + case t if isSubtype(t, localTypeOf[java.time.Period]) => { + Schema(YearMonthIntervalType(), nullable = true) + } + case t if isSubtype(t, localTypeOf[BigDecimal]) => { + Schema(DecimalType.SYSTEM_DEFAULT, nullable = true) + } + case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => { + Schema(DecimalType.SYSTEM_DEFAULT, nullable = true) + } + case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => { + Schema(DecimalType.BigIntDecimal, nullable = true) + } + case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => { + Schema(DecimalType.BigIntDecimal, nullable = true) + } + case t if isSubtype(t, localTypeOf[Decimal]) => { + Schema(DecimalType.SYSTEM_DEFAULT, nullable = true) + } + case t if isSubtype(t, localTypeOf[java.lang.Integer]) => Schema(IntegerType, nullable = true) + case t if isSubtype(t, localTypeOf[java.lang.Long]) => Schema(LongType, nullable = true) + case t if isSubtype(t, localTypeOf[java.lang.Double]) => Schema(DoubleType, nullable = true) + case t if isSubtype(t, localTypeOf[java.lang.Float]) => Schema(FloatType, nullable = true) + case t if isSubtype(t, localTypeOf[java.lang.Short]) => Schema(ShortType, nullable = true) + case t if isSubtype(t, localTypeOf[java.lang.Byte]) => Schema(ByteType, nullable = true) + case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => Schema(BooleanType, nullable = true) + case t if isSubtype(t, definitions.IntTpe) => Schema(IntegerType, nullable = false) + case t if isSubtype(t, definitions.LongTpe) => Schema(LongType, nullable = false) + case t if isSubtype(t, definitions.DoubleTpe) => Schema(DoubleType, nullable = false) + case t if isSubtype(t, definitions.FloatTpe) => Schema(FloatType, nullable = false) + case t if isSubtype(t, definitions.ShortTpe) => Schema(ShortType, nullable = false) + case t if isSubtype(t, definitions.ByteTpe) => Schema(ByteType, nullable = false) + case t if isSubtype(t, definitions.BooleanTpe) => Schema(BooleanType, nullable = false) + case t if definedByConstructorParams(t) => { + val params = getConstructorParameters(t) + Schema( + StructType( + params.map { case (fieldName, fieldType) => + val Schema(dataType, nullable) = schemaFor(fieldType) + StructField(fieldName, dataType, nullable) + } + ), nullable = true + ) + } + case other => { + throw new UnsupportedOperationException(s"Schema for type $other is not supported") + } + } } - } - - def createDeserializerForString(path: Expression, returnNullable: Boolean): Expression = { - Invoke(path, "toString", ObjectType(classOf[java.lang.String]), - returnNullable = returnNullable) - } - - def getJavaBeanReadableProperties(beanClass: Class[_]): Array[PropertyDescriptor] = { - val beanInfo = Introspector.getBeanInfo(beanClass) - beanInfo.getPropertyDescriptors.filterNot(_.getName == "class") - .filterNot(_.getName == "declaringClass") - .filter(_.getReadMethod != null) - } - - /* - * Retrieves the runtime class corresponding to the provided type. - */ - def getClassFromType(tpe: Type): Class[_] = mirror.runtimeClass(tpe.dealias.typeSymbol.asClass) - - case class Schema(dataType: DataType, nullable: Boolean) - - /** Returns a catalyst DataType and its nullability for the given Scala Type using reflection. */ - def schemaFor(tpe: `Type`): Schema = cleanUpReflectionObjects { - baseType(tpe) match { - // this must be the first case, since all objects in scala are instances of Null, therefore - // Null type would wrongly match the first of them, which is Option as of now - case t if isSubtype(t, definitions.NullTpe) => Schema(NullType, nullable = true) - case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => - val udt = getClassFromType(t).getAnnotation(classOf[SQLUserDefinedType]).udt(). - getConstructor().newInstance() - Schema(udt, nullable = true) - case t if UDTRegistration.exists(getClassNameFromType(t)) => - val udt = UDTRegistration.getUDTFor(getClassNameFromType(t)).get.getConstructor(). - newInstance().asInstanceOf[UserDefinedType[_]] - Schema(udt, nullable = true) - case t if isSubtype(t, localTypeOf[Option[_]]) => - val TypeRef(_, _, Seq(optType)) = t - Schema(schemaFor(optType).dataType, nullable = true) - case t if isSubtype(t, localTypeOf[Array[Byte]]) => Schema(BinaryType, nullable = true) - case t if isSubtype(t, localTypeOf[Array[_]]) => - val TypeRef(_, _, Seq(elementType)) = t - val Schema(dataType, nullable) = schemaFor(elementType) - Schema(ArrayType(dataType, containsNull = nullable), nullable = true) - case t if isSubtype(t, localTypeOf[Seq[_]]) => - val TypeRef(_, _, Seq(elementType)) = t - val Schema(dataType, nullable) = schemaFor(elementType) - Schema(ArrayType(dataType, containsNull = nullable), nullable = true) - case t if isSubtype(t, localTypeOf[Map[_, _]]) => - val TypeRef(_, _, Seq(keyType, valueType)) = t - val Schema(valueDataType, valueNullable) = schemaFor(valueType) - Schema(MapType(schemaFor(keyType).dataType, - valueDataType, valueContainsNull = valueNullable), nullable = true) - case t if isSubtype(t, localTypeOf[Set[_]]) => - val TypeRef(_, _, Seq(elementType)) = t - val Schema(dataType, nullable) = schemaFor(elementType) - Schema(ArrayType(dataType, containsNull = nullable), nullable = true) - case t if isSubtype(t, localTypeOf[String]) => Schema(StringType, nullable = true) - case t if isSubtype(t, localTypeOf[java.time.Instant]) => - Schema(TimestampType, nullable = true) - case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => - Schema(TimestampType, nullable = true) - case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => Schema(DateType, nullable = true) - case t if isSubtype(t, localTypeOf[java.sql.Date]) => Schema(DateType, nullable = true) - case t if isSubtype(t, localTypeOf[BigDecimal]) => - Schema(DecimalType.SYSTEM_DEFAULT, nullable = true) - case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => - Schema(DecimalType.SYSTEM_DEFAULT, nullable = true) - case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => - Schema(DecimalType.BigIntDecimal, nullable = true) - case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => - Schema(DecimalType.BigIntDecimal, nullable = true) - case t if isSubtype(t, localTypeOf[Decimal]) => - Schema(DecimalType.SYSTEM_DEFAULT, nullable = true) - case t if isSubtype(t, localTypeOf[java.lang.Integer]) => Schema(IntegerType, nullable = true) - case t if isSubtype(t, localTypeOf[java.lang.Long]) => Schema(LongType, nullable = true) - case t if isSubtype(t, localTypeOf[java.lang.Double]) => Schema(DoubleType, nullable = true) - case t if isSubtype(t, localTypeOf[java.lang.Float]) => Schema(FloatType, nullable = true) - case t if isSubtype(t, localTypeOf[java.lang.Short]) => Schema(ShortType, nullable = true) - case t if isSubtype(t, localTypeOf[java.lang.Byte]) => Schema(ByteType, nullable = true) - case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => Schema(BooleanType, nullable = true) - case t if isSubtype(t, definitions.IntTpe) => Schema(IntegerType, nullable = false) - case t if isSubtype(t, definitions.LongTpe) => Schema(LongType, nullable = false) - case t if isSubtype(t, definitions.DoubleTpe) => Schema(DoubleType, nullable = false) - case t if isSubtype(t, definitions.FloatTpe) => Schema(FloatType, nullable = false) - case t if isSubtype(t, definitions.ShortTpe) => Schema(ShortType, nullable = false) - case t if isSubtype(t, definitions.ByteTpe) => Schema(ByteType, nullable = false) - case t if isSubtype(t, definitions.BooleanTpe) => Schema(BooleanType, nullable = false) - case t if definedByConstructorParams(t) => - val params = getConstructorParameters(t) - Schema(StructType( - params.map { case (fieldName, fieldType) => - val Schema(dataType, nullable) = schemaFor(fieldType) - StructField(fieldName, dataType, nullable) - }), nullable = true) - case other => - throw new UnsupportedOperationException(s"Schema for type $other is not supported") + + /** + * Whether the fields of the given type is defined entirely by its constructor parameters. + */ + def definedByConstructorParams(tpe: Type): Boolean = cleanUpReflectionObjects { + tpe.dealias match { + // `Option` is a `Product`, but we don't wanna treat `Option[Int]` as a struct type. + case t if isSubtype(t, localTypeOf[Option[_]]) => definedByConstructorParams(t.typeArgs.head) + case _ => { + isSubtype(tpe.dealias, localTypeOf[Product]) || + isSubtype(tpe.dealias, localTypeOf[DefinedByConstructorParams]) + } + } } - } - - /** - * Whether the fields of the given type is defined entirely by its constructor parameters. - */ - def definedByConstructorParams(tpe: Type): Boolean = cleanUpReflectionObjects { - tpe.dealias match { - // `Option` is a `Product`, but we don't wanna treat `Option[Int]` as a struct type. - case t if isSubtype(t, localTypeOf[Option[_]]) => definedByConstructorParams(t.typeArgs.head) - case _ => isSubtype(tpe.dealias, localTypeOf[Product]) || - isSubtype(tpe.dealias, localTypeOf[DefinedByConstructorParams]) + + private val javaKeywords = Set( + "abstract", "assert", "boolean", "break", "byte", "case", "catch", + "char", "class", "const", "continue", "default", "do", "double", "else", "extends", "false", + "final", "finally", "float", "for", "goto", "if", "implements", "import", "instanceof", "int", + "interface", "long", "native", "new", "null", "package", "private", "protected", "public", + "return", "short", "static", "strictfp", "super", "switch", "synchronized", "this", "throw", + "throws", "transient", "true", "try", "void", "volatile", "while" + ) + + + @scala.annotation.tailrec + def javaBoxedType(dt: DataType): Class[_] = dt match { + case _: DecimalType => classOf[Decimal] + case _: DayTimeIntervalType => classOf[java.lang.Long] + case _: YearMonthIntervalType => classOf[java.lang.Integer] + case BinaryType => classOf[Array[Byte]] + case StringType => classOf[UTF8String] + case CalendarIntervalType => classOf[CalendarInterval] + case _: StructType => classOf[InternalRow] + case _: ArrayType => classOf[ArrayType] + case _: MapType => classOf[MapType] + case udt: UserDefinedType[_] => javaBoxedType(udt.sqlType) + case ObjectType(cls) => cls + case _ => ScalaReflection.typeBoxedJavaMapping.getOrElse(dt, classOf[java.lang.Object]) } - } - - private val javaKeywords = Set("abstract", "assert", "boolean", "break", "byte", "case", "catch", - "char", "class", "const", "continue", "default", "do", "double", "else", "extends", "false", - "final", "finally", "float", "for", "goto", "if", "implements", "import", "instanceof", "int", - "interface", "long", "native", "new", "null", "package", "private", "protected", "public", - "return", "short", "static", "strictfp", "super", "switch", "synchronized", "this", "throw", - "throws", "transient", "true", "try", "void", "volatile", "while") - - - @scala.annotation.tailrec - def javaBoxedType(dt: DataType): Class[_] = dt match { - case _: DecimalType => classOf[Decimal] - case BinaryType => classOf[Array[Byte]] - case StringType => classOf[UTF8String] - case CalendarIntervalType => classOf[CalendarInterval] - case _: StructType => classOf[InternalRow] - case _: ArrayType => classOf[ArrayType] - case _: MapType => classOf[MapType] - case udt: UserDefinedType[_] => javaBoxedType(udt.sqlType) - case ObjectType(cls) => cls - case _ => ScalaReflection.typeBoxedJavaMapping.getOrElse(dt, classOf[java.lang.Object]) - } } @@ -991,120 +1264,124 @@ object KotlinReflection extends KotlinReflection { * object, this trait able to work in both the runtime and the compile time (macro) universe. */ trait KotlinReflection extends Logging { - /** The universe we work in (runtime or macro) */ - val universe: scala.reflect.api.Universe - - /** The mirror used to access types in the universe */ - def mirror: universe.Mirror - - import universe._ - - // The Predef.Map is scala.collection.immutable.Map. - // Since the map values can be mutable, we explicitly import scala.collection.Map at here. - - /** - * Any codes calling `scala.reflect.api.Types.TypeApi.<:<` should be wrapped by this method to - * clean up the Scala reflection garbage automatically. Otherwise, it will leak some objects to - * `scala.reflect.runtime.JavaUniverse.undoLog`. - * - * @see https://github.com/scala/bug/issues/8302 - */ - def cleanUpReflectionObjects[T](func: => T): T = { - universe.asInstanceOf[scala.reflect.runtime.JavaUniverse].undoLog.undo(func) - } - - /** - * Return the Scala Type for `T` in the current classloader mirror. - * - * Use this method instead of the convenience method `universe.typeOf`, which - * assumes that all types can be found in the classloader that loaded scala-reflect classes. - * That's not necessarily the case when running using Eclipse launchers or even - * Sbt console or test (without `fork := true`). - * - * @see SPARK-5281 - */ - def localTypeOf[T: TypeTag]: `Type` = { - val tag = implicitly[TypeTag[T]] - tag.in(mirror).tpe.dealias - } - - /** - * Returns the full class name for a type. The returned name is the canonical - * Scala name, where each component is separated by a period. It is NOT the - * Java-equivalent runtime name (no dollar signs). - * - * In simple cases, both the Scala and Java names are the same, however when Scala - * generates constructs that do not map to a Java equivalent, such as singleton objects - * or nested classes in package objects, it uses the dollar sign ($) to create - * synthetic classes, emulating behaviour in Java bytecode. - */ - def getClassNameFromType(tpe: `Type`): String = { - tpe.dealias.erasure.typeSymbol.asClass.fullName - } - - /** - * Returns the parameter names and types for the primary constructor of this type. - * - * Note that it only works for scala classes with primary constructor, and currently doesn't - * support inner class. - */ - def getConstructorParameters(tpe: Type): Seq[(String, Type)] = { - val dealiasedTpe = tpe.dealias - val formalTypeArgs = dealiasedTpe.typeSymbol.asClass.typeParams - val TypeRef(_, _, actualTypeArgs) = dealiasedTpe - val params = constructParams(dealiasedTpe) - // if there are type variables to fill in, do the substitution (SomeClass[T] -> SomeClass[Int]) - if (actualTypeArgs.nonEmpty) { - params.map { p => - p.name.decodedName.toString -> - p.typeSignature.substituteTypes(formalTypeArgs, actualTypeArgs) - } - } else { - params.map { p => - p.name.decodedName.toString -> p.typeSignature - } + /** The universe we work in (runtime or macro) */ + val universe: scala.reflect.api.Universe + + /** The mirror used to access types in the universe */ + def mirror: universe.Mirror + + import universe._ + + // The Predef.Map is scala.collection.immutable.Map. + // Since the map values can be mutable, we explicitly import scala.collection.Map at here. + + /** + * Any codes calling `scala.reflect.api.Types.TypeApi.<:<` should be wrapped by this method to + * clean up the Scala reflection garbage automatically. Otherwise, it will leak some objects to + * `scala.reflect.runtime.JavaUniverse.undoLog`. + * + * @see https://github.com/scala/bug/issues/8302 + */ + def cleanUpReflectionObjects[T](func: => T): T = { + universe.asInstanceOf[scala.reflect.runtime.JavaUniverse].undoLog.undo(func) } - } - - /** - * If our type is a Scala trait it may have a companion object that - * only defines a constructor via `apply` method. - */ - private def getCompanionConstructor(tpe: Type): Symbol = { - def throwUnsupportedOperation = { - throw new UnsupportedOperationException(s"Unable to find constructor for $tpe. " + - s"This could happen if $tpe is an interface, or a trait without companion object " + - "constructor.") + + /** + * Return the Scala Type for `T` in the current classloader mirror. + * + * Use this method instead of the convenience method `universe.typeOf`, which + * assumes that all types can be found in the classloader that loaded scala-reflect classes. + * That's not necessarily the case when running using Eclipse launchers or even + * Sbt console or test (without `fork := true`). + * + * @see SPARK-5281 + */ + def localTypeOf[T: TypeTag]: `Type` = { + val tag = implicitly[TypeTag[T]] + tag.in(mirror).tpe.dealias } - tpe.typeSymbol.asClass.companion match { - case NoSymbol => throwUnsupportedOperation - case sym => sym.asTerm.typeSignature.member(universe.TermName("apply")) match { - case NoSymbol => throwUnsupportedOperation - case constructorSym => constructorSym - } + /** + * Returns the full class name for a type. The returned name is the canonical + * Scala name, where each component is separated by a period. It is NOT the + * Java-equivalent runtime name (no dollar signs). + * + * In simple cases, both the Scala and Java names are the same, however when Scala + * generates constructs that do not map to a Java equivalent, such as singleton objects + * or nested classes in package objects, it uses the dollar sign ($) to create + * synthetic classes, emulating behaviour in Java bytecode. + */ + def getClassNameFromType(tpe: `Type`): String = { + tpe.dealias.erasure.typeSymbol.asClass.fullName } - } - protected def constructParams(tpe: Type): Seq[Symbol] = { - val constructorSymbol = tpe.member(termNames.CONSTRUCTOR) match { - case NoSymbol => getCompanionConstructor(tpe) - case sym => sym + /** + * Returns the parameter names and types for the primary constructor of this type. + * + * Note that it only works for scala classes with primary constructor, and currently doesn't + * support inner class. + */ + def getConstructorParameters(tpe: Type): Seq[(String, Type)] = { + val dealiasedTpe = tpe.dealias + val formalTypeArgs = dealiasedTpe.typeSymbol.asClass.typeParams + val TypeRef(_, _, actualTypeArgs) = dealiasedTpe + val params = constructParams(dealiasedTpe) + // if there are type variables to fill in, do the substitution (SomeClass[T] -> SomeClass[Int]) + if (actualTypeArgs.nonEmpty) { + params.map { p => + p.name.decodedName.toString -> + p.typeSignature.substituteTypes(formalTypeArgs, actualTypeArgs) + } + } else { + params.map { p => + p.name.decodedName.toString -> p.typeSignature + } + } } - val params = if (constructorSymbol.isMethod) { - constructorSymbol.asMethod.paramLists - } else { - // Find the primary constructor, and use its parameter ordering. - val primaryConstructorSymbol: Option[Symbol] = constructorSymbol.asTerm.alternatives.find( - s => s.isMethod && s.asMethod.isPrimaryConstructor) - if (primaryConstructorSymbol.isEmpty) { - sys.error("Internal SQL error: Product object did not have a primary constructor.") - } else { - primaryConstructorSymbol.get.asMethod.paramLists - } + + /** + * If our type is a Scala trait it may have a companion object that + * only defines a constructor via `apply` method. + */ + private def getCompanionConstructor(tpe: Type): Symbol = { + def throwUnsupportedOperation = { + throw new UnsupportedOperationException(s"Unable to find constructor for $tpe. " + + s"This could happen if $tpe is an interface, or a trait without companion object " + + "constructor." + ) + } + + tpe.typeSymbol.asClass.companion match { + case NoSymbol => throwUnsupportedOperation + case sym => { + sym.asTerm.typeSignature.member(universe.TermName("apply")) match { + case NoSymbol => throwUnsupportedOperation + case constructorSym => constructorSym + } + } + } + } + + protected def constructParams(tpe: Type): Seq[Symbol] = { + val constructorSymbol = tpe.member(termNames.CONSTRUCTOR) match { + case NoSymbol => getCompanionConstructor(tpe) + case sym => sym + } + val params = if (constructorSymbol.isMethod) { + constructorSymbol.asMethod.paramLists + } else { + // Find the primary constructor, and use its parameter ordering. + val primaryConstructorSymbol: Option[Symbol] = constructorSymbol.asTerm.alternatives.find( + s => s.isMethod && s.asMethod.isPrimaryConstructor + ) + if (primaryConstructorSymbol.isEmpty) { + sys.error("Internal SQL error: Product object did not have a primary constructor.") + } else { + primaryConstructorSymbol.get.asMethod.paramLists + } + } + params.flatten } - params.flatten - } } diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 0e7abe2d..7d208637 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -21,7 +21,6 @@ package org.jetbrains.kotlinx.spark.api -import org.apache.hadoop.shaded.org.apache.commons.math3.exception.util.ArgUtils import org.apache.spark.SparkContext import org.apache.spark.api.java.JavaSparkContext import org.apache.spark.api.java.function.* @@ -33,14 +32,12 @@ import org.apache.spark.sql.streaming.GroupState import org.apache.spark.sql.streaming.GroupStateTimeout import org.apache.spark.sql.streaming.OutputMode import org.apache.spark.sql.types.* -import org.apache.spark.sql.types.DataTypes.DateType import org.apache.spark.unsafe.types.CalendarInterval import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions import scala.Product import scala.Tuple2 -import scala.concurrent.duration.`Duration$` import scala.reflect.ClassTag -import scala.reflect.api.TypeTags.TypeTag +import scala.reflect.api.StandardDefinitions import java.beans.PropertyDescriptor import java.math.BigDecimal import java.sql.Date @@ -1178,6 +1175,12 @@ inline fun = mapOf()): DataType { + if (type.classifier == ByteArray::class) return KComplexTypeWrapper( + DataTypes.BinaryType, + ByteArray::class.java, + type.isMarkedNullable, + ) + val primitiveSchema = knownDataTypes[type.classifier] if (primitiveSchema != null) return KSimpleTypeWrapper( primitiveSchema, @@ -1203,7 +1206,7 @@ fun schema(type: KType, map: Map = mapOf()): DataType { DoubleArray::class -> typeOf() BooleanArray::class -> typeOf() ShortArray::class -> typeOf() - ByteArray::class -> typeOf() +// ByteArray::class -> typeOf() else -> types.getValue(klass.typeParameters[0].name) } } else types.getValue(klass.typeParameters[0].name) @@ -1306,6 +1309,7 @@ private val knownDataTypes: Map, DataType> = mapOf( Timestamp::class to DataTypes.TimestampType, Instant::class to DataTypes.TimestampType, ByteArray::class to DataTypes.BinaryType, + Decimal::class to DecimalType.SYSTEM_DEFAULT(), CalendarInterval::class to DataTypes.CalendarIntervalType, Nothing::class to DataTypes.NullType, ) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index e9936e0b..331b1c6c 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -353,11 +353,11 @@ class ApiTest : ShouldSpec({ dataset.show() } should("handle binary datasets") { // uses encoder - val dataset = dsOf(byteArrayOf(1, 0, 1, 0)) + val dataset = dsOf("Hello there".encodeToByteArray()) dataset.show() } should("be able to serialize binary") { // uses knownDataTypes - val dataset = dsOf(byteArrayOf(1, 0, 1, 0) to 2) + val dataset = dsOf(c(byteArrayOf(1, 0, 12), 1, intArrayOf(1, 2, 3))) dataset.show() } should("be able to serialize CalendarInterval") { // uses knownDataTypes From 0f585bdbabaa89abd5280410e69c3dc17f552e87 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Wed, 23 Feb 2022 13:40:15 +0100 Subject: [PATCH 045/213] serializing binary works! --- .../apache/spark/sql/KotlinReflection.scala | 38 ++++++++++++++----- .../jetbrains/kotlinx/spark/api/ApiTest.kt | 2 +- 2 files changed, 29 insertions(+), 11 deletions(-) diff --git a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala index 5c0b3cf7..74cdf290 100644 --- a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala +++ b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala @@ -22,7 +22,7 @@ package org.apache.spark.sql import org.apache.spark.internal.Logging import org.apache.spark.sql.catalyst.DeserializerBuildHelper._ -import org.apache.spark.sql.catalyst.ScalaReflection.{Schema, getClassFromType, isSubtype, javaBoxedType, localTypeOf} +import org.apache.spark.sql.catalyst.ScalaReflection.{Schema, dataTypeFor, getClassFromType, isSubtype, javaBoxedType, localTypeOf} import org.apache.spark.sql.catalyst.SerializerBuildHelper._ import org.apache.spark.sql.catalyst.analysis.GetColumnByOrdinal import org.apache.spark.sql.catalyst.expressions.objects._ @@ -34,6 +34,7 @@ import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String} import org.apache.spark.util.Utils import java.beans.{Introspector, PropertyDescriptor} +import java.lang.Exception /** @@ -215,6 +216,15 @@ object KotlinReflection extends KotlinReflection { baseType(tpe) match { // + case t if ( + try { + !dataTypeFor(t).isInstanceOf[ObjectType] + } catch { + case _: Throwable => false + }) && !predefinedDt.exists(_.isInstanceOf[ComplexWrapper]) || tpe == localTypeOf[Array[Byte]] => { + path + } + case t if isSubtype(t, localTypeOf[java.lang.Integer]) => { createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Integer]) } @@ -621,8 +631,14 @@ object KotlinReflection extends KotlinReflection { } def getType[T](clazz: Class[T]): universe.Type = { - val mir = runtimeMirror(clazz.getClassLoader) - mir.classSymbol(clazz).toType + clazz match { + case _ if clazz == classOf[Array[Byte]] => localTypeOf[Array[Byte]] + case _ => { + val mir = runtimeMirror(clazz.getClassLoader) + mir.classSymbol(clazz).toType + } + } + } def deserializerFor(cls: java.lang.Class[_], dt: DataTypeWithClass): Expression = { @@ -737,9 +753,10 @@ object KotlinReflection extends KotlinReflection { baseType(tpe) match { - // - case _ if !inputObject.dataType.isInstanceOf[ObjectType] - && !predefinedDt.exists(_.isInstanceOf[ComplexWrapper]) => { + // // TODO binary should go though objectType + case _ if !inputObject.dataType.isInstanceOf[ObjectType] && (!predefinedDt.exists { + _.isInstanceOf[ComplexWrapper] + } || tpe == localTypeOf[Array[Byte]]) => { inputObject } case t if isSubtype(t, localTypeOf[Option[_]]) => { @@ -915,18 +932,19 @@ object KotlinReflection extends KotlinReflection { val fieldName = structField.name val propClass = structField.dataType.asInstanceOf[DataTypeWithClass].cls val propDt = structField.dataType.asInstanceOf[DataTypeWithClass] + + val fieldType: Type = getType(propClass) // TODO this must also return the type Array[Byte] + // val fieldValue = Invoke( inputObject, maybeProp.get.getReadMethod.getName, + // dataTypeFor(fieldType), inferExternalType(propClass), returnNullable = structField.nullable ) val newPath = walkedTypePath.recordField(propClass.getName, fieldName) - val tpe = - // if (propClass == classOf[Array[Byte]]) localTypeOf[Array[Byte]] - // else - getType(propClass) + val tpe = getType(propClass) val serializer = serializerFor( inputObject = fieldValue, diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index 331b1c6c..a12a48e2 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -357,7 +357,7 @@ class ApiTest : ShouldSpec({ dataset.show() } should("be able to serialize binary") { // uses knownDataTypes - val dataset = dsOf(c(byteArrayOf(1, 0, 12), 1, intArrayOf(1, 2, 3))) + val dataset = dsOf(c("Hello there".encodeToByteArray(), 1, intArrayOf(1, 2, 3))) dataset.show() } should("be able to serialize CalendarInterval") { // uses knownDataTypes From 92ed60ef3046e1259b3dca8bbd41681030246fec Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Wed, 23 Feb 2022 13:43:11 +0100 Subject: [PATCH 046/213] serializing binary works! --- .../scala/org/apache/spark/sql/KotlinReflection.scala | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala index 74cdf290..7e098d00 100644 --- a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala +++ b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala @@ -309,8 +309,7 @@ object KotlinReflection extends KotlinReflection { } case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => { createDeserializerForScalaBigInt(path) - } // TODO case t if isSubtype(t, localTypeOf[Array[Byte]]) => - // createDeserializerForTypesSupportValueOf(path, classOf[Array[Byte]]) + } case t if isSubtype(t, localTypeOf[Array[_]]) => { var TypeRef(_, _, Seq(elementType)) = t @@ -753,7 +752,7 @@ object KotlinReflection extends KotlinReflection { baseType(tpe) match { - // // TODO binary should go though objectType + // case _ if !inputObject.dataType.isInstanceOf[ObjectType] && (!predefinedDt.exists { _.isInstanceOf[ComplexWrapper] } || tpe == localTypeOf[Array[Byte]]) => { @@ -933,12 +932,9 @@ object KotlinReflection extends KotlinReflection { val propClass = structField.dataType.asInstanceOf[DataTypeWithClass].cls val propDt = structField.dataType.asInstanceOf[DataTypeWithClass] - val fieldType: Type = getType(propClass) // TODO this must also return the type Array[Byte] - // val fieldValue = Invoke( inputObject, maybeProp.get.getReadMethod.getName, - // dataTypeFor(fieldType), inferExternalType(propClass), returnNullable = structField.nullable ) @@ -951,8 +947,7 @@ object KotlinReflection extends KotlinReflection { tpe = tpe, walkedTypePath = newPath, seenTypeSet = seenTypeSet, - predefinedDt = if (propDt - .isInstanceOf[ComplexWrapper] /*&& propClass != classOf[Array[Byte]]*/ ) Some(propDt) else None + predefinedDt = if (propDt.isInstanceOf[ComplexWrapper]) Some(propDt) else None ) (fieldName, serializer) From 1fb680b98ffb3485186d9009fd62d760943f3a40 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Wed, 23 Feb 2022 14:17:52 +0100 Subject: [PATCH 047/213] fixed serializing CalendarInterval, added tests and fixes for Decimal and BigDecimal --- .../apache/spark/sql/KotlinReflection.scala | 9 +++++---- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 9 ++------- .../org/jetbrains/kotlinx/spark/api/ApiTest.kt | 18 ++++++++++++++++++ 3 files changed, 25 insertions(+), 11 deletions(-) diff --git a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala index 7e098d00..05ff330b 100644 --- a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala +++ b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala @@ -64,6 +64,8 @@ object KotlinReflection extends KotlinReflection { case c if c == java.lang.Float.TYPE => FloatType case c if c == java.lang.Double.TYPE => DoubleType case c if c == classOf[Array[Byte]] => BinaryType + case c if c == classOf[Decimal] => DecimalType.SYSTEM_DEFAULT + case c if c == classOf[CalendarInterval] => CalendarIntervalType case _ => ObjectType(cls) } @@ -221,7 +223,7 @@ object KotlinReflection extends KotlinReflection { !dataTypeFor(t).isInstanceOf[ObjectType] } catch { case _: Throwable => false - }) && !predefinedDt.exists(_.isInstanceOf[ComplexWrapper]) || tpe == localTypeOf[Array[Byte]] => { + }) && !predefinedDt.exists(_.isInstanceOf[ComplexWrapper]) => { path } @@ -753,9 +755,8 @@ object KotlinReflection extends KotlinReflection { baseType(tpe) match { // - case _ if !inputObject.dataType.isInstanceOf[ObjectType] && (!predefinedDt.exists { - _.isInstanceOf[ComplexWrapper] - } || tpe == localTypeOf[Array[Byte]]) => { + case _ if !inputObject.dataType.isInstanceOf[ObjectType] && + !predefinedDt.exists(_.isInstanceOf[ComplexWrapper]) => { inputObject } case t if isSubtype(t, localTypeOf[Option[_]]) => { diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 7d208637..e94a0f8f 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -1175,12 +1175,6 @@ inline fun = mapOf()): DataType { - if (type.classifier == ByteArray::class) return KComplexTypeWrapper( - DataTypes.BinaryType, - ByteArray::class.java, - type.isMarkedNullable, - ) - val primitiveSchema = knownDataTypes[type.classifier] if (primitiveSchema != null) return KSimpleTypeWrapper( primitiveSchema, @@ -1206,7 +1200,7 @@ fun schema(type: KType, map: Map = mapOf()): DataType { DoubleArray::class -> typeOf() BooleanArray::class -> typeOf() ShortArray::class -> typeOf() -// ByteArray::class -> typeOf() +// ByteArray::class -> typeOf() handled by BinaryType else -> types.getValue(klass.typeParameters[0].name) } } else types.getValue(klass.typeParameters[0].name) @@ -1310,6 +1304,7 @@ private val knownDataTypes: Map, DataType> = mapOf( Instant::class to DataTypes.TimestampType, ByteArray::class to DataTypes.BinaryType, Decimal::class to DecimalType.SYSTEM_DEFAULT(), + BigDecimal::class to DecimalType.SYSTEM_DEFAULT(), CalendarInterval::class to DataTypes.CalendarIntervalType, Nothing::class to DataTypes.NullType, ) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index a12a48e2..16c2bfe7 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -25,6 +25,7 @@ import org.apache.spark.sql.Dataset import org.apache.spark.sql.functions.* import org.apache.spark.sql.streaming.GroupState import org.apache.spark.sql.streaming.GroupStateTimeout +import org.apache.spark.sql.types.Decimal import org.apache.spark.unsafe.types.CalendarInterval import scala.Product import scala.Tuple1 @@ -32,6 +33,7 @@ import scala.Tuple2 import scala.Tuple3 import scala.collection.Seq import java.io.Serializable +import java.math.BigDecimal import java.sql.Date import java.sql.Timestamp import java.time.Duration @@ -360,6 +362,22 @@ class ApiTest : ShouldSpec({ val dataset = dsOf(c("Hello there".encodeToByteArray(), 1, intArrayOf(1, 2, 3))) dataset.show() } + should("handle Decimal datasets") { // uses encoder + val dataset = dsOf(Decimal().set(50)) + dataset.show() + } + should("be able to serialize Decimal") { // uses knownDataTypes + val dataset = dsOf(c(Decimal().set(50), 12)) + dataset.show() + } + should("handle BigDecimal datasets") { // uses encoder + val dataset = dsOf(BigDecimal.TEN) + dataset.show() + } + should("be able to serialize BigDecimal") { // uses knownDataTypes + val dataset = dsOf(c(BigDecimal.TEN, 12)) + dataset.show() + } should("be able to serialize CalendarInterval") { // uses knownDataTypes val dataset = dsOf(CalendarInterval(1, 0, 0L) to 2) dataset.show() From 4f8ae68fed1e2a8e7086ef68a91e52d70aabb59d Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Wed, 23 Feb 2022 17:17:26 +0100 Subject: [PATCH 048/213] updating all tests to shouldBe instead of just show --- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 2 +- .../jetbrains/kotlinx/spark/api/ApiTest.kt | 149 ++++++++++++------ 2 files changed, 105 insertions(+), 46 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index e94a0f8f..2860d6da 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -36,6 +36,7 @@ import org.apache.spark.unsafe.types.CalendarInterval import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions import scala.Product import scala.Tuple2 +import scala.concurrent.duration.`Duration$` import scala.reflect.ClassTag import scala.reflect.api.StandardDefinitions import java.beans.PropertyDescriptor @@ -1306,7 +1307,6 @@ private val knownDataTypes: Map, DataType> = mapOf( Decimal::class to DecimalType.SYSTEM_DEFAULT(), BigDecimal::class to DecimalType.SYSTEM_DEFAULT(), CalendarInterval::class to DataTypes.CalendarIntervalType, - Nothing::class to DataTypes.NullType, ) private fun transitiveMerge(a: Map, b: Map): Map { diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index 16c2bfe7..b66fadbd 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -20,6 +20,7 @@ package org.jetbrains.kotlinx.spark.api/*- import ch.tutteli.atrium.api.fluent.en_GB.* import ch.tutteli.atrium.api.verbs.expect import io.kotest.core.spec.style.ShouldSpec +import io.kotest.matchers.should import io.kotest.matchers.shouldBe import org.apache.spark.sql.Dataset import org.apache.spark.sql.functions.* @@ -323,68 +324,97 @@ class ApiTest : ShouldSpec({ cogrouped.count() shouldBe 4 } should("handle LocalDate Datasets") { // uses encoder - val dataset: Dataset = dsOf(LocalDate.now(), LocalDate.now()) - dataset.show() + val dates = listOf(LocalDate.now(), LocalDate.now()) + val dataset: Dataset = dates.toDS() + dataset.collectAsList() shouldBe dates } should("handle Instant Datasets") { // uses encoder - val dataset: Dataset = dsOf(Instant.now(), Instant.now()) - dataset.show() + val instants = listOf(Instant.now(), Instant.now()) + val dataset: Dataset = instants.toDS() + dataset.collectAsList() shouldBe instants } should("Be able to serialize Instant") { // uses knownDataTypes - val dataset = dsOf(Instant.now() to Instant.now()) - dataset.show() + val instantPair = Instant.now() to Instant.now() + val dataset = dsOf(instantPair) + dataset.collectAsList() shouldBe listOf(instantPair) } should("be able to serialize Date") { // uses knownDataTypes - val dataset: Dataset> = dsOf(Date.valueOf("2020-02-10") to 5) - dataset.show() + val datePair = Date.valueOf("2020-02-10") to 5 + val dataset: Dataset> = dsOf(datePair) + dataset.collectAsList() shouldBe listOf(datePair) } should("handle Timestamp Datasets") { // uses encoder - val dataset = dsOf(Timestamp(0L)) - dataset.show() + val timeStamps = listOf(Timestamp(0L), Timestamp(1L)) + val dataset = timeStamps.toDS() + dataset.collectAsList() shouldBe timeStamps } should("be able to serialize Timestamp") { // uses knownDataTypes - val dataset = dsOf(Timestamp(0L) to 2) - dataset.show() + val timestampPair = Timestamp(0L) to 2 + val dataset = dsOf(timestampPair) + dataset.collectAsList() shouldBe listOf(timestampPair) } should("handle Duration Datasets") { // uses encoder val dataset = dsOf(Duration.ZERO) - dataset.show() + dataset.collectAsList() shouldBe listOf(Duration.ZERO) } should("handle Period Datasets") { // uses encoder - val dataset = dsOf(Period.ZERO) - dataset.show() + val periods = listOf(Period.ZERO, Period.ofDays(2)) + val dataset = periods.toDS() + + dataset.show(false) + + dataset.collectAsList().let { + it[0] shouldBe Period.ZERO + + // TODO this is also broken in Scala. It reports a Period of 0 instead of 2 days +// it[1] shouldBe Period.ofDays(2) + it[1] shouldBe Period.ofDays(0) + } + } should("handle binary datasets") { // uses encoder - val dataset = dsOf("Hello there".encodeToByteArray()) - dataset.show() + val byteArray = "Hello there".encodeToByteArray() + val dataset = dsOf(byteArray) + dataset.collectAsList() shouldBe listOf(byteArray) } should("be able to serialize binary") { // uses knownDataTypes - val dataset = dsOf(c("Hello there".encodeToByteArray(), 1, intArrayOf(1, 2, 3))) - dataset.show() - } - should("handle Decimal datasets") { // uses encoder - val dataset = dsOf(Decimal().set(50)) - dataset.show() + val byteArrayTriple = c("Hello there".encodeToByteArray(), 1, intArrayOf(1, 2, 3)) + val dataset = dsOf(byteArrayTriple) + + val (a, b, c) = dataset.collectAsList().single() + a contentEquals "Hello there".encodeToByteArray() shouldBe true + b shouldBe 1 + c contentEquals intArrayOf(1, 2, 3) shouldBe true } should("be able to serialize Decimal") { // uses knownDataTypes - val dataset = dsOf(c(Decimal().set(50), 12)) - dataset.show() + val decimalPair = c(Decimal().set(50), 12) + val dataset = dsOf(decimalPair) + dataset.collectAsList() shouldBe listOf(decimalPair) } should("handle BigDecimal datasets") { // uses encoder - val dataset = dsOf(BigDecimal.TEN) - dataset.show() + val decimals = listOf(BigDecimal.ONE, BigDecimal.TEN) + val dataset = decimals.toDS() + dataset.collectAsList().let { (one, ten) -> + one.compareTo(BigDecimal.ONE) shouldBe 0 + ten.compareTo(BigDecimal.TEN) shouldBe 0 + } } should("be able to serialize BigDecimal") { // uses knownDataTypes - val dataset = dsOf(c(BigDecimal.TEN, 12)) - dataset.show() + val decimalPair = c(BigDecimal.TEN, 12) + val dataset = dsOf(decimalPair) + val (a, b) = dataset.collectAsList().single() + a.compareTo(BigDecimal.TEN) shouldBe 0 + b shouldBe 12 } should("be able to serialize CalendarInterval") { // uses knownDataTypes - val dataset = dsOf(CalendarInterval(1, 0, 0L) to 2) - dataset.show() + val calendarIntervalPair = CalendarInterval(1, 0, 0L) to 2 + val dataset = dsOf(calendarIntervalPair) + dataset.collectAsList() shouldBe listOf(calendarIntervalPair) } - should("be able to serialize null") { // uses knownDataTypes - val dataset: Dataset> = dsOf(null to 2) - dataset.show() + should("handle nullable datasets") { + val ints = listOf(1, 2, 3, null) + val dataset = ints.toDS() + dataset.collectAsList() shouldBe ints } should("Be able to serialize Scala Tuples including data classes") { val dataset = dsOf( @@ -415,20 +445,20 @@ class ApiTest : ShouldSpec({ val newDS1WithAs: Dataset = dataset.selectTyped( col("a").`as`(), ) - newDS1WithAs.show() + newDS1WithAs.collectAsList() val newDS2: Dataset> = dataset.selectTyped( col(SomeClass::a), // NOTE: this only works on 3.0, returning a data class with an array in it col(SomeClass::b), ) - newDS2.show() + newDS2.collectAsList() val newDS3: Dataset> = dataset.selectTyped( col(SomeClass::a), col(SomeClass::b), col(SomeClass::b), ) - newDS3.show() + newDS3.collectAsList() val newDS4: Dataset> = dataset.selectTyped( col(SomeClass::a), @@ -436,7 +466,7 @@ class ApiTest : ShouldSpec({ col(SomeClass::b), col(SomeClass::b), ) - newDS4.show() + newDS4.collectAsList() val newDS5: Dataset> = dataset.selectTyped( col(SomeClass::a), @@ -445,7 +475,7 @@ class ApiTest : ShouldSpec({ col(SomeClass::b), col(SomeClass::b), ) - newDS5.show() + newDS5.collectAsList() } should("Access columns using invoke on datasets") { val dataset = dsOf( @@ -498,19 +528,18 @@ class ApiTest : ShouldSpec({ dataset(SomeOtherClass::a), col(SomeOtherClass::c), ) - b.show() + b.collectAsList() } should("Handle some where queries using column operator functions") { val dataset = dsOf( SomeOtherClass(intArrayOf(1, 2, 3), 4, true), SomeOtherClass(intArrayOf(4, 3, 2), 1, true), ) - dataset.show() + dataset.collectAsList() val column = col("b").`as`() val b = dataset.where(column gt 3 and col(SomeOtherClass::c)) - b.show() b.count() shouldBe 1 } @@ -519,21 +548,51 @@ class ApiTest : ShouldSpec({ listOf(SomeClass(intArrayOf(1, 2, 3), 4)), listOf(SomeClass(intArrayOf(3, 2, 1), 0)), ) - dataset.show() + + val (first, second) = dataset.collectAsList() + + first.single().let { (a, b) -> + a.contentEquals(intArrayOf(1, 2, 3)) shouldBe true + b shouldBe 4 + } + second.single().let { (a, b) -> + a.contentEquals(intArrayOf(3, 2, 1)) shouldBe true + b shouldBe 0 + } } should("Be able to serialize arrays of data classes") { val dataset = dsOf( arrayOf(SomeClass(intArrayOf(1, 2, 3), 4)), arrayOf(SomeClass(intArrayOf(3, 2, 1), 0)), ) - dataset.show() + + val (first, second) = dataset.collectAsList() + + first.single().let { (a, b) -> + a.contentEquals(intArrayOf(1, 2, 3)) shouldBe true + b shouldBe 4 + } + second.single().let { (a, b) -> + a.contentEquals(intArrayOf(3, 2, 1)) shouldBe true + b shouldBe 0 + } } should("Be able to serialize lists of tuples") { val dataset = dsOf( listOf(Tuple2(intArrayOf(1, 2, 3), 4)), listOf(Tuple2(intArrayOf(3, 2, 1), 0)), ) - dataset.show() + + val (first, second) = dataset.collectAsList() + + first.single().let { + it._1().contentEquals(intArrayOf(1, 2, 3)) shouldBe true + it._2() shouldBe 4 + } + second.single().let { + it._1().contentEquals(intArrayOf(3, 2, 1)) shouldBe true + it._2() shouldBe 0 + } } should("Allow simple forEachPartition in datasets") { val dataset = dsOf( From 38486bbd5931ff4e9e9ee27696969c81798335df Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Wed, 23 Feb 2022 17:25:09 +0100 Subject: [PATCH 049/213] removed .show() from rdd test --- .../test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt | 6 ------ 1 file changed, 6 deletions(-) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index 149e6500..c8ad1a41 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -604,7 +604,6 @@ class ApiTest : ShouldSpec({ listOf(1, 2, 3, 4, 5, 6) ).rdd() val dataset0: Dataset = rdd0.toDS() - dataset0.show() dataset0.toList() shouldBe listOf(1, 2, 3, 4, 5, 6) @@ -613,7 +612,6 @@ class ApiTest : ShouldSpec({ listOf(1, 2, 3, 4, 5, 6) ) val dataset1: Dataset = rdd1.toDS() - dataset1.show() dataset1.toList() shouldBe listOf(1, 2, 3, 4, 5, 6) @@ -622,7 +620,6 @@ class ApiTest : ShouldSpec({ listOf(1.0, 2.0, 3.0, 4.0, 5.0, 6.0) ) val dataset2: Dataset = rdd2.toDS() - dataset2.show() dataset2.toList() shouldBe listOf(1.0, 2.0, 3.0, 4.0, 5.0, 6.0) @@ -631,7 +628,6 @@ class ApiTest : ShouldSpec({ listOf(Tuple2(1, 1.0), Tuple2(2, 2.0), Tuple2(3, 3.0)) ) val dataset3: Dataset> = rdd3.toDS() - dataset3.show() dataset3.toList>() shouldBe listOf(Tuple2(1, 1.0), Tuple2(2, 2.0), Tuple2(3, 3.0)) @@ -640,7 +636,6 @@ class ApiTest : ShouldSpec({ listOf(SomeClass(intArrayOf(1, 2), 0)) ) val dataset4 = rdd4.toDS() - dataset4.show() dataset4.toList().first().let { (a, b) -> a contentEquals intArrayOf(1, 2) shouldBe true @@ -652,7 +647,6 @@ class ApiTest : ShouldSpec({ listOf(c(1.0, 4)) ) val dataset5 = rdd5.toDS() - dataset5.show() dataset5.toList>() shouldBe listOf(c(1.0, 4)) } From 0acd3e2debbd6ee6013ca60339f8d280cc068032 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Wed, 23 Feb 2022 17:36:18 +0100 Subject: [PATCH 050/213] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 380ee310..498334d2 100644 --- a/README.md +++ b/README.md @@ -27,14 +27,14 @@ We have opened a Spark Project Improvement Proposal: [Kotlin support for Apache - [Code of Conduct](#code-of-conduct) - [License](#license) -## Supported versions of Apache Spark #TODO +## Supported versions of Apache Spark | Apache Spark | Scala | Kotlin for Apache Spark | |:------------:|:-----:|:-------------------------------:| | 3.0.0+ | 2.12 | kotlin-spark-api-3.0:1.0.2 | | 2.4.1+ | 2.12 | kotlin-spark-api-2.4_2.12:1.0.2 | | 2.4.1+ | 2.11 | kotlin-spark-api-2.4_2.11:1.0.2 | -| 3.2.0+ | 2.12 | kotlin-spark-api-2.4_2.12:1.0.3 | +| 3.2.0+ | 2.12 | kotlin-spark-api-3.2:1.0.3 | ## Releases From bcf99b8fe3b3950e99a9b90cb17bcd1add8f810e Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Thu, 24 Feb 2022 15:07:42 +0100 Subject: [PATCH 051/213] split up rdd tests, added list test. Added kotlin official code style prop for later --- .../jetbrains/kotlinx/spark/api/ApiTest.kt | 30 ++++++++++++------- pom.xml | 1 + 2 files changed, 21 insertions(+), 10 deletions(-) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index c8ad1a41..06c5628f 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -598,22 +598,24 @@ class ApiTest : ShouldSpec({ it.nullable() shouldBe true } } - should("Easily convert a (Java)RDD to a Dataset") { - // scala RDD + should("Convert Scala RDD to Dataset") { val rdd0: RDD = sc.parallelize( listOf(1, 2, 3, 4, 5, 6) ).rdd() val dataset0: Dataset = rdd0.toDS() dataset0.toList() shouldBe listOf(1, 2, 3, 4, 5, 6) + } - // normal JavaRDD + should("Convert a JavaRDD to a Dataset") { val rdd1: JavaRDD = sc.parallelize( listOf(1, 2, 3, 4, 5, 6) ) val dataset1: Dataset = rdd1.toDS() dataset1.toList() shouldBe listOf(1, 2, 3, 4, 5, 6) + } + should("Convert JavaDoubleRDD to Dataset") { // JavaDoubleRDD val rdd2: JavaDoubleRDD = sc.parallelizeDoubles( @@ -622,16 +624,16 @@ class ApiTest : ShouldSpec({ val dataset2: Dataset = rdd2.toDS() dataset2.toList() shouldBe listOf(1.0, 2.0, 3.0, 4.0, 5.0, 6.0) - - // JavaPairRDD + } + should("Convert JavaPairRDD to Dataset") { val rdd3: JavaPairRDD = sc.parallelizePairs( listOf(Tuple2(1, 1.0), Tuple2(2, 2.0), Tuple2(3, 3.0)) ) val dataset3: Dataset> = rdd3.toDS() dataset3.toList>() shouldBe listOf(Tuple2(1, 1.0), Tuple2(2, 2.0), Tuple2(3, 3.0)) - - // Kotlin Serializable data class RDD + } + should("Convert Kotlin Serializable data class RDD to Dataset") { val rdd4 = sc.parallelize( listOf(SomeClass(intArrayOf(1, 2), 0)) ) @@ -641,8 +643,8 @@ class ApiTest : ShouldSpec({ a contentEquals intArrayOf(1, 2) shouldBe true b shouldBe 0 } - - // Arity + } + should("Convert Arity RDD to Dataset") { val rdd5 = sc.parallelize( listOf(c(1.0, 4)) ) @@ -650,6 +652,14 @@ class ApiTest : ShouldSpec({ dataset5.toList>() shouldBe listOf(c(1.0, 4)) } + should("Convert List RDD to Dataset") { + val rdd6 = sc.parallelize( + listOf(listOf(1, 2, 3), listOf(4, 5, 6)) + ) + val dataset6 = rdd6.toDS() + + dataset6.toList>() shouldBe listOf(listOf(1, 2, 3), listOf(4, 5, 6)) + } } } }) @@ -684,5 +694,5 @@ data class ComplexEnumDataClass( data class NullFieldAbleDataClass( val optionList: List?, - val optionMap: Map? + val optionMap: Map?, ) \ No newline at end of file diff --git a/pom.xml b/pom.xml index 47043737..2ced5eb7 100644 --- a/pom.xml +++ b/pom.xml @@ -32,6 +32,7 @@ 3.0.0-M5 1.6.8 4.5.6 + official From 47e1ccb0df52a8d00059485c56b1d3780e612ff3 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Thu, 24 Feb 2022 15:16:06 +0100 Subject: [PATCH 052/213] removed sc.stop() --- .../kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt index 2e750c81..039b14b3 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt @@ -78,6 +78,7 @@ inline fun withSpark( * @param logLevel Control our logLevel. This overrides any user-defined log settings. * @param func function which will be executed in context of [KSparkSession] (it means that `this` inside block will point to [KSparkSession]) */ +@Suppress("UsePropertyAccessSyntax") @JvmOverloads inline fun withSpark(builder: Builder, logLevel: SparkLogLevel = ERROR, func: KSparkSession.() -> Unit) { builder @@ -86,7 +87,6 @@ inline fun withSpark(builder: Builder, logLevel: SparkLogLevel = ERROR, func: KS KSparkSession(this).apply { sparkContext.setLogLevel(logLevel) func() - sc.stop() spark.stop() } } @@ -163,5 +163,6 @@ open class KSparkSession(val spark: SparkSession, val sc: JavaSparkContext = Jav /** * This wrapper over [SparkSession] and [JavaStreamingContext] provides several additional methods to create [org.apache.spark.sql.Dataset] */ -class KSparkStreamingSession(session: KSparkSession, val ssc: JavaStreamingContext) : KSparkSession(session.spark, session.sc) +class KSparkStreamingSession(session: KSparkSession, val ssc: JavaStreamingContext) : + KSparkSession(session.spark, session.sc) From 2fdba6a0fcc9742a6ba472fcc5bb540f1b0baf0e Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Thu, 24 Feb 2022 16:21:20 +0100 Subject: [PATCH 053/213] Update docs generation added force_orphan --- .github/workflows/generate_docs.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/generate_docs.yml b/.github/workflows/generate_docs.yml index 0aee6d85..cdbd1949 100644 --- a/.github/workflows/generate_docs.yml +++ b/.github/workflows/generate_docs.yml @@ -25,5 +25,6 @@ jobs: github_token: ${{ secrets.GITHUB_TOKEN }} publish_branch: docs publish_dir: ./kotlin-spark-api/3.2/target/dokka + force_orphan: true From 5d785e0a36995abeb67fb888965fce8fd6f5ce55 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Thu, 24 Feb 2022 16:27:38 +0100 Subject: [PATCH 054/213] added jira issue --- .../src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt | 1 + 1 file changed, 1 insertion(+) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index b66fadbd..df58e874 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -367,6 +367,7 @@ class ApiTest : ShouldSpec({ it[0] shouldBe Period.ZERO // TODO this is also broken in Scala. It reports a Period of 0 instead of 2 days + // https://issues.apache.org/jira/browse/SPARK-38317 // it[1] shouldBe Period.ofDays(2) it[1] shouldBe Period.ofDays(0) } From 518d1a153bea33d8b206e08ab89a1e5db8e76868 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Fri, 25 Feb 2022 12:53:06 +0100 Subject: [PATCH 055/213] Update ApiTest.kt Apparently this is expected behavior: https://issues.apache.org/jira/browse/SPARK-38317 --- .../test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index df58e874..79d2407c 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -366,9 +366,7 @@ class ApiTest : ShouldSpec({ dataset.collectAsList().let { it[0] shouldBe Period.ZERO - // TODO this is also broken in Scala. It reports a Period of 0 instead of 2 days - // https://issues.apache.org/jira/browse/SPARK-38317 -// it[1] shouldBe Period.ofDays(2) + // NOTE Spark truncates java.time.Period to months. it[1] shouldBe Period.ofDays(0) } @@ -736,4 +734,4 @@ data class ComplexEnumDataClass( data class NullFieldAbleDataClass( val optionList: List?, val optionMap: Map? -) \ No newline at end of file +) From e62089676a2915feed0ed517e4d50a9a7e2226bf Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Mon, 21 Feb 2022 19:49:36 +0100 Subject: [PATCH 056/213] added encoders: Duration, Period, ByteArray (Binary, now actually working) added tests for: Instant datatype, duration, period, binary, and others added datatypes: ByteArray (BinaryType), CalendarInterval, Nothing (All three not yet working) --- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 40 +++++++++++++------ .../jetbrains/kotlinx/spark/api/ApiTest.kt | 31 ++++++++++++++ 2 files changed, 58 insertions(+), 13 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 7e9ef135..4ef15444 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -34,17 +34,22 @@ import org.apache.spark.sql.streaming.GroupState import org.apache.spark.sql.streaming.GroupStateTimeout import org.apache.spark.sql.streaming.OutputMode import org.apache.spark.sql.types.* +import org.apache.spark.sql.types.DataTypes.DateType +import org.apache.spark.unsafe.types.CalendarInterval import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions import scala.Product import scala.Tuple2 +import scala.concurrent.duration.`Duration$` import scala.reflect.ClassTag import scala.reflect.api.TypeTags.TypeTag import java.beans.PropertyDescriptor import java.math.BigDecimal import java.sql.Date import java.sql.Timestamp +import java.time.Duration import java.time.Instant import java.time.LocalDate +import java.time.Period import java.util.* import java.util.concurrent.ConcurrentHashMap import kotlin.Any @@ -96,10 +101,12 @@ val ENCODERS: Map, Encoder<*>> = mapOf( String::class to STRING(), BigDecimal::class to DECIMAL(), Date::class to DATE(), - LocalDate::class to LOCALDATE(), // 3.0 only + LocalDate::class to LOCALDATE(), // 3.0+ Timestamp::class to TIMESTAMP(), - Instant::class to INSTANT(), // 3.0 only - ByteArray::class to BINARY() + Instant::class to INSTANT(), // 3.0+ + ByteArray::class to BINARY(), + Duration::class to DURATION(), // 3.2+ + Period::class to PERIOD(), // 3.2+ ) @@ -190,12 +197,16 @@ fun generateEncoder(type: KType, cls: KClass<*>): Encoder { } as Encoder } -private fun isSupportedClass(cls: KClass<*>): Boolean = - cls.isData - || cls.isSubclassOf(Map::class) - || cls.isSubclassOf(Iterable::class) - || cls.isSubclassOf(Product::class) - || cls.java.isArray +private fun isSupportedClass(cls: KClass<*>): Boolean = when { + cls == ByteArray::class -> false // uses binary encoder + cls.isData -> true + cls.isSubclassOf(Map::class) -> true + cls.isSubclassOf(Iterable::class) -> true + cls.isSubclassOf(Product::class) -> true + cls.java.isArray -> true + else -> false + } + private fun kotlinClassEncoder(schema: DataType, kClass: KClass<*>): Encoder { return ExpressionEncoder( @@ -1303,10 +1314,13 @@ private val knownDataTypes: Map, DataType> = mapOf( Float::class to DataTypes.FloatType, Double::class to DataTypes.DoubleType, String::class to DataTypes.StringType, - LocalDate::class to `DateType$`.`MODULE$`, - Date::class to `DateType$`.`MODULE$`, - Timestamp::class to `TimestampType$`.`MODULE$`, - Instant::class to `TimestampType$`.`MODULE$`, + LocalDate::class to DataTypes.DateType, + Date::class to DataTypes.DateType, + Timestamp::class to DataTypes.TimestampType, + Instant::class to DataTypes.TimestampType, + ByteArray::class to DataTypes.BinaryType, + CalendarInterval::class to DataTypes.CalendarIntervalType, + Nothing::class to DataTypes.NullType, ) private fun transitiveMerge(a: Map, b: Map): Map { diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index 06c5628f..bcb53bcb 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -30,6 +30,7 @@ import org.apache.spark.sql.Dataset import org.apache.spark.sql.functions.* import org.apache.spark.sql.streaming.GroupState import org.apache.spark.sql.streaming.GroupStateTimeout +import org.apache.spark.unsafe.types.CalendarInterval import scala.Product import scala.Tuple1 import scala.Tuple2 @@ -38,8 +39,10 @@ import scala.collection.Seq import java.io.Serializable import java.sql.Date import java.sql.Timestamp +import java.time.Duration import java.time.Instant import java.time.LocalDate +import java.time.Period import kotlin.collections.Iterator import scala.collection.Iterator as ScalaIterator import scala.collection.Map as ScalaMap @@ -330,6 +333,10 @@ class ApiTest : ShouldSpec({ val dataset: Dataset = dsOf(Instant.now(), Instant.now()) dataset.show() } + should("Be able to serialize Instant") { // uses knownDataTypes + val dataset = dsOf(Instant.now() to Instant.now()) + dataset.show() + } should("be able to serialize Date") { // uses knownDataTypes val dataset: Dataset> = dsOf(Date.valueOf("2020-02-10") to 5) dataset.show() @@ -342,6 +349,30 @@ class ApiTest : ShouldSpec({ val dataset = dsOf(Timestamp(0L) to 2) dataset.show() } + should("handle Duration Datasets") { // uses encoder + val dataset = dsOf(Duration.ZERO) + dataset.show() + } + should("handle Period Datasets") { // uses encoder + val dataset = dsOf(Period.ZERO) + dataset.show() + } + should("handle binary datasets") { // uses encoder + val dataset = dsOf(byteArrayOf(1, 0, 1, 0)) + dataset.show() + } + should("be able to serialize binary") { // uses knownDataTypes + val dataset = dsOf(byteArrayOf(1, 0, 1, 0) to 2) + dataset.show() + } + should("be able to serialize CalendarInterval") { // uses knownDataTypes + val dataset = dsOf(CalendarInterval(1, 0, 0L) to 2) + dataset.show() + } + should("be able to serialize null") { // uses knownDataTypes + val dataset: Dataset> = dsOf(null to 2) + dataset.show() + } should("Be able to serialize Scala Tuples including data classes") { val dataset = dsOf( Tuple2("a", Tuple3("a", 1, LonLat(1.0, 1.0))), From e466df609fa24dbbd027c8a45a9220950e0141e5 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Tue, 22 Feb 2022 22:00:26 +0100 Subject: [PATCH 057/213] copying over some other missing parts from ScalaReflection.scala. Did some refactoring. still trying to get serializing of binary to work --- .../apache/spark/sql/KotlinReflection.scala | 2233 +++++++++-------- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 14 +- .../jetbrains/kotlinx/spark/api/ApiTest.kt | 4 +- 3 files changed, 1266 insertions(+), 985 deletions(-) diff --git a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala index be808af0..5c0b3cf7 100644 --- a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala +++ b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala @@ -22,6 +22,7 @@ package org.apache.spark.sql import org.apache.spark.internal.Logging import org.apache.spark.sql.catalyst.DeserializerBuildHelper._ +import org.apache.spark.sql.catalyst.ScalaReflection.{Schema, getClassFromType, isSubtype, javaBoxedType, localTypeOf} import org.apache.spark.sql.catalyst.SerializerBuildHelper._ import org.apache.spark.sql.catalyst.analysis.GetColumnByOrdinal import org.apache.spark.sql.catalyst.expressions.objects._ @@ -30,6 +31,7 @@ import org.apache.spark.sql.catalyst.util.ArrayBasedMapData import org.apache.spark.sql.catalyst.{InternalRow, ScalaReflection, WalkedTypePath} import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String} +import org.apache.spark.util.Utils import java.beans.{Introspector, PropertyDescriptor} @@ -45,944 +47,1215 @@ trait DefinedByConstructorParams * KotlinReflection is heavily inspired by ScalaReflection and even extends it just to add several methods */ object KotlinReflection extends KotlinReflection { - /** - * Returns the Spark SQL DataType for a given java class. Where this is not an exact mapping - * to a native type, an ObjectType is returned. - * - * Unlike `inferDataType`, this function doesn't do any massaging of types into the Spark SQL type - * system. As a result, ObjectType will be returned for things like boxed Integers. - */ - private def inferExternalType(cls: Class[_]): DataType = cls match { - case c if c == java.lang.Boolean.TYPE => BooleanType - case c if c == java.lang.Byte.TYPE => ByteType - case c if c == java.lang.Short.TYPE => ShortType - case c if c == java.lang.Integer.TYPE => IntegerType - case c if c == java.lang.Long.TYPE => LongType - case c if c == java.lang.Float.TYPE => FloatType - case c if c == java.lang.Double.TYPE => DoubleType - case c if c == classOf[Array[Byte]] => BinaryType - case _ => ObjectType(cls) - } - - val universe: scala.reflect.runtime.universe.type = scala.reflect.runtime.universe - - // Since we are creating a runtime mirror using the class loader of current thread, - // we need to use def at here. So, every time we call mirror, it is using the - // class loader of the current thread. - override def mirror: universe.Mirror = { - universe.runtimeMirror(Thread.currentThread().getContextClassLoader) - } - - import universe._ - - // The Predef.Map is scala.collection.immutable.Map. - // Since the map values can be mutable, we explicitly import scala.collection.Map at here. - import scala.collection.Map - - - def isSubtype(t: universe.Type, t2: universe.Type): Boolean = t <:< t2 - - /** - * Synchronize to prevent concurrent usage of `<:<` operator. - * This operator is not thread safe in any current version of scala; i.e. - * (2.11.12, 2.12.10, 2.13.0-M5). - * - * See https://github.com/scala/bug/issues/10766 - */ - /* - private[catalyst] def isSubtype(tpe1: `Type`, tpe2: `Type`): Boolean = { - ScalaReflection.ScalaSubtypeLock.synchronized { - tpe1 <:< tpe2 - } + /** + * Returns the Spark SQL DataType for a given java class. Where this is not an exact mapping + * to a native type, an ObjectType is returned. + * + * Unlike `inferDataType`, this function doesn't do any massaging of types into the Spark SQL type + * system. As a result, ObjectType will be returned for things like boxed Integers. + */ + private def inferExternalType(cls: Class[_]): DataType = cls match { + case c if c == java.lang.Boolean.TYPE => BooleanType + case c if c == java.lang.Byte.TYPE => ByteType + case c if c == java.lang.Short.TYPE => ShortType + case c if c == java.lang.Integer.TYPE => IntegerType + case c if c == java.lang.Long.TYPE => LongType + case c if c == java.lang.Float.TYPE => FloatType + case c if c == java.lang.Double.TYPE => DoubleType + case c if c == classOf[Array[Byte]] => BinaryType + case _ => ObjectType(cls) } - */ - - private def dataTypeFor(tpe: `Type`): DataType = cleanUpReflectionObjects { - tpe.dealias match { - case t if isSubtype(t, definitions.NullTpe) => NullType - case t if isSubtype(t, definitions.IntTpe) => IntegerType - case t if isSubtype(t, definitions.LongTpe) => LongType - case t if isSubtype(t, definitions.DoubleTpe) => DoubleType - case t if isSubtype(t, definitions.FloatTpe) => FloatType - case t if isSubtype(t, definitions.ShortTpe) => ShortType - case t if isSubtype(t, definitions.ByteTpe) => ByteType - case t if isSubtype(t, definitions.BooleanTpe) => BooleanType - case t if isSubtype(t, localTypeOf[Array[Byte]]) => BinaryType - case t if isSubtype(t, localTypeOf[CalendarInterval]) => CalendarIntervalType - case t if isSubtype(t, localTypeOf[Decimal]) => DecimalType.SYSTEM_DEFAULT - case _ => - val className = getClassNameFromType(tpe) - className match { - case "scala.Array" => - val TypeRef(_, _, Seq(elementType)) = tpe - arrayClassFor(elementType) - case _ => - val clazz = getClassFromType(tpe) - ObjectType(clazz) - } + + val universe: scala.reflect.runtime.universe.type = scala.reflect.runtime.universe + + // Since we are creating a runtime mirror using the class loader of current thread, + // we need to use def at here. So, every time we call mirror, it is using the + // class loader of the current thread. + override def mirror: universe.Mirror = { + universe.runtimeMirror(Thread.currentThread().getContextClassLoader) } - } - - /** - * Given a type `T` this function constructs `ObjectType` that holds a class of type - * `Array[T]`. - * - * Special handling is performed for primitive types to map them back to their raw - * JVM form instead of the Scala Array that handles auto boxing. - */ - private def arrayClassFor(tpe: `Type`): ObjectType = cleanUpReflectionObjects { - val cls = tpe.dealias match { - case t if isSubtype(t, definitions.IntTpe) => classOf[Array[Int]] - case t if isSubtype(t, definitions.LongTpe) => classOf[Array[Long]] - case t if isSubtype(t, definitions.DoubleTpe) => classOf[Array[Double]] - case t if isSubtype(t, definitions.FloatTpe) => classOf[Array[Float]] - case t if isSubtype(t, definitions.ShortTpe) => classOf[Array[Short]] - case t if isSubtype(t, definitions.ByteTpe) => classOf[Array[Byte]] - case t if isSubtype(t, definitions.BooleanTpe) => classOf[Array[Boolean]] - case other => - // There is probably a better way to do this, but I couldn't find it... - val elementType = dataTypeFor(other).asInstanceOf[ObjectType].cls - java.lang.reflect.Array.newInstance(elementType, 0).getClass + import universe._ + + // The Predef.Map is scala.collection.immutable.Map. + // Since the map values can be mutable, we explicitly import scala.collection.Map at here. + import scala.collection.Map + + + def isSubtype(t: universe.Type, t2: universe.Type): Boolean = t <:< t2 + + /** + * Synchronize to prevent concurrent usage of `<:<` operator. + * This operator is not thread safe in any current version of scala; i.e. + * (2.11.12, 2.12.10, 2.13.0-M5). + * + * See https://github.com/scala/bug/issues/10766 + */ + /* + private[catalyst] def isSubtype(tpe1: `Type`, tpe2: `Type`): Boolean = { + ScalaReflection.ScalaSubtypeLock.synchronized { + tpe1 <:< tpe2 + } + } + */ + + private def dataTypeFor(tpe: `Type`): DataType = cleanUpReflectionObjects { + tpe.dealias match { + case t if isSubtype(t, definitions.NullTpe) => NullType + case t if isSubtype(t, definitions.IntTpe) => IntegerType + case t if isSubtype(t, definitions.LongTpe) => LongType + case t if isSubtype(t, definitions.DoubleTpe) => DoubleType + case t if isSubtype(t, definitions.FloatTpe) => FloatType + case t if isSubtype(t, definitions.ShortTpe) => ShortType + case t if isSubtype(t, definitions.ByteTpe) => ByteType + case t if isSubtype(t, definitions.BooleanTpe) => BooleanType + case t if isSubtype(t, localTypeOf[Array[Byte]]) => BinaryType + case t if isSubtype(t, localTypeOf[CalendarInterval]) => CalendarIntervalType + case t if isSubtype(t, localTypeOf[Decimal]) => DecimalType.SYSTEM_DEFAULT + case _ => { + val className = getClassNameFromType(tpe) + className match { + case "scala.Array" => { + val TypeRef(_, _, Seq(elementType)) = tpe + arrayClassFor(elementType) + } + case _ => { + val clazz = getClassFromType(tpe) + ObjectType(clazz) + } + } + } + } } - ObjectType(cls) - } - - /** - * Returns true if the value of this data type is same between internal and external. - */ - def isNativeType(dt: DataType): Boolean = dt match { - case NullType | BooleanType | ByteType | ShortType | IntegerType | LongType | - FloatType | DoubleType | BinaryType | CalendarIntervalType => true - case _ => false - } - - private def baseType(tpe: `Type`): `Type` = { - tpe.dealias match { - case annotatedType: AnnotatedType => annotatedType.underlying - case other => other + + /** + * Given a type `T` this function constructs `ObjectType` that holds a class of type + * `Array[T]`. + * + * Special handling is performed for primitive types to map them back to their raw + * JVM form instead of the Scala Array that handles auto boxing. + */ + private def arrayClassFor(tpe: `Type`): ObjectType = cleanUpReflectionObjects { + val cls = tpe.dealias match { + case t if isSubtype(t, definitions.IntTpe) => classOf[Array[Int]] + case t if isSubtype(t, definitions.LongTpe) => classOf[Array[Long]] + case t if isSubtype(t, definitions.DoubleTpe) => classOf[Array[Double]] + case t if isSubtype(t, definitions.FloatTpe) => classOf[Array[Float]] + case t if isSubtype(t, definitions.ShortTpe) => classOf[Array[Short]] + case t if isSubtype(t, definitions.ByteTpe) => classOf[Array[Byte]] + case t if isSubtype(t, definitions.BooleanTpe) => classOf[Array[Boolean]] + case t if isSubtype(t, localTypeOf[Array[Byte]]) => classOf[Array[Array[Byte]]] + case t if isSubtype(t, localTypeOf[CalendarInterval]) => classOf[Array[CalendarInterval]] + case t if isSubtype(t, localTypeOf[Decimal]) => classOf[Array[Decimal]] + case other => { + // There is probably a better way to do this, but I couldn't find it... + val elementType = dataTypeFor(other).asInstanceOf[ObjectType].cls + java.lang.reflect.Array.newInstance(elementType, 0).getClass + } + + } + ObjectType(cls) } - } - - /** - * Returns an expression that can be used to deserialize a Spark SQL representation to an object - * of type `T` with a compatible schema. The Spark SQL representation is located at ordinal 0 of - * a row, i.e., `GetColumnByOrdinal(0, _)`. Nested classes will have their fields accessed using - * `UnresolvedExtractValue`. - * - * The returned expression is used by `ExpressionEncoder`. The encoder will resolve and bind this - * deserializer expression when using it. - */ - def deserializerForType(tpe: `Type`): Expression = { - val clsName = getClassNameFromType(tpe) - val walkedTypePath = WalkedTypePath().recordRoot(clsName) - val Schema(dataType, nullable) = schemaFor(tpe) - - // Assumes we are deserializing the first column of a row. - deserializerForWithNullSafetyAndUpcast(GetColumnByOrdinal(0, dataType), dataType, - nullable = nullable, walkedTypePath, - (casted, typePath) => deserializerFor(tpe, casted, typePath)) - } - - - /** - * Returns an expression that can be used to deserialize an input expression to an object of type - * `T` with a compatible schema. - * - * @param tpe The `Type` of deserialized object. - * @param path The expression which can be used to extract serialized value. - * @param walkedTypePath The paths from top to bottom to access current field when deserializing. - */ - private def deserializerFor( - tpe: `Type`, - path: Expression, - walkedTypePath: WalkedTypePath, - predefinedDt: Option[DataTypeWithClass] = None - ): Expression = cleanUpReflectionObjects { - baseType(tpe) match { - - // - case t if isSubtype(t, localTypeOf[java.lang.Integer]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Integer]) - - case t if isSubtype(t, localTypeOf[Int]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Integer]) - - case t if isSubtype(t, localTypeOf[java.lang.Long]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Long]) - case t if isSubtype(t, localTypeOf[Long]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Long]) - - case t if isSubtype(t, localTypeOf[java.lang.Double]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Double]) - case t if isSubtype(t, localTypeOf[Double]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Double]) - - case t if isSubtype(t, localTypeOf[java.lang.Float]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Float]) - case t if isSubtype(t, localTypeOf[Float]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Float]) - - case t if isSubtype(t, localTypeOf[java.lang.Short]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Short]) - case t if isSubtype(t, localTypeOf[Short]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Short]) - - case t if isSubtype(t, localTypeOf[java.lang.Byte]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Byte]) - case t if isSubtype(t, localTypeOf[Byte]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Byte]) - - case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Boolean]) - case t if isSubtype(t, localTypeOf[Boolean]) => - createDeserializerForTypesSupportValueOf(path, - classOf[java.lang.Boolean]) - - case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => - createDeserializerForLocalDate(path) - - case t if isSubtype(t, localTypeOf[java.sql.Date]) => - createDeserializerForSqlDate(path) - // - - case t if isSubtype(t, localTypeOf[java.time.Instant]) => - createDeserializerForInstant(path) - - case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => - createDeserializerForSqlTimestamp(path) - - case t if isSubtype(t, localTypeOf[java.lang.String]) => - createDeserializerForString(path, returnNullable = false) - - case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => - createDeserializerForJavaBigDecimal(path, returnNullable = false) - - case t if isSubtype(t, localTypeOf[BigDecimal]) => - createDeserializerForScalaBigDecimal(path, returnNullable = false) - - case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => - createDeserializerForJavaBigInteger(path, returnNullable = false) - - case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => - createDeserializerForScalaBigInt(path) - - case t if isSubtype(t, localTypeOf[Array[_]]) => - var TypeRef(_, _, Seq(elementType)) = t - if (predefinedDt.isDefined && !elementType.dealias.typeSymbol.isClass) - elementType = getType(predefinedDt.get.asInstanceOf[KComplexTypeWrapper].dt.asInstanceOf[ArrayType].elementType.asInstanceOf[DataTypeWithClass].cls) - val Schema(dataType, elementNullable) = predefinedDt.map(it => { - val elementInfo = it.asInstanceOf[KComplexTypeWrapper].dt.asInstanceOf[ArrayType].elementType.asInstanceOf[DataTypeWithClass] - Schema(elementInfo.dt, elementInfo.nullable) - }) - .getOrElse(schemaFor(elementType)) - val className = getClassNameFromType(elementType) - val newTypePath = walkedTypePath.recordArray(className) - - val mapFunction: Expression => Expression = element => { - // upcast the array element to the data type the encoder expected. - deserializerForWithNullSafetyAndUpcast( - element, - dataType, - nullable = elementNullable, - newTypePath, - (casted, typePath) => deserializerFor(elementType, casted, typePath, predefinedDt.map(_.asInstanceOf[KComplexTypeWrapper].dt.asInstanceOf[ArrayType].elementType).filter(_.isInstanceOf[ComplexWrapper]).map(_.asInstanceOf[ComplexWrapper]))) + + /** + * Returns true if the value of this data type is same between internal and external. + */ + def isNativeType(dt: DataType): Boolean = dt match { + case NullType | BooleanType | ByteType | ShortType | IntegerType | LongType | + FloatType | DoubleType | BinaryType | CalendarIntervalType => { + true } + case _ => false + } - val arrayData = UnresolvedMapObjects(mapFunction, path) - val arrayCls = arrayClassFor(elementType) - - val methodName = elementType match { - case t if isSubtype(t, definitions.IntTpe) => "toIntArray" - case t if isSubtype(t, definitions.LongTpe) => "toLongArray" - case t if isSubtype(t, definitions.DoubleTpe) => "toDoubleArray" - case t if isSubtype(t, definitions.FloatTpe) => "toFloatArray" - case t if isSubtype(t, definitions.ShortTpe) => "toShortArray" - case t if isSubtype(t, definitions.ByteTpe) => "toByteArray" - case t if isSubtype(t, definitions.BooleanTpe) => "toBooleanArray" - // non-primitive - case _ => "array" + private def baseType(tpe: `Type`): `Type` = { + tpe.dealias match { + case annotatedType: AnnotatedType => annotatedType.underlying + case other => other } - Invoke(arrayData, methodName, arrayCls, returnNullable = false) + } - // We serialize a `Set` to Catalyst array. When we deserialize a Catalyst array - // to a `Set`, if there are duplicated elements, the elements will be de-duplicated. + /** + * Returns an expression that can be used to deserialize a Spark SQL representation to an object + * of type `T` with a compatible schema. The Spark SQL representation is located at ordinal 0 of + * a row, i.e., `GetColumnByOrdinal(0, _)`. Nested classes will have their fields accessed using + * `UnresolvedExtractValue`. + * + * The returned expression is used by `ExpressionEncoder`. The encoder will resolve and bind this + * deserializer expression when using it. + */ + def deserializerForType(tpe: `Type`): Expression = { + val clsName = getClassNameFromType(tpe) + val walkedTypePath = WalkedTypePath().recordRoot(clsName) + val Schema(dataType, nullable) = schemaFor(tpe) + + // Assumes we are deserializing the first column of a row. + deserializerForWithNullSafetyAndUpcast( + GetColumnByOrdinal(0, dataType), dataType, + nullable = nullable, walkedTypePath, + (casted, typePath) => deserializerFor(tpe, casted, typePath) + ) + } - case t if isSubtype(t, localTypeOf[Map[_, _]]) => - val TypeRef(_, _, Seq(keyType, valueType)) = t - val classNameForKey = getClassNameFromType(keyType) - val classNameForValue = getClassNameFromType(valueType) + /** + * Returns an expression that can be used to deserialize an input expression to an object of type + * `T` with a compatible schema. + * + * @param tpe The `Type` of deserialized object. + * @param path The expression which can be used to extract serialized value. + * @param walkedTypePath The paths from top to bottom to access current field when deserializing. + */ + private def deserializerFor( + tpe: `Type`, + path: Expression, + walkedTypePath: WalkedTypePath, + predefinedDt: Option[DataTypeWithClass] = None + ): Expression = cleanUpReflectionObjects { + baseType(tpe) match { + + // + case t if isSubtype(t, localTypeOf[java.lang.Integer]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Integer]) + } + case t if isSubtype(t, localTypeOf[Int]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Integer]) + } + case t if isSubtype(t, localTypeOf[java.lang.Long]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Long]) + } + case t if isSubtype(t, localTypeOf[Long]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Long]) + } + case t if isSubtype(t, localTypeOf[java.lang.Double]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Double]) + } + case t if isSubtype(t, localTypeOf[Double]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Double]) + } + case t if isSubtype(t, localTypeOf[java.lang.Float]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Float]) + } + case t if isSubtype(t, localTypeOf[Float]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Float]) + } + case t if isSubtype(t, localTypeOf[java.lang.Short]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Short]) + } + case t if isSubtype(t, localTypeOf[Short]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Short]) + } + case t if isSubtype(t, localTypeOf[java.lang.Byte]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Byte]) + } + case t if isSubtype(t, localTypeOf[Byte]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Byte]) + } + case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Boolean]) + } + case t if isSubtype(t, localTypeOf[Boolean]) => { + createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Boolean]) + } + case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => { + createDeserializerForLocalDate(path) + } + case t if isSubtype(t, localTypeOf[java.sql.Date]) => { + createDeserializerForSqlDate(path) + } // - val newTypePath = walkedTypePath.recordMap(classNameForKey, classNameForValue) + case t if isSubtype(t, localTypeOf[java.time.Instant]) => { + createDeserializerForInstant(path) + } + case t if isSubtype(t, localTypeOf[java.lang.Enum[_]]) => { + createDeserializerForTypesSupportValueOf( + Invoke(path, "toString", ObjectType(classOf[String]), returnNullable = false), + getClassFromType(t), + ) + } + case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => { + createDeserializerForSqlTimestamp(path) + } + case t if isSubtype(t, localTypeOf[java.time.LocalDateTime]) => { + createDeserializerForLocalDateTime(path) + } + case t if isSubtype(t, localTypeOf[java.time.Duration]) => { + createDeserializerForDuration(path) + } + case t if isSubtype(t, localTypeOf[java.time.Period]) => { + createDeserializerForPeriod(path) + } + case t if isSubtype(t, localTypeOf[java.lang.String]) => { + createDeserializerForString(path, returnNullable = false) + } + case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => { + createDeserializerForJavaBigDecimal(path, returnNullable = false) + } + case t if isSubtype(t, localTypeOf[BigDecimal]) => { + createDeserializerForScalaBigDecimal(path, returnNullable = false) + } + case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => { + createDeserializerForJavaBigInteger(path, returnNullable = false) + } + case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => { + createDeserializerForScalaBigInt(path) + } // TODO case t if isSubtype(t, localTypeOf[Array[Byte]]) => + // createDeserializerForTypesSupportValueOf(path, classOf[Array[Byte]]) + + case t if isSubtype(t, localTypeOf[Array[_]]) => { + var TypeRef(_, _, Seq(elementType)) = t + if (predefinedDt.isDefined && !elementType.dealias.typeSymbol.isClass) + elementType = getType(predefinedDt.get.asInstanceOf[KComplexTypeWrapper].dt.asInstanceOf[ArrayType] + .elementType.asInstanceOf[DataTypeWithClass].cls + ) + val Schema(dataType, elementNullable) = predefinedDt.map { it => + val elementInfo = it.asInstanceOf[KComplexTypeWrapper].dt.asInstanceOf[ArrayType].elementType + .asInstanceOf[DataTypeWithClass] + Schema(elementInfo.dt, elementInfo.nullable) + }.getOrElse(schemaFor(elementType)) + val className = getClassNameFromType(elementType) + val newTypePath = walkedTypePath.recordArray(className) - UnresolvedCatalystToExternalMap( - path, - p => deserializerFor(keyType, p, newTypePath), - p => deserializerFor(valueType, p, newTypePath), - mirror.runtimeClass(t.typeSymbol.asClass) - ) + val mapFunction: Expression => Expression = element => { + // upcast the array element to the data type the encoder expected. + deserializerForWithNullSafetyAndUpcast( + element, + dataType, + nullable = elementNullable, + newTypePath, + (casted, typePath) => deserializerFor( + tpe = elementType, + path = casted, + walkedTypePath = typePath, + predefinedDt = predefinedDt + .map(_.asInstanceOf[KComplexTypeWrapper].dt.asInstanceOf[ArrayType].elementType) + .filter(_.isInstanceOf[ComplexWrapper]) + .map(_.asInstanceOf[ComplexWrapper]) + ) + ) + } - case t if isSubtype(t, localTypeOf[java.lang.Enum[_]]) => - createDeserializerForTypesSupportValueOf( - createDeserializerForString(path, returnNullable = false), Class.forName(t.toString)) - - case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => - val udt = getClassFromType(t).getAnnotation(classOf[SQLUserDefinedType]).udt(). - getConstructor().newInstance() - val obj = NewInstance( - udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt(), - Nil, - dataType = ObjectType(udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt())) - Invoke(obj, "deserialize", ObjectType(udt.userClass), path :: Nil) - - case t if UDTRegistration.exists(getClassNameFromType(t)) => - val udt = UDTRegistration.getUDTFor(getClassNameFromType(t)).get.getConstructor(). - newInstance().asInstanceOf[UserDefinedType[_]] - val obj = NewInstance( - udt.getClass, - Nil, - dataType = ObjectType(udt.getClass)) - Invoke(obj, "deserialize", ObjectType(udt.userClass), path :: Nil) - - case _ if predefinedDt.isDefined => - predefinedDt.get match { - case wrapper: KDataTypeWrapper => - val structType = wrapper.dt - val cls = wrapper.cls - val arguments = structType - .fields - .map(field => { - val dataType = field.dataType.asInstanceOf[DataTypeWithClass] - val nullable = dataType.nullable - val clsName = getClassNameFromType(getType(dataType.cls)) - val newTypePath = walkedTypePath.recordField(clsName, field.name) - - // For tuples, we based grab the inner fields by ordinal instead of name. - val newPath = deserializerFor( - getType(dataType.cls), - addToPath(path, field.name, dataType.dt, newTypePath), - newTypePath, - Some(dataType).filter(_.isInstanceOf[ComplexWrapper]) - ) - expressionWithNullSafety( - newPath, - nullable = nullable, - newTypePath - ) + val arrayData = UnresolvedMapObjects(mapFunction, path) + val arrayCls = arrayClassFor(elementType) + + val methodName = elementType match { + case t if isSubtype(t, definitions.IntTpe) => "toIntArray" + case t if isSubtype(t, definitions.LongTpe) => "toLongArray" + case t if isSubtype(t, definitions.DoubleTpe) => "toDoubleArray" + case t if isSubtype(t, definitions.FloatTpe) => "toFloatArray" + case t if isSubtype(t, definitions.ShortTpe) => "toShortArray" + case t if isSubtype(t, definitions.ByteTpe) => "toByteArray" + case t if isSubtype(t, definitions.BooleanTpe) => "toBooleanArray" + // non-primitive + case _ => "array" + } + Invoke(arrayData, methodName, arrayCls, returnNullable = false) + } - }) - val newInstance = NewInstance(cls, arguments, ObjectType(cls), propagateNull = false) + // We serialize a `Set` to Catalyst array. When we deserialize a Catalyst array + // to a `Set`, if there are duplicated elements, the elements will be de-duplicated. - org.apache.spark.sql.catalyst.expressions.If( - IsNull(path), - org.apache.spark.sql.catalyst.expressions.Literal.create(null, ObjectType(cls)), - newInstance - ) + case t if isSubtype(t, localTypeOf[Map[_, _]]) => { + val TypeRef(_, _, Seq(keyType, valueType)) = t - case t: ComplexWrapper => - t.dt match { - case MapType(kt, vt, _) => - val Seq(keyType, valueType) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass].cls).map(getType(_)) - val Seq(keyDT, valueDT) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass]) val classNameForKey = getClassNameFromType(keyType) val classNameForValue = getClassNameFromType(valueType) val newTypePath = walkedTypePath.recordMap(classNameForKey, classNameForValue) - val keyData = - Invoke( - UnresolvedMapObjects( - p => deserializerFor(keyType, p, newTypePath, Some(keyDT).filter(_.isInstanceOf[ComplexWrapper])), - MapKeys(path)), - "array", - ObjectType(classOf[Array[Any]])) - - val valueData = - Invoke( - UnresolvedMapObjects( - p => deserializerFor(valueType, p, newTypePath, Some(valueDT).filter(_.isInstanceOf[ComplexWrapper])), - MapValues(path)), - "array", - ObjectType(classOf[Array[Any]])) - - StaticInvoke( - ArrayBasedMapData.getClass, - ObjectType(classOf[java.util.Map[_, _]]), - "toJavaMap", - keyData :: valueData :: Nil, - returnNullable = false) - - case ArrayType(elementType, containsNull) => - val dataTypeWithClass = elementType.asInstanceOf[DataTypeWithClass] - val mapFunction: Expression => Expression = element => { - // upcast the array element to the data type the encoder expected. - val et = getType(dataTypeWithClass.cls) - val className = getClassNameFromType(et) - val newTypePath = walkedTypePath.recordArray(className) - deserializerForWithNullSafetyAndUpcast( - element, - dataTypeWithClass.dt, - nullable = dataTypeWithClass.nullable, - newTypePath, - (casted, typePath) => { - deserializerFor(et, casted, typePath, Some(dataTypeWithClass).filter(_.isInstanceOf[ComplexWrapper]).map(_.asInstanceOf[ComplexWrapper])) - }) + UnresolvedCatalystToExternalMap( + path, + p => deserializerFor(keyType, p, newTypePath), + p => deserializerFor(valueType, p, newTypePath), + mirror.runtimeClass(t.typeSymbol.asClass) + ) + } + + case t if isSubtype(t, localTypeOf[java.lang.Enum[_]]) => { + createDeserializerForTypesSupportValueOf( + createDeserializerForString(path, returnNullable = false), + Class.forName(t.toString), + ) + } + case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => { + val udt = getClassFromType(t).getAnnotation(classOf[SQLUserDefinedType]).udt(). + getConstructor().newInstance() + val obj = NewInstance( + udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt(), + Nil, + dataType = ObjectType(udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt()) + ) + Invoke(obj, "deserialize", ObjectType(udt.userClass), path :: Nil) + } + + case t if UDTRegistration.exists(getClassNameFromType(t)) => { + val udt = UDTRegistration.getUDTFor(getClassNameFromType(t)).get.getConstructor(). + newInstance().asInstanceOf[UserDefinedType[_]] + val obj = NewInstance( + udt.getClass, + Nil, + dataType = ObjectType(udt.getClass) + ) + Invoke(obj, "deserialize", ObjectType(udt.userClass), path :: Nil) + } + + case _ if predefinedDt.isDefined => { + predefinedDt.get match { + + case wrapper: KDataTypeWrapper => { + val structType = wrapper.dt + val cls = wrapper.cls + val arguments = structType + .fields + .map { field => + val dataType = field.dataType.asInstanceOf[DataTypeWithClass] + val nullable = dataType.nullable + val clsName = getClassNameFromType(getType(dataType.cls)) + val newTypePath = walkedTypePath.recordField(clsName, field.name) + + // For tuples, we based grab the inner fields by ordinal instead of name. + val newPath = deserializerFor( + tpe = getType(dataType.cls), + path = addToPath(path, field.name, dataType.dt, newTypePath), + walkedTypePath = newTypePath, + predefinedDt = Some(dataType).filter(_.isInstanceOf[ComplexWrapper]) + ) + expressionWithNullSafety( + newPath, + nullable = nullable, + newTypePath + ) + } + val newInstance = NewInstance(cls, arguments, ObjectType(cls), propagateNull = false) + + org.apache.spark.sql.catalyst.expressions.If( + IsNull(path), + org.apache.spark.sql.catalyst.expressions.Literal.create(null, ObjectType(cls)), + newInstance + ) + } + + case t: ComplexWrapper => { + + t.dt match { + case MapType(kt, vt, _) => { + val Seq(keyType, valueType) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass].cls) + .map(getType(_)) + val Seq(keyDT, valueDT) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass]) + val classNameForKey = getClassNameFromType(keyType) + val classNameForValue = getClassNameFromType(valueType) + + val newTypePath = walkedTypePath.recordMap(classNameForKey, classNameForValue) + + val keyData = + Invoke( + UnresolvedMapObjects( + p => deserializerFor( + keyType, p, newTypePath, Some(keyDT) + .filter(_.isInstanceOf[ComplexWrapper]) + ), + MapKeys(path) + ), + "array", + ObjectType(classOf[Array[Any]]) + ) + + val valueData = + Invoke( + UnresolvedMapObjects( + p => deserializerFor( + valueType, p, newTypePath, Some(valueDT) + .filter(_.isInstanceOf[ComplexWrapper]) + ), + MapValues(path) + ), + "array", + ObjectType(classOf[Array[Any]]) + ) + + StaticInvoke( + ArrayBasedMapData.getClass, + ObjectType(classOf[java.util.Map[_, _]]), + "toJavaMap", + keyData :: valueData :: Nil, + returnNullable = false + ) + } + + case ArrayType(elementType, containsNull) => { + val dataTypeWithClass = elementType.asInstanceOf[DataTypeWithClass] + val mapFunction: Expression => Expression = element => { + // upcast the array element to the data type the encoder expected. + val et = getType(dataTypeWithClass.cls) + val className = getClassNameFromType(et) + val newTypePath = walkedTypePath.recordArray(className) + deserializerForWithNullSafetyAndUpcast( + element, + dataTypeWithClass.dt, + nullable = dataTypeWithClass.nullable, + newTypePath, + (casted, typePath) => { + deserializerFor( + et, casted, typePath, Some(dataTypeWithClass) + .filter(_.isInstanceOf[ComplexWrapper]) + .map(_.asInstanceOf[ComplexWrapper]) + ) + } + ) + } + + UnresolvedMapObjects(mapFunction, path, customCollectionCls = Some(t.cls)) + } + + case StructType(elementType: Array[StructField]) => { + val cls = t.cls + + val arguments = elementType.map { field => + val dataType = field.dataType.asInstanceOf[DataTypeWithClass] + val nullable = dataType.nullable + val clsName = getClassNameFromType(getType(dataType.cls)) + val newTypePath = walkedTypePath.recordField(clsName, field.name) + + // For tuples, we based grab the inner fields by ordinal instead of name. + val newPath = deserializerFor( + getType(dataType.cls), + addToPath(path, field.name, dataType.dt, newTypePath), + newTypePath, + Some(dataType).filter(_.isInstanceOf[ComplexWrapper]) + ) + expressionWithNullSafety( + newPath, + nullable = nullable, + newTypePath + ) + } + val newInstance = NewInstance(cls, arguments, ObjectType(cls), propagateNull = false) + + org.apache.spark.sql.catalyst.expressions.If( + IsNull(path), + org.apache.spark.sql.catalyst.expressions.Literal.create(null, ObjectType(cls)), + newInstance + ) + } + + case _ => { + throw new UnsupportedOperationException( + s"No Encoder found for $tpe\n" + walkedTypePath + ) + } + } + } } + } - UnresolvedMapObjects(mapFunction, path, customCollectionCls = Some(t.cls)) - - case StructType(elementType: Array[StructField]) => - val cls = t.cls - - val arguments = elementType.map { field => - val dataType = field.dataType.asInstanceOf[DataTypeWithClass] - val nullable = dataType.nullable - val clsName = getClassNameFromType(getType(dataType.cls)) - val newTypePath = walkedTypePath.recordField(clsName, field.name) - - // For tuples, we based grab the inner fields by ordinal instead of name. - val newPath = deserializerFor( - getType(dataType.cls), - addToPath(path, field.name, dataType.dt, newTypePath), - newTypePath, - Some(dataType).filter(_.isInstanceOf[ComplexWrapper]) - ) - expressionWithNullSafety( - newPath, - nullable = nullable, - newTypePath - ) + case t if definedByConstructorParams(t) => { + val params = getConstructorParameters(t) + + val cls = getClassFromType(tpe) + + val arguments = params.zipWithIndex.map { case ((fieldName, fieldType), i) => + val Schema(dataType, nullable) = schemaFor(fieldType) + val clsName = getClassNameFromType(fieldType) + val newTypePath = walkedTypePath.recordField(clsName, fieldName) + + // For tuples, we based grab the inner fields by ordinal instead of name. + val newPath = if (cls.getName startsWith "scala.Tuple") { + deserializerFor( + fieldType, + addToPathOrdinal(path, i, dataType, newTypePath), + newTypePath + ) + } else { + deserializerFor( + fieldType, + addToPath(path, fieldName, dataType, newTypePath), + newTypePath + ) + } + expressionWithNullSafety( + newPath, + nullable = nullable, + newTypePath + ) } + val newInstance = NewInstance(cls, arguments, ObjectType(cls), propagateNull = false) org.apache.spark.sql.catalyst.expressions.If( - IsNull(path), - org.apache.spark.sql.catalyst.expressions.Literal.create(null, ObjectType(cls)), - newInstance + IsNull(path), + org.apache.spark.sql.catalyst.expressions.Literal.create(null, ObjectType(cls)), + newInstance ) + } - - case _ => + case _ => { throw new UnsupportedOperationException( - s"No Encoder found for $tpe\n" + walkedTypePath) + s"No Encoder found for $tpe\n" + walkedTypePath + ) } } + } - case t if definedByConstructorParams(t) => - val params = getConstructorParameters(t) - - val cls = getClassFromType(tpe) - - val arguments = params.zipWithIndex.map { case ((fieldName, fieldType), i) => - val Schema(dataType, nullable) = schemaFor(fieldType) - val clsName = getClassNameFromType(fieldType) - val newTypePath = walkedTypePath.recordField(clsName, fieldName) - - // For tuples, we based grab the inner fields by ordinal instead of name. - val newPath = if (cls.getName startsWith "scala.Tuple") { - deserializerFor( - fieldType, - addToPathOrdinal(path, i, dataType, newTypePath), - newTypePath) - } else { - deserializerFor( - fieldType, - addToPath(path, fieldName, dataType, newTypePath), - newTypePath) - } - expressionWithNullSafety( - newPath, - nullable = nullable, - newTypePath) - } + /** + * Returns an expression for serializing an object of type T to Spark SQL representation. The + * input object is located at ordinal 0 of a row, i.e., `BoundReference(0, _)`. + * + * If the given type is not supported, i.e. there is no encoder can be built for this type, + * an [[UnsupportedOperationException]] will be thrown with detailed error message to explain + * the type path walked so far and which class we are not supporting. + * There are 4 kinds of type path: + * * the root type: `root class: "abc.xyz.MyClass"` + * * the value type of [[Option]]: `option value class: "abc.xyz.MyClass"` + * * the element type of [[Array]] or [[Seq]]: `array element class: "abc.xyz.MyClass"` + * * the field of [[Product]]: `field (class: "abc.xyz.MyClass", name: "myField")` + */ + def serializerForType(tpe: `Type`): Expression = ScalaReflection.cleanUpReflectionObjects { + val clsName = getClassNameFromType(tpe) + val walkedTypePath = WalkedTypePath().recordRoot(clsName) + + // The input object to `ExpressionEncoder` is located at first column of an row. + val isPrimitive = tpe.typeSymbol.asClass.isPrimitive + val inputObject = BoundReference(0, dataTypeFor(tpe), nullable = !isPrimitive) + + serializerFor(inputObject, tpe, walkedTypePath) + } - val newInstance = NewInstance(cls, arguments, ObjectType(cls), propagateNull = false) + def getType[T](clazz: Class[T]): universe.Type = { + val mir = runtimeMirror(clazz.getClassLoader) + mir.classSymbol(clazz).toType + } - org.apache.spark.sql.catalyst.expressions.If( - IsNull(path), - org.apache.spark.sql.catalyst.expressions.Literal.create(null, ObjectType(cls)), - newInstance + def deserializerFor(cls: java.lang.Class[_], dt: DataTypeWithClass): Expression = { + val tpe = getType(cls) + val clsName = getClassNameFromType(tpe) + val walkedTypePath = WalkedTypePath().recordRoot(clsName) + + // Assumes we are deserializing the first column of a row. + deserializerForWithNullSafetyAndUpcast( + GetColumnByOrdinal(0, dt.dt), + dt.dt, + nullable = dt.nullable, + walkedTypePath, + (casted, typePath) => deserializerFor(tpe, casted, typePath, Some(dt)) ) - - case _ => - throw new UnsupportedOperationException( - s"No Encoder found for $tpe\n" + walkedTypePath) } - } - - /** - * Returns an expression for serializing an object of type T to Spark SQL representation. The - * input object is located at ordinal 0 of a row, i.e., `BoundReference(0, _)`. - * - * If the given type is not supported, i.e. there is no encoder can be built for this type, - * an [[UnsupportedOperationException]] will be thrown with detailed error message to explain - * the type path walked so far and which class we are not supporting. - * There are 4 kinds of type path: - * * the root type: `root class: "abc.xyz.MyClass"` - * * the value type of [[Option]]: `option value class: "abc.xyz.MyClass"` - * * the element type of [[Array]] or [[Seq]]: `array element class: "abc.xyz.MyClass"` - * * the field of [[Product]]: `field (class: "abc.xyz.MyClass", name: "myField")` - */ - def serializerForType(tpe: `Type`): Expression = ScalaReflection.cleanUpReflectionObjects { - val clsName = getClassNameFromType(tpe) - val walkedTypePath = WalkedTypePath().recordRoot(clsName) - - // The input object to `ExpressionEncoder` is located at first column of an row. - val isPrimitive = tpe.typeSymbol.asClass.isPrimitive - val inputObject = BoundReference(0, dataTypeFor(tpe), nullable = !isPrimitive) - - serializerFor(inputObject, tpe, walkedTypePath) - } - - def getType[T](clazz: Class[T]): universe.Type = { - val mir = runtimeMirror(clazz.getClassLoader) - mir.classSymbol(clazz).toType - } - - def deserializerFor(cls: java.lang.Class[_], dt: DataTypeWithClass): Expression = { - val tpe = getType(cls) - val clsName = getClassNameFromType(tpe) - val walkedTypePath = WalkedTypePath().recordRoot(clsName) - - // Assumes we are deserializing the first column of a row. - deserializerForWithNullSafetyAndUpcast( - GetColumnByOrdinal(0, dt.dt), - dt.dt, - nullable = dt.nullable, - walkedTypePath, - (casted, typePath) => deserializerFor(tpe, casted, typePath, Some(dt)) - ) - } - - - def serializerFor(cls: java.lang.Class[_], dt: DataTypeWithClass): Expression = { - - val tpe = getType(cls) - val clsName = getClassNameFromType(tpe) - val walkedTypePath = WalkedTypePath().recordRoot(clsName) - val inputObject = BoundReference(0, ObjectType(cls), nullable = true) - serializerFor(inputObject, tpe, walkedTypePath, predefinedDt = Some(dt)) - } - - /** - * Returns an expression for serializing the value of an input expression into Spark SQL - * internal representation. - */ - private def serializerFor( - inputObject: Expression, - tpe: `Type`, - walkedTypePath: WalkedTypePath, - seenTypeSet: Set[`Type`] = Set.empty, - predefinedDt: Option[DataTypeWithClass] = None - ): Expression = cleanUpReflectionObjects { - - def toCatalystArray(input: Expression, elementType: `Type`, predefinedDt: Option[DataTypeWithClass] = None): Expression = { - predefinedDt.map(_.dt).getOrElse(dataTypeFor(elementType)) match { - - case dt@(MapType(_, _, _) | ArrayType(_, _) | StructType(_)) => - val clsName = getClassNameFromType(elementType) - val newPath = walkedTypePath.recordArray(clsName) - createSerializerForMapObjects(input, ObjectType(predefinedDt.get.cls), - serializerFor(_, elementType, newPath, seenTypeSet, predefinedDt)) - - case dt: ObjectType => - val clsName = getClassNameFromType(elementType) - val newPath = walkedTypePath.recordArray(clsName) - createSerializerForMapObjects(input, dt, - serializerFor(_, elementType, newPath, seenTypeSet)) - - case dt@(BooleanType | ByteType | ShortType | IntegerType | LongType | - FloatType | DoubleType) => - val cls = input.dataType.asInstanceOf[ObjectType].cls - if (cls.isArray && cls.getComponentType.isPrimitive) { - createSerializerForPrimitiveArray(input, dt) - } else { - createSerializerForGenericArray(input, dt, nullable = predefinedDt.map(_.nullable).getOrElse(schemaFor(elementType).nullable)) - } - - case _: StringType => - val clsName = getClassNameFromType(typeOf[String]) - val newPath = walkedTypePath.recordArray(clsName) - createSerializerForMapObjects(input, ObjectType(Class.forName(getClassNameFromType(elementType))), - serializerFor(_, elementType, newPath, seenTypeSet)) - - - case dt => - createSerializerForGenericArray(input, dt, nullable = predefinedDt.map(_.nullable).getOrElse(schemaFor(elementType).nullable)) - } + + + def serializerFor(cls: java.lang.Class[_], dt: DataTypeWithClass): Expression = { + val tpe = getType(cls) + val clsName = getClassNameFromType(tpe) + val walkedTypePath = WalkedTypePath().recordRoot(clsName) + val inputObject = BoundReference(0, ObjectType(cls), nullable = true) + serializerFor(inputObject, tpe, walkedTypePath, predefinedDt = Some(dt)) } - baseType(tpe) match { - - // - case _ if !inputObject.dataType.isInstanceOf[ObjectType] && !predefinedDt.exists(_.isInstanceOf[ComplexWrapper]) => inputObject - - case t if isSubtype(t, localTypeOf[Option[_]]) => - val TypeRef(_, _, Seq(optType)) = t - val className = getClassNameFromType(optType) - val newPath = walkedTypePath.recordOption(className) - val unwrapped = UnwrapOption(dataTypeFor(optType), inputObject) - serializerFor(unwrapped, optType, newPath, seenTypeSet) - - // Since List[_] also belongs to localTypeOf[Product], we put this case before - // "case t if definedByConstructorParams(t)" to make sure it will match to the - // case "localTypeOf[Seq[_]]" - case t if isSubtype(t, localTypeOf[Seq[_]]) => - val TypeRef(_, _, Seq(elementType)) = t - toCatalystArray(inputObject, elementType) - - case t if isSubtype(t, localTypeOf[Array[_]]) && predefinedDt.isEmpty => - val TypeRef(_, _, Seq(elementType)) = t - toCatalystArray(inputObject, elementType) - - case t if isSubtype(t, localTypeOf[Map[_, _]]) => - val TypeRef(_, _, Seq(keyType, valueType)) = t - val keyClsName = getClassNameFromType(keyType) - val valueClsName = getClassNameFromType(valueType) - val keyPath = walkedTypePath.recordKeyForMap(keyClsName) - val valuePath = walkedTypePath.recordValueForMap(valueClsName) - - createSerializerForMap( - inputObject, - MapElementInformation( - dataTypeFor(keyType), - nullable = !keyType.typeSymbol.asClass.isPrimitive, - serializerFor(_, keyType, keyPath, seenTypeSet)), - MapElementInformation( - dataTypeFor(valueType), - nullable = !valueType.typeSymbol.asClass.isPrimitive, - serializerFor(_, valueType, valuePath, seenTypeSet)) - ) + /** + * Returns an expression for serializing the value of an input expression into Spark SQL + * internal representation. + */ + private def serializerFor( + inputObject: Expression, + tpe: `Type`, + walkedTypePath: WalkedTypePath, + seenTypeSet: Set[`Type`] = Set.empty, + predefinedDt: Option[DataTypeWithClass] = None, + ): Expression = cleanUpReflectionObjects { + + def toCatalystArray( + input: Expression, + elementType: `Type`, + predefinedDt: Option[DataTypeWithClass] = None, + ): Expression = { + val dataType = predefinedDt + .map(_.dt) + .getOrElse { + dataTypeFor(elementType) + } - case t if isSubtype(t, localTypeOf[scala.collection.Set[_]]) => - val TypeRef(_, _, Seq(elementType)) = t + dataType match { - // There's no corresponding Catalyst type for `Set`, we serialize a `Set` to Catalyst array. - // Note that the property of `Set` is only kept when manipulating the data as domain object. - val newInput = - Invoke( - inputObject, - "toSeq", - ObjectType(classOf[Seq[_]])) - - toCatalystArray(newInput, elementType) - - case t if isSubtype(t, localTypeOf[String]) => - createSerializerForString(inputObject) - case t if isSubtype(t, localTypeOf[java.time.Instant]) => - createSerializerForJavaInstant(inputObject) - - case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => - createSerializerForSqlTimestamp(inputObject) - - case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => - createSerializerForJavaLocalDate(inputObject) - - case t if isSubtype(t, localTypeOf[java.sql.Date]) => createSerializerForSqlDate(inputObject) - - case t if isSubtype(t, localTypeOf[BigDecimal]) => - createSerializerForScalaBigDecimal(inputObject) - - case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => - createSerializerForJavaBigDecimal(inputObject) - - case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => - createSerializerForJavaBigInteger(inputObject) - - case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => - createSerializerForScalaBigInt(inputObject) - - case t if isSubtype(t, localTypeOf[java.lang.Integer]) => - createSerializerForInteger(inputObject) - case t if isSubtype(t, localTypeOf[Int]) => - createSerializerForInteger(inputObject) - case t if isSubtype(t, localTypeOf[java.lang.Long]) => createSerializerForLong(inputObject) - case t if isSubtype(t, localTypeOf[Long]) => createSerializerForLong(inputObject) - case t if isSubtype(t, localTypeOf[java.lang.Double]) => createSerializerForDouble(inputObject) - case t if isSubtype(t, localTypeOf[Double]) => createSerializerForDouble(inputObject) - case t if isSubtype(t, localTypeOf[java.lang.Float]) => createSerializerForFloat(inputObject) - case t if isSubtype(t, localTypeOf[Float]) => createSerializerForFloat(inputObject) - case t if isSubtype(t, localTypeOf[java.lang.Short]) => createSerializerForShort(inputObject) - case t if isSubtype(t, localTypeOf[Short]) => createSerializerForShort(inputObject) - case t if isSubtype(t, localTypeOf[java.lang.Byte]) => createSerializerForByte(inputObject) - case t if isSubtype(t, localTypeOf[Byte]) => createSerializerForByte(inputObject) - case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => createSerializerForBoolean(inputObject) - case t if isSubtype(t, localTypeOf[Boolean]) => createSerializerForBoolean(inputObject) - - case t if isSubtype(t, localTypeOf[java.lang.Enum[_]]) => - createSerializerForString( - Invoke(inputObject, "name", ObjectType(classOf[String]), returnNullable = false)) - - case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => - val udt = getClassFromType(t) - .getAnnotation(classOf[SQLUserDefinedType]).udt().getConstructor().newInstance() - val udtClass = udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt() - createSerializerForUserDefinedType(inputObject, udt, udtClass) - - case t if UDTRegistration.exists(getClassNameFromType(t)) => - val udt = UDTRegistration.getUDTFor(getClassNameFromType(t)).get.getConstructor(). - newInstance().asInstanceOf[UserDefinedType[_]] - val udtClass = udt.getClass - createSerializerForUserDefinedType(inputObject, udt, udtClass) - // - - case _ if predefinedDt.isDefined => - predefinedDt.get match { - case dataType: KDataTypeWrapper => - val cls = dataType.cls - val properties = getJavaBeanReadableProperties(cls) - val structFields = dataType.dt.fields.map(_.asInstanceOf[KStructField]) - val fields = structFields.map { structField => - val maybeProp = properties.find(it => it.getReadMethod.getName == structField.getterName) - if (maybeProp.isEmpty) throw new IllegalArgumentException(s"Field ${structField.name} is not found among available props, which are: ${properties.map(_.getName).mkString(", ")}") - val fieldName = structField.name - val propClass = structField.dataType.asInstanceOf[DataTypeWithClass].cls - val propDt = structField.dataType.asInstanceOf[DataTypeWithClass] - val fieldValue = Invoke( - inputObject, - maybeProp.get.getReadMethod.getName, - inferExternalType(propClass), - returnNullable = structField.nullable - ) - val newPath = walkedTypePath.recordField(propClass.getName, fieldName) - (fieldName, serializerFor(fieldValue, getType(propClass), newPath, seenTypeSet, if (propDt.isInstanceOf[ComplexWrapper]) Some(propDt) else None)) - - } - createSerializerForObject(inputObject, fields) - - case otherTypeWrapper: ComplexWrapper => - otherTypeWrapper.dt match { - case MapType(kt, vt, _) => - val Seq(keyType, valueType) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass].cls).map(getType(_)) - val Seq(keyDT, valueDT) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass]) + case dt @ (MapType(_, _, _) | ArrayType(_, _) | StructType(_)) => { + val clsName = getClassNameFromType(elementType) + val newPath = walkedTypePath.recordArray(clsName) + createSerializerForMapObjects( + input, ObjectType(predefinedDt.get.cls), + serializerFor(_, elementType, newPath, seenTypeSet, predefinedDt) + ) + } + + case dt: ObjectType => { + val clsName = getClassNameFromType(elementType) + val newPath = walkedTypePath.recordArray(clsName) + createSerializerForMapObjects( + input, dt, + serializerFor(_, elementType, newPath, seenTypeSet) + ) + } + + // case dt: ByteType => + // createSerializerForPrimitiveArray(input, dt) + + case dt @ (BooleanType | ByteType | ShortType | IntegerType | LongType | FloatType | DoubleType) => { + val cls = input.dataType.asInstanceOf[ObjectType].cls + if (cls.isArray && cls.getComponentType.isPrimitive) { + createSerializerForPrimitiveArray(input, dt) + } else { + createSerializerForGenericArray( + inputObject = input, + dataType = dt, + nullable = predefinedDt + .map(_.nullable) + .getOrElse( + schemaFor(elementType).nullable + ), + ) + } + } + + case _: StringType => { + val clsName = getClassNameFromType(typeOf[String]) + val newPath = walkedTypePath.recordArray(clsName) + createSerializerForMapObjects( + input, ObjectType(Class.forName(getClassNameFromType(elementType))), + serializerFor(_, elementType, newPath, seenTypeSet) + ) + } + + case dt => { + createSerializerForGenericArray( + inputObject = input, + dataType = dt, + nullable = predefinedDt + .map(_.nullable) + .getOrElse { + schemaFor(elementType).nullable + }, + ) + } + } + } + + baseType(tpe) match { + + // + case _ if !inputObject.dataType.isInstanceOf[ObjectType] + && !predefinedDt.exists(_.isInstanceOf[ComplexWrapper]) => { + inputObject + } + case t if isSubtype(t, localTypeOf[Option[_]]) => { + val TypeRef(_, _, Seq(optType)) = t + val className = getClassNameFromType(optType) + val newPath = walkedTypePath.recordOption(className) + val unwrapped = UnwrapOption(dataTypeFor(optType), inputObject) + serializerFor(unwrapped, optType, newPath, seenTypeSet) + } + + // Since List[_] also belongs to localTypeOf[Product], we put this case before + // "case t if definedByConstructorParams(t)" to make sure it will match to the + // case "localTypeOf[Seq[_]]" + case t if isSubtype(t, localTypeOf[Seq[_]]) => { + val TypeRef(_, _, Seq(elementType)) = t + toCatalystArray(inputObject, elementType) + } + + case t if isSubtype(t, localTypeOf[Array[_]]) && predefinedDt.isEmpty => { + val TypeRef(_, _, Seq(elementType)) = t + toCatalystArray(inputObject, elementType) + } + + case t if isSubtype(t, localTypeOf[Map[_, _]]) => { + val TypeRef(_, _, Seq(keyType, valueType)) = t val keyClsName = getClassNameFromType(keyType) val valueClsName = getClassNameFromType(valueType) val keyPath = walkedTypePath.recordKeyForMap(keyClsName) val valuePath = walkedTypePath.recordValueForMap(valueClsName) createSerializerForMap( - inputObject, - MapElementInformation( - dataTypeFor(keyType), - nullable = !keyType.typeSymbol.asClass.isPrimitive, - serializerFor(_, keyType, keyPath, seenTypeSet, Some(keyDT).filter(_.isInstanceOf[ComplexWrapper]))), - MapElementInformation( - dataTypeFor(valueType), - nullable = !valueType.typeSymbol.asClass.isPrimitive, - serializerFor(_, valueType, valuePath, seenTypeSet, Some(valueDT).filter(_.isInstanceOf[ComplexWrapper]))) + inputObject, + MapElementInformation( + dataTypeFor(keyType), + nullable = !keyType.typeSymbol.asClass.isPrimitive, + serializerFor(_, keyType, keyPath, seenTypeSet) + ), + MapElementInformation( + dataTypeFor(valueType), + nullable = !valueType.typeSymbol.asClass.isPrimitive, + serializerFor(_, valueType, valuePath, seenTypeSet) + ) ) - case ArrayType(elementType, _) => - toCatalystArray(inputObject, getType(elementType.asInstanceOf[DataTypeWithClass].cls), Some(elementType.asInstanceOf[DataTypeWithClass])) + } - case StructType(elementType: Array[StructField]) => - val cls = otherTypeWrapper.cls - val names = elementType.map(_.name) + case t if isSubtype(t, localTypeOf[scala.collection.Set[_]]) => { + val TypeRef(_, _, Seq(elementType)) = t - val beanInfo = Introspector.getBeanInfo(cls) - val methods = beanInfo.getMethodDescriptors.filter(it => names.contains(it.getName)) + // There's no corresponding Catalyst type for `Set`, we serialize a `Set` to Catalyst array. + // Note that the property of `Set` is only kept when manipulating the data as domain object. + val newInput = + Invoke( + inputObject, + "toSeq", + ObjectType(classOf[Seq[_]]) + ) + toCatalystArray(newInput, elementType) + } - val fields = elementType.map { structField => + case t if isSubtype(t, localTypeOf[String]) => { + createSerializerForString(inputObject) + } + case t if isSubtype(t, localTypeOf[java.time.Instant]) => { + createSerializerForJavaInstant(inputObject) + } + case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => { + createSerializerForSqlTimestamp(inputObject) + } + case t if isSubtype(t, localTypeOf[java.time.LocalDateTime]) => { + createSerializerForLocalDateTime(inputObject) + } + case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => { + createSerializerForJavaLocalDate(inputObject) + } + case t if isSubtype(t, localTypeOf[java.sql.Date]) => { + createSerializerForSqlDate(inputObject) + } + case t if isSubtype(t, localTypeOf[java.time.Duration]) => { + createSerializerForJavaDuration(inputObject) + } + case t if isSubtype(t, localTypeOf[java.time.Period]) => { + createSerializerForJavaPeriod(inputObject) + } + case t if isSubtype(t, localTypeOf[BigDecimal]) => { + createSerializerForScalaBigDecimal(inputObject) + } + case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => { + createSerializerForJavaBigDecimal(inputObject) + } + case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => { + createSerializerForJavaBigInteger(inputObject) + } + case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => { + createSerializerForScalaBigInt(inputObject) + } - val maybeProp = methods.find(it => it.getName == structField.name) - if (maybeProp.isEmpty) throw new IllegalArgumentException(s"Field ${structField.name} is not found among available props, which are: ${methods.map(_.getName).mkString(", ")}") - val fieldName = structField.name - val propClass = structField.dataType.asInstanceOf[DataTypeWithClass].cls - val propDt = structField.dataType.asInstanceOf[DataTypeWithClass] - val fieldValue = Invoke( - inputObject, - maybeProp.get.getName, - inferExternalType(propClass), - returnNullable = propDt.nullable - ) - val newPath = walkedTypePath.recordField(propClass.getName, fieldName) - (fieldName, serializerFor(fieldValue, getType(propClass), newPath, seenTypeSet, if (propDt.isInstanceOf[ComplexWrapper]) Some(propDt) else None)) + case t if isSubtype(t, localTypeOf[java.lang.Integer]) => { + createSerializerForInteger(inputObject) + } + case t if isSubtype(t, localTypeOf[Int]) => { + createSerializerForInteger(inputObject) + } + case t if isSubtype(t, localTypeOf[java.lang.Long]) => { + createSerializerForLong(inputObject) + } + case t if isSubtype(t, localTypeOf[Long]) => { + createSerializerForLong(inputObject) + } + case t if isSubtype(t, localTypeOf[java.lang.Double]) => { + createSerializerForDouble(inputObject) + } + case t if isSubtype(t, localTypeOf[Double]) => { + createSerializerForDouble(inputObject) + } + case t if isSubtype(t, localTypeOf[java.lang.Float]) => { + createSerializerForFloat(inputObject) + } + case t if isSubtype(t, localTypeOf[Float]) => { + createSerializerForFloat(inputObject) + } + case t if isSubtype(t, localTypeOf[java.lang.Short]) => { + createSerializerForShort(inputObject) + } + case t if isSubtype(t, localTypeOf[Short]) => { + createSerializerForShort(inputObject) + } + case t if isSubtype(t, localTypeOf[java.lang.Byte]) => { + createSerializerForByte(inputObject) + } + case t if isSubtype(t, localTypeOf[Byte]) => { + createSerializerForByte(inputObject) + } + case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => { + createSerializerForBoolean(inputObject) + } + case t if isSubtype(t, localTypeOf[Boolean]) => { + createSerializerForBoolean(inputObject) + } + case t if isSubtype(t, localTypeOf[java.lang.Enum[_]]) => { + createSerializerForString( + Invoke(inputObject, "name", ObjectType(classOf[String]), returnNullable = false) + ) + } + case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => { + val udt = getClassFromType(t) + .getAnnotation(classOf[SQLUserDefinedType]).udt().getConstructor().newInstance() + val udtClass = udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt() + createSerializerForUserDefinedType(inputObject, udt, udtClass) + } + + case t if UDTRegistration.exists(getClassNameFromType(t)) => { + val udt = UDTRegistration.getUDTFor(getClassNameFromType(t)).get.getConstructor(). + newInstance().asInstanceOf[UserDefinedType[_]] + val udtClass = udt.getClass + createSerializerForUserDefinedType(inputObject, udt, udtClass) + } + // + + case _ if predefinedDt.isDefined => { + predefinedDt.get match { + + case dataType: KDataTypeWrapper => { + val cls = dataType.cls + val properties = getJavaBeanReadableProperties(cls) + val structFields = dataType.dt.fields.map(_.asInstanceOf[KStructField]) + val fields: Array[(String, Expression)] = structFields.map { structField => + val maybeProp = properties.find(it => it.getReadMethod.getName == structField.getterName) + if (maybeProp.isEmpty) throw new IllegalArgumentException(s"Field ${ + structField.name + } is not found among available props, which are: ${properties.map(_.getName).mkString(", ")}" + ) + val fieldName = structField.name + val propClass = structField.dataType.asInstanceOf[DataTypeWithClass].cls + val propDt = structField.dataType.asInstanceOf[DataTypeWithClass] + val fieldValue = Invoke( + inputObject, + maybeProp.get.getReadMethod.getName, + inferExternalType(propClass), + returnNullable = structField.nullable + ) + val newPath = walkedTypePath.recordField(propClass.getName, fieldName) + + val tpe = + // if (propClass == classOf[Array[Byte]]) localTypeOf[Array[Byte]] + // else + getType(propClass) + + val serializer = serializerFor( + inputObject = fieldValue, + tpe = tpe, + walkedTypePath = newPath, + seenTypeSet = seenTypeSet, + predefinedDt = if (propDt + .isInstanceOf[ComplexWrapper] /*&& propClass != classOf[Array[Byte]]*/ ) Some(propDt) else None + ) + + (fieldName, serializer) + } + createSerializerForObject(inputObject, fields) + } + + case otherTypeWrapper: ComplexWrapper => { + + otherTypeWrapper.dt match { + + case MapType(kt, vt, _) => { + val Seq(keyType, valueType) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass].cls) + .map(getType(_)) + val Seq(keyDT, valueDT) = Seq(kt, vt).map(_.asInstanceOf[DataTypeWithClass]) + val keyClsName = getClassNameFromType(keyType) + val valueClsName = getClassNameFromType(valueType) + val keyPath = walkedTypePath.recordKeyForMap(keyClsName) + val valuePath = walkedTypePath.recordValueForMap(valueClsName) + + createSerializerForMap( + inputObject, + MapElementInformation( + dataTypeFor(keyType), + nullable = !keyType.typeSymbol.asClass.isPrimitive, + serializerFor( + _, keyType, keyPath, seenTypeSet, Some(keyDT) + .filter(_.isInstanceOf[ComplexWrapper]) + ) + ), + MapElementInformation( + dataTypeFor(valueType), + nullable = !valueType.typeSymbol.asClass.isPrimitive, + serializerFor( + _, valueType, valuePath, seenTypeSet, Some(valueDT) + .filter(_.isInstanceOf[ComplexWrapper]) + ) + ) + ) + } + + case ArrayType(elementType, _) => { + toCatalystArray( + inputObject, + getType(elementType.asInstanceOf[DataTypeWithClass].cls + ), Some(elementType.asInstanceOf[DataTypeWithClass]) + ) + } + + case StructType(elementType: Array[StructField]) => { + val cls = otherTypeWrapper.cls + val names = elementType.map(_.name) + + val beanInfo = Introspector.getBeanInfo(cls) + val methods = beanInfo.getMethodDescriptors.filter(it => names.contains(it.getName)) + + + val fields = elementType.map { structField => + + val maybeProp = methods.find(it => it.getName == structField.name) + if (maybeProp.isEmpty) throw new IllegalArgumentException(s"Field ${ + structField.name + } is not found among available props, which are: ${ + methods.map(_.getName).mkString(", ") + }" + ) + val fieldName = structField.name + val propClass = structField.dataType.asInstanceOf[DataTypeWithClass].cls + val propDt = structField.dataType.asInstanceOf[DataTypeWithClass] + val fieldValue = Invoke( + inputObject, + maybeProp.get.getName, + inferExternalType(propClass), + returnNullable = propDt.nullable + ) + val newPath = walkedTypePath.recordField(propClass.getName, fieldName) + (fieldName, serializerFor( + fieldValue, getType(propClass), newPath, seenTypeSet, if (propDt + .isInstanceOf[ComplexWrapper]) Some(propDt) else None + )) + + } + createSerializerForObject(inputObject, fields) + } + + case _ => { + throw new UnsupportedOperationException( + s"No Encoder found for $tpe\n" + walkedTypePath + ) + } + } + } + } + } + + case t if definedByConstructorParams(t) => { + if (seenTypeSet.contains(t)) { + throw new UnsupportedOperationException( + s"cannot have circular references in class, but got the circular reference of class $t" + ) + } + val params = getConstructorParameters(t) + val fields = params.map { case (fieldName, fieldType) => + if (javaKeywords.contains(fieldName)) { + throw new UnsupportedOperationException(s"`$fieldName` is a reserved keyword and " + + "cannot be used as field name\n" + walkedTypePath + ) + } + + // SPARK-26730 inputObject won't be null with If's guard below. And KnownNotNul + // is necessary here. Because for a nullable nested inputObject with struct data + // type, e.g. StructType(IntegerType, StringType), it will return nullable=true + // for IntegerType without KnownNotNull. And that's what we do not expect to. + val fieldValue = Invoke( + KnownNotNull(inputObject), fieldName, dataTypeFor(fieldType), + returnNullable = !fieldType.typeSymbol.asClass.isPrimitive + ) + val clsName = getClassNameFromType(fieldType) + val newPath = walkedTypePath.recordField(clsName, fieldName) + (fieldName, serializerFor(fieldValue, fieldType, newPath, seenTypeSet + t)) } createSerializerForObject(inputObject, fields) + } - case _ => + case _ => { throw new UnsupportedOperationException( - s"No Encoder found for $tpe\n" + walkedTypePath) - + s"No Encoder found for $tpe\n" + walkedTypePath + ) } } + } - case t if definedByConstructorParams(t) => - if (seenTypeSet.contains(t)) { - throw new UnsupportedOperationException( - s"cannot have circular references in class, but got the circular reference of class $t") - } + def createDeserializerForString(path: Expression, returnNullable: Boolean): Expression = { + Invoke( + path, "toString", ObjectType(classOf[java.lang.String]), + returnNullable = returnNullable + ) + } - val params = getConstructorParameters(t) - val fields = params.map { case (fieldName, fieldType) => - if (javaKeywords.contains(fieldName)) { - throw new UnsupportedOperationException(s"`$fieldName` is a reserved keyword and " + - "cannot be used as field name\n" + walkedTypePath) - } - - // SPARK-26730 inputObject won't be null with If's guard below. And KnownNotNul - // is necessary here. Because for a nullable nested inputObject with struct data - // type, e.g. StructType(IntegerType, StringType), it will return nullable=true - // for IntegerType without KnownNotNull. And that's what we do not expect to. - val fieldValue = Invoke(KnownNotNull(inputObject), fieldName, dataTypeFor(fieldType), - returnNullable = !fieldType.typeSymbol.asClass.isPrimitive) - val clsName = getClassNameFromType(fieldType) - val newPath = walkedTypePath.recordField(clsName, fieldName) - (fieldName, serializerFor(fieldValue, fieldType, newPath, seenTypeSet + t)) - } - createSerializerForObject(inputObject, fields) + def getJavaBeanReadableProperties(beanClass: Class[_]): Array[PropertyDescriptor] = { + val beanInfo = Introspector.getBeanInfo(beanClass) + beanInfo.getPropertyDescriptors.filterNot(_.getName == "class") + .filterNot(_.getName == "declaringClass") + .filter(_.getReadMethod != null) + } - case _ => - throw new UnsupportedOperationException( - s"No Encoder found for $tpe\n" + walkedTypePath) + /* + * Retrieves the runtime class corresponding to the provided type. + */ + def getClassFromType(tpe: Type): Class[_] = mirror.runtimeClass(tpe.dealias.typeSymbol.asClass) + + case class Schema(dataType: DataType, nullable: Boolean) + + /** Returns a catalyst DataType and its nullability for the given Scala Type using reflection. */ + def schemaFor(tpe: `Type`): Schema = cleanUpReflectionObjects { + + baseType(tpe) match { + // this must be the first case, since all objects in scala are instances of Null, therefore + // Null type would wrongly match the first of them, which is Option as of now + case t if isSubtype(t, definitions.NullTpe) => Schema(NullType, nullable = true) + + case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => { + val udt = getClassFromType(t).getAnnotation(classOf[SQLUserDefinedType]).udt(). + getConstructor().newInstance() + Schema(udt, nullable = true) + } + case t if UDTRegistration.exists(getClassNameFromType(t)) => { + val udt = UDTRegistration + .getUDTFor(getClassNameFromType(t)) + .get + .getConstructor() + .newInstance() + .asInstanceOf[UserDefinedType[_]] + Schema(udt, nullable = true) + } + case t if isSubtype(t, localTypeOf[Option[_]]) => { + val TypeRef(_, _, Seq(optType)) = t + Schema(schemaFor(optType).dataType, nullable = true) + } + case t if isSubtype(t, localTypeOf[Array[Byte]]) => { + Schema(BinaryType, nullable = true) + } + case t if isSubtype(t, localTypeOf[Array[_]]) => { + val TypeRef(_, _, Seq(elementType)) = t + val Schema(dataType, nullable) = schemaFor(elementType) + Schema(ArrayType(dataType, containsNull = nullable), nullable = true) + } + case t if isSubtype(t, localTypeOf[Seq[_]]) => { + val TypeRef(_, _, Seq(elementType)) = t + val Schema(dataType, nullable) = schemaFor(elementType) + Schema(ArrayType(dataType, containsNull = nullable), nullable = true) + } + case t if isSubtype(t, localTypeOf[Map[_, _]]) => { + val TypeRef(_, _, Seq(keyType, valueType)) = t + val Schema(valueDataType, valueNullable) = schemaFor(valueType) + Schema( + MapType( + schemaFor(keyType).dataType, + valueDataType, valueContainsNull = valueNullable + ), nullable = true + ) + } + case t if isSubtype(t, localTypeOf[Set[_]]) => { + val TypeRef(_, _, Seq(elementType)) = t + val Schema(dataType, nullable) = schemaFor(elementType) + Schema(ArrayType(dataType, containsNull = nullable), nullable = true) + } + case t if isSubtype(t, localTypeOf[String]) => { + Schema(StringType, nullable = true) + } + case t if isSubtype(t, localTypeOf[java.time.Instant]) => { + Schema(TimestampType, nullable = true) + } + case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => { + Schema(TimestampType, nullable = true) + } + // SPARK-36227: Remove TimestampNTZ type support in Spark 3.2 with minimal code changes. + case t if isSubtype(t, localTypeOf[java.time.LocalDateTime]) && Utils.isTesting => { + Schema(TimestampNTZType, nullable = true) + } + case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => { + Schema(DateType, nullable = true) + } + case t if isSubtype(t, localTypeOf[java.sql.Date]) => { + Schema(DateType, nullable = true) + } + case t if isSubtype(t, localTypeOf[CalendarInterval]) => { + Schema(CalendarIntervalType, nullable = true) + } + case t if isSubtype(t, localTypeOf[java.time.Duration]) => { + Schema(DayTimeIntervalType(), nullable = true) + } + case t if isSubtype(t, localTypeOf[java.time.Period]) => { + Schema(YearMonthIntervalType(), nullable = true) + } + case t if isSubtype(t, localTypeOf[BigDecimal]) => { + Schema(DecimalType.SYSTEM_DEFAULT, nullable = true) + } + case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => { + Schema(DecimalType.SYSTEM_DEFAULT, nullable = true) + } + case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => { + Schema(DecimalType.BigIntDecimal, nullable = true) + } + case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => { + Schema(DecimalType.BigIntDecimal, nullable = true) + } + case t if isSubtype(t, localTypeOf[Decimal]) => { + Schema(DecimalType.SYSTEM_DEFAULT, nullable = true) + } + case t if isSubtype(t, localTypeOf[java.lang.Integer]) => Schema(IntegerType, nullable = true) + case t if isSubtype(t, localTypeOf[java.lang.Long]) => Schema(LongType, nullable = true) + case t if isSubtype(t, localTypeOf[java.lang.Double]) => Schema(DoubleType, nullable = true) + case t if isSubtype(t, localTypeOf[java.lang.Float]) => Schema(FloatType, nullable = true) + case t if isSubtype(t, localTypeOf[java.lang.Short]) => Schema(ShortType, nullable = true) + case t if isSubtype(t, localTypeOf[java.lang.Byte]) => Schema(ByteType, nullable = true) + case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => Schema(BooleanType, nullable = true) + case t if isSubtype(t, definitions.IntTpe) => Schema(IntegerType, nullable = false) + case t if isSubtype(t, definitions.LongTpe) => Schema(LongType, nullable = false) + case t if isSubtype(t, definitions.DoubleTpe) => Schema(DoubleType, nullable = false) + case t if isSubtype(t, definitions.FloatTpe) => Schema(FloatType, nullable = false) + case t if isSubtype(t, definitions.ShortTpe) => Schema(ShortType, nullable = false) + case t if isSubtype(t, definitions.ByteTpe) => Schema(ByteType, nullable = false) + case t if isSubtype(t, definitions.BooleanTpe) => Schema(BooleanType, nullable = false) + case t if definedByConstructorParams(t) => { + val params = getConstructorParameters(t) + Schema( + StructType( + params.map { case (fieldName, fieldType) => + val Schema(dataType, nullable) = schemaFor(fieldType) + StructField(fieldName, dataType, nullable) + } + ), nullable = true + ) + } + case other => { + throw new UnsupportedOperationException(s"Schema for type $other is not supported") + } + } } - } - - def createDeserializerForString(path: Expression, returnNullable: Boolean): Expression = { - Invoke(path, "toString", ObjectType(classOf[java.lang.String]), - returnNullable = returnNullable) - } - - def getJavaBeanReadableProperties(beanClass: Class[_]): Array[PropertyDescriptor] = { - val beanInfo = Introspector.getBeanInfo(beanClass) - beanInfo.getPropertyDescriptors.filterNot(_.getName == "class") - .filterNot(_.getName == "declaringClass") - .filter(_.getReadMethod != null) - } - - /* - * Retrieves the runtime class corresponding to the provided type. - */ - def getClassFromType(tpe: Type): Class[_] = mirror.runtimeClass(tpe.dealias.typeSymbol.asClass) - - case class Schema(dataType: DataType, nullable: Boolean) - - /** Returns a catalyst DataType and its nullability for the given Scala Type using reflection. */ - def schemaFor(tpe: `Type`): Schema = cleanUpReflectionObjects { - baseType(tpe) match { - // this must be the first case, since all objects in scala are instances of Null, therefore - // Null type would wrongly match the first of them, which is Option as of now - case t if isSubtype(t, definitions.NullTpe) => Schema(NullType, nullable = true) - case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => - val udt = getClassFromType(t).getAnnotation(classOf[SQLUserDefinedType]).udt(). - getConstructor().newInstance() - Schema(udt, nullable = true) - case t if UDTRegistration.exists(getClassNameFromType(t)) => - val udt = UDTRegistration.getUDTFor(getClassNameFromType(t)).get.getConstructor(). - newInstance().asInstanceOf[UserDefinedType[_]] - Schema(udt, nullable = true) - case t if isSubtype(t, localTypeOf[Option[_]]) => - val TypeRef(_, _, Seq(optType)) = t - Schema(schemaFor(optType).dataType, nullable = true) - case t if isSubtype(t, localTypeOf[Array[Byte]]) => Schema(BinaryType, nullable = true) - case t if isSubtype(t, localTypeOf[Array[_]]) => - val TypeRef(_, _, Seq(elementType)) = t - val Schema(dataType, nullable) = schemaFor(elementType) - Schema(ArrayType(dataType, containsNull = nullable), nullable = true) - case t if isSubtype(t, localTypeOf[Seq[_]]) => - val TypeRef(_, _, Seq(elementType)) = t - val Schema(dataType, nullable) = schemaFor(elementType) - Schema(ArrayType(dataType, containsNull = nullable), nullable = true) - case t if isSubtype(t, localTypeOf[Map[_, _]]) => - val TypeRef(_, _, Seq(keyType, valueType)) = t - val Schema(valueDataType, valueNullable) = schemaFor(valueType) - Schema(MapType(schemaFor(keyType).dataType, - valueDataType, valueContainsNull = valueNullable), nullable = true) - case t if isSubtype(t, localTypeOf[Set[_]]) => - val TypeRef(_, _, Seq(elementType)) = t - val Schema(dataType, nullable) = schemaFor(elementType) - Schema(ArrayType(dataType, containsNull = nullable), nullable = true) - case t if isSubtype(t, localTypeOf[String]) => Schema(StringType, nullable = true) - case t if isSubtype(t, localTypeOf[java.time.Instant]) => - Schema(TimestampType, nullable = true) - case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => - Schema(TimestampType, nullable = true) - case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => Schema(DateType, nullable = true) - case t if isSubtype(t, localTypeOf[java.sql.Date]) => Schema(DateType, nullable = true) - case t if isSubtype(t, localTypeOf[BigDecimal]) => - Schema(DecimalType.SYSTEM_DEFAULT, nullable = true) - case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => - Schema(DecimalType.SYSTEM_DEFAULT, nullable = true) - case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => - Schema(DecimalType.BigIntDecimal, nullable = true) - case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => - Schema(DecimalType.BigIntDecimal, nullable = true) - case t if isSubtype(t, localTypeOf[Decimal]) => - Schema(DecimalType.SYSTEM_DEFAULT, nullable = true) - case t if isSubtype(t, localTypeOf[java.lang.Integer]) => Schema(IntegerType, nullable = true) - case t if isSubtype(t, localTypeOf[java.lang.Long]) => Schema(LongType, nullable = true) - case t if isSubtype(t, localTypeOf[java.lang.Double]) => Schema(DoubleType, nullable = true) - case t if isSubtype(t, localTypeOf[java.lang.Float]) => Schema(FloatType, nullable = true) - case t if isSubtype(t, localTypeOf[java.lang.Short]) => Schema(ShortType, nullable = true) - case t if isSubtype(t, localTypeOf[java.lang.Byte]) => Schema(ByteType, nullable = true) - case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => Schema(BooleanType, nullable = true) - case t if isSubtype(t, definitions.IntTpe) => Schema(IntegerType, nullable = false) - case t if isSubtype(t, definitions.LongTpe) => Schema(LongType, nullable = false) - case t if isSubtype(t, definitions.DoubleTpe) => Schema(DoubleType, nullable = false) - case t if isSubtype(t, definitions.FloatTpe) => Schema(FloatType, nullable = false) - case t if isSubtype(t, definitions.ShortTpe) => Schema(ShortType, nullable = false) - case t if isSubtype(t, definitions.ByteTpe) => Schema(ByteType, nullable = false) - case t if isSubtype(t, definitions.BooleanTpe) => Schema(BooleanType, nullable = false) - case t if definedByConstructorParams(t) => - val params = getConstructorParameters(t) - Schema(StructType( - params.map { case (fieldName, fieldType) => - val Schema(dataType, nullable) = schemaFor(fieldType) - StructField(fieldName, dataType, nullable) - }), nullable = true) - case other => - throw new UnsupportedOperationException(s"Schema for type $other is not supported") + + /** + * Whether the fields of the given type is defined entirely by its constructor parameters. + */ + def definedByConstructorParams(tpe: Type): Boolean = cleanUpReflectionObjects { + tpe.dealias match { + // `Option` is a `Product`, but we don't wanna treat `Option[Int]` as a struct type. + case t if isSubtype(t, localTypeOf[Option[_]]) => definedByConstructorParams(t.typeArgs.head) + case _ => { + isSubtype(tpe.dealias, localTypeOf[Product]) || + isSubtype(tpe.dealias, localTypeOf[DefinedByConstructorParams]) + } + } } - } - - /** - * Whether the fields of the given type is defined entirely by its constructor parameters. - */ - def definedByConstructorParams(tpe: Type): Boolean = cleanUpReflectionObjects { - tpe.dealias match { - // `Option` is a `Product`, but we don't wanna treat `Option[Int]` as a struct type. - case t if isSubtype(t, localTypeOf[Option[_]]) => definedByConstructorParams(t.typeArgs.head) - case _ => isSubtype(tpe.dealias, localTypeOf[Product]) || - isSubtype(tpe.dealias, localTypeOf[DefinedByConstructorParams]) + + private val javaKeywords = Set( + "abstract", "assert", "boolean", "break", "byte", "case", "catch", + "char", "class", "const", "continue", "default", "do", "double", "else", "extends", "false", + "final", "finally", "float", "for", "goto", "if", "implements", "import", "instanceof", "int", + "interface", "long", "native", "new", "null", "package", "private", "protected", "public", + "return", "short", "static", "strictfp", "super", "switch", "synchronized", "this", "throw", + "throws", "transient", "true", "try", "void", "volatile", "while" + ) + + + @scala.annotation.tailrec + def javaBoxedType(dt: DataType): Class[_] = dt match { + case _: DecimalType => classOf[Decimal] + case _: DayTimeIntervalType => classOf[java.lang.Long] + case _: YearMonthIntervalType => classOf[java.lang.Integer] + case BinaryType => classOf[Array[Byte]] + case StringType => classOf[UTF8String] + case CalendarIntervalType => classOf[CalendarInterval] + case _: StructType => classOf[InternalRow] + case _: ArrayType => classOf[ArrayType] + case _: MapType => classOf[MapType] + case udt: UserDefinedType[_] => javaBoxedType(udt.sqlType) + case ObjectType(cls) => cls + case _ => ScalaReflection.typeBoxedJavaMapping.getOrElse(dt, classOf[java.lang.Object]) } - } - - private val javaKeywords = Set("abstract", "assert", "boolean", "break", "byte", "case", "catch", - "char", "class", "const", "continue", "default", "do", "double", "else", "extends", "false", - "final", "finally", "float", "for", "goto", "if", "implements", "import", "instanceof", "int", - "interface", "long", "native", "new", "null", "package", "private", "protected", "public", - "return", "short", "static", "strictfp", "super", "switch", "synchronized", "this", "throw", - "throws", "transient", "true", "try", "void", "volatile", "while") - - - @scala.annotation.tailrec - def javaBoxedType(dt: DataType): Class[_] = dt match { - case _: DecimalType => classOf[Decimal] - case BinaryType => classOf[Array[Byte]] - case StringType => classOf[UTF8String] - case CalendarIntervalType => classOf[CalendarInterval] - case _: StructType => classOf[InternalRow] - case _: ArrayType => classOf[ArrayType] - case _: MapType => classOf[MapType] - case udt: UserDefinedType[_] => javaBoxedType(udt.sqlType) - case ObjectType(cls) => cls - case _ => ScalaReflection.typeBoxedJavaMapping.getOrElse(dt, classOf[java.lang.Object]) - } } @@ -991,120 +1264,124 @@ object KotlinReflection extends KotlinReflection { * object, this trait able to work in both the runtime and the compile time (macro) universe. */ trait KotlinReflection extends Logging { - /** The universe we work in (runtime or macro) */ - val universe: scala.reflect.api.Universe - - /** The mirror used to access types in the universe */ - def mirror: universe.Mirror - - import universe._ - - // The Predef.Map is scala.collection.immutable.Map. - // Since the map values can be mutable, we explicitly import scala.collection.Map at here. - - /** - * Any codes calling `scala.reflect.api.Types.TypeApi.<:<` should be wrapped by this method to - * clean up the Scala reflection garbage automatically. Otherwise, it will leak some objects to - * `scala.reflect.runtime.JavaUniverse.undoLog`. - * - * @see https://github.com/scala/bug/issues/8302 - */ - def cleanUpReflectionObjects[T](func: => T): T = { - universe.asInstanceOf[scala.reflect.runtime.JavaUniverse].undoLog.undo(func) - } - - /** - * Return the Scala Type for `T` in the current classloader mirror. - * - * Use this method instead of the convenience method `universe.typeOf`, which - * assumes that all types can be found in the classloader that loaded scala-reflect classes. - * That's not necessarily the case when running using Eclipse launchers or even - * Sbt console or test (without `fork := true`). - * - * @see SPARK-5281 - */ - def localTypeOf[T: TypeTag]: `Type` = { - val tag = implicitly[TypeTag[T]] - tag.in(mirror).tpe.dealias - } - - /** - * Returns the full class name for a type. The returned name is the canonical - * Scala name, where each component is separated by a period. It is NOT the - * Java-equivalent runtime name (no dollar signs). - * - * In simple cases, both the Scala and Java names are the same, however when Scala - * generates constructs that do not map to a Java equivalent, such as singleton objects - * or nested classes in package objects, it uses the dollar sign ($) to create - * synthetic classes, emulating behaviour in Java bytecode. - */ - def getClassNameFromType(tpe: `Type`): String = { - tpe.dealias.erasure.typeSymbol.asClass.fullName - } - - /** - * Returns the parameter names and types for the primary constructor of this type. - * - * Note that it only works for scala classes with primary constructor, and currently doesn't - * support inner class. - */ - def getConstructorParameters(tpe: Type): Seq[(String, Type)] = { - val dealiasedTpe = tpe.dealias - val formalTypeArgs = dealiasedTpe.typeSymbol.asClass.typeParams - val TypeRef(_, _, actualTypeArgs) = dealiasedTpe - val params = constructParams(dealiasedTpe) - // if there are type variables to fill in, do the substitution (SomeClass[T] -> SomeClass[Int]) - if (actualTypeArgs.nonEmpty) { - params.map { p => - p.name.decodedName.toString -> - p.typeSignature.substituteTypes(formalTypeArgs, actualTypeArgs) - } - } else { - params.map { p => - p.name.decodedName.toString -> p.typeSignature - } + /** The universe we work in (runtime or macro) */ + val universe: scala.reflect.api.Universe + + /** The mirror used to access types in the universe */ + def mirror: universe.Mirror + + import universe._ + + // The Predef.Map is scala.collection.immutable.Map. + // Since the map values can be mutable, we explicitly import scala.collection.Map at here. + + /** + * Any codes calling `scala.reflect.api.Types.TypeApi.<:<` should be wrapped by this method to + * clean up the Scala reflection garbage automatically. Otherwise, it will leak some objects to + * `scala.reflect.runtime.JavaUniverse.undoLog`. + * + * @see https://github.com/scala/bug/issues/8302 + */ + def cleanUpReflectionObjects[T](func: => T): T = { + universe.asInstanceOf[scala.reflect.runtime.JavaUniverse].undoLog.undo(func) } - } - - /** - * If our type is a Scala trait it may have a companion object that - * only defines a constructor via `apply` method. - */ - private def getCompanionConstructor(tpe: Type): Symbol = { - def throwUnsupportedOperation = { - throw new UnsupportedOperationException(s"Unable to find constructor for $tpe. " + - s"This could happen if $tpe is an interface, or a trait without companion object " + - "constructor.") + + /** + * Return the Scala Type for `T` in the current classloader mirror. + * + * Use this method instead of the convenience method `universe.typeOf`, which + * assumes that all types can be found in the classloader that loaded scala-reflect classes. + * That's not necessarily the case when running using Eclipse launchers or even + * Sbt console or test (without `fork := true`). + * + * @see SPARK-5281 + */ + def localTypeOf[T: TypeTag]: `Type` = { + val tag = implicitly[TypeTag[T]] + tag.in(mirror).tpe.dealias } - tpe.typeSymbol.asClass.companion match { - case NoSymbol => throwUnsupportedOperation - case sym => sym.asTerm.typeSignature.member(universe.TermName("apply")) match { - case NoSymbol => throwUnsupportedOperation - case constructorSym => constructorSym - } + /** + * Returns the full class name for a type. The returned name is the canonical + * Scala name, where each component is separated by a period. It is NOT the + * Java-equivalent runtime name (no dollar signs). + * + * In simple cases, both the Scala and Java names are the same, however when Scala + * generates constructs that do not map to a Java equivalent, such as singleton objects + * or nested classes in package objects, it uses the dollar sign ($) to create + * synthetic classes, emulating behaviour in Java bytecode. + */ + def getClassNameFromType(tpe: `Type`): String = { + tpe.dealias.erasure.typeSymbol.asClass.fullName } - } - protected def constructParams(tpe: Type): Seq[Symbol] = { - val constructorSymbol = tpe.member(termNames.CONSTRUCTOR) match { - case NoSymbol => getCompanionConstructor(tpe) - case sym => sym + /** + * Returns the parameter names and types for the primary constructor of this type. + * + * Note that it only works for scala classes with primary constructor, and currently doesn't + * support inner class. + */ + def getConstructorParameters(tpe: Type): Seq[(String, Type)] = { + val dealiasedTpe = tpe.dealias + val formalTypeArgs = dealiasedTpe.typeSymbol.asClass.typeParams + val TypeRef(_, _, actualTypeArgs) = dealiasedTpe + val params = constructParams(dealiasedTpe) + // if there are type variables to fill in, do the substitution (SomeClass[T] -> SomeClass[Int]) + if (actualTypeArgs.nonEmpty) { + params.map { p => + p.name.decodedName.toString -> + p.typeSignature.substituteTypes(formalTypeArgs, actualTypeArgs) + } + } else { + params.map { p => + p.name.decodedName.toString -> p.typeSignature + } + } } - val params = if (constructorSymbol.isMethod) { - constructorSymbol.asMethod.paramLists - } else { - // Find the primary constructor, and use its parameter ordering. - val primaryConstructorSymbol: Option[Symbol] = constructorSymbol.asTerm.alternatives.find( - s => s.isMethod && s.asMethod.isPrimaryConstructor) - if (primaryConstructorSymbol.isEmpty) { - sys.error("Internal SQL error: Product object did not have a primary constructor.") - } else { - primaryConstructorSymbol.get.asMethod.paramLists - } + + /** + * If our type is a Scala trait it may have a companion object that + * only defines a constructor via `apply` method. + */ + private def getCompanionConstructor(tpe: Type): Symbol = { + def throwUnsupportedOperation = { + throw new UnsupportedOperationException(s"Unable to find constructor for $tpe. " + + s"This could happen if $tpe is an interface, or a trait without companion object " + + "constructor." + ) + } + + tpe.typeSymbol.asClass.companion match { + case NoSymbol => throwUnsupportedOperation + case sym => { + sym.asTerm.typeSignature.member(universe.TermName("apply")) match { + case NoSymbol => throwUnsupportedOperation + case constructorSym => constructorSym + } + } + } + } + + protected def constructParams(tpe: Type): Seq[Symbol] = { + val constructorSymbol = tpe.member(termNames.CONSTRUCTOR) match { + case NoSymbol => getCompanionConstructor(tpe) + case sym => sym + } + val params = if (constructorSymbol.isMethod) { + constructorSymbol.asMethod.paramLists + } else { + // Find the primary constructor, and use its parameter ordering. + val primaryConstructorSymbol: Option[Symbol] = constructorSymbol.asTerm.alternatives.find( + s => s.isMethod && s.asMethod.isPrimaryConstructor + ) + if (primaryConstructorSymbol.isEmpty) { + sys.error("Internal SQL error: Product object did not have a primary constructor.") + } else { + primaryConstructorSymbol.get.asMethod.paramLists + } + } + params.flatten } - params.flatten - } } diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 4ef15444..32a2caa9 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -21,7 +21,6 @@ package org.jetbrains.kotlinx.spark.api -import org.apache.hadoop.shaded.org.apache.commons.math3.exception.util.ArgUtils import org.apache.spark.SparkContext import org.apache.spark.api.java.* import org.apache.spark.api.java.function.* @@ -34,14 +33,12 @@ import org.apache.spark.sql.streaming.GroupState import org.apache.spark.sql.streaming.GroupStateTimeout import org.apache.spark.sql.streaming.OutputMode import org.apache.spark.sql.types.* -import org.apache.spark.sql.types.DataTypes.DateType import org.apache.spark.unsafe.types.CalendarInterval import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions import scala.Product import scala.Tuple2 -import scala.concurrent.duration.`Duration$` import scala.reflect.ClassTag -import scala.reflect.api.TypeTags.TypeTag +import scala.reflect.api.StandardDefinitions import java.beans.PropertyDescriptor import java.math.BigDecimal import java.sql.Date @@ -1191,6 +1188,12 @@ inline fun = mapOf()): DataType { + if (type.classifier == ByteArray::class) return KComplexTypeWrapper( + DataTypes.BinaryType, + ByteArray::class.java, + type.isMarkedNullable, + ) + val primitiveSchema = knownDataTypes[type.classifier] if (primitiveSchema != null) return KSimpleTypeWrapper( primitiveSchema, @@ -1216,7 +1219,7 @@ fun schema(type: KType, map: Map = mapOf()): DataType { DoubleArray::class -> typeOf() BooleanArray::class -> typeOf() ShortArray::class -> typeOf() - ByteArray::class -> typeOf() +// ByteArray::class -> typeOf() else -> types.getValue(klass.typeParameters[0].name) } } else types.getValue(klass.typeParameters[0].name) @@ -1319,6 +1322,7 @@ private val knownDataTypes: Map, DataType> = mapOf( Timestamp::class to DataTypes.TimestampType, Instant::class to DataTypes.TimestampType, ByteArray::class to DataTypes.BinaryType, + Decimal::class to DecimalType.SYSTEM_DEFAULT(), CalendarInterval::class to DataTypes.CalendarIntervalType, Nothing::class to DataTypes.NullType, ) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index bcb53bcb..03be4945 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -358,11 +358,11 @@ class ApiTest : ShouldSpec({ dataset.show() } should("handle binary datasets") { // uses encoder - val dataset = dsOf(byteArrayOf(1, 0, 1, 0)) + val dataset = dsOf("Hello there".encodeToByteArray()) dataset.show() } should("be able to serialize binary") { // uses knownDataTypes - val dataset = dsOf(byteArrayOf(1, 0, 1, 0) to 2) + val dataset = dsOf(c(byteArrayOf(1, 0, 12), 1, intArrayOf(1, 2, 3))) dataset.show() } should("be able to serialize CalendarInterval") { // uses knownDataTypes From 1557fa40b9c18263b715376d243bfb19472e43ea Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Wed, 23 Feb 2022 13:40:15 +0100 Subject: [PATCH 058/213] serializing binary works! --- .../apache/spark/sql/KotlinReflection.scala | 38 ++++++++++++++----- .../jetbrains/kotlinx/spark/api/ApiTest.kt | 2 +- 2 files changed, 29 insertions(+), 11 deletions(-) diff --git a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala index 5c0b3cf7..74cdf290 100644 --- a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala +++ b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala @@ -22,7 +22,7 @@ package org.apache.spark.sql import org.apache.spark.internal.Logging import org.apache.spark.sql.catalyst.DeserializerBuildHelper._ -import org.apache.spark.sql.catalyst.ScalaReflection.{Schema, getClassFromType, isSubtype, javaBoxedType, localTypeOf} +import org.apache.spark.sql.catalyst.ScalaReflection.{Schema, dataTypeFor, getClassFromType, isSubtype, javaBoxedType, localTypeOf} import org.apache.spark.sql.catalyst.SerializerBuildHelper._ import org.apache.spark.sql.catalyst.analysis.GetColumnByOrdinal import org.apache.spark.sql.catalyst.expressions.objects._ @@ -34,6 +34,7 @@ import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String} import org.apache.spark.util.Utils import java.beans.{Introspector, PropertyDescriptor} +import java.lang.Exception /** @@ -215,6 +216,15 @@ object KotlinReflection extends KotlinReflection { baseType(tpe) match { // + case t if ( + try { + !dataTypeFor(t).isInstanceOf[ObjectType] + } catch { + case _: Throwable => false + }) && !predefinedDt.exists(_.isInstanceOf[ComplexWrapper]) || tpe == localTypeOf[Array[Byte]] => { + path + } + case t if isSubtype(t, localTypeOf[java.lang.Integer]) => { createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Integer]) } @@ -621,8 +631,14 @@ object KotlinReflection extends KotlinReflection { } def getType[T](clazz: Class[T]): universe.Type = { - val mir = runtimeMirror(clazz.getClassLoader) - mir.classSymbol(clazz).toType + clazz match { + case _ if clazz == classOf[Array[Byte]] => localTypeOf[Array[Byte]] + case _ => { + val mir = runtimeMirror(clazz.getClassLoader) + mir.classSymbol(clazz).toType + } + } + } def deserializerFor(cls: java.lang.Class[_], dt: DataTypeWithClass): Expression = { @@ -737,9 +753,10 @@ object KotlinReflection extends KotlinReflection { baseType(tpe) match { - // - case _ if !inputObject.dataType.isInstanceOf[ObjectType] - && !predefinedDt.exists(_.isInstanceOf[ComplexWrapper]) => { + // // TODO binary should go though objectType + case _ if !inputObject.dataType.isInstanceOf[ObjectType] && (!predefinedDt.exists { + _.isInstanceOf[ComplexWrapper] + } || tpe == localTypeOf[Array[Byte]]) => { inputObject } case t if isSubtype(t, localTypeOf[Option[_]]) => { @@ -915,18 +932,19 @@ object KotlinReflection extends KotlinReflection { val fieldName = structField.name val propClass = structField.dataType.asInstanceOf[DataTypeWithClass].cls val propDt = structField.dataType.asInstanceOf[DataTypeWithClass] + + val fieldType: Type = getType(propClass) // TODO this must also return the type Array[Byte] + // val fieldValue = Invoke( inputObject, maybeProp.get.getReadMethod.getName, + // dataTypeFor(fieldType), inferExternalType(propClass), returnNullable = structField.nullable ) val newPath = walkedTypePath.recordField(propClass.getName, fieldName) - val tpe = - // if (propClass == classOf[Array[Byte]]) localTypeOf[Array[Byte]] - // else - getType(propClass) + val tpe = getType(propClass) val serializer = serializerFor( inputObject = fieldValue, diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index 03be4945..cc7b68f6 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -362,7 +362,7 @@ class ApiTest : ShouldSpec({ dataset.show() } should("be able to serialize binary") { // uses knownDataTypes - val dataset = dsOf(c(byteArrayOf(1, 0, 12), 1, intArrayOf(1, 2, 3))) + val dataset = dsOf(c("Hello there".encodeToByteArray(), 1, intArrayOf(1, 2, 3))) dataset.show() } should("be able to serialize CalendarInterval") { // uses knownDataTypes From 680e5b1e4e55351f78bd57a1ded681ce4b4929ef Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Wed, 23 Feb 2022 13:43:11 +0100 Subject: [PATCH 059/213] serializing binary works! --- .../scala/org/apache/spark/sql/KotlinReflection.scala | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala index 74cdf290..7e098d00 100644 --- a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala +++ b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala @@ -309,8 +309,7 @@ object KotlinReflection extends KotlinReflection { } case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => { createDeserializerForScalaBigInt(path) - } // TODO case t if isSubtype(t, localTypeOf[Array[Byte]]) => - // createDeserializerForTypesSupportValueOf(path, classOf[Array[Byte]]) + } case t if isSubtype(t, localTypeOf[Array[_]]) => { var TypeRef(_, _, Seq(elementType)) = t @@ -753,7 +752,7 @@ object KotlinReflection extends KotlinReflection { baseType(tpe) match { - // // TODO binary should go though objectType + // case _ if !inputObject.dataType.isInstanceOf[ObjectType] && (!predefinedDt.exists { _.isInstanceOf[ComplexWrapper] } || tpe == localTypeOf[Array[Byte]]) => { @@ -933,12 +932,9 @@ object KotlinReflection extends KotlinReflection { val propClass = structField.dataType.asInstanceOf[DataTypeWithClass].cls val propDt = structField.dataType.asInstanceOf[DataTypeWithClass] - val fieldType: Type = getType(propClass) // TODO this must also return the type Array[Byte] - // val fieldValue = Invoke( inputObject, maybeProp.get.getReadMethod.getName, - // dataTypeFor(fieldType), inferExternalType(propClass), returnNullable = structField.nullable ) @@ -951,8 +947,7 @@ object KotlinReflection extends KotlinReflection { tpe = tpe, walkedTypePath = newPath, seenTypeSet = seenTypeSet, - predefinedDt = if (propDt - .isInstanceOf[ComplexWrapper] /*&& propClass != classOf[Array[Byte]]*/ ) Some(propDt) else None + predefinedDt = if (propDt.isInstanceOf[ComplexWrapper]) Some(propDt) else None ) (fieldName, serializer) From ba0c452ddc7865bec67208c7f7f24e31d92394e0 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Wed, 23 Feb 2022 14:17:52 +0100 Subject: [PATCH 060/213] fixed serializing CalendarInterval, added tests and fixes for Decimal and BigDecimal --- .../apache/spark/sql/KotlinReflection.scala | 9 +++++---- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 9 ++------- .../org/jetbrains/kotlinx/spark/api/ApiTest.kt | 18 ++++++++++++++++++ 3 files changed, 25 insertions(+), 11 deletions(-) diff --git a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala index 7e098d00..05ff330b 100644 --- a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala +++ b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala @@ -64,6 +64,8 @@ object KotlinReflection extends KotlinReflection { case c if c == java.lang.Float.TYPE => FloatType case c if c == java.lang.Double.TYPE => DoubleType case c if c == classOf[Array[Byte]] => BinaryType + case c if c == classOf[Decimal] => DecimalType.SYSTEM_DEFAULT + case c if c == classOf[CalendarInterval] => CalendarIntervalType case _ => ObjectType(cls) } @@ -221,7 +223,7 @@ object KotlinReflection extends KotlinReflection { !dataTypeFor(t).isInstanceOf[ObjectType] } catch { case _: Throwable => false - }) && !predefinedDt.exists(_.isInstanceOf[ComplexWrapper]) || tpe == localTypeOf[Array[Byte]] => { + }) && !predefinedDt.exists(_.isInstanceOf[ComplexWrapper]) => { path } @@ -753,9 +755,8 @@ object KotlinReflection extends KotlinReflection { baseType(tpe) match { // - case _ if !inputObject.dataType.isInstanceOf[ObjectType] && (!predefinedDt.exists { - _.isInstanceOf[ComplexWrapper] - } || tpe == localTypeOf[Array[Byte]]) => { + case _ if !inputObject.dataType.isInstanceOf[ObjectType] && + !predefinedDt.exists(_.isInstanceOf[ComplexWrapper]) => { inputObject } case t if isSubtype(t, localTypeOf[Option[_]]) => { diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 32a2caa9..4a93c6b8 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -1188,12 +1188,6 @@ inline fun = mapOf()): DataType { - if (type.classifier == ByteArray::class) return KComplexTypeWrapper( - DataTypes.BinaryType, - ByteArray::class.java, - type.isMarkedNullable, - ) - val primitiveSchema = knownDataTypes[type.classifier] if (primitiveSchema != null) return KSimpleTypeWrapper( primitiveSchema, @@ -1219,7 +1213,7 @@ fun schema(type: KType, map: Map = mapOf()): DataType { DoubleArray::class -> typeOf() BooleanArray::class -> typeOf() ShortArray::class -> typeOf() -// ByteArray::class -> typeOf() +// ByteArray::class -> typeOf() handled by BinaryType else -> types.getValue(klass.typeParameters[0].name) } } else types.getValue(klass.typeParameters[0].name) @@ -1323,6 +1317,7 @@ private val knownDataTypes: Map, DataType> = mapOf( Instant::class to DataTypes.TimestampType, ByteArray::class to DataTypes.BinaryType, Decimal::class to DecimalType.SYSTEM_DEFAULT(), + BigDecimal::class to DecimalType.SYSTEM_DEFAULT(), CalendarInterval::class to DataTypes.CalendarIntervalType, Nothing::class to DataTypes.NullType, ) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index cc7b68f6..9e310ec2 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -30,6 +30,7 @@ import org.apache.spark.sql.Dataset import org.apache.spark.sql.functions.* import org.apache.spark.sql.streaming.GroupState import org.apache.spark.sql.streaming.GroupStateTimeout +import org.apache.spark.sql.types.Decimal import org.apache.spark.unsafe.types.CalendarInterval import scala.Product import scala.Tuple1 @@ -37,6 +38,7 @@ import scala.Tuple2 import scala.Tuple3 import scala.collection.Seq import java.io.Serializable +import java.math.BigDecimal import java.sql.Date import java.sql.Timestamp import java.time.Duration @@ -365,6 +367,22 @@ class ApiTest : ShouldSpec({ val dataset = dsOf(c("Hello there".encodeToByteArray(), 1, intArrayOf(1, 2, 3))) dataset.show() } + should("handle Decimal datasets") { // uses encoder + val dataset = dsOf(Decimal().set(50)) + dataset.show() + } + should("be able to serialize Decimal") { // uses knownDataTypes + val dataset = dsOf(c(Decimal().set(50), 12)) + dataset.show() + } + should("handle BigDecimal datasets") { // uses encoder + val dataset = dsOf(BigDecimal.TEN) + dataset.show() + } + should("be able to serialize BigDecimal") { // uses knownDataTypes + val dataset = dsOf(c(BigDecimal.TEN, 12)) + dataset.show() + } should("be able to serialize CalendarInterval") { // uses knownDataTypes val dataset = dsOf(CalendarInterval(1, 0, 0L) to 2) dataset.show() From 054d6267d23fdccd24923105f06ad6783fd097c6 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Wed, 23 Feb 2022 17:17:26 +0100 Subject: [PATCH 061/213] updating all tests to shouldBe instead of just show --- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 2 +- .../jetbrains/kotlinx/spark/api/ApiTest.kt | 149 ++++++++++++------ 2 files changed, 105 insertions(+), 46 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt index 4a93c6b8..fb1b5340 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt @@ -37,6 +37,7 @@ import org.apache.spark.unsafe.types.CalendarInterval import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions import scala.Product import scala.Tuple2 +import scala.concurrent.duration.`Duration$` import scala.reflect.ClassTag import scala.reflect.api.StandardDefinitions import java.beans.PropertyDescriptor @@ -1319,7 +1320,6 @@ private val knownDataTypes: Map, DataType> = mapOf( Decimal::class to DecimalType.SYSTEM_DEFAULT(), BigDecimal::class to DecimalType.SYSTEM_DEFAULT(), CalendarInterval::class to DataTypes.CalendarIntervalType, - Nothing::class to DataTypes.NullType, ) private fun transitiveMerge(a: Map, b: Map): Map { diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index 9e310ec2..2684a7b8 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -20,6 +20,7 @@ package org.jetbrains.kotlinx.spark.api/*- import ch.tutteli.atrium.api.fluent.en_GB.* import ch.tutteli.atrium.api.verbs.expect import io.kotest.core.spec.style.ShouldSpec +import io.kotest.matchers.should import io.kotest.matchers.shouldBe import org.apache.spark.api.java.JavaDoubleRDD import org.apache.spark.api.java.JavaPairRDD @@ -328,68 +329,97 @@ class ApiTest : ShouldSpec({ cogrouped.count() shouldBe 4 } should("handle LocalDate Datasets") { // uses encoder - val dataset: Dataset = dsOf(LocalDate.now(), LocalDate.now()) - dataset.show() + val dates = listOf(LocalDate.now(), LocalDate.now()) + val dataset: Dataset = dates.toDS() + dataset.collectAsList() shouldBe dates } should("handle Instant Datasets") { // uses encoder - val dataset: Dataset = dsOf(Instant.now(), Instant.now()) - dataset.show() + val instants = listOf(Instant.now(), Instant.now()) + val dataset: Dataset = instants.toDS() + dataset.collectAsList() shouldBe instants } should("Be able to serialize Instant") { // uses knownDataTypes - val dataset = dsOf(Instant.now() to Instant.now()) - dataset.show() + val instantPair = Instant.now() to Instant.now() + val dataset = dsOf(instantPair) + dataset.collectAsList() shouldBe listOf(instantPair) } should("be able to serialize Date") { // uses knownDataTypes - val dataset: Dataset> = dsOf(Date.valueOf("2020-02-10") to 5) - dataset.show() + val datePair = Date.valueOf("2020-02-10") to 5 + val dataset: Dataset> = dsOf(datePair) + dataset.collectAsList() shouldBe listOf(datePair) } should("handle Timestamp Datasets") { // uses encoder - val dataset = dsOf(Timestamp(0L)) - dataset.show() + val timeStamps = listOf(Timestamp(0L), Timestamp(1L)) + val dataset = timeStamps.toDS() + dataset.collectAsList() shouldBe timeStamps } should("be able to serialize Timestamp") { // uses knownDataTypes - val dataset = dsOf(Timestamp(0L) to 2) - dataset.show() + val timestampPair = Timestamp(0L) to 2 + val dataset = dsOf(timestampPair) + dataset.collectAsList() shouldBe listOf(timestampPair) } should("handle Duration Datasets") { // uses encoder val dataset = dsOf(Duration.ZERO) - dataset.show() + dataset.collectAsList() shouldBe listOf(Duration.ZERO) } should("handle Period Datasets") { // uses encoder - val dataset = dsOf(Period.ZERO) - dataset.show() + val periods = listOf(Period.ZERO, Period.ofDays(2)) + val dataset = periods.toDS() + + dataset.show(false) + + dataset.collectAsList().let { + it[0] shouldBe Period.ZERO + + // TODO this is also broken in Scala. It reports a Period of 0 instead of 2 days +// it[1] shouldBe Period.ofDays(2) + it[1] shouldBe Period.ofDays(0) + } + } should("handle binary datasets") { // uses encoder - val dataset = dsOf("Hello there".encodeToByteArray()) - dataset.show() + val byteArray = "Hello there".encodeToByteArray() + val dataset = dsOf(byteArray) + dataset.collectAsList() shouldBe listOf(byteArray) } should("be able to serialize binary") { // uses knownDataTypes - val dataset = dsOf(c("Hello there".encodeToByteArray(), 1, intArrayOf(1, 2, 3))) - dataset.show() - } - should("handle Decimal datasets") { // uses encoder - val dataset = dsOf(Decimal().set(50)) - dataset.show() + val byteArrayTriple = c("Hello there".encodeToByteArray(), 1, intArrayOf(1, 2, 3)) + val dataset = dsOf(byteArrayTriple) + + val (a, b, c) = dataset.collectAsList().single() + a contentEquals "Hello there".encodeToByteArray() shouldBe true + b shouldBe 1 + c contentEquals intArrayOf(1, 2, 3) shouldBe true } should("be able to serialize Decimal") { // uses knownDataTypes - val dataset = dsOf(c(Decimal().set(50), 12)) - dataset.show() + val decimalPair = c(Decimal().set(50), 12) + val dataset = dsOf(decimalPair) + dataset.collectAsList() shouldBe listOf(decimalPair) } should("handle BigDecimal datasets") { // uses encoder - val dataset = dsOf(BigDecimal.TEN) - dataset.show() + val decimals = listOf(BigDecimal.ONE, BigDecimal.TEN) + val dataset = decimals.toDS() + dataset.collectAsList().let { (one, ten) -> + one.compareTo(BigDecimal.ONE) shouldBe 0 + ten.compareTo(BigDecimal.TEN) shouldBe 0 + } } should("be able to serialize BigDecimal") { // uses knownDataTypes - val dataset = dsOf(c(BigDecimal.TEN, 12)) - dataset.show() + val decimalPair = c(BigDecimal.TEN, 12) + val dataset = dsOf(decimalPair) + val (a, b) = dataset.collectAsList().single() + a.compareTo(BigDecimal.TEN) shouldBe 0 + b shouldBe 12 } should("be able to serialize CalendarInterval") { // uses knownDataTypes - val dataset = dsOf(CalendarInterval(1, 0, 0L) to 2) - dataset.show() + val calendarIntervalPair = CalendarInterval(1, 0, 0L) to 2 + val dataset = dsOf(calendarIntervalPair) + dataset.collectAsList() shouldBe listOf(calendarIntervalPair) } - should("be able to serialize null") { // uses knownDataTypes - val dataset: Dataset> = dsOf(null to 2) - dataset.show() + should("handle nullable datasets") { + val ints = listOf(1, 2, 3, null) + val dataset = ints.toDS() + dataset.collectAsList() shouldBe ints } should("Be able to serialize Scala Tuples including data classes") { val dataset = dsOf( @@ -420,20 +450,20 @@ class ApiTest : ShouldSpec({ val newDS1WithAs: Dataset = dataset.selectTyped( col("a").`as`(), ) - newDS1WithAs.show() + newDS1WithAs.collectAsList() val newDS2: Dataset> = dataset.selectTyped( col(SomeClass::a), // NOTE: this only works on 3.0, returning a data class with an array in it col(SomeClass::b), ) - newDS2.show() + newDS2.collectAsList() val newDS3: Dataset> = dataset.selectTyped( col(SomeClass::a), col(SomeClass::b), col(SomeClass::b), ) - newDS3.show() + newDS3.collectAsList() val newDS4: Dataset> = dataset.selectTyped( col(SomeClass::a), @@ -441,7 +471,7 @@ class ApiTest : ShouldSpec({ col(SomeClass::b), col(SomeClass::b), ) - newDS4.show() + newDS4.collectAsList() val newDS5: Dataset> = dataset.selectTyped( col(SomeClass::a), @@ -450,7 +480,7 @@ class ApiTest : ShouldSpec({ col(SomeClass::b), col(SomeClass::b), ) - newDS5.show() + newDS5.collectAsList() } should("Access columns using invoke on datasets") { val dataset = dsOf( @@ -503,19 +533,18 @@ class ApiTest : ShouldSpec({ dataset(SomeOtherClass::a), col(SomeOtherClass::c), ) - b.show() + b.collectAsList() } should("Handle some where queries using column operator functions") { val dataset = dsOf( SomeOtherClass(intArrayOf(1, 2, 3), 4, true), SomeOtherClass(intArrayOf(4, 3, 2), 1, true), ) - dataset.show() + dataset.collectAsList() val column = col("b").`as`() val b = dataset.where(column gt 3 and col(SomeOtherClass::c)) - b.show() b.count() shouldBe 1 } @@ -524,21 +553,51 @@ class ApiTest : ShouldSpec({ listOf(SomeClass(intArrayOf(1, 2, 3), 4)), listOf(SomeClass(intArrayOf(3, 2, 1), 0)), ) - dataset.show() + + val (first, second) = dataset.collectAsList() + + first.single().let { (a, b) -> + a.contentEquals(intArrayOf(1, 2, 3)) shouldBe true + b shouldBe 4 + } + second.single().let { (a, b) -> + a.contentEquals(intArrayOf(3, 2, 1)) shouldBe true + b shouldBe 0 + } } should("Be able to serialize arrays of data classes") { val dataset = dsOf( arrayOf(SomeClass(intArrayOf(1, 2, 3), 4)), arrayOf(SomeClass(intArrayOf(3, 2, 1), 0)), ) - dataset.show() + + val (first, second) = dataset.collectAsList() + + first.single().let { (a, b) -> + a.contentEquals(intArrayOf(1, 2, 3)) shouldBe true + b shouldBe 4 + } + second.single().let { (a, b) -> + a.contentEquals(intArrayOf(3, 2, 1)) shouldBe true + b shouldBe 0 + } } should("Be able to serialize lists of tuples") { val dataset = dsOf( listOf(Tuple2(intArrayOf(1, 2, 3), 4)), listOf(Tuple2(intArrayOf(3, 2, 1), 0)), ) - dataset.show() + + val (first, second) = dataset.collectAsList() + + first.single().let { + it._1().contentEquals(intArrayOf(1, 2, 3)) shouldBe true + it._2() shouldBe 4 + } + second.single().let { + it._1().contentEquals(intArrayOf(3, 2, 1)) shouldBe true + it._2() shouldBe 0 + } } should("Allow simple forEachPartition in datasets") { val dataset = dsOf( From 66dc40e6fb7bd9f7a766025d7b3724da2291b23b Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Thu, 24 Feb 2022 16:27:38 +0100 Subject: [PATCH 062/213] added jira issue --- .../src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt | 1 + 1 file changed, 1 insertion(+) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index 2684a7b8..7d62bc18 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -372,6 +372,7 @@ class ApiTest : ShouldSpec({ it[0] shouldBe Period.ZERO // TODO this is also broken in Scala. It reports a Period of 0 instead of 2 days + // https://issues.apache.org/jira/browse/SPARK-38317 // it[1] shouldBe Period.ofDays(2) it[1] shouldBe Period.ofDays(0) } From 31f56d80c0d3870b8a2ad49b7b220796bcb2c989 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Fri, 25 Feb 2022 12:53:06 +0100 Subject: [PATCH 063/213] rebasing on spark 3.2 branch --- .../test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index 7d62bc18..fa320c39 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -371,9 +371,7 @@ class ApiTest : ShouldSpec({ dataset.collectAsList().let { it[0] shouldBe Period.ZERO - // TODO this is also broken in Scala. It reports a Period of 0 instead of 2 days - // https://issues.apache.org/jira/browse/SPARK-38317 -// it[1] shouldBe Period.ofDays(2) + // NOTE Spark truncates java.time.Period to months. it[1] shouldBe Period.ofDays(0) } @@ -804,4 +802,4 @@ data class ComplexEnumDataClass( data class NullFieldAbleDataClass( val optionList: List?, val optionMap: Map?, -) \ No newline at end of file +) From d56b5a4d5631e77b9af24b902ca9710768483762 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 28 Feb 2022 13:36:36 +0100 Subject: [PATCH 064/213] spark 3.2.1 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 2ced5eb7..0df3adac 100644 --- a/pom.xml +++ b/pom.xml @@ -15,7 +15,7 @@ 0.16.0 4.6.0 1.0.1 - 3.2.0 + 3.2.1 2.10.0 From b038490981af2f184fb93e434384bcea174d0279 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 28 Feb 2022 16:39:05 +0100 Subject: [PATCH 065/213] created files and separated out ApiV1 into separate files. Just want to create some docs per file to give an overview of what's happening in each --- .../org/jetbrains/kotlinx/spark/api/ApiV1.kt | 1340 ----------------- .../org/jetbrains/kotlinx/spark/api/Column.kt | 407 +++++ .../jetbrains/kotlinx/spark/api/Dataset.kt | 408 +++++ .../jetbrains/kotlinx/spark/api/Encoding.kt | 276 ++++ .../jetbrains/kotlinx/spark/api/GroupState.kt | 40 + .../spark/api/KeyValueGroupedDataset.kt | 178 +++ .../api/{SparkHelper.kt => SparkSession.kt} | 83 +- 7 files changed, 1380 insertions(+), 1352 deletions(-) delete mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt rename kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/{SparkHelper.kt => SparkSession.kt} (69%) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt deleted file mode 100644 index fb1b5340..00000000 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/ApiV1.kt +++ /dev/null @@ -1,1340 +0,0 @@ -/*- - * =LICENSE= - * Kotlin Spark API - * ---------- - * Copyright (C) 2019 - 2020 JetBrains - * ---------- - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * =LICENSEEND= - */ -@file:Suppress("HasPlatformType", "unused", "FunctionName") - -package org.jetbrains.kotlinx.spark.api - -import org.apache.spark.SparkContext -import org.apache.spark.api.java.* -import org.apache.spark.api.java.function.* -import org.apache.spark.broadcast.Broadcast -import org.apache.spark.rdd.RDD -import org.apache.spark.sql.* -import org.apache.spark.sql.Encoders.* -import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder -import org.apache.spark.sql.streaming.GroupState -import org.apache.spark.sql.streaming.GroupStateTimeout -import org.apache.spark.sql.streaming.OutputMode -import org.apache.spark.sql.types.* -import org.apache.spark.unsafe.types.CalendarInterval -import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions -import scala.Product -import scala.Tuple2 -import scala.concurrent.duration.`Duration$` -import scala.reflect.ClassTag -import scala.reflect.api.StandardDefinitions -import java.beans.PropertyDescriptor -import java.math.BigDecimal -import java.sql.Date -import java.sql.Timestamp -import java.time.Duration -import java.time.Instant -import java.time.LocalDate -import java.time.Period -import java.util.* -import java.util.concurrent.ConcurrentHashMap -import kotlin.Any -import kotlin.Array -import kotlin.Boolean -import kotlin.BooleanArray -import kotlin.Byte -import kotlin.ByteArray -import kotlin.Deprecated -import kotlin.DeprecationLevel -import kotlin.Double -import kotlin.DoubleArray -import kotlin.ExperimentalStdlibApi -import kotlin.Float -import kotlin.FloatArray -import kotlin.IllegalArgumentException -import kotlin.Int -import kotlin.IntArray -import kotlin.Long -import kotlin.LongArray -import kotlin.OptIn -import kotlin.Pair -import kotlin.ReplaceWith -import kotlin.Short -import kotlin.ShortArray -import kotlin.String -import kotlin.Suppress -import kotlin.Triple -import kotlin.Unit -import kotlin.also -import kotlin.apply -import kotlin.invoke -import kotlin.reflect.* -import kotlin.reflect.full.findAnnotation -import kotlin.reflect.full.isSubclassOf -import kotlin.reflect.full.isSubtypeOf -import kotlin.reflect.full.primaryConstructor -import kotlin.to - -@JvmField -val ENCODERS: Map, Encoder<*>> = mapOf( - Boolean::class to BOOLEAN(), - Byte::class to BYTE(), - Short::class to SHORT(), - Int::class to INT(), - Long::class to LONG(), - Float::class to FLOAT(), - Double::class to DOUBLE(), - String::class to STRING(), - BigDecimal::class to DECIMAL(), - Date::class to DATE(), - LocalDate::class to LOCALDATE(), // 3.0+ - Timestamp::class to TIMESTAMP(), - Instant::class to INSTANT(), // 3.0+ - ByteArray::class to BINARY(), - Duration::class to DURATION(), // 3.2+ - Period::class to PERIOD(), // 3.2+ -) - - -/** - * Broadcast a read-only variable to the cluster, returning a - * [org.apache.spark.broadcast.Broadcast] object for reading it in distributed functions. - * The variable will be sent to each cluster only once. - * - * @param value value to broadcast to the Spark nodes - * @return `Broadcast` object, a read-only variable cached on each machine - */ -inline fun SparkSession.broadcast(value: T): Broadcast = try { - sparkContext.broadcast(value, encoder().clsTag()) -} catch (e: ClassNotFoundException) { - JavaSparkContext(sparkContext).broadcast(value) -} - -/** - * Broadcast a read-only variable to the cluster, returning a - * [org.apache.spark.broadcast.Broadcast] object for reading it in distributed functions. - * The variable will be sent to each cluster only once. - * - * @param value value to broadcast to the Spark nodes - * @return `Broadcast` object, a read-only variable cached on each machine - * @see broadcast - */ -@Deprecated( - "You can now use `spark.broadcast()` instead.", - ReplaceWith("spark.broadcast(value)"), - DeprecationLevel.WARNING -) -inline fun SparkContext.broadcast(value: T): Broadcast = try { - broadcast(value, encoder().clsTag()) -} catch (e: ClassNotFoundException) { - JavaSparkContext(this).broadcast(value) -} - -/** - * Utility method to create dataset from list - */ -inline fun SparkSession.toDS(list: List): Dataset = - createDataset(list, encoder()) - -/** - * Utility method to create dataset from list - */ -inline fun SparkSession.dsOf(vararg t: T): Dataset = - createDataset(listOf(*t), encoder()) - -/** - * Utility method to create dataset from list - */ -inline fun List.toDS(spark: SparkSession): Dataset = - spark.createDataset(this, encoder()) - -/** - * Utility method to create dataset from RDD - */ -inline fun RDD.toDS(spark: SparkSession): Dataset = - spark.createDataset(this, encoder()) - -/** - * Utility method to create dataset from JavaRDD - */ -inline fun JavaRDDLike.toDS(spark: SparkSession): Dataset = - spark.createDataset(this.rdd(), encoder()) - -/** - * Main method of API, which gives you seamless integration with Spark: - * It creates encoder for any given supported type T - * - * Supported types are data classes, primitives, and Lists, Maps and Arrays containing them - * - * @param T type, supported by Spark - * @return generated encoder - */ -@OptIn(ExperimentalStdlibApi::class) -inline fun encoder(): Encoder = generateEncoder(typeOf(), T::class) - -/** - * @see encoder - */ -fun generateEncoder(type: KType, cls: KClass<*>): Encoder { - @Suppress("UNCHECKED_CAST") - return when { - isSupportedClass(cls) -> kotlinClassEncoder(memoizedSchema(type), cls) - else -> ENCODERS[cls] as? Encoder? ?: bean(cls.java) - } as Encoder -} - -private fun isSupportedClass(cls: KClass<*>): Boolean = when { - cls == ByteArray::class -> false // uses binary encoder - cls.isData -> true - cls.isSubclassOf(Map::class) -> true - cls.isSubclassOf(Iterable::class) -> true - cls.isSubclassOf(Product::class) -> true - cls.java.isArray -> true - else -> false - } - - -private fun kotlinClassEncoder(schema: DataType, kClass: KClass<*>): Encoder { - return ExpressionEncoder( - if (schema is DataTypeWithClass) KotlinReflection.serializerFor( - kClass.java, - schema - ) else KotlinReflection.serializerForType(KotlinReflection.getType(kClass.java)), - if (schema is DataTypeWithClass) KotlinReflection.deserializerFor( - kClass.java, - schema - ) else KotlinReflection.deserializerForType(KotlinReflection.getType(kClass.java)), - ClassTag.apply(kClass.java) - ) -} - -/** - * (Kotlin-specific) - * Returns a new Dataset that contains the result of applying [func] to each element. - */ -inline fun Dataset.map(noinline func: (T) -> R): Dataset = - map(MapFunction(func), encoder()) - -/** - * (Kotlin-specific) - * Returns a new Dataset by first applying a function to all elements of this Dataset, - * and then flattening the results. - */ -inline fun Dataset.flatMap(noinline func: (T) -> Iterator): Dataset = - flatMap(func, encoder()) - -/** - * (Kotlin-specific) - * Returns a new Dataset by flattening. This means that a Dataset of an iterable such as - * `listOf(listOf(1, 2, 3), listOf(4, 5, 6))` will be flattened to a Dataset of `listOf(1, 2, 3, 4, 5, 6)`. - */ -inline fun > Dataset.flatten(): Dataset = - flatMap(FlatMapFunction { it.iterator() }, encoder()) - -/** - * (Kotlin-specific) - * Returns a [KeyValueGroupedDataset] where the data is grouped by the given key [func]. - */ -inline fun Dataset.groupByKey(noinline func: (T) -> R): KeyValueGroupedDataset = - groupByKey(MapFunction(func), encoder()) - -/** - * (Kotlin-specific) - * Returns a new Dataset that contains the result of applying [func] to each partition. - */ -inline fun Dataset.mapPartitions(noinline func: (Iterator) -> Iterator): Dataset = - mapPartitions(func, encoder()) - -/** - * (Kotlin-specific) - * Filters rows to eliminate [null] values. - */ -@Suppress("UNCHECKED_CAST") -fun Dataset.filterNotNull(): Dataset = filter { it != null } as Dataset - -/** - * Returns a new [KeyValueGroupedDataset] where the given function [func] has been applied - * to the data. The grouping key is unchanged by this. - * - * ```kotlin - * // Create values grouped by key from a Dataset> - * ds.groupByKey { it._1 }.mapValues { it._2 } - * ``` - */ -inline fun KeyValueGroupedDataset.mapValues(noinline func: (VALUE) -> R): KeyValueGroupedDataset = - mapValues(MapFunction(func), encoder()) - -/** - * (Kotlin-specific) - * Applies the given function to each group of data. For each unique group, the function will - * be passed the group key and an iterator that contains all the elements in the group. The - * function can return an element of arbitrary type which will be returned as a new [Dataset]. - * - * This function does not support partial aggregation, and as a result requires shuffling all - * the data in the [Dataset]. If an application intends to perform an aggregation over each - * key, it is best to use the reduce function or an - * [org.apache.spark.sql.expressions.Aggregator]. - * - * Internally, the implementation will spill to disk if any given group is too large to fit into - * memory. However, users must take care to avoid materializing the whole iterator for a group - * (for example, by calling [toList]) unless they are sure that this is possible given the memory - * constraints of their cluster. - */ -inline fun KeyValueGroupedDataset.mapGroups(noinline func: (KEY, Iterator) -> R): Dataset = - mapGroups(MapGroupsFunction(func), encoder()) - -/** - * (Kotlin-specific) - * Reduces the elements of each group of data using the specified binary function. - * The given function must be commutative and associative or the result may be non-deterministic. - * - * Note that you need to use [reduceGroupsK] always instead of the Java- or Scala-specific - * [KeyValueGroupedDataset.reduceGroups] to make the compiler work. - */ -inline fun KeyValueGroupedDataset.reduceGroupsK(noinline func: (VALUE, VALUE) -> VALUE): Dataset> = - reduceGroups(ReduceFunction(func)) - .map { t -> t._1 to t._2 } - -/** - * (Kotlin-specific) - * Reduces the elements of this Dataset using the specified binary function. The given `func` - * must be commutative and associative or the result may be non-deterministic. - */ -inline fun Dataset.reduceK(noinline func: (T, T) -> T): T = - reduce(ReduceFunction(func)) - -/** - * (Kotlin-specific) - * Maps the Dataset to only retain the "keys" or [Tuple2._1] values. - */ -@JvmName("takeKeysTuple2") -inline fun Dataset>.takeKeys(): Dataset = map { it._1() } - -/** - * (Kotlin-specific) - * Maps the Dataset to only retain the "keys" or [Pair.first] values. - */ -inline fun Dataset>.takeKeys(): Dataset = map { it.first } - -/** - * (Kotlin-specific) - * Maps the Dataset to only retain the "keys" or [Arity2._1] values. - */ -@JvmName("takeKeysArity2") -inline fun Dataset>.takeKeys(): Dataset = map { it._1 } - -/** - * (Kotlin-specific) - * Maps the Dataset to only retain the "values" or [Tuple2._2] values. - */ -@JvmName("takeValuesTuple2") -inline fun Dataset>.takeValues(): Dataset = map { it._2() } - -/** - * (Kotlin-specific) - * Maps the Dataset to only retain the "values" or [Pair.second] values. - */ -inline fun Dataset>.takeValues(): Dataset = map { it.second } - -/** - * (Kotlin-specific) - * Maps the Dataset to only retain the "values" or [Arity2._2] values. - */ -@JvmName("takeValuesArity2") -inline fun Dataset>.takeValues(): Dataset = map { it._2 } - -/** - * (Kotlin-specific) - * Applies the given function to each group of data. For each unique group, the function will - * be passed the group key and an iterator that contains all the elements in the group. The - * function can return an iterator containing elements of an arbitrary type which will be returned - * as a new [Dataset]. - * - * This function does not support partial aggregation, and as a result requires shuffling all - * the data in the [Dataset]. If an application intends to perform an aggregation over each - * key, it is best to use the reduce function or an - * [org.apache.spark.sql.expressions.Aggregator]. - * - * Internally, the implementation will spill to disk if any given group is too large to fit into - * memory. However, users must take care to avoid materializing the whole iterator for a group - * (for example, by calling [toList]) unless they are sure that this is possible given the memory - * constraints of their cluster. - */ -inline fun KeyValueGroupedDataset.flatMapGroups( - noinline func: (key: K, values: Iterator) -> Iterator, -): Dataset = flatMapGroups( - FlatMapGroupsFunction(func), - encoder() -) - -/** - * (Kotlin-specific) - * Returns the group state value if it exists, else [null]. - * This is comparable to [GroupState.getOption], but instead utilises Kotlin's nullability features - * to get the same result. - */ -fun GroupState.getOrNull(): S? = if (exists()) get() else null - -/** - * (Kotlin-specific) - * Allows the group state object to be used as a delegate. Will be [null] if it does not exist. - * - * For example: - * ```kotlin - * groupedDataset.mapGroupsWithState(GroupStateTimeout.NoTimeout()) { key, values, state: GroupState -> - * var s by state - * ... - * } - * ``` - */ -operator fun GroupState.getValue(thisRef: Any?, property: KProperty<*>): S? = getOrNull() - -/** - * (Kotlin-specific) - * Allows the group state object to be used as a delegate. Will be [null] if it does not exist. - * - * For example: - * ```kotlin - * groupedDataset.mapGroupsWithState(GroupStateTimeout.NoTimeout()) { key, values, state: GroupState -> - * var s by state - * ... - * } - * ``` - */ -operator fun GroupState.setValue(thisRef: Any?, property: KProperty<*>, value: S?): Unit = update(value) - -/** - * (Kotlin-specific) - * Applies the given function to each group of data, while maintaining a user-defined per-group - * state. The result Dataset will represent the objects returned by the function. - * For a static batch Dataset, the function will be invoked once per group. For a streaming - * Dataset, the function will be invoked for each group repeatedly in every trigger, and - * updates to each group's state will be saved across invocations. - * See [org.apache.spark.sql.streaming.GroupState] for more details. - * - * @param S The type of the user-defined state. Must be encodable to Spark SQL types. - * @param U The type of the output objects. Must be encodable to Spark SQL types. - * @param func Function to be called on every group. - * - * See [Encoder] for more details on what types are encodable to Spark SQL. - */ -inline fun KeyValueGroupedDataset.mapGroupsWithState( - noinline func: (key: K, values: Iterator, state: GroupState) -> U, -): Dataset = mapGroupsWithState( - MapGroupsWithStateFunction(func), - encoder(), - encoder() -) - -/** - * (Kotlin-specific) - * Applies the given function to each group of data, while maintaining a user-defined per-group - * state. The result Dataset will represent the objects returned by the function. - * For a static batch Dataset, the function will be invoked once per group. For a streaming - * Dataset, the function will be invoked for each group repeatedly in every trigger, and - * updates to each group's state will be saved across invocations. - * See [org.apache.spark.sql.streaming.GroupState] for more details. - * - * @param S The type of the user-defined state. Must be encodable to Spark SQL types. - * @param U The type of the output objects. Must be encodable to Spark SQL types. - * @param func Function to be called on every group. - * @param timeoutConf Timeout configuration for groups that do not receive data for a while. - * - * See [Encoder] for more details on what types are encodable to Spark SQL. - */ -inline fun KeyValueGroupedDataset.mapGroupsWithState( - timeoutConf: GroupStateTimeout, - noinline func: (key: K, values: Iterator, state: GroupState) -> U, -): Dataset = mapGroupsWithState( - MapGroupsWithStateFunction(func), - encoder(), - encoder(), - timeoutConf -) - -/** - * (Kotlin-specific) - * Applies the given function to each group of data, while maintaining a user-defined per-group - * state. The result Dataset will represent the objects returned by the function. - * For a static batch Dataset, the function will be invoked once per group. For a streaming - * Dataset, the function will be invoked for each group repeatedly in every trigger, and - * updates to each group's state will be saved across invocations. - * See [GroupState] for more details. - * - * @param S The type of the user-defined state. Must be encodable to Spark SQL types. - * @param U The type of the output objects. Must be encodable to Spark SQL types. - * @param func Function to be called on every group. - * @param outputMode The output mode of the function. - * @param timeoutConf Timeout configuration for groups that do not receive data for a while. - * - * See [Encoder] for more details on what types are encodable to Spark SQL. - */ -inline fun KeyValueGroupedDataset.flatMapGroupsWithState( - outputMode: OutputMode, - timeoutConf: GroupStateTimeout, - noinline func: (key: K, values: Iterator, state: GroupState) -> Iterator, -): Dataset = flatMapGroupsWithState( - FlatMapGroupsWithStateFunction(func), - outputMode, - encoder(), - encoder(), - timeoutConf -) - -/** - * (Kotlin-specific) - * Applies the given function to each cogrouped data. For each unique group, the function will - * be passed the grouping key and 2 iterators containing all elements in the group from - * [Dataset] [this] and [other]. The function can return an iterator containing elements of an - * arbitrary type which will be returned as a new [Dataset]. - */ -inline fun KeyValueGroupedDataset.cogroup( - other: KeyValueGroupedDataset, - noinline func: (key: K, left: Iterator, right: Iterator) -> Iterator, -): Dataset = cogroup( - other, - CoGroupFunction(func), - encoder() -) - -/** DEPRECATED: Use [as] or [to] for this. */ -@Deprecated( - message = "Deprecated, since we already have `as`() and to().", - replaceWith = ReplaceWith("this.to()"), - level = DeprecationLevel.ERROR, -) -inline fun Dataset.downcast(): Dataset = `as`(encoder()) - -/** - * (Kotlin-specific) - * Returns a new Dataset where each record has been mapped on to the specified type. The - * method used to map columns depend on the type of [R]: - * - When [R] is a class, fields for the class will be mapped to columns of the same name - * (case sensitivity is determined by [spark.sql.caseSensitive]). - * - When [R] is a tuple, the columns will be mapped by ordinal (i.e. the first column will - * be assigned to `_1`). - * - When [R] is a primitive type (i.e. [String], [Int], etc.), then the first column of the - * `DataFrame` will be used. - * - * If the schema of the Dataset does not match the desired [R] type, you can use [Dataset.select]/[selectTyped] - * along with [Dataset.alias] or [as]/[to] to rearrange or rename as required. - * - * Note that [as]/[to] only changes the view of the data that is passed into typed operations, - * such as [map], and does not eagerly project away any columns that are not present in - * the specified class. - * - * @see to as alias for [as] - */ -inline fun Dataset<*>.`as`(): Dataset = `as`(encoder()) - -/** - * (Kotlin-specific) - * Returns a new Dataset where each record has been mapped on to the specified type. The - * method used to map columns depend on the type of [R]: - * - When [R] is a class, fields for the class will be mapped to columns of the same name - * (case sensitivity is determined by [spark.sql.caseSensitive]). - * - When [R] is a tuple, the columns will be mapped by ordinal (i.e. the first column will - * be assigned to `_1`). - * - When [R] is a primitive type (i.e. [String], [Int], etc.), then the first column of the - * `DataFrame` will be used. - * - * If the schema of the Dataset does not match the desired [R] type, you can use [Dataset.select]/[selectTyped] - * along with [Dataset.alias] or [as]/[to] to rearrange or rename as required. - * - * Note that [as]/[to] only changes the view of the data that is passed into typed operations, - * such as [map], and does not eagerly project away any columns that are not present in - * the specified class. - * - * @see as as alias for [to] - */ -inline fun Dataset<*>.to(): Dataset = `as`(encoder()) - -/** - * (Kotlin-specific) - * Applies a function [func] to all rows. - */ -inline fun Dataset.forEach(noinline func: (T) -> Unit): Unit = foreach(ForeachFunction(func)) - -/** - * (Kotlin-specific) - * Runs [func] on each partition of this Dataset. - */ -inline fun Dataset.forEachPartition(noinline func: (Iterator) -> Unit): Unit = - foreachPartition(ForeachPartitionFunction(func)) - -/** - * It's hard to call `Dataset.debugCodegen` from kotlin, so here is utility for that - */ -fun Dataset.debugCodegen(): Dataset = also { KSparkExtensions.debugCodegen(it) } - -/** - * Returns the Spark context associated with this Spark session. - */ -val SparkSession.sparkContext: SparkContext - get() = KSparkExtensions.sparkContext(this) - -/** - * It's hard to call `Dataset.debug` from kotlin, so here is utility for that - */ -fun Dataset.debug(): Dataset = also { KSparkExtensions.debug(it) } - -@Suppress("FunctionName") -@Deprecated( - message = "Changed to \"`===`\" to better reflect Scala API.", - replaceWith = ReplaceWith("this `===` c"), - level = DeprecationLevel.ERROR, -) -infix fun Column.`==`(c: Column) = `$eq$eq$eq`(c) - -/** - * Unary minus, i.e. negate the expression. - * ``` - * // Scala: select the amount column and negates all values. - * df.select( -df("amount") ) - * - * // Kotlin: - * import org.jetbrains.kotlinx.spark.api.* - * df.select( -df("amount") ) - * - * // Java: - * import static org.apache.spark.sql.functions.*; - * df.select( negate(col("amount") ); - * ``` - */ -operator fun Column.unaryMinus(): Column = `unary_$minus`() - -/** - * Inversion of boolean expression, i.e. NOT. - * ``` - * // Scala: select rows that are not active (isActive === false) - * df.filter( !df("isActive") ) - * - * // Kotlin: - * import org.jetbrains.kotlinx.spark.api.* - * df.filter( !df("amount") ) - * - * // Java: - * import static org.apache.spark.sql.functions.*; - * df.filter( not(df.col("isActive")) ); - * ``` - */ -operator fun Column.not(): Column = `unary_$bang`() - -/** - * Equality test. - * ``` - * // Scala: - * df.filter( df("colA") === df("colB") ) - * - * // Kotlin: - * import org.jetbrains.kotlinx.spark.api.* - * df.filter( df("colA") eq df("colB") ) - * // or - * df.filter( df("colA") `===` df("colB") ) - * - * // Java - * import static org.apache.spark.sql.functions.*; - * df.filter( col("colA").equalTo(col("colB")) ); - * ``` - */ -infix fun Column.eq(other: Any): Column = `$eq$eq$eq`(other) - -/** - * Equality test. - * ``` - * // Scala: - * df.filter( df("colA") === df("colB") ) - * - * // Kotlin: - * import org.jetbrains.kotlinx.spark.api.* - * df.filter( df("colA") eq df("colB") ) - * // or - * df.filter( df("colA") `===` df("colB") ) - * - * // Java - * import static org.apache.spark.sql.functions.*; - * df.filter( col("colA").equalTo(col("colB")) ); - * ``` - */ -infix fun Column.`===`(other: Any): Column = `$eq$eq$eq`(other) - -/** - * Inequality test. - * ``` - * // Scala: - * df.select( df("colA") =!= df("colB") ) - * df.select( !(df("colA") === df("colB")) ) - * - * // Kotlin: - * import org.jetbrains.kotlinx.spark.api.* - * df.select( df("colA") neq df("colB") ) - * df.select( !(df("colA") eq df("colB")) ) - * // or - * df.select( df("colA") `=!=` df("colB") ) - * df.select( !(df("colA") `===` df("colB")) ) - * - * // Java: - * import static org.apache.spark.sql.functions.*; - * df.select( col("colA").notEqual(col("colB")) ); - * ``` - */ -infix fun Column.neq(other: Any): Column = `$eq$bang$eq`(other) - -/** - * Inequality test. - * ``` - * // Scala: - * df.select( df("colA") =!= df("colB") ) - * df.select( !(df("colA") === df("colB")) ) - * - * // Kotlin: - * import org.jetbrains.kotlinx.spark.api.* - * df.select( df("colA") neq df("colB") ) - * df.select( !(df("colA") eq df("colB")) ) - * // or - * df.select( df("colA") `=!=` df("colB") ) - * df.select( !(df("colA") `===` df("colB")) ) - * - * // Java: - * import static org.apache.spark.sql.functions.*; - * df.select( col("colA").notEqual(col("colB")) ); - * ``` - */ -infix fun Column.`=!=`(other: Any): Column = `$eq$bang$eq`(other) - -/** - * Greater than. - * ``` - * // Scala: The following selects people older than 21. - * people.select( people("age") > 21 ) - * - * // Kotlin: - * import org.jetbrains.kotlinx.spark.api.* - * people.select( people("age") gt 21 ) - * - * // Java: - * import static org.apache.spark.sql.functions.*; - * people.select( people.col("age").gt(21) ); - * ``` - */ -infix fun Column.gt(other: Any): Column = `$greater`(other) - -/** - * Less than. - * ``` - * // Scala: The following selects people younger than 21. - * people.select( people("age") < 21 ) - * - * // Kotlin: - * import org.jetbrains.kotlinx.spark.api.* - * people.select( people("age") lt 21 ) - * - * // Java: - * import static org.apache.spark.sql.functions.*; - * people.select( people.col("age").lt(21) ); - * ``` - */ -infix fun Column.lt(other: Any): Column = `$less`(other) - -/** - * Less than or equal to. - * ``` - * // Scala: The following selects people age 21 or younger than 21. - * people.select( people("age") <= 21 ) - * - * // Kotlin: - * import org.jetbrains.kotlinx.spark.api.* - * people.select( people("age") leq 21 ) - * - * // Java: - * import static org.apache.spark.sql.functions.*; - * people.select( people.col("age").leq(21) ); - * ``` - */ -infix fun Column.leq(other: Any): Column = `$less$eq`(other) - -/** - * Greater than or equal to an expression. - * ``` - * // Scala: The following selects people age 21 or older than 21. - * people.select( people("age") >= 21 ) - * - * // Kotlin: - * import org.jetbrains.kotlinx.spark.api.* - * people.select( people("age") geq 21 ) - * - * // Java: - * import static org.apache.spark.sql.functions.*; - * people.select( people.col("age").geq(21) ); - * ``` - */ -infix fun Column.geq(other: Any): Column = `$greater$eq`(other) - -/** - * True if the current column is in the given [range]. - * ``` - * // Scala: - * df.where( df("colA").between(1, 5) ) - * - * // Kotlin: - * import org.jetbrains.kotlinx.spark.api.* - * df.where( df("colA") inRangeOf 1..5 ) - * - * // Java: - * import static org.apache.spark.sql.functions.*; - * df.where( df.col("colA").between(1, 5) ); - * ``` - */ -infix fun Column.inRangeOf(range: ClosedRange<*>): Column = between(range.start, range.endInclusive) - -/** - * Boolean OR. - * ``` - * // Scala: The following selects people that are in school or employed. - * people.filter( people("inSchool") || people("isEmployed") ) - * - * // Kotlin: - * import org.jetbrains.kotlinx.spark.api.* - * people.filter( people("inSchool") or people("isEmployed") ) - * - * // Java: - * import static org.apache.spark.sql.functions.*; - * people.filter( people.col("inSchool").or(people.col("isEmployed")) ); - * ``` - */ -infix fun Column.or(other: Any): Column = `$bar$bar`(other) - -/** - * Boolean AND. - * ``` - * // Scala: The following selects people that are in school and employed at the same time. - * people.select( people("inSchool") && people("isEmployed") ) - * - * // Kotlin: - * import org.jetbrains.kotlinx.spark.api.* - * people.select( people("inSchool") and people("isEmployed") ) - * // or - * people.select( people("inSchool") `&&` people("isEmployed") ) - * - * // Java: - * import static org.apache.spark.sql.functions.*; - * people.select( people.col("inSchool").and(people.col("isEmployed")) ); - * ``` - */ -infix fun Column.and(other: Any): Column = `$amp$amp`(other) - -/** - * Boolean AND. - * ``` - * // Scala: The following selects people that are in school and employed at the same time. - * people.select( people("inSchool") && people("isEmployed") ) - * - * // Kotlin: - * import org.jetbrains.kotlinx.spark.api.* - * people.select( people("inSchool") and people("isEmployed") ) - * // or - * people.select( people("inSchool") `&&` people("isEmployed") ) - * - * // Java: - * import static org.apache.spark.sql.functions.*; - * people.select( people.col("inSchool").and(people.col("isEmployed")) ); - * ``` - */ -infix fun Column.`&&`(other: Any): Column = `$amp$amp`(other) - -/** - * Multiplication of this expression and another expression. - * ``` - * // Scala: The following multiplies a person's height by their weight. - * people.select( people("height") * people("weight") ) - * - * // Kotlin: - * import org.jetbrains.kotlinx.spark.api.* - * people.select( people("height") * people("weight") ) - * - * // Java: - * import static org.apache.spark.sql.functions.*; - * people.select( people.col("height").multiply(people.col("weight")) ); - * ``` - */ -operator fun Column.times(other: Any): Column = `$times`(other) - -/** - * Division this expression by another expression. - * ``` - * // Scala: The following divides a person's height by their weight. - * people.select( people("height") / people("weight") ) - * - * // Kotlin - * import org.jetbrains.kotlinx.spark.api.* - * people.select( people("height") / people("weight") ) - * - * // Java: - * import static org.apache.spark.sql.functions.*; - * people.select( people.col("height").divide(people.col("weight")) ); - * ``` - */ -operator fun Column.div(other: Any): Column = `$div`(other) - -/** - * Modulo (a.k.a. remainder) expression. - * ``` - * // Scala: - * df.where( df("colA") % 2 === 0 ) - * - * // Kotlin: - * import org.jetbrains.kotlinx.spark.api.* - * df.where( df("colA") % 2 eq 0 ) - * - * // Java: - * import static org.apache.spark.sql.functions.*; - * df.where( df.col("colA").mod(2).equalTo(0) ); - * ``` - */ -operator fun Column.rem(other: Any): Column = `$percent`(other) - -/** - * An expression that gets an item at position `ordinal` out of an array, - * or gets a value by key `key` in a `MapType`. - * ``` - * // Scala: - * df.where( df("arrayColumn").getItem(0) === 5 ) - * - * // Kotlin - * import org.jetbrains.kotlinx.spark.api.* - * df.where( df("arrayColumn")[0] eq 5 ) - * - * // Java - * import static org.apache.spark.sql.functions.*; - * df.where( df.col("arrayColumn").getItem(0).equalTo(5) ); - * ``` - */ -operator fun Column.get(key: Any): Column = getItem(key) - -/** - * Creates a [Column] of literal value. - * - * The passed in object is returned directly if it is already a [Column]. - * If the object is a Scala Symbol, it is converted into a [Column] also. - * Otherwise, a new [Column] is created to represent the literal value. - * - * This is just a shortcut to the function from [org.apache.spark.sql.functions]. - * For all the functions, simply add `import org.apache.spark.sql.functions.*` to your file. - */ -fun lit(a: Any): Column = functions.lit(a) - -/** - * Provides a type hint about the expected return value of this column. This information can - * be used by operations such as `select` on a [Dataset] to automatically convert the - * results into the correct JVM types. - * - * ``` - * val df: Dataset = ... - * val typedColumn: Dataset = df.selectTyped( col("a").`as`() ) - * ``` - */ -@Suppress("UNCHECKED_CAST") -inline fun Column.`as`(): TypedColumn = `as`(encoder()) - -/** - * Alias for [Dataset.joinWith] which passes "left" argument - * and respects the fact that in result of left join right relation is nullable - * - * @receiver left dataset - * @param right right dataset - * @param col join condition - * - * @return dataset of pairs where right element is forced nullable - */ -inline fun Dataset.leftJoin(right: Dataset, col: Column): Dataset> { - return joinWith(right, col, "left").map { it._1 to it._2 } -} - -/** - * Alias for [Dataset.joinWith] which passes "right" argument - * and respects the fact that in result of right join left relation is nullable - * - * @receiver left dataset - * @param right right dataset - * @param col join condition - * - * @return dataset of [Pair] where left element is forced nullable - */ -inline fun Dataset.rightJoin(right: Dataset, col: Column): Dataset> { - return joinWith(right, col, "right").map { it._1 to it._2 } -} - -/** - * Alias for [Dataset.joinWith] which passes "inner" argument - * - * @receiver left dataset - * @param right right dataset - * @param col join condition - * - * @return resulting dataset of [Pair] - */ -inline fun Dataset.innerJoin(right: Dataset, col: Column): Dataset> { - return joinWith(right, col, "inner").map { it._1 to it._2 } -} - -/** - * Alias for [Dataset.joinWith] which passes "full" argument - * and respects the fact that in result of join any element of resulting tuple is nullable - * - * @receiver left dataset - * @param right right dataset - * @param col join condition - * - * @return dataset of [Pair] where both elements are forced nullable - */ -inline fun Dataset.fullJoin( - right: Dataset, - col: Column, -): Dataset> { - return joinWith(right, col, "full").map { it._1 to it._2 } -} - -/** - * Alias for [Dataset.sort] which forces user to provide sorted columns from the source dataset - * - * @receiver source [Dataset] - * @param columns producer of sort columns - * @return sorted [Dataset] - */ -inline fun Dataset.sort(columns: (Dataset) -> Array) = sort(*columns(this)) - -/** - * This function creates block, where one can call any further computations on already cached dataset - * Data will be unpersisted automatically at the end of computation - * - * it may be useful in many situations, for example, when one needs to write data to several targets - * ```kotlin - * ds.withCached { - * write() - * .also { it.orc("First destination") } - * .also { it.avro("Second destination") } - * } - * ``` - * - * @param blockingUnpersist if execution should be blocked until everything persisted will be deleted - * @param executeOnCached Block which should be executed on cached dataset. - * @return result of block execution for further usage. It may be anything including source or new dataset - */ -inline fun Dataset.withCached( - blockingUnpersist: Boolean = false, - executeOnCached: Dataset.() -> R, -): R { - val cached = this.cache() - return cached.executeOnCached().also { cached.unpersist(blockingUnpersist) } -} - -/** - * Collects the dataset as list where each item has been mapped to type [T]. - */ -inline fun Dataset<*>.toList(): List = to().collectAsList() as List - -/** - * Collects the dataset as Array where each item has been mapped to type [T]. - */ -inline fun Dataset<*>.toArray(): Array = to().collect() as Array - -/** - * Selects column based on the column name and returns it as a [Column]. - * - * @note The column name can also reference to a nested column like `a.b`. - */ -operator fun Dataset.invoke(colName: String): Column = col(colName) - -/** - * Helper function to quickly get a [TypedColumn] (or [Column]) from a dataset in a refactor-safe manner. - * ```kotlin - * val dataset: Dataset = ... - * val columnA: TypedColumn = dataset.col(YourClass::a) - * ``` - * @see invoke - */ -@Suppress("UNCHECKED_CAST") -inline fun Dataset.col(column: KProperty1): TypedColumn = - col(column.name).`as`() as TypedColumn - -/** - * Returns a [Column] based on the given class attribute, not connected to a dataset. - * ```kotlin - * val dataset: Dataset = ... - * val new: Dataset> = dataset.select( col(YourClass::a), col(YourClass::b) ) - * ``` - */ -@Suppress("UNCHECKED_CAST") -inline fun col(column: KProperty1): TypedColumn = - functions.col(column.name).`as`() as TypedColumn - -/** - * Helper function to quickly get a [TypedColumn] (or [Column]) from a dataset in a refactor-safe manner. - * ```kotlin - * val dataset: Dataset = ... - * val columnA: TypedColumn = dataset(YourClass::a) - * ``` - * @see col - */ -inline operator fun Dataset.invoke(column: KProperty1): TypedColumn = col(column) - -/** - * Allows to sort data class dataset on one or more of the properties of the data class. - * ```kotlin - * val sorted: Dataset = unsorted.sort(YourClass::a) - * val sorted2: Dataset = unsorted.sort(YourClass::a, YourClass::b) - * ``` - */ -fun Dataset.sort(col: KProperty1, vararg cols: KProperty1): Dataset = - sort(col.name, *cols.map { it.name }.toTypedArray()) - -/** - * Alternative to [Dataset.show] which returns source dataset. - * Useful for debug purposes when you need to view content of a dataset as an intermediate operation - */ -fun Dataset.showDS(numRows: Int = 20, truncate: Boolean = true) = apply { show(numRows, truncate) } - -/** - * Returns a new Dataset by computing the given [Column] expressions for each element. - */ -@Suppress("UNCHECKED_CAST") -inline fun Dataset.selectTyped( - c1: TypedColumn, -): Dataset = select(c1 as TypedColumn) - -/** - * Returns a new Dataset by computing the given [Column] expressions for each element. - */ -@Suppress("UNCHECKED_CAST") -inline fun Dataset.selectTyped( - c1: TypedColumn, - c2: TypedColumn, -): Dataset> = - select( - c1 as TypedColumn, - c2 as TypedColumn, - ).map { Pair(it._1(), it._2()) } - -/** - * Returns a new Dataset by computing the given [Column] expressions for each element. - */ -@Suppress("UNCHECKED_CAST") -inline fun Dataset.selectTyped( - c1: TypedColumn, - c2: TypedColumn, - c3: TypedColumn, -): Dataset> = - select( - c1 as TypedColumn, - c2 as TypedColumn, - c3 as TypedColumn, - ).map { Triple(it._1(), it._2(), it._3()) } - -/** - * Returns a new Dataset by computing the given [Column] expressions for each element. - */ -@Suppress("UNCHECKED_CAST") -inline fun Dataset.selectTyped( - c1: TypedColumn, - c2: TypedColumn, - c3: TypedColumn, - c4: TypedColumn, -): Dataset> = - select( - c1 as TypedColumn, - c2 as TypedColumn, - c3 as TypedColumn, - c4 as TypedColumn, - ).map { Arity4(it._1(), it._2(), it._3(), it._4()) } - -/** - * Returns a new Dataset by computing the given [Column] expressions for each element. - */ -@Suppress("UNCHECKED_CAST") -inline fun Dataset.selectTyped( - c1: TypedColumn, - c2: TypedColumn, - c3: TypedColumn, - c4: TypedColumn, - c5: TypedColumn, -): Dataset> = - select( - c1 as TypedColumn, - c2 as TypedColumn, - c3 as TypedColumn, - c4 as TypedColumn, - c5 as TypedColumn, - ).map { Arity5(it._1(), it._2(), it._3(), it._4(), it._5()) } - - -/** - * Not meant to be used by the user explicitly. - * - * This function generates the DataType schema for supported classes, including Kotlin data classes, [Map], - * [Iterable], [Product], [Array], and combinations of those. - * - * It's mainly used by [generateEncoder]/[encoder]. - */ -@OptIn(ExperimentalStdlibApi::class) -fun schema(type: KType, map: Map = mapOf()): DataType { - val primitiveSchema = knownDataTypes[type.classifier] - if (primitiveSchema != null) return KSimpleTypeWrapper( - primitiveSchema, - (type.classifier!! as KClass<*>).java, - type.isMarkedNullable - ) - val klass = type.classifier as? KClass<*> ?: throw IllegalArgumentException("Unsupported type $type") - val args = type.arguments - - val types = transitiveMerge(map, klass.typeParameters.zip(args).map { - it.first.name to it.second.type!! - }.toMap()) - return when { - klass.isSubclassOf(Enum::class) -> { - KSimpleTypeWrapper(DataTypes.StringType, klass.java, type.isMarkedNullable) - } - klass.isSubclassOf(Iterable::class) || klass.java.isArray -> { - val listParam = if (klass.java.isArray) { - when (klass) { - IntArray::class -> typeOf() - LongArray::class -> typeOf() - FloatArray::class -> typeOf() - DoubleArray::class -> typeOf() - BooleanArray::class -> typeOf() - ShortArray::class -> typeOf() -// ByteArray::class -> typeOf() handled by BinaryType - else -> types.getValue(klass.typeParameters[0].name) - } - } else types.getValue(klass.typeParameters[0].name) - KComplexTypeWrapper( - DataTypes.createArrayType(schema(listParam, types), listParam.isMarkedNullable), - klass.java, - type.isMarkedNullable - ) - } - klass.isSubclassOf(Map::class) -> { - val mapKeyParam = types.getValue(klass.typeParameters[0].name) - val mapValueParam = types.getValue(klass.typeParameters[1].name) - KComplexTypeWrapper( - DataTypes.createMapType( - schema(mapKeyParam, types), - schema(mapValueParam, types), - true - ), - klass.java, - type.isMarkedNullable - ) - } - klass.isData -> { - val structType = StructType( - klass - .primaryConstructor!! - .parameters - .filter { it.findAnnotation() == null } - .map { - val projectedType = types[it.type.toString()] ?: it.type - val propertyDescriptor = PropertyDescriptor( - it.name, - klass.java, - "is" + it.name?.replaceFirstChar { if (it.isLowerCase()) it.titlecase(Locale.getDefault()) else it.toString() }, - null - ) - KStructField( - propertyDescriptor.readMethod.name, - StructField( - it.name, - schema(projectedType, types), - projectedType.isMarkedNullable, - Metadata.empty() - ) - ) - } - .toTypedArray() - ) - KDataTypeWrapper(structType, klass.java, true) - } - klass.isSubclassOf(Product::class) -> { - val params = type.arguments.mapIndexed { i, it -> - "_${i + 1}" to it.type!! - } - - val structType = DataTypes.createStructType( - params.map { (fieldName, fieldType) -> - val dataType = schema(fieldType, types) - KStructField( - fieldName, - StructField(fieldName, dataType, fieldType.isMarkedNullable, Metadata.empty()) - ) - }.toTypedArray() - ) - - KComplexTypeWrapper(structType, klass.java, true) - } - else -> throw IllegalArgumentException("$type is unsupported") - } -} - -/** - * The entry point to programming Spark with the Dataset and DataFrame API. - * - * @see org.apache.spark.sql.SparkSession - */ -typealias SparkSession = org.apache.spark.sql.SparkSession - -/** - * Control our logLevel. This overrides any user-defined log settings. - * @param level The desired log level as [SparkLogLevel]. - */ -fun SparkContext.setLogLevel(level: SparkLogLevel): Unit = setLogLevel(level.name) - -enum class SparkLogLevel { - ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN -} - -private val knownDataTypes: Map, DataType> = mapOf( - Byte::class to DataTypes.ByteType, - Short::class to DataTypes.ShortType, - Int::class to DataTypes.IntegerType, - Long::class to DataTypes.LongType, - Boolean::class to DataTypes.BooleanType, - Float::class to DataTypes.FloatType, - Double::class to DataTypes.DoubleType, - String::class to DataTypes.StringType, - LocalDate::class to DataTypes.DateType, - Date::class to DataTypes.DateType, - Timestamp::class to DataTypes.TimestampType, - Instant::class to DataTypes.TimestampType, - ByteArray::class to DataTypes.BinaryType, - Decimal::class to DecimalType.SYSTEM_DEFAULT(), - BigDecimal::class to DecimalType.SYSTEM_DEFAULT(), - CalendarInterval::class to DataTypes.CalendarIntervalType, -) - -private fun transitiveMerge(a: Map, b: Map): Map { - return a + b.mapValues { - a.getOrDefault(it.value.toString(), it.value) - } -} - -class Memoize1(val f: (T) -> R) : (T) -> R { - - private val values = ConcurrentHashMap() - - override fun invoke(x: T): R = values.getOrPut(x) { f(x) } -} - -private fun ((T) -> R).memoize(): (T) -> R = Memoize1(this) - -private val memoizedSchema: (KType) -> DataType = { x: KType -> schema(x) }.memoize() diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt new file mode 100644 index 00000000..922c2868 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt @@ -0,0 +1,407 @@ +package org.jetbrains.kotlinx.spark.api + +import org.apache.spark.sql.Column +import org.apache.spark.sql.Dataset +import org.apache.spark.sql.TypedColumn +import org.apache.spark.sql.functions +import kotlin.reflect.KProperty1 + +/** + * Selects column based on the column name and returns it as a [Column]. + * + * @note The column name can also reference to a nested column like `a.b`. + */ +operator fun Dataset.invoke(colName: String): Column = col(colName) + +/** + * Helper function to quickly get a [TypedColumn] (or [Column]) from a dataset in a refactor-safe manner. + * ```kotlin + * val dataset: Dataset = ... + * val columnA: TypedColumn = dataset.col(YourClass::a) + * ``` + * @see invoke + */ +@Suppress("UNCHECKED_CAST") +inline fun Dataset.col(column: KProperty1): TypedColumn = + col(column.name).`as`() as TypedColumn + + +/** + * Helper function to quickly get a [TypedColumn] (or [Column]) from a dataset in a refactor-safe manner. + * ```kotlin + * val dataset: Dataset = ... + * val columnA: TypedColumn = dataset(YourClass::a) + * ``` + * @see col + */ +inline operator fun Dataset.invoke(column: KProperty1): TypedColumn = col(column) + + +@Suppress("FunctionName") +@Deprecated( + message = "Changed to \"`===`\" to better reflect Scala API.", + replaceWith = ReplaceWith("this `===` c"), + level = DeprecationLevel.ERROR, +) +infix fun Column.`==`(c: Column) = `$eq$eq$eq`(c) + +/** + * Unary minus, i.e. negate the expression. + * ``` + * // Scala: select the amount column and negates all values. + * df.select( -df("amount") ) + * + * // Kotlin: + * import org.jetbrains.kotlinx.spark.api.* + * df.select( -df("amount") ) + * + * // Java: + * import static org.apache.spark.sql.functions.*; + * df.select( negate(col("amount") ); + * ``` + */ +operator fun Column.unaryMinus(): Column = `unary_$minus`() + +/** + * Inversion of boolean expression, i.e. NOT. + * ``` + * // Scala: select rows that are not active (isActive === false) + * df.filter( !df("isActive") ) + * + * // Kotlin: + * import org.jetbrains.kotlinx.spark.api.* + * df.filter( !df("amount") ) + * + * // Java: + * import static org.apache.spark.sql.functions.*; + * df.filter( not(df.col("isActive")) ); + * ``` + */ +operator fun Column.not(): Column = `unary_$bang`() + +/** + * Equality test. + * ``` + * // Scala: + * df.filter( df("colA") === df("colB") ) + * + * // Kotlin: + * import org.jetbrains.kotlinx.spark.api.* + * df.filter( df("colA") eq df("colB") ) + * // or + * df.filter( df("colA") `===` df("colB") ) + * + * // Java + * import static org.apache.spark.sql.functions.*; + * df.filter( col("colA").equalTo(col("colB")) ); + * ``` + */ +infix fun Column.eq(other: Any): Column = `$eq$eq$eq`(other) + +/** + * Equality test. + * ``` + * // Scala: + * df.filter( df("colA") === df("colB") ) + * + * // Kotlin: + * import org.jetbrains.kotlinx.spark.api.* + * df.filter( df("colA") eq df("colB") ) + * // or + * df.filter( df("colA") `===` df("colB") ) + * + * // Java + * import static org.apache.spark.sql.functions.*; + * df.filter( col("colA").equalTo(col("colB")) ); + * ``` + */ +infix fun Column.`===`(other: Any): Column = `$eq$eq$eq`(other) + +/** + * Inequality test. + * ``` + * // Scala: + * df.select( df("colA") =!= df("colB") ) + * df.select( !(df("colA") === df("colB")) ) + * + * // Kotlin: + * import org.jetbrains.kotlinx.spark.api.* + * df.select( df("colA") neq df("colB") ) + * df.select( !(df("colA") eq df("colB")) ) + * // or + * df.select( df("colA") `=!=` df("colB") ) + * df.select( !(df("colA") `===` df("colB")) ) + * + * // Java: + * import static org.apache.spark.sql.functions.*; + * df.select( col("colA").notEqual(col("colB")) ); + * ``` + */ +infix fun Column.neq(other: Any): Column = `$eq$bang$eq`(other) + +/** + * Inequality test. + * ``` + * // Scala: + * df.select( df("colA") =!= df("colB") ) + * df.select( !(df("colA") === df("colB")) ) + * + * // Kotlin: + * import org.jetbrains.kotlinx.spark.api.* + * df.select( df("colA") neq df("colB") ) + * df.select( !(df("colA") eq df("colB")) ) + * // or + * df.select( df("colA") `=!=` df("colB") ) + * df.select( !(df("colA") `===` df("colB")) ) + * + * // Java: + * import static org.apache.spark.sql.functions.*; + * df.select( col("colA").notEqual(col("colB")) ); + * ``` + */ +infix fun Column.`=!=`(other: Any): Column = `$eq$bang$eq`(other) + +/** + * Greater than. + * ``` + * // Scala: The following selects people older than 21. + * people.select( people("age") > 21 ) + * + * // Kotlin: + * import org.jetbrains.kotlinx.spark.api.* + * people.select( people("age") gt 21 ) + * + * // Java: + * import static org.apache.spark.sql.functions.*; + * people.select( people.col("age").gt(21) ); + * ``` + */ +infix fun Column.gt(other: Any): Column = `$greater`(other) + +/** + * Less than. + * ``` + * // Scala: The following selects people younger than 21. + * people.select( people("age") < 21 ) + * + * // Kotlin: + * import org.jetbrains.kotlinx.spark.api.* + * people.select( people("age") lt 21 ) + * + * // Java: + * import static org.apache.spark.sql.functions.*; + * people.select( people.col("age").lt(21) ); + * ``` + */ +infix fun Column.lt(other: Any): Column = `$less`(other) + +/** + * Less than or equal to. + * ``` + * // Scala: The following selects people age 21 or younger than 21. + * people.select( people("age") <= 21 ) + * + * // Kotlin: + * import org.jetbrains.kotlinx.spark.api.* + * people.select( people("age") leq 21 ) + * + * // Java: + * import static org.apache.spark.sql.functions.*; + * people.select( people.col("age").leq(21) ); + * ``` + */ +infix fun Column.leq(other: Any): Column = `$less$eq`(other) + +/** + * Greater than or equal to an expression. + * ``` + * // Scala: The following selects people age 21 or older than 21. + * people.select( people("age") >= 21 ) + * + * // Kotlin: + * import org.jetbrains.kotlinx.spark.api.* + * people.select( people("age") geq 21 ) + * + * // Java: + * import static org.apache.spark.sql.functions.*; + * people.select( people.col("age").geq(21) ); + * ``` + */ +infix fun Column.geq(other: Any): Column = `$greater$eq`(other) + +/** + * True if the current column is in the given [range]. + * ``` + * // Scala: + * df.where( df("colA").between(1, 5) ) + * + * // Kotlin: + * import org.jetbrains.kotlinx.spark.api.* + * df.where( df("colA") inRangeOf 1..5 ) + * + * // Java: + * import static org.apache.spark.sql.functions.*; + * df.where( df.col("colA").between(1, 5) ); + * ``` + */ +infix fun Column.inRangeOf(range: ClosedRange<*>): Column = between(range.start, range.endInclusive) + +/** + * Boolean OR. + * ``` + * // Scala: The following selects people that are in school or employed. + * people.filter( people("inSchool") || people("isEmployed") ) + * + * // Kotlin: + * import org.jetbrains.kotlinx.spark.api.* + * people.filter( people("inSchool") or people("isEmployed") ) + * + * // Java: + * import static org.apache.spark.sql.functions.*; + * people.filter( people.col("inSchool").or(people.col("isEmployed")) ); + * ``` + */ +infix fun Column.or(other: Any): Column = `$bar$bar`(other) + +/** + * Boolean AND. + * ``` + * // Scala: The following selects people that are in school and employed at the same time. + * people.select( people("inSchool") && people("isEmployed") ) + * + * // Kotlin: + * import org.jetbrains.kotlinx.spark.api.* + * people.select( people("inSchool") and people("isEmployed") ) + * // or + * people.select( people("inSchool") `&&` people("isEmployed") ) + * + * // Java: + * import static org.apache.spark.sql.functions.*; + * people.select( people.col("inSchool").and(people.col("isEmployed")) ); + * ``` + */ +infix fun Column.and(other: Any): Column = `$amp$amp`(other) + +/** + * Boolean AND. + * ``` + * // Scala: The following selects people that are in school and employed at the same time. + * people.select( people("inSchool") && people("isEmployed") ) + * + * // Kotlin: + * import org.jetbrains.kotlinx.spark.api.* + * people.select( people("inSchool") and people("isEmployed") ) + * // or + * people.select( people("inSchool") `&&` people("isEmployed") ) + * + * // Java: + * import static org.apache.spark.sql.functions.*; + * people.select( people.col("inSchool").and(people.col("isEmployed")) ); + * ``` + */ +infix fun Column.`&&`(other: Any): Column = `$amp$amp`(other) + +/** + * Multiplication of this expression and another expression. + * ``` + * // Scala: The following multiplies a person's height by their weight. + * people.select( people("height") * people("weight") ) + * + * // Kotlin: + * import org.jetbrains.kotlinx.spark.api.* + * people.select( people("height") * people("weight") ) + * + * // Java: + * import static org.apache.spark.sql.functions.*; + * people.select( people.col("height").multiply(people.col("weight")) ); + * ``` + */ +operator fun Column.times(other: Any): Column = `$times`(other) + +/** + * Division this expression by another expression. + * ``` + * // Scala: The following divides a person's height by their weight. + * people.select( people("height") / people("weight") ) + * + * // Kotlin + * import org.jetbrains.kotlinx.spark.api.* + * people.select( people("height") / people("weight") ) + * + * // Java: + * import static org.apache.spark.sql.functions.*; + * people.select( people.col("height").divide(people.col("weight")) ); + * ``` + */ +operator fun Column.div(other: Any): Column = `$div`(other) + +/** + * Modulo (a.k.a. remainder) expression. + * ``` + * // Scala: + * df.where( df("colA") % 2 === 0 ) + * + * // Kotlin: + * import org.jetbrains.kotlinx.spark.api.* + * df.where( df("colA") % 2 eq 0 ) + * + * // Java: + * import static org.apache.spark.sql.functions.*; + * df.where( df.col("colA").mod(2).equalTo(0) ); + * ``` + */ +operator fun Column.rem(other: Any): Column = `$percent`(other) + +/** + * An expression that gets an item at position `ordinal` out of an array, + * or gets a value by key `key` in a `MapType`. + * ``` + * // Scala: + * df.where( df("arrayColumn").getItem(0) === 5 ) + * + * // Kotlin + * import org.jetbrains.kotlinx.spark.api.* + * df.where( df("arrayColumn")[0] eq 5 ) + * + * // Java + * import static org.apache.spark.sql.functions.*; + * df.where( df.col("arrayColumn").getItem(0).equalTo(5) ); + * ``` + */ +operator fun Column.get(key: Any): Column = getItem(key) + +/** + * Provides a type hint about the expected return value of this column. This information can + * be used by operations such as `select` on a [Dataset] to automatically convert the + * results into the correct JVM types. + * + * ``` + * val df: Dataset = ... + * val typedColumn: Dataset = df.selectTyped( col("a").`as`() ) + * ``` + */ +@Suppress("UNCHECKED_CAST") +inline fun Column.`as`(): TypedColumn = `as`(encoder()) + +/** + * Creates a [Column] of literal value. + * + * The passed in object is returned directly if it is already a [Column]. + * If the object is a Scala Symbol, it is converted into a [Column] also. + * Otherwise, a new [Column] is created to represent the literal value. + * + * This is just a shortcut to the function from [org.apache.spark.sql.functions]. + * For all the functions, simply add `import org.apache.spark.sql.functions.*` to your file. + */ +fun lit(a: Any): Column = functions.lit(a) + +/** + * Returns a [Column] based on the given class attribute, not connected to a dataset. + * ```kotlin + * val dataset: Dataset = ... + * val new: Dataset> = dataset.select( col(YourClass::a), col(YourClass::b) ) + * ``` + */ +@Suppress("UNCHECKED_CAST") +inline fun col(column: KProperty1): TypedColumn = + functions.col(column.name).`as`() as TypedColumn diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt new file mode 100644 index 00000000..f5dc4868 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt @@ -0,0 +1,408 @@ +package org.jetbrains.kotlinx.spark.api + +import org.apache.spark.api.java.JavaRDDLike +import org.apache.spark.api.java.function.FlatMapFunction +import org.apache.spark.api.java.function.ForeachFunction +import org.apache.spark.api.java.function.ForeachPartitionFunction +import org.apache.spark.api.java.function.MapFunction +import org.apache.spark.api.java.function.ReduceFunction +import org.apache.spark.rdd.RDD +import org.apache.spark.sql.Column +import org.apache.spark.sql.Dataset +import org.apache.spark.sql.KeyValueGroupedDataset +import org.apache.spark.sql.TypedColumn +import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions +import scala.Tuple2 +import kotlin.reflect.KProperty1 + + +/** + * Utility method to create dataset from list + */ +inline fun SparkSession.toDS(list: List): Dataset = + createDataset(list, encoder()) + +/** + * Utility method to create dataset from list + */ +inline fun SparkSession.dsOf(vararg t: T): Dataset = + createDataset(listOf(*t), encoder()) + +/** + * Utility method to create dataset from list + */ +inline fun List.toDS(spark: SparkSession): Dataset = + spark.createDataset(this, encoder()) + +/** + * Utility method to create dataset from RDD + */ +inline fun RDD.toDS(spark: SparkSession): Dataset = + spark.createDataset(this, encoder()) + +/** + * Utility method to create dataset from JavaRDD + */ +inline fun JavaRDDLike.toDS(spark: SparkSession): Dataset = + spark.createDataset(this.rdd(), encoder()) + +/** + * (Kotlin-specific) + * Returns a new Dataset that contains the result of applying [func] to each element. + */ +inline fun Dataset.map(noinline func: (T) -> R): Dataset = + map(MapFunction(func), encoder()) + +/** + * (Kotlin-specific) + * Returns a new Dataset by first applying a function to all elements of this Dataset, + * and then flattening the results. + */ +inline fun Dataset.flatMap(noinline func: (T) -> Iterator): Dataset = + flatMap(func, encoder()) + +/** + * (Kotlin-specific) + * Returns a new Dataset by flattening. This means that a Dataset of an iterable such as + * `listOf(listOf(1, 2, 3), listOf(4, 5, 6))` will be flattened to a Dataset of `listOf(1, 2, 3, 4, 5, 6)`. + */ +inline fun > Dataset.flatten(): Dataset = + flatMap(FlatMapFunction { it.iterator() }, encoder()) + +/** + * (Kotlin-specific) + * Returns a [KeyValueGroupedDataset] where the data is grouped by the given key [func]. + */ +inline fun Dataset.groupByKey(noinline func: (T) -> R): KeyValueGroupedDataset = + groupByKey(MapFunction(func), encoder()) + +/** + * (Kotlin-specific) + * Returns a new Dataset that contains the result of applying [func] to each partition. + */ +inline fun Dataset.mapPartitions(noinline func: (Iterator) -> Iterator): Dataset = + mapPartitions(func, encoder()) + +/** + * (Kotlin-specific) + * Filters rows to eliminate [null] values. + */ +@Suppress("UNCHECKED_CAST") +fun Dataset.filterNotNull(): Dataset = filter { it != null } as Dataset + + +/** + * (Kotlin-specific) + * Reduces the elements of this Dataset using the specified binary function. The given `func` + * must be commutative and associative or the result may be non-deterministic. + */ +inline fun Dataset.reduceK(noinline func: (T, T) -> T): T = + reduce(ReduceFunction(func)) + +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "keys" or [Tuple2._1] values. + */ +@JvmName("takeKeysTuple2") +inline fun Dataset>.takeKeys(): Dataset = map { it._1() } + +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "keys" or [Pair.first] values. + */ +inline fun Dataset>.takeKeys(): Dataset = map { it.first } + +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "keys" or [Arity2._1] values. + */ +@JvmName("takeKeysArity2") +inline fun Dataset>.takeKeys(): Dataset = map { it._1 } + +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "values" or [Tuple2._2] values. + */ +@JvmName("takeValuesTuple2") +inline fun Dataset>.takeValues(): Dataset = map { it._2() } + +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "values" or [Pair.second] values. + */ +inline fun Dataset>.takeValues(): Dataset = map { it.second } + +/** + * (Kotlin-specific) + * Maps the Dataset to only retain the "values" or [Arity2._2] values. + */ +@JvmName("takeValuesArity2") +inline fun Dataset>.takeValues(): Dataset = map { it._2 } + +/** DEPRECATED: Use [as] or [to] for this. */ +@Deprecated( + message = "Deprecated, since we already have `as`() and to().", + replaceWith = ReplaceWith("this.to()"), + level = DeprecationLevel.ERROR, +) +inline fun Dataset.downcast(): Dataset = `as`(encoder()) + +/** + * (Kotlin-specific) + * Returns a new Dataset where each record has been mapped on to the specified type. The + * method used to map columns depend on the type of [R]: + * - When [R] is a class, fields for the class will be mapped to columns of the same name + * (case sensitivity is determined by [spark.sql.caseSensitive]). + * - When [R] is a tuple, the columns will be mapped by ordinal (i.e. the first column will + * be assigned to `_1`). + * - When [R] is a primitive type (i.e. [String], [Int], etc.), then the first column of the + * `DataFrame` will be used. + * + * If the schema of the Dataset does not match the desired [R] type, you can use [Dataset.select]/[selectTyped] + * along with [Dataset.alias] or [as]/[to] to rearrange or rename as required. + * + * Note that [as]/[to] only changes the view of the data that is passed into typed operations, + * such as [map], and does not eagerly project away any columns that are not present in + * the specified class. + * + * @see to as alias for [as] + */ +inline fun Dataset<*>.`as`(): Dataset = `as`(encoder()) + +/** + * (Kotlin-specific) + * Returns a new Dataset where each record has been mapped on to the specified type. The + * method used to map columns depend on the type of [R]: + * - When [R] is a class, fields for the class will be mapped to columns of the same name + * (case sensitivity is determined by [spark.sql.caseSensitive]). + * - When [R] is a tuple, the columns will be mapped by ordinal (i.e. the first column will + * be assigned to `_1`). + * - When [R] is a primitive type (i.e. [String], [Int], etc.), then the first column of the + * `DataFrame` will be used. + * + * If the schema of the Dataset does not match the desired [R] type, you can use [Dataset.select]/[selectTyped] + * along with [Dataset.alias] or [as]/[to] to rearrange or rename as required. + * + * Note that [as]/[to] only changes the view of the data that is passed into typed operations, + * such as [map], and does not eagerly project away any columns that are not present in + * the specified class. + * + * @see as as alias for [to] + */ +inline fun Dataset<*>.to(): Dataset = `as`(encoder()) + +/** + * (Kotlin-specific) + * Applies a function [func] to all rows. + */ +inline fun Dataset.forEach(noinline func: (T) -> Unit): Unit = foreach(ForeachFunction(func)) + +/** + * (Kotlin-specific) + * Runs [func] on each partition of this Dataset. + */ +inline fun Dataset.forEachPartition(noinline func: (Iterator) -> Unit): Unit = + foreachPartition(ForeachPartitionFunction(func)) + +/** + * It's hard to call `Dataset.debugCodegen` from kotlin, so here is utility for that + */ +fun Dataset.debugCodegen(): Dataset = also { KSparkExtensions.debugCodegen(it) } + +/** + * It's hard to call `Dataset.debug` from kotlin, so here is utility for that + */ +fun Dataset.debug(): Dataset = also { KSparkExtensions.debug(it) } + + +/** + * Alias for [Dataset.joinWith] which passes "left" argument + * and respects the fact that in result of left join right relation is nullable + * + * @receiver left dataset + * @param right right dataset + * @param col join condition + * + * @return dataset of pairs where right element is forced nullable + */ +inline fun Dataset.leftJoin(right: Dataset, col: Column): Dataset> { + return joinWith(right, col, "left").map { it._1 to it._2 } +} + +/** + * Alias for [Dataset.joinWith] which passes "right" argument + * and respects the fact that in result of right join left relation is nullable + * + * @receiver left dataset + * @param right right dataset + * @param col join condition + * + * @return dataset of [Pair] where left element is forced nullable + */ +inline fun Dataset.rightJoin(right: Dataset, col: Column): Dataset> { + return joinWith(right, col, "right").map { it._1 to it._2 } +} + +/** + * Alias for [Dataset.joinWith] which passes "inner" argument + * + * @receiver left dataset + * @param right right dataset + * @param col join condition + * + * @return resulting dataset of [Pair] + */ +inline fun Dataset.innerJoin(right: Dataset, col: Column): Dataset> { + return joinWith(right, col, "inner").map { it._1 to it._2 } +} + +/** + * Alias for [Dataset.joinWith] which passes "full" argument + * and respects the fact that in result of join any element of resulting tuple is nullable + * + * @receiver left dataset + * @param right right dataset + * @param col join condition + * + * @return dataset of [Pair] where both elements are forced nullable + */ +inline fun Dataset.fullJoin( + right: Dataset, + col: Column, +): Dataset> { + return joinWith(right, col, "full").map { it._1 to it._2 } +} + +/** + * Alias for [Dataset.sort] which forces user to provide sorted columns from the source dataset + * + * @receiver source [Dataset] + * @param columns producer of sort columns + * @return sorted [Dataset] + */ +inline fun Dataset.sort(columns: (Dataset) -> Array): Dataset = sort(*columns(this)) + +/** + * This function creates block, where one can call any further computations on already cached dataset + * Data will be unpersisted automatically at the end of computation + * + * it may be useful in many situations, for example, when one needs to write data to several targets + * ```kotlin + * ds.withCached { + * write() + * .also { it.orc("First destination") } + * .also { it.avro("Second destination") } + * } + * ``` + * + * @param blockingUnpersist if execution should be blocked until everything persisted will be deleted + * @param executeOnCached Block which should be executed on cached dataset. + * @return result of block execution for further usage. It may be anything including source or new dataset + */ +inline fun Dataset.withCached( + blockingUnpersist: Boolean = false, + executeOnCached: Dataset.() -> R, +): R { + val cached = this.cache() + return cached.executeOnCached().also { cached.unpersist(blockingUnpersist) } +} + +/** + * Collects the dataset as list where each item has been mapped to type [T]. + */ +inline fun Dataset<*>.toList(): List = to().collectAsList() as List + +/** + * Collects the dataset as Array where each item has been mapped to type [T]. + */ +inline fun Dataset<*>.toArray(): Array = to().collect() as Array + + +/** + * Allows to sort data class dataset on one or more of the properties of the data class. + * ```kotlin + * val sorted: Dataset = unsorted.sort(YourClass::a) + * val sorted2: Dataset = unsorted.sort(YourClass::a, YourClass::b) + * ``` + */ +fun Dataset.sort(col: KProperty1, vararg cols: KProperty1): Dataset = + sort(col.name, *cols.map { it.name }.toTypedArray()) + +/** + * Alternative to [Dataset.show] which returns source dataset. + * Useful for debug purposes when you need to view content of a dataset as an intermediate operation + */ +fun Dataset.showDS(numRows: Int = 20, truncate: Boolean = true): Dataset = apply { show(numRows, truncate) } + +/** + * Returns a new Dataset by computing the given [Column] expressions for each element. + */ +@Suppress("UNCHECKED_CAST") +inline fun Dataset.selectTyped( + c1: TypedColumn, +): Dataset = select(c1 as TypedColumn) + +/** + * Returns a new Dataset by computing the given [Column] expressions for each element. + */ +@Suppress("UNCHECKED_CAST") +inline fun Dataset.selectTyped( + c1: TypedColumn, + c2: TypedColumn, +): Dataset> = + select( + c1 as TypedColumn, + c2 as TypedColumn, + ).map { Pair(it._1(), it._2()) } + +/** + * Returns a new Dataset by computing the given [Column] expressions for each element. + */ +@Suppress("UNCHECKED_CAST") +inline fun Dataset.selectTyped( + c1: TypedColumn, + c2: TypedColumn, + c3: TypedColumn, +): Dataset> = + select( + c1 as TypedColumn, + c2 as TypedColumn, + c3 as TypedColumn, + ).map { Triple(it._1(), it._2(), it._3()) } + +/** + * Returns a new Dataset by computing the given [Column] expressions for each element. + */ +@Suppress("UNCHECKED_CAST") +inline fun Dataset.selectTyped( + c1: TypedColumn, + c2: TypedColumn, + c3: TypedColumn, + c4: TypedColumn, +): Dataset> = + select( + c1 as TypedColumn, + c2 as TypedColumn, + c3 as TypedColumn, + c4 as TypedColumn, + ).map { Arity4(it._1(), it._2(), it._3(), it._4()) } + +/** + * Returns a new Dataset by computing the given [Column] expressions for each element. + */ +@Suppress("UNCHECKED_CAST") +inline fun Dataset.selectTyped( + c1: TypedColumn, + c2: TypedColumn, + c3: TypedColumn, + c4: TypedColumn, + c5: TypedColumn, +): Dataset> = + select( + c1 as TypedColumn, + c2 as TypedColumn, + c3 as TypedColumn, + c4 as TypedColumn, + c5 as TypedColumn, + ).map { Arity5(it._1(), it._2(), it._3(), it._4(), it._5()) } + diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt new file mode 100644 index 00000000..c73c2da0 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt @@ -0,0 +1,276 @@ +/*- + * =LICENSE= + * Kotlin Spark API + * ---------- + * Copyright (C) 2019 - 2020 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +@file:Suppress("HasPlatformType", "unused", "FunctionName") + +package org.jetbrains.kotlinx.spark.api + +import org.apache.spark.sql.* +import org.apache.spark.sql.Encoders.* +import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder +import org.apache.spark.sql.types.* +import org.apache.spark.unsafe.types.CalendarInterval +import scala.Product +import scala.reflect.ClassTag +import java.beans.PropertyDescriptor +import java.math.BigDecimal +import java.sql.Date +import java.sql.Timestamp +import java.time.Duration +import java.time.Instant +import java.time.LocalDate +import java.time.Period +import java.util.* +import java.util.concurrent.ConcurrentHashMap +import kotlin.Any +import kotlin.Array +import kotlin.Boolean +import kotlin.BooleanArray +import kotlin.Byte +import kotlin.ByteArray +import kotlin.Double +import kotlin.DoubleArray +import kotlin.ExperimentalStdlibApi +import kotlin.Float +import kotlin.FloatArray +import kotlin.IllegalArgumentException +import kotlin.Int +import kotlin.IntArray +import kotlin.Long +import kotlin.LongArray +import kotlin.OptIn +import kotlin.Short +import kotlin.ShortArray +import kotlin.String +import kotlin.Suppress +import kotlin.reflect.* +import kotlin.reflect.full.findAnnotation +import kotlin.reflect.full.isSubclassOf +import kotlin.reflect.full.primaryConstructor +import kotlin.to + +@JvmField +val ENCODERS: Map, Encoder<*>> = mapOf( + Boolean::class to BOOLEAN(), + Byte::class to BYTE(), + Short::class to SHORT(), + Int::class to INT(), + Long::class to LONG(), + Float::class to FLOAT(), + Double::class to DOUBLE(), + String::class to STRING(), + BigDecimal::class to DECIMAL(), + Date::class to DATE(), + LocalDate::class to LOCALDATE(), // 3.0+ + Timestamp::class to TIMESTAMP(), + Instant::class to INSTANT(), // 3.0+ + ByteArray::class to BINARY(), + Duration::class to DURATION(), // 3.2+ + Period::class to PERIOD(), // 3.2+ +) + +private val knownDataTypes: Map, DataType> = mapOf( + Byte::class to DataTypes.ByteType, + Short::class to DataTypes.ShortType, + Int::class to DataTypes.IntegerType, + Long::class to DataTypes.LongType, + Boolean::class to DataTypes.BooleanType, + Float::class to DataTypes.FloatType, + Double::class to DataTypes.DoubleType, + String::class to DataTypes.StringType, + LocalDate::class to DataTypes.DateType, + Date::class to DataTypes.DateType, + Timestamp::class to DataTypes.TimestampType, + Instant::class to DataTypes.TimestampType, + ByteArray::class to DataTypes.BinaryType, + Decimal::class to DecimalType.SYSTEM_DEFAULT(), + BigDecimal::class to DecimalType.SYSTEM_DEFAULT(), + CalendarInterval::class to DataTypes.CalendarIntervalType, +) + +/** + * Main method of API, which gives you seamless integration with Spark: + * It creates encoder for any given supported type T + * + * Supported types are data classes, primitives, and Lists, Maps and Arrays containing them + * + * @param T type, supported by Spark + * @return generated encoder + */ +@OptIn(ExperimentalStdlibApi::class) +inline fun encoder(): Encoder = generateEncoder(typeOf(), T::class) + +/** + * @see encoder + */ +fun generateEncoder(type: KType, cls: KClass<*>): Encoder { + @Suppress("UNCHECKED_CAST") + return when { + isSupportedByKotlinClassEncoder(cls) -> kotlinClassEncoder(memoizedSchema(type), cls) + else -> ENCODERS[cls] as? Encoder? ?: bean(cls.java) + } as Encoder +} + +private fun isSupportedByKotlinClassEncoder(cls: KClass<*>): Boolean = when { + cls == ByteArray::class -> false // uses binary encoder + cls.isData -> true + cls.isSubclassOf(Map::class) -> true + cls.isSubclassOf(Iterable::class) -> true + cls.isSubclassOf(Product::class) -> true + cls.java.isArray -> true + else -> false +} + + +private fun kotlinClassEncoder(schema: DataType, kClass: KClass<*>): Encoder { + return ExpressionEncoder( + if (schema is DataTypeWithClass) KotlinReflection.serializerFor( + kClass.java, + schema + ) else KotlinReflection.serializerForType(KotlinReflection.getType(kClass.java)), + if (schema is DataTypeWithClass) KotlinReflection.deserializerFor( + kClass.java, + schema + ) else KotlinReflection.deserializerForType(KotlinReflection.getType(kClass.java)), + ClassTag.apply(kClass.java) + ) +} + +/** + * Not meant to be used by the user explicitly. + * + * This function generates the DataType schema for supported classes, including Kotlin data classes, [Map], + * [Iterable], [Product], [Array], and combinations of those. + * + * It's mainly used by [generateEncoder]/[encoder]. + */ +@OptIn(ExperimentalStdlibApi::class) +fun schema(type: KType, map: Map = mapOf()): DataType { + val primitiveSchema = knownDataTypes[type.classifier] + if (primitiveSchema != null) return KSimpleTypeWrapper( + primitiveSchema, + (type.classifier!! as KClass<*>).java, + type.isMarkedNullable + ) + val klass = type.classifier as? KClass<*> ?: throw IllegalArgumentException("Unsupported type $type") + val args = type.arguments + + val types = transitiveMerge(map, klass.typeParameters.zip(args).map { + it.first.name to it.second.type!! + }.toMap()) + return when { + klass.isSubclassOf(Enum::class) -> { + KSimpleTypeWrapper(DataTypes.StringType, klass.java, type.isMarkedNullable) + } + klass.isSubclassOf(Iterable::class) || klass.java.isArray -> { + val listParam = if (klass.java.isArray) { + when (klass) { + IntArray::class -> typeOf() + LongArray::class -> typeOf() + FloatArray::class -> typeOf() + DoubleArray::class -> typeOf() + BooleanArray::class -> typeOf() + ShortArray::class -> typeOf() + // ByteArray handled by BinaryType + else -> types.getValue(klass.typeParameters[0].name) + } + } else types.getValue(klass.typeParameters[0].name) + KComplexTypeWrapper( + DataTypes.createArrayType(schema(listParam, types), listParam.isMarkedNullable), + klass.java, + type.isMarkedNullable + ) + } + klass.isSubclassOf(Map::class) -> { + val mapKeyParam = types.getValue(klass.typeParameters[0].name) + val mapValueParam = types.getValue(klass.typeParameters[1].name) + KComplexTypeWrapper( + DataTypes.createMapType( + schema(mapKeyParam, types), + schema(mapValueParam, types), + true + ), + klass.java, + type.isMarkedNullable + ) + } + klass.isData -> { + val structType = StructType( + klass + .primaryConstructor!! + .parameters + .filter { it.findAnnotation() == null } + .map { + val projectedType = types[it.type.toString()] ?: it.type + val propertyDescriptor = PropertyDescriptor( + it.name, + klass.java, + "is" + it.name?.replaceFirstChar { if (it.isLowerCase()) it.titlecase(Locale.getDefault()) else it.toString() }, + null + ) + KStructField( + propertyDescriptor.readMethod.name, + StructField( + it.name, + schema(projectedType, types), + projectedType.isMarkedNullable, + Metadata.empty() + ) + ) + } + .toTypedArray() + ) + KDataTypeWrapper(structType, klass.java, true) + } + klass.isSubclassOf(Product::class) -> { + val params = type.arguments.mapIndexed { i, it -> + "_${i + 1}" to it.type!! + } + + val structType = DataTypes.createStructType( + params.map { (fieldName, fieldType) -> + val dataType = schema(fieldType, types) + KStructField( + fieldName, + StructField(fieldName, dataType, fieldType.isMarkedNullable, Metadata.empty()) + ) + }.toTypedArray() + ) + + KComplexTypeWrapper(structType, klass.java, true) + } + else -> throw IllegalArgumentException("$type is unsupported") + } +} + +private fun transitiveMerge(a: Map, b: Map): Map = + a + b.mapValues { + a.getOrDefault(it.value.toString(), it.value) + } + +class Memoize1(val f: (T) -> R) : (T) -> R { + + private val values = ConcurrentHashMap() + + override fun invoke(x: T): R = values.getOrPut(x) { f(x) } +} + +private fun ((T) -> R).memoize(): (T) -> R = Memoize1(this) + +private val memoizedSchema: (KType) -> DataType = { x: KType -> schema(x) }.memoize() diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt new file mode 100644 index 00000000..c3a26b3b --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt @@ -0,0 +1,40 @@ +package org.jetbrains.kotlinx.spark.api + +import org.apache.spark.sql.streaming.GroupState +import kotlin.reflect.KProperty + +/** + * (Kotlin-specific) + * Returns the group state value if it exists, else [null]. + * This is comparable to [GroupState.getOption], but instead utilises Kotlin's nullability features + * to get the same result. + */ +fun GroupState.getOrNull(): S? = if (exists()) get() else null + +/** + * (Kotlin-specific) + * Allows the group state object to be used as a delegate. Will be [null] if it does not exist. + * + * For example: + * ```kotlin + * groupedDataset.mapGroupsWithState(GroupStateTimeout.NoTimeout()) { key, values, state: GroupState -> + * var s by state + * ... + * } + * ``` + */ +operator fun GroupState.getValue(thisRef: Any?, property: KProperty<*>): S? = getOrNull() + +/** + * (Kotlin-specific) + * Allows the group state object to be used as a delegate. Will be [null] if it does not exist. + * + * For example: + * ```kotlin + * groupedDataset.mapGroupsWithState(GroupStateTimeout.NoTimeout()) { key, values, state: GroupState -> + * var s by state + * ... + * } + * ``` + */ +operator fun GroupState.setValue(thisRef: Any?, property: KProperty<*>, value: S?): Unit = update(value) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt new file mode 100644 index 00000000..23062799 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt @@ -0,0 +1,178 @@ +package org.jetbrains.kotlinx.spark.api + +import org.apache.spark.api.java.function.CoGroupFunction +import org.apache.spark.api.java.function.FlatMapGroupsFunction +import org.apache.spark.api.java.function.FlatMapGroupsWithStateFunction +import org.apache.spark.api.java.function.MapFunction +import org.apache.spark.api.java.function.MapGroupsFunction +import org.apache.spark.api.java.function.MapGroupsWithStateFunction +import org.apache.spark.api.java.function.ReduceFunction +import org.apache.spark.sql.Dataset +import org.apache.spark.sql.Encoder +import org.apache.spark.sql.KeyValueGroupedDataset +import org.apache.spark.sql.streaming.GroupState +import org.apache.spark.sql.streaming.GroupStateTimeout +import org.apache.spark.sql.streaming.OutputMode + + +/** + * Returns a new [KeyValueGroupedDataset] where the given function [func] has been applied + * to the data. The grouping key is unchanged by this. + * + * ```kotlin + * // Create values grouped by key from a Dataset> + * ds.groupByKey { it._1 }.mapValues { it._2 } + * ``` + */ +inline fun KeyValueGroupedDataset.mapValues(noinline func: (VALUE) -> R): KeyValueGroupedDataset = + mapValues(MapFunction(func), encoder()) + +/** + * (Kotlin-specific) + * Applies the given function to each group of data. For each unique group, the function will + * be passed the group key and an iterator that contains all the elements in the group. The + * function can return an element of arbitrary type which will be returned as a new [Dataset]. + * + * This function does not support partial aggregation, and as a result requires shuffling all + * the data in the [Dataset]. If an application intends to perform an aggregation over each + * key, it is best to use the reduce function or an + * [org.apache.spark.sql.expressions.Aggregator]. + * + * Internally, the implementation will spill to disk if any given group is too large to fit into + * memory. However, users must take care to avoid materializing the whole iterator for a group + * (for example, by calling [toList]) unless they are sure that this is possible given the memory + * constraints of their cluster. + */ +inline fun KeyValueGroupedDataset.mapGroups(noinline func: (KEY, Iterator) -> R): Dataset = + mapGroups(MapGroupsFunction(func), encoder()) + +/** + * (Kotlin-specific) + * Reduces the elements of each group of data using the specified binary function. + * The given function must be commutative and associative or the result may be non-deterministic. + * + * Note that you need to use [reduceGroupsK] always instead of the Java- or Scala-specific + * [KeyValueGroupedDataset.reduceGroups] to make the compiler work. + */ +inline fun KeyValueGroupedDataset.reduceGroupsK(noinline func: (VALUE, VALUE) -> VALUE): Dataset> = + reduceGroups(ReduceFunction(func)) + .map { t -> t._1 to t._2 } + +/** + * (Kotlin-specific) + * Applies the given function to each group of data. For each unique group, the function will + * be passed the group key and an iterator that contains all the elements in the group. The + * function can return an iterator containing elements of an arbitrary type which will be returned + * as a new [Dataset]. + * + * This function does not support partial aggregation, and as a result requires shuffling all + * the data in the [Dataset]. If an application intends to perform an aggregation over each + * key, it is best to use the reduce function or an + * [org.apache.spark.sql.expressions.Aggregator]. + * + * Internally, the implementation will spill to disk if any given group is too large to fit into + * memory. However, users must take care to avoid materializing the whole iterator for a group + * (for example, by calling [toList]) unless they are sure that this is possible given the memory + * constraints of their cluster. + */ +inline fun KeyValueGroupedDataset.flatMapGroups( + noinline func: (key: K, values: Iterator) -> Iterator, +): Dataset = flatMapGroups( + FlatMapGroupsFunction(func), + encoder() +) + + +/** + * (Kotlin-specific) + * Applies the given function to each group of data, while maintaining a user-defined per-group + * state. The result Dataset will represent the objects returned by the function. + * For a static batch Dataset, the function will be invoked once per group. For a streaming + * Dataset, the function will be invoked for each group repeatedly in every trigger, and + * updates to each group's state will be saved across invocations. + * See [org.apache.spark.sql.streaming.GroupState] for more details. + * + * @param S The type of the user-defined state. Must be encodable to Spark SQL types. + * @param U The type of the output objects. Must be encodable to Spark SQL types. + * @param func Function to be called on every group. + * + * See [Encoder] for more details on what types are encodable to Spark SQL. + */ +inline fun KeyValueGroupedDataset.mapGroupsWithState( + noinline func: (key: K, values: Iterator, state: GroupState) -> U, +): Dataset = mapGroupsWithState( + MapGroupsWithStateFunction(func), + encoder(), + encoder() +) + +/** + * (Kotlin-specific) + * Applies the given function to each group of data, while maintaining a user-defined per-group + * state. The result Dataset will represent the objects returned by the function. + * For a static batch Dataset, the function will be invoked once per group. For a streaming + * Dataset, the function will be invoked for each group repeatedly in every trigger, and + * updates to each group's state will be saved across invocations. + * See [org.apache.spark.sql.streaming.GroupState] for more details. + * + * @param S The type of the user-defined state. Must be encodable to Spark SQL types. + * @param U The type of the output objects. Must be encodable to Spark SQL types. + * @param func Function to be called on every group. + * @param timeoutConf Timeout configuration for groups that do not receive data for a while. + * + * See [Encoder] for more details on what types are encodable to Spark SQL. + */ +inline fun KeyValueGroupedDataset.mapGroupsWithState( + timeoutConf: GroupStateTimeout, + noinline func: (key: K, values: Iterator, state: GroupState) -> U, +): Dataset = mapGroupsWithState( + MapGroupsWithStateFunction(func), + encoder(), + encoder(), + timeoutConf +) + +/** + * (Kotlin-specific) + * Applies the given function to each group of data, while maintaining a user-defined per-group + * state. The result Dataset will represent the objects returned by the function. + * For a static batch Dataset, the function will be invoked once per group. For a streaming + * Dataset, the function will be invoked for each group repeatedly in every trigger, and + * updates to each group's state will be saved across invocations. + * See [GroupState] for more details. + * + * @param S The type of the user-defined state. Must be encodable to Spark SQL types. + * @param U The type of the output objects. Must be encodable to Spark SQL types. + * @param func Function to be called on every group. + * @param outputMode The output mode of the function. + * @param timeoutConf Timeout configuration for groups that do not receive data for a while. + * + * See [Encoder] for more details on what types are encodable to Spark SQL. + */ +inline fun KeyValueGroupedDataset.flatMapGroupsWithState( + outputMode: OutputMode, + timeoutConf: GroupStateTimeout, + noinline func: (key: K, values: Iterator, state: GroupState) -> Iterator, +): Dataset = flatMapGroupsWithState( + FlatMapGroupsWithStateFunction(func), + outputMode, + encoder(), + encoder(), + timeoutConf +) + +/** + * (Kotlin-specific) + * Applies the given function to each cogrouped data. For each unique group, the function will + * be passed the grouping key and 2 iterators containing all elements in the group from + * [Dataset] [this] and [other]. The function can return an iterator containing elements of an + * arbitrary type which will be returned as a new [Dataset]. + */ +inline fun KeyValueGroupedDataset.cogroup( + other: KeyValueGroupedDataset, + noinline func: (key: K, left: Iterator, right: Iterator) -> Iterator, +): Dataset = cogroup( + other, + CoGroupFunction(func), + encoder() +) \ No newline at end of file diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt similarity index 69% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt rename to kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index 98fdae8d..91df8d7a 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkHelper.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -20,14 +20,53 @@ package org.jetbrains.kotlinx.spark.api import org.apache.spark.SparkConf -import org.apache.spark.api.java.JavaRDD +import org.apache.spark.SparkContext import org.apache.spark.api.java.JavaRDDLike import org.apache.spark.api.java.JavaSparkContext +import org.apache.spark.broadcast.Broadcast import org.apache.spark.rdd.RDD -import org.apache.spark.sql.Dataset import org.apache.spark.sql.SparkSession.Builder import org.apache.spark.sql.UDFRegistration import org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR +import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions + +/** + * This wrapper over [SparkSession] which provides several additional methods to create [org.apache.spark.sql.Dataset] + */ +class KSparkSession(val spark: SparkSession) { + + val sc: JavaSparkContext by lazy { JavaSparkContext(spark.sparkContext) } + + inline fun List.toDS() = toDS(spark) + inline fun Array.toDS() = spark.dsOf(*this) + inline fun dsOf(vararg arg: T) = spark.dsOf(*arg) + inline fun RDD.toDS() = toDS(spark) + inline fun JavaRDDLike.toDS() = toDS(spark) + val udf: UDFRegistration get() = spark.udf() +} + +/** + * The entry point to programming Spark with the Dataset and DataFrame API. + * + * @see org.apache.spark.sql.SparkSession + */ +typealias SparkSession = org.apache.spark.sql.SparkSession + +/** + * Control our logLevel. This overrides any user-defined log settings. + * @param level The desired log level as [SparkLogLevel]. + */ +fun SparkContext.setLogLevel(level: SparkLogLevel): Unit = setLogLevel(level.name) + +enum class SparkLogLevel { + ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN +} + +/** + * Returns the Spark context associated with this Spark session. + */ +val SparkSession.sparkContext: SparkContext + get() = KSparkExtensions.sparkContext(this) /** * Wrapper for spark creation which allows setting different spark params. @@ -105,16 +144,36 @@ inline fun withSpark(sparkConf: SparkConf, logLevel: SparkLogLevel = ERROR, func } /** - * This wrapper over [SparkSession] which provides several additional methods to create [org.apache.spark.sql.Dataset] + * Broadcast a read-only variable to the cluster, returning a + * [org.apache.spark.broadcast.Broadcast] object for reading it in distributed functions. + * The variable will be sent to each cluster only once. + * + * @param value value to broadcast to the Spark nodes + * @return `Broadcast` object, a read-only variable cached on each machine */ -class KSparkSession(val spark: SparkSession) { - - val sc: JavaSparkContext by lazy { JavaSparkContext(spark.sparkContext) } +inline fun SparkSession.broadcast(value: T): Broadcast = try { + sparkContext.broadcast(value, encoder().clsTag()) +} catch (e: ClassNotFoundException) { + JavaSparkContext(sparkContext).broadcast(value) +} - inline fun List.toDS() = toDS(spark) - inline fun Array.toDS() = spark.dsOf(*this) - inline fun dsOf(vararg arg: T) = spark.dsOf(*arg) - inline fun RDD.toDS() = toDS(spark) - inline fun JavaRDDLike.toDS() = toDS(spark) - val udf: UDFRegistration get() = spark.udf() +/** + * Broadcast a read-only variable to the cluster, returning a + * [org.apache.spark.broadcast.Broadcast] object for reading it in distributed functions. + * The variable will be sent to each cluster only once. + * + * @param value value to broadcast to the Spark nodes + * @return `Broadcast` object, a read-only variable cached on each machine + * @see broadcast + */ +@Deprecated( + "You can now use `spark.broadcast()` instead.", + ReplaceWith("spark.broadcast(value)"), + DeprecationLevel.WARNING +) +inline fun SparkContext.broadcast(value: T): Broadcast = try { + broadcast(value, encoder().clsTag()) +} catch (e: ClassNotFoundException) { + JavaSparkContext(this).broadcast(value) } + From 635dc9449990bf0dca185f326da8431d4e93c3c6 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 28 Feb 2022 17:51:19 +0100 Subject: [PATCH 066/213] Added docs in a couple of files, added multiple conversions regarding tuples, pairs, arities etc. --- .../spark/api/{VarArities.kt => Arities.kt} | 23 +- .../org/jetbrains/kotlinx/spark/api/Column.kt | 25 ++ .../kotlinx/spark/api/Conversions.kt | 270 ++++++++++++++++++ .../jetbrains/kotlinx/spark/api/Dataset.kt | 27 ++ .../jetbrains/kotlinx/spark/api/GroupState.kt | 19 ++ .../spark/api/KeyValueGroupedDataset.kt | 19 ++ 6 files changed, 382 insertions(+), 1 deletion(-) rename kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/{VarArities.kt => Arities.kt} (99%) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/VarArities.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt similarity index 99% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/VarArities.kt rename to kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt index af870038..36bd6efb 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/VarArities.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt @@ -17,9 +17,30 @@ * limitations under the License. * =LICENSEEND= */ + /** - * Helper classes and functions to work with unnamed tuples + * Helper classes and functions to work with unnamed tuples we call Arities. + * Arities are easier to work with in Kotlin than Scala Tuples since they are Kotlin data classes. + * This means they can be destructured, copied, etc. + * Finally, the Arities are Serializable, meaning they can be used inside RDDs and they can be broadcast. + * + * Example: + * ```kotlin + * // creation + * val tuple: Arity3 = c(1, "test", 1.0) + * + * // addition + * val newTuple: Arity5 = tuple + c(1, 2) + * + * // destructuring + * val dataset: Dataset> = ... + * dataset.map { (a: Int, b: Double) -> + * (a + b).toString() + * } + * + * ``` */ + package org.jetbrains.kotlinx.spark.api import java.io.Serializable diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt index 922c2868..ffa42ada 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt @@ -1,3 +1,28 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.0+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2021 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ + +/** + * This file contains all Column helper functions. + * This includes easier Column creation and operator functions. + */ + package org.jetbrains.kotlinx.spark.api import org.apache.spark.sql.Column diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt index e49ecf84..559e27a7 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt @@ -17,12 +17,20 @@ * limitations under the License. * =LICENSEEND= */ + +/** + * This files contains conversions of Iterators, Collections, etc. between the Scala- + * and Kotlin/Java variants. + */ + @file:Suppress("NOTHING_TO_INLINE", "RemoveExplicitTypeArguments", "unused") package org.jetbrains.kotlinx.spark.api +import scala.* import scala.collection.JavaConverters import java.util.* +import java.util.Enumeration import java.util.concurrent.ConcurrentMap import scala.collection.Iterable as ScalaIterable import scala.collection.Iterator as ScalaIterator @@ -158,3 +166,265 @@ fun ScalaMap.asKotlinMap(): Map = JavaConverters.mapAsJavaMap fun ScalaConcurrentMap.asKotlinConcurrentMap(): ConcurrentMap = JavaConverters.mapAsJavaConcurrentMap(this) + +/** + * Returns a new [Tuple2] based on the arguments in the current [Pair]. + */ +fun Pair.toTuple(): Tuple2 = Tuple2(first, second) + +/** + * Returns a new [Arity2] based on the arguments in the current [Pair]. + */ +fun Pair.toArity(): Arity2 = Arity2(first, second) + +/** + * Returns a new [Pair] based on the arguments in the current [Tuple2]. + */ +fun Tuple2.toPair(): Pair = Pair(_1(), _2()) + +/** + * Returns a new [Pair] based on the arguments in the current [Arity2]. + */ +fun Arity2.toPair(): Pair = Pair(_1, _2) + + +/** + * Returns a new [Tuple3] based on the arguments in the current [Triple]. + */ +fun Triple.toTuple(): Tuple3 = Tuple3(first, second, third) + +/** + * Returns a new [Arity3] based on the arguments in the current [Triple]. + */ +fun Triple.toArity(): Arity3 = Arity3(first, second, third) + +/** + * Returns a new [Triple] based on the arguments in the current [Tuple3]. + */ +fun Tuple3.toTriple(): Triple = Triple(_1(), _2(), _3()) + +/** + * Returns a new [Triple] based on the arguments in the current [Arity3]. + */ +fun Arity3.toTriple(): Triple = Triple(_1, _2, _3) + + +/** + * Returns a new Arity1 based on this Tuple1. + **/ +fun Tuple1.toArity(): Arity1 = Arity1(this._1()) + +/** + * Returns a new Arity2 based on this Tuple2. + **/ +fun Tuple2.toArity(): Arity2 = Arity2(this._1(), this._2()) + +/** + * Returns a new Arity3 based on this Tuple3. + **/ +fun Tuple3.toArity(): Arity3 = Arity3(this._1(), this._2(), this._3()) + +/** + * Returns a new Arity4 based on this Tuple4. + **/ +fun Tuple4.toArity(): Arity4 = Arity4(this._1(), this._2(), this._3(), this._4()) + +/** + * Returns a new Arity5 based on this Tuple5. + **/ +fun Tuple5.toArity(): Arity5 = Arity5(this._1(), this._2(), this._3(), this._4(), this._5()) + +/** + * Returns a new Arity6 based on this Tuple6. + **/ +fun Tuple6.toArity(): Arity6 = Arity6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) + +/** + * Returns a new Arity7 based on this Tuple7. + **/ +fun Tuple7.toArity(): Arity7 = Arity7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) + +/** + * Returns a new Arity8 based on this Tuple8. + **/ +fun Tuple8.toArity(): Arity8 = Arity8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) + +/** + * Returns a new Arity9 based on this Tuple9. + **/ +fun Tuple9.toArity(): Arity9 = Arity9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) + +/** + * Returns a new Arity10 based on this Tuple10. + **/ +fun Tuple10.toArity(): Arity10 = Arity10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) + +/** + * Returns a new Arity11 based on this Tuple11. + **/ +fun Tuple11.toArity(): Arity11 = Arity11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) + +/** + * Returns a new Arity12 based on this Tuple12. + **/ +fun Tuple12.toArity(): Arity12 = Arity12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) + +/** + * Returns a new Arity13 based on this Tuple13. + **/ +fun Tuple13.toArity(): Arity13 = Arity13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) + +/** + * Returns a new Arity14 based on this Tuple14. + **/ +fun Tuple14.toArity(): Arity14 = Arity14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) + +/** + * Returns a new Arity15 based on this Tuple15. + **/ +fun Tuple15.toArity(): Arity15 = Arity15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) + +/** + * Returns a new Arity16 based on this Tuple16. + **/ +fun Tuple16.toArity(): Arity16 = Arity16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) + +/** + * Returns a new Arity17 based on this Tuple17. + **/ +fun Tuple17.toArity(): Arity17 = Arity17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) + +/** + * Returns a new Arity18 based on this Tuple18. + **/ +fun Tuple18.toArity(): Arity18 = Arity18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) + +/** + * Returns a new Arity19 based on this Tuple19. + **/ +fun Tuple19.toArity(): Arity19 = Arity19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) + +/** + * Returns a new Arity20 based on this Tuple20. + **/ +fun Tuple20.toArity(): Arity20 = Arity20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) + +/** + * Returns a new Arity21 based on this Tuple21. + **/ +fun Tuple21.toArity(): Arity21 = Arity21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) + +/** + * Returns a new Arity22 based on this Tuple22. + **/ +fun Tuple22.toArity(): Arity22 = Arity22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) + +/** + * Returns a new Tuple1 based on this Arity1. + **/ +fun Arity1.toTuple(): Tuple1 = Tuple1(this._1) + +/** + * Returns a new Tuple2 based on this Arity2. + **/ +fun Arity2.toTuple(): Tuple2 = Tuple2(this._1, this._2) + +/** + * Returns a new Tuple3 based on this Arity3. + **/ +fun Arity3.toTuple(): Tuple3 = Tuple3(this._1, this._2, this._3) + +/** + * Returns a new Tuple4 based on this Arity4. + **/ +fun Arity4.toTuple(): Tuple4 = Tuple4(this._1, this._2, this._3, this._4) + +/** + * Returns a new Tuple5 based on this Arity5. + **/ +fun Arity5.toTuple(): Tuple5 = Tuple5(this._1, this._2, this._3, this._4, this._5) + +/** + * Returns a new Tuple6 based on this Arity6. + **/ +fun Arity6.toTuple(): Tuple6 = Tuple6(this._1, this._2, this._3, this._4, this._5, this._6) + +/** + * Returns a new Tuple7 based on this Arity7. + **/ +fun Arity7.toTuple(): Tuple7 = Tuple7(this._1, this._2, this._3, this._4, this._5, this._6, this._7) + +/** + * Returns a new Tuple8 based on this Arity8. + **/ +fun Arity8.toTuple(): Tuple8 = Tuple8(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8) + +/** + * Returns a new Tuple9 based on this Arity9. + **/ +fun Arity9.toTuple(): Tuple9 = Tuple9(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9) + +/** + * Returns a new Tuple10 based on this Arity10. + **/ +fun Arity10.toTuple(): Tuple10 = Tuple10(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10) + +/** + * Returns a new Tuple11 based on this Arity11. + **/ +fun Arity11.toTuple(): Tuple11 = Tuple11(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11) + +/** + * Returns a new Tuple12 based on this Arity12. + **/ +fun Arity12.toTuple(): Tuple12 = Tuple12(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12) + +/** + * Returns a new Tuple13 based on this Arity13. + **/ +fun Arity13.toTuple(): Tuple13 = Tuple13(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13) + +/** + * Returns a new Tuple14 based on this Arity14. + **/ +fun Arity14.toTuple(): Tuple14 = Tuple14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14) + +/** + * Returns a new Tuple15 based on this Arity15. + **/ +fun Arity15.toTuple(): Tuple15 = Tuple15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15) + +/** + * Returns a new Tuple16 based on this Arity16. + **/ +fun Arity16.toTuple(): Tuple16 = Tuple16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16) + +/** + * Returns a new Tuple17 based on this Arity17. + **/ +fun Arity17.toTuple(): Tuple17 = Tuple17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17) + +/** + * Returns a new Tuple18 based on this Arity18. + **/ +fun Arity18.toTuple(): Tuple18 = Tuple18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18) + +/** + * Returns a new Tuple19 based on this Arity19. + **/ +fun Arity19.toTuple(): Tuple19 = Tuple19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19) + +/** + * Returns a new Tuple20 based on this Arity20. + **/ +fun Arity20.toTuple(): Tuple20 = Tuple20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20) + +/** + * Returns a new Tuple21 based on this Arity21. + **/ +fun Arity21.toTuple(): Tuple21 = Tuple21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21) + +/** + * Returns a new Tuple22 based on this Arity22. + **/ +fun Arity22.toTuple(): Tuple22 = Tuple22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt index f5dc4868..fe4a9b90 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt @@ -1,3 +1,30 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.0+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2021 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ + +/** + * This file contains all Dataset helper functions. + * This includes the creation of Datasets from arrays, lists, and RDDs, + * as well as lots of extension functions which makes working with Datasets from Kotlin + * possible/easier. + */ + package org.jetbrains.kotlinx.spark.api import org.apache.spark.api.java.JavaRDDLike diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt index c3a26b3b..1af3d194 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt @@ -1,3 +1,22 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.0+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2021 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ package org.jetbrains.kotlinx.spark.api import org.apache.spark.sql.streaming.GroupState diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt index 23062799..d85fff8a 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt @@ -1,3 +1,22 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.0+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2021 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ package org.jetbrains.kotlinx.spark.api import org.apache.spark.api.java.function.CoGroupFunction From eb598783cb6f88eddc9819dbcd526f8cf6551450 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 1 Mar 2022 13:21:53 +0100 Subject: [PATCH 067/213] Adding and updating docs and cleaning up code --- .../jetbrains/kotlinx/spark/api/Arities.kt | 1 - .../kotlinx/spark/api/Conversions.kt | 2 +- .../jetbrains/kotlinx/spark/api/Dataset.kt | 2 +- .../jetbrains/kotlinx/spark/api/Encoding.kt | 189 ++++++++----- .../jetbrains/kotlinx/spark/api/GroupState.kt | 11 +- .../jetbrains/kotlinx/spark/api/Iterators.kt | 41 ++- .../spark/api/KeyValueGroupedDataset.kt | 18 +- .../kotlinx/spark/api/SparkSession.kt | 44 ++- .../kotlinx/spark/api/UDFRegister.kt | 253 +++++++++--------- 9 files changed, 344 insertions(+), 217 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt index 36bd6efb..6dcb1666 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt @@ -37,7 +37,6 @@ * dataset.map { (a: Int, b: Double) -> * (a + b).toString() * } - * * ``` */ diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt index 559e27a7..8b67a1bc 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt @@ -19,7 +19,7 @@ */ /** - * This files contains conversions of Iterators, Collections, etc. between the Scala- + * This files contains conversions of Iterators, Collections, Tuples, etc. between the Scala- * and Kotlin/Java variants. */ diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt index fe4a9b90..b5070b84 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt @@ -50,7 +50,7 @@ inline fun SparkSession.toDS(list: List): Dataset = createDataset(list, encoder()) /** - * Utility method to create dataset from list + * Utility method to create dataset from *array or vararg arguments */ inline fun SparkSession.dsOf(vararg t: T): Dataset = createDataset(listOf(*t), encoder()) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt index c73c2da0..fb3ac0a4 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt @@ -17,12 +17,21 @@ * limitations under the License. * =LICENSEEND= */ + +/** + * This file contains the encoding logic for the Kotlin Spark API. + * It provides encoders for Spark, based on reflection, for functions that need it. + * Aside from the normal Spark encoders, it also provides encoding for Kotlin data classes, Iterables, + * Products, Arrays, Maps etc. + */ + @file:Suppress("HasPlatformType", "unused", "FunctionName") package org.jetbrains.kotlinx.spark.api import org.apache.spark.sql.* import org.apache.spark.sql.Encoders.* +import org.apache.spark.sql.KotlinReflection.* import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder import org.apache.spark.sql.types.* import org.apache.spark.unsafe.types.CalendarInterval @@ -119,37 +128,35 @@ inline fun encoder(): Encoder = generateEncoder(typeOf(), T::c /** * @see encoder */ -fun generateEncoder(type: KType, cls: KClass<*>): Encoder { - @Suppress("UNCHECKED_CAST") - return when { - isSupportedByKotlinClassEncoder(cls) -> kotlinClassEncoder(memoizedSchema(type), cls) +@Suppress("UNCHECKED_CAST") +fun generateEncoder(type: KType, cls: KClass<*>): Encoder = + when { + isSupportedByKotlinClassEncoder(cls) -> kotlinClassEncoder(schema = memoizedSchema(type), kClass = cls) else -> ENCODERS[cls] as? Encoder? ?: bean(cls.java) } as Encoder -} -private fun isSupportedByKotlinClassEncoder(cls: KClass<*>): Boolean = when { - cls == ByteArray::class -> false // uses binary encoder - cls.isData -> true - cls.isSubclassOf(Map::class) -> true - cls.isSubclassOf(Iterable::class) -> true - cls.isSubclassOf(Product::class) -> true - cls.java.isArray -> true - else -> false -} +private fun isSupportedByKotlinClassEncoder(cls: KClass<*>): Boolean = + when { + cls == ByteArray::class -> false // uses binary encoder + cls.isData -> true + cls.isSubclassOf(Map::class) -> true + cls.isSubclassOf(Iterable::class) -> true + cls.isSubclassOf(Product::class) -> true + cls.java.isArray -> true + else -> false + } private fun kotlinClassEncoder(schema: DataType, kClass: KClass<*>): Encoder { - return ExpressionEncoder( - if (schema is DataTypeWithClass) KotlinReflection.serializerFor( - kClass.java, - schema - ) else KotlinReflection.serializerForType(KotlinReflection.getType(kClass.java)), - if (schema is DataTypeWithClass) KotlinReflection.deserializerFor( - kClass.java, - schema - ) else KotlinReflection.deserializerForType(KotlinReflection.getType(kClass.java)), - ClassTag.apply(kClass.java) - ) + val serializer = + if (schema is DataTypeWithClass) serializerFor(kClass.java, schema) + else serializerForType(getType(kClass.java)) + + val deserializer = + if (schema is DataTypeWithClass) deserializerFor(kClass.java, schema) + else deserializerForType(getType(kClass.java)) + + return ExpressionEncoder(serializer, deserializer, ClassTag.apply(kClass.java)) } /** @@ -163,21 +170,31 @@ private fun kotlinClassEncoder(schema: DataType, kClass: KClass<*>): Encoder @OptIn(ExperimentalStdlibApi::class) fun schema(type: KType, map: Map = mapOf()): DataType { val primitiveSchema = knownDataTypes[type.classifier] - if (primitiveSchema != null) return KSimpleTypeWrapper( - primitiveSchema, - (type.classifier!! as KClass<*>).java, - type.isMarkedNullable - ) + if (primitiveSchema != null) + return KSimpleTypeWrapper( + /* dt = */ primitiveSchema, + /* cls = */ (type.classifier!! as KClass<*>).java, + /* nullable = */ type.isMarkedNullable + ) + val klass = type.classifier as? KClass<*> ?: throw IllegalArgumentException("Unsupported type $type") val args = type.arguments - val types = transitiveMerge(map, klass.typeParameters.zip(args).map { - it.first.name to it.second.type!! - }.toMap()) + val types = transitiveMerge( + map, + klass.typeParameters.zip(args).associate { + it.first.name to it.second.type!! + }, + ) + return when { - klass.isSubclassOf(Enum::class) -> { - KSimpleTypeWrapper(DataTypes.StringType, klass.java, type.isMarkedNullable) - } + klass.isSubclassOf(Enum::class) -> + KSimpleTypeWrapper( + /* dt = */ DataTypes.StringType, + /* cls = */ klass.java, + /* nullable = */ type.isMarkedNullable + ) + klass.isSubclassOf(Iterable::class) || klass.java.isArray -> { val listParam = if (klass.java.isArray) { when (klass) { @@ -187,29 +204,40 @@ fun schema(type: KType, map: Map = mapOf()): DataType { DoubleArray::class -> typeOf() BooleanArray::class -> typeOf() ShortArray::class -> typeOf() - // ByteArray handled by BinaryType + /* ByteArray handled by BinaryType */ else -> types.getValue(klass.typeParameters[0].name) } } else types.getValue(klass.typeParameters[0].name) + + val dataType = DataTypes.createArrayType( + /* elementType = */ schema(listParam, types), + /* containsNull = */ listParam.isMarkedNullable + ) + KComplexTypeWrapper( - DataTypes.createArrayType(schema(listParam, types), listParam.isMarkedNullable), - klass.java, - type.isMarkedNullable + /* dt = */ dataType, + /* cls = */ klass.java, + /* nullable = */ type.isMarkedNullable ) } + klass.isSubclassOf(Map::class) -> { val mapKeyParam = types.getValue(klass.typeParameters[0].name) val mapValueParam = types.getValue(klass.typeParameters[1].name) + + val dataType = DataTypes.createMapType( + /* keyType = */ schema(mapKeyParam, types), + /* valueType = */ schema(mapValueParam, types), + /* valueContainsNull = */ true + ) + KComplexTypeWrapper( - DataTypes.createMapType( - schema(mapKeyParam, types), - schema(mapValueParam, types), - true - ), - klass.java, - type.isMarkedNullable + /* dt = */ dataType, + /* cls = */ klass.java, + /* nullable = */ type.isMarkedNullable ) } + klass.isData -> { val structType = StructType( klass @@ -219,18 +247,22 @@ fun schema(type: KType, map: Map = mapOf()): DataType { .map { val projectedType = types[it.type.toString()] ?: it.type val propertyDescriptor = PropertyDescriptor( - it.name, - klass.java, - "is" + it.name?.replaceFirstChar { if (it.isLowerCase()) it.titlecase(Locale.getDefault()) else it.toString() }, - null + /* propertyName = */ it.name, + /* beanClass = */ klass.java, + /* readMethodName = */ "is" + it.name?.replaceFirstChar { + if (it.isLowerCase()) it.titlecase(Locale.getDefault()) + else it.toString() + }, + /* writeMethodName = */ null ) + KStructField( - propertyDescriptor.readMethod.name, - StructField( - it.name, - schema(projectedType, types), - projectedType.isMarkedNullable, - Metadata.empty() + /* getterName = */ propertyDescriptor.readMethod.name, + /* delegate = */ StructField( + /* name = */ it.name, + /* dataType = */ schema(projectedType, types), + /* nullable = */ projectedType.isMarkedNullable, + /* metadata = */ Metadata.empty() ) ) } @@ -246,31 +278,50 @@ fun schema(type: KType, map: Map = mapOf()): DataType { val structType = DataTypes.createStructType( params.map { (fieldName, fieldType) -> val dataType = schema(fieldType, types) + KStructField( - fieldName, - StructField(fieldName, dataType, fieldType.isMarkedNullable, Metadata.empty()) + /* getterName = */ fieldName, + /* delegate = */ StructField( + /* name = */ fieldName, + /* dataType = */ dataType, + /* nullable = */ fieldType.isMarkedNullable, + /* metadata = */Metadata.empty() + ) ) }.toTypedArray() ) - KComplexTypeWrapper(structType, klass.java, true) + KComplexTypeWrapper( + /* dt = */ structType, + /* cls = */ klass.java, + /* nullable = */ true + ) } + else -> throw IllegalArgumentException("$type is unsupported") } } -private fun transitiveMerge(a: Map, b: Map): Map = - a + b.mapValues { - a.getOrDefault(it.value.toString(), it.value) - } +/** + * Memoized version of [schema]. This ensures the [DataType] of given `type` only + * has to be calculated once. + */ +private val memoizedSchema: (type: KType) -> DataType = memoize { + schema(it) +} -class Memoize1(val f: (T) -> R) : (T) -> R { +private fun transitiveMerge(a: Map, b: Map): Map = + a + b.mapValues { a.getOrDefault(it.value.toString(), it.value) } +/** Wrapper around function with 1 argument to avoid recalculation when a certain argument is queried again. */ +private class Memoize1(private val function: (T) -> R) : (T) -> R { private val values = ConcurrentHashMap() - - override fun invoke(x: T): R = values.getOrPut(x) { f(x) } + override fun invoke(x: T): R = values.getOrPut(x) { function(x) } } -private fun ((T) -> R).memoize(): (T) -> R = Memoize1(this) +/** Wrapper around function to avoid recalculation when a certain argument is queried again. */ +private fun ((T) -> R).memoized(): (T) -> R = Memoize1(this) + +/** Wrapper around function to avoid recalculation when a certain argument is queried again. */ +private fun memoize(function: (T) -> R): (T) -> R = Memoize1(function) -private val memoizedSchema: (KType) -> DataType = { x: KType -> schema(x) }.memoize() diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt index 1af3d194..e2013783 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt @@ -17,6 +17,11 @@ * limitations under the License. * =LICENSEEND= */ + +/** + * This file contains some helper functions to more easily work with [GroupState] from Kotlin. + */ + package org.jetbrains.kotlinx.spark.api import org.apache.spark.sql.streaming.GroupState @@ -24,7 +29,7 @@ import kotlin.reflect.KProperty /** * (Kotlin-specific) - * Returns the group state value if it exists, else [null]. + * Returns the group state value if it exists, else `null`. * This is comparable to [GroupState.getOption], but instead utilises Kotlin's nullability features * to get the same result. */ @@ -32,7 +37,7 @@ fun GroupState.getOrNull(): S? = if (exists()) get() else null /** * (Kotlin-specific) - * Allows the group state object to be used as a delegate. Will be [null] if it does not exist. + * Allows the group state object to be used as a delegate. Will be `null` if it does not exist. * * For example: * ```kotlin @@ -46,7 +51,7 @@ operator fun GroupState.getValue(thisRef: Any?, property: KProperty<*>): /** * (Kotlin-specific) - * Allows the group state object to be used as a delegate. Will be [null] if it does not exist. + * Allows the group state object to be used as a delegate. Will be `null` if it does not exist. * * For example: * ```kotlin diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt index 9f7de351..1be79918 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt @@ -17,35 +17,57 @@ * limitations under the License. * =LICENSEEND= */ + +/** + * This file contains several ways to wrap and modify iterators lazily. + * This includes mapping, filtering, and partitioning. + */ + package org.jetbrains.kotlinx.spark.api +/** Partitions the values of the iterator lazily in groups of [size]. */ class PartitioningIterator( private val source: Iterator, private val size: Int, - private val cutIncomplete: Boolean = false + private val cutIncomplete: Boolean = false, ) : AbstractIterator>() { + override fun computeNext() { if (!source.hasNext()) return done() val interimResult = arrayListOf() repeat(size) { - if (source.hasNext()) interimResult.add(source.next()) - else return if (cutIncomplete) done() else setNext(interimResult) + if (source.hasNext()) + interimResult.add(source.next()) + else + return if (cutIncomplete) + done() + else + setNext(interimResult) } setNext(interimResult) } + } +/** Maps the values of the iterator lazily using [func]. */ class MappingIterator( - private val self: Iterator, - private val func: (T) -> R + private val source: Iterator, + private val func: (T) -> R, ) : AbstractIterator() { - override fun computeNext() = if (self.hasNext()) setNext(func(self.next())) else done() + + override fun computeNext(): Unit = + if (source.hasNext()) + setNext(func(source.next())) + else + done() } +/** Filters the values of the iterator lazily using [predicate]. */ class FilteringIterator( private val source: Iterator, - private val predicate: (T) -> Boolean + private val predicate: (T) -> Boolean, ) : AbstractIterator() { + override fun computeNext() { while (source.hasNext()) { val next = source.next() @@ -56,13 +78,14 @@ class FilteringIterator( } done() } + } /** Maps the values of the iterator lazily using [func]. */ fun Iterator.map(func: (T) -> R): Iterator = MappingIterator(this, func) -/** Filters the values of the iterator lazily using [func]. */ -fun Iterator.filter(func: (T) -> Boolean): Iterator = FilteringIterator(this, func) +/** Filters the values of the iterator lazily using [predicate]. */ +fun Iterator.filter(predicate: (T) -> Boolean): Iterator = FilteringIterator(this, predicate) /** Partitions the values of the iterator lazily in groups of [size]. */ fun Iterator.partition(size: Int): Iterator> = PartitioningIterator(this, size) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt index d85fff8a..81b7c0bf 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt @@ -17,6 +17,12 @@ * limitations under the License. * =LICENSEEND= */ + +/** + * This file contains several extension functions to work with [KeyValueGroupedDataset]s more easily + * from Kotlin. This includes automatically providing the right encoders, as well as mapping to `Arities`. + */ + package org.jetbrains.kotlinx.spark.api import org.apache.spark.api.java.function.CoGroupFunction @@ -75,7 +81,7 @@ inline fun KeyValueGroupedDataset.mapGroups( */ inline fun KeyValueGroupedDataset.reduceGroupsK(noinline func: (VALUE, VALUE) -> VALUE): Dataset> = reduceGroups(ReduceFunction(func)) - .map { t -> t._1 to t._2 } + .map { t -> t._1() to t._2() } /** * (Kotlin-specific) @@ -98,7 +104,7 @@ inline fun KeyValueGroupedDataset.flatMapGroups( noinline func: (key: K, values: Iterator) -> Iterator, ): Dataset = flatMapGroups( FlatMapGroupsFunction(func), - encoder() + encoder(), ) @@ -122,7 +128,7 @@ inline fun KeyValueGroupedDataset.mapGroupsWi ): Dataset = mapGroupsWithState( MapGroupsWithStateFunction(func), encoder(), - encoder() + encoder(), ) /** @@ -148,7 +154,7 @@ inline fun KeyValueGroupedDataset.mapGroupsWi MapGroupsWithStateFunction(func), encoder(), encoder(), - timeoutConf + timeoutConf, ) /** @@ -177,7 +183,7 @@ inline fun KeyValueGroupedDataset.flatMapGrou outputMode, encoder(), encoder(), - timeoutConf + timeoutConf, ) /** @@ -193,5 +199,5 @@ inline fun KeyValueGroupedDataset.cogroup( ): Dataset = cogroup( other, CoGroupFunction(func), - encoder() + encoder(), ) \ No newline at end of file diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index 91df8d7a..118abf48 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -17,6 +17,11 @@ * limitations under the License. * =LICENSEEND= */ + +/** + * This file contains the main entry points and wrappers for the Kotlin Spark API. + */ + package org.jetbrains.kotlinx.spark.api import org.apache.spark.SparkConf @@ -25,23 +30,49 @@ import org.apache.spark.api.java.JavaRDDLike import org.apache.spark.api.java.JavaSparkContext import org.apache.spark.broadcast.Broadcast import org.apache.spark.rdd.RDD +import org.apache.spark.sql.Dataset import org.apache.spark.sql.SparkSession.Builder import org.apache.spark.sql.UDFRegistration import org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions /** - * This wrapper over [SparkSession] which provides several additional methods to create [org.apache.spark.sql.Dataset] + * This wrapper over [SparkSession] which provides several additional methods to create [org.apache.spark.sql.Dataset]. + * + * @param spark The current [SparkSession] to wrap */ class KSparkSession(val spark: SparkSession) { + /** Lazy instance of [JavaSparkContext] wrapper around [sparkContext]. */ val sc: JavaSparkContext by lazy { JavaSparkContext(spark.sparkContext) } - inline fun List.toDS() = toDS(spark) - inline fun Array.toDS() = spark.dsOf(*this) - inline fun dsOf(vararg arg: T) = spark.dsOf(*arg) - inline fun RDD.toDS() = toDS(spark) - inline fun JavaRDDLike.toDS() = toDS(spark) + /** Utility method to create dataset from list. */ + inline fun List.toDS(): Dataset = toDS(spark) + + /** Utility method to create dataset from [Array]. */ + inline fun Array.toDS(): Dataset = spark.dsOf(*this) + + /** Utility method to create dataset from vararg arguments. */ + inline fun dsOf(vararg arg: T): Dataset = spark.dsOf(*arg) + + /** Utility method to create dataset from Scala [RDD]. */ + inline fun RDD.toDS(): Dataset = toDS(spark) + + /** Utility method to create dataset from [JavaRDDLike]. */ + inline fun JavaRDDLike.toDS(): Dataset = toDS(spark) + + /** + * A collection of methods for registering user-defined functions (UDF). + * + * The following example registers a UDF in Kotlin: + * ```Kotlin + * sparkSession.udf.register("myUDF") { arg1: Int, arg2: String -> arg2 + arg1 } + * ``` + * + * @note The user-defined functions must be deterministic. Due to optimization, + * duplicate invocations may be eliminated or the function may even be invoked more times than + * it is present in the query. + */ val udf: UDFRegistration get() = spark.udf() } @@ -58,6 +89,7 @@ typealias SparkSession = org.apache.spark.sql.SparkSession */ fun SparkContext.setLogLevel(level: SparkLogLevel): Unit = setLogLevel(level.name) +/** Log levels for spark. */ enum class SparkLogLevel { ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN } diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt index 6dc19d58..05d72675 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt @@ -17,6 +17,11 @@ * limitations under the License. * =LICENSEEND= */ + +/** + * This file contains functions to register UDFs easily from Kotlin. + */ + @file:Suppress("DuplicatedCode") package org.jetbrains.kotlinx.spark.api @@ -32,28 +37,34 @@ import kotlin.reflect.KClass import kotlin.reflect.full.isSubclassOf import kotlin.reflect.typeOf -fun DataType.unWrapper(): DataType { - return when (this) { +/** Unwraps [DataTypeWithClass]. */ +fun DataType.unWrap(): DataType = + when (this) { is DataTypeWithClass -> DataType.fromJson(dt().json()) else -> this } -} /** - * Checks if [this] is of a valid type for an UDF, otherwise it throws a [TypeOfUDFParameterNotSupportedException] + * Checks if [this] is of a valid type for a UDF, otherwise it throws a [TypeOfUDFParameterNotSupportedException] */ @PublishedApi internal fun KClass<*>.checkForValidType(parameterName: String) { - if (this == String::class || isSubclassOf(WrappedArray::class)) return // Most of the time we need strings or WrappedArrays - if (isSubclassOf(Iterable::class) || java.isArray - || isSubclassOf(Map::class) || isSubclassOf(Array::class) - || isSubclassOf(ByteArray::class) || isSubclassOf(CharArray::class) - || isSubclassOf(ShortArray::class) || isSubclassOf(IntArray::class) - || isSubclassOf(LongArray::class) || isSubclassOf(FloatArray::class) - || isSubclassOf(DoubleArray::class) || isSubclassOf(BooleanArray::class) - ) { - throw TypeOfUDFParameterNotSupportedException(this, parameterName) - } + if (this == String::class || isSubclassOf(WrappedArray::class)) + return // Most of the time we need strings or WrappedArrays + + if (isSubclassOf(Iterable::class) + || java.isArray + || isSubclassOf(Map::class) + || isSubclassOf(Array::class) + || isSubclassOf(ByteArray::class) + || isSubclassOf(CharArray::class) + || isSubclassOf(ShortArray::class) + || isSubclassOf(IntArray::class) + || isSubclassOf(LongArray::class) + || isSubclassOf(FloatArray::class) + || isSubclassOf(DoubleArray::class) + || isSubclassOf(BooleanArray::class) + ) throw TypeOfUDFParameterNotSupportedException(this, parameterName) } /** @@ -64,7 +75,7 @@ class TypeOfUDFParameterNotSupportedException(kClass: KClass<*>, parameterName: ) /** - * A wrapper for an UDF with 0 arguments. + * A wrapper for a UDF with 0 arguments. * @property udfName the name of the UDF */ class UDFWrapper0(private val udfName: String) { @@ -77,16 +88,16 @@ class UDFWrapper0(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register(name: String, noinline func: () -> R): UDFWrapper0 { - register(name, UDF0(func), schema(typeOf()).unWrapper()) + register(name, UDF0(func), schema(typeOf()).unWrap()) return UDFWrapper0(name) } /** - * A wrapper for an UDF with 1 arguments. + * A wrapper for a UDF with 1 arguments. * @property udfName the name of the UDF */ class UDFWrapper1(private val udfName: String) { @@ -99,17 +110,17 @@ class UDFWrapper1(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register(name: String, noinline func: (T0) -> R): UDFWrapper1 { T0::class.checkForValidType("T0") - register(name, UDF1(func), schema(typeOf()).unWrapper()) + register(name, UDF1(func), schema(typeOf()).unWrap()) return UDFWrapper1(name) } /** - * A wrapper for an UDF with 2 arguments. + * A wrapper for a UDF with 2 arguments. * @property udfName the name of the UDF */ class UDFWrapper2(private val udfName: String) { @@ -122,21 +133,21 @@ class UDFWrapper2(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1) -> R + noinline func: (T0, T1) -> R, ): UDFWrapper2 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") - register(name, UDF2(func), schema(typeOf()).unWrapper()) + register(name, UDF2(func), schema(typeOf()).unWrap()) return UDFWrapper2(name) } /** - * A wrapper for an UDF with 3 arguments. + * A wrapper for a UDF with 3 arguments. * @property udfName the name of the UDF */ class UDFWrapper3(private val udfName: String) { @@ -149,22 +160,22 @@ class UDFWrapper3(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2) -> R + noinline func: (T0, T1, T2) -> R, ): UDFWrapper3 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") T2::class.checkForValidType("T2") - register(name, UDF3(func), schema(typeOf()).unWrapper()) + register(name, UDF3(func), schema(typeOf()).unWrap()) return UDFWrapper3(name) } /** - * A wrapper for an UDF with 4 arguments. + * A wrapper for a UDF with 4 arguments. * @property udfName the name of the UDF */ class UDFWrapper4(private val udfName: String) { @@ -177,23 +188,23 @@ class UDFWrapper4(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3) -> R + noinline func: (T0, T1, T2, T3) -> R, ): UDFWrapper4 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") T2::class.checkForValidType("T2") T3::class.checkForValidType("T3") - register(name, UDF4(func), schema(typeOf()).unWrapper()) + register(name, UDF4(func), schema(typeOf()).unWrap()) return UDFWrapper4(name) } /** - * A wrapper for an UDF with 5 arguments. + * A wrapper for a UDF with 5 arguments. * @property udfName the name of the UDF */ class UDFWrapper5(private val udfName: String) { @@ -206,24 +217,24 @@ class UDFWrapper5(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4) -> R + noinline func: (T0, T1, T2, T3, T4) -> R, ): UDFWrapper5 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") T2::class.checkForValidType("T2") T3::class.checkForValidType("T3") T4::class.checkForValidType("T4") - register(name, UDF5(func), schema(typeOf()).unWrapper()) + register(name, UDF5(func), schema(typeOf()).unWrap()) return UDFWrapper5(name) } /** - * A wrapper for an UDF with 6 arguments. + * A wrapper for a UDF with 6 arguments. * @property udfName the name of the UDF */ class UDFWrapper6(private val udfName: String) { @@ -236,19 +247,19 @@ class UDFWrapper6(private val udfName: String) { param2: Column, param3: Column, param4: Column, - param5: Column + param5: Column, ): Column { return functions.callUDF(udfName, param0, param1, param2, param3, param4, param5) } } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4, T5) -> R + noinline func: (T0, T1, T2, T3, T4, T5) -> R, ): UDFWrapper6 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") @@ -256,12 +267,12 @@ inline fun ()).unWrapper()) + register(name, UDF6(func), schema(typeOf()).unWrap()) return UDFWrapper6(name) } /** - * A wrapper for an UDF with 7 arguments. + * A wrapper for a UDF with 7 arguments. * @property udfName the name of the UDF */ class UDFWrapper7(private val udfName: String) { @@ -275,19 +286,19 @@ class UDFWrapper7(private val udfName: String) { param3: Column, param4: Column, param5: Column, - param6: Column + param6: Column, ): Column { return functions.callUDF(udfName, param0, param1, param2, param3, param4, param5, param6) } } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4, T5, T6) -> R + noinline func: (T0, T1, T2, T3, T4, T5, T6) -> R, ): UDFWrapper7 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") @@ -296,12 +307,12 @@ inline fun ()).unWrapper()) + register(name, UDF7(func), schema(typeOf()).unWrap()) return UDFWrapper7(name) } /** - * A wrapper for an UDF with 8 arguments. + * A wrapper for a UDF with 8 arguments. * @property udfName the name of the UDF */ class UDFWrapper8(private val udfName: String) { @@ -316,19 +327,19 @@ class UDFWrapper8(private val udfName: String) { param4: Column, param5: Column, param6: Column, - param7: Column + param7: Column, ): Column { return functions.callUDF(udfName, param0, param1, param2, param3, param4, param5, param6, param7) } } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4, T5, T6, T7) -> R + noinline func: (T0, T1, T2, T3, T4, T5, T6, T7) -> R, ): UDFWrapper8 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") @@ -338,12 +349,12 @@ inline fun ()).unWrapper()) + register(name, UDF8(func), schema(typeOf()).unWrap()) return UDFWrapper8(name) } /** - * A wrapper for an UDF with 9 arguments. + * A wrapper for a UDF with 9 arguments. * @property udfName the name of the UDF */ class UDFWrapper9(private val udfName: String) { @@ -359,19 +370,19 @@ class UDFWrapper9(private val udfName: String) { param5: Column, param6: Column, param7: Column, - param8: Column + param8: Column, ): Column { return functions.callUDF(udfName, param0, param1, param2, param3, param4, param5, param6, param7, param8) } } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8) -> R + noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8) -> R, ): UDFWrapper9 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") @@ -382,12 +393,12 @@ inline fun ()).unWrapper()) + register(name, UDF9(func), schema(typeOf()).unWrap()) return UDFWrapper9(name) } /** - * A wrapper for an UDF with 10 arguments. + * A wrapper for a UDF with 10 arguments. * @property udfName the name of the UDF */ class UDFWrapper10(private val udfName: String) { @@ -404,7 +415,7 @@ class UDFWrapper10(private val udfName: String) { param6: Column, param7: Column, param8: Column, - param9: Column + param9: Column, ): Column { return functions.callUDF( udfName, @@ -423,12 +434,12 @@ class UDFWrapper10(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9) -> R + noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9) -> R, ): UDFWrapper10 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") @@ -440,12 +451,12 @@ inline fun ()).unWrapper()) + register(name, UDF10(func), schema(typeOf()).unWrap()) return UDFWrapper10(name) } /** - * A wrapper for an UDF with 11 arguments. + * A wrapper for a UDF with 11 arguments. * @property udfName the name of the UDF */ class UDFWrapper11(private val udfName: String) { @@ -463,7 +474,7 @@ class UDFWrapper11(private val udfName: String) { param7: Column, param8: Column, param9: Column, - param10: Column + param10: Column, ): Column { return functions.callUDF( udfName, @@ -483,12 +494,12 @@ class UDFWrapper11(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10) -> R + noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10) -> R, ): UDFWrapper11 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") @@ -501,12 +512,12 @@ inline fun ()).unWrapper()) + register(name, UDF11(func), schema(typeOf()).unWrap()) return UDFWrapper11(name) } /** - * A wrapper for an UDF with 12 arguments. + * A wrapper for a UDF with 12 arguments. * @property udfName the name of the UDF */ class UDFWrapper12(private val udfName: String) { @@ -525,7 +536,7 @@ class UDFWrapper12(private val udfName: String) { param8: Column, param9: Column, param10: Column, - param11: Column + param11: Column, ): Column { return functions.callUDF( udfName, @@ -546,12 +557,12 @@ class UDFWrapper12(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11) -> R + noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11) -> R, ): UDFWrapper12 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") @@ -565,12 +576,12 @@ inline fun ()).unWrapper()) + register(name, UDF12(func), schema(typeOf()).unWrap()) return UDFWrapper12(name) } /** - * A wrapper for an UDF with 13 arguments. + * A wrapper for a UDF with 13 arguments. * @property udfName the name of the UDF */ class UDFWrapper13(private val udfName: String) { @@ -590,7 +601,7 @@ class UDFWrapper13(private val udfName: String) { param9: Column, param10: Column, param11: Column, - param12: Column + param12: Column, ): Column { return functions.callUDF( udfName, @@ -612,12 +623,12 @@ class UDFWrapper13(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12) -> R + noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12) -> R, ): UDFWrapper13 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") @@ -632,12 +643,12 @@ inline fun ()).unWrapper()) + register(name, UDF13(func), schema(typeOf()).unWrap()) return UDFWrapper13(name) } /** - * A wrapper for an UDF with 14 arguments. + * A wrapper for a UDF with 14 arguments. * @property udfName the name of the UDF */ class UDFWrapper14(private val udfName: String) { @@ -658,7 +669,7 @@ class UDFWrapper14(private val udfName: String) { param10: Column, param11: Column, param12: Column, - param13: Column + param13: Column, ): Column { return functions.callUDF( udfName, @@ -681,12 +692,12 @@ class UDFWrapper14(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13) -> R + noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13) -> R, ): UDFWrapper14 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") @@ -702,12 +713,12 @@ inline fun ()).unWrapper()) + register(name, UDF14(func), schema(typeOf()).unWrap()) return UDFWrapper14(name) } /** - * A wrapper for an UDF with 15 arguments. + * A wrapper for a UDF with 15 arguments. * @property udfName the name of the UDF */ class UDFWrapper15(private val udfName: String) { @@ -729,7 +740,7 @@ class UDFWrapper15(private val udfName: String) { param11: Column, param12: Column, param13: Column, - param14: Column + param14: Column, ): Column { return functions.callUDF( udfName, @@ -753,12 +764,12 @@ class UDFWrapper15(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14) -> R + noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14) -> R, ): UDFWrapper15 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") @@ -775,12 +786,12 @@ inline fun ()).unWrapper()) + register(name, UDF15(func), schema(typeOf()).unWrap()) return UDFWrapper15(name) } /** - * A wrapper for an UDF with 16 arguments. + * A wrapper for a UDF with 16 arguments. * @property udfName the name of the UDF */ class UDFWrapper16(private val udfName: String) { @@ -803,7 +814,7 @@ class UDFWrapper16(private val udfName: String) { param12: Column, param13: Column, param14: Column, - param15: Column + param15: Column, ): Column { return functions.callUDF( udfName, @@ -828,12 +839,12 @@ class UDFWrapper16(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15) -> R + noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15) -> R, ): UDFWrapper16 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") @@ -851,12 +862,12 @@ inline fun ()).unWrapper()) + register(name, UDF16(func), schema(typeOf()).unWrap()) return UDFWrapper16(name) } /** - * A wrapper for an UDF with 17 arguments. + * A wrapper for a UDF with 17 arguments. * @property udfName the name of the UDF */ class UDFWrapper17(private val udfName: String) { @@ -880,7 +891,7 @@ class UDFWrapper17(private val udfName: String) { param13: Column, param14: Column, param15: Column, - param16: Column + param16: Column, ): Column { return functions.callUDF( udfName, @@ -906,12 +917,12 @@ class UDFWrapper17(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16) -> R + noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16) -> R, ): UDFWrapper17 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") @@ -930,12 +941,12 @@ inline fun ()).unWrapper()) + register(name, UDF17(func), schema(typeOf()).unWrap()) return UDFWrapper17(name) } /** - * A wrapper for an UDF with 18 arguments. + * A wrapper for a UDF with 18 arguments. * @property udfName the name of the UDF */ class UDFWrapper18(private val udfName: String) { @@ -960,7 +971,7 @@ class UDFWrapper18(private val udfName: String) { param14: Column, param15: Column, param16: Column, - param17: Column + param17: Column, ): Column { return functions.callUDF( udfName, @@ -987,12 +998,12 @@ class UDFWrapper18(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17) -> R + noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17) -> R, ): UDFWrapper18 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") @@ -1012,12 +1023,12 @@ inline fun ()).unWrapper()) + register(name, UDF18(func), schema(typeOf()).unWrap()) return UDFWrapper18(name) } /** - * A wrapper for an UDF with 19 arguments. + * A wrapper for a UDF with 19 arguments. * @property udfName the name of the UDF */ class UDFWrapper19(private val udfName: String) { @@ -1043,7 +1054,7 @@ class UDFWrapper19(private val udfName: String) { param15: Column, param16: Column, param17: Column, - param18: Column + param18: Column, ): Column { return functions.callUDF( udfName, @@ -1071,12 +1082,12 @@ class UDFWrapper19(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18) -> R + noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18) -> R, ): UDFWrapper19 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") @@ -1097,12 +1108,12 @@ inline fun ()).unWrapper()) + register(name, UDF19(func), schema(typeOf()).unWrap()) return UDFWrapper19(name) } /** - * A wrapper for an UDF with 20 arguments. + * A wrapper for a UDF with 20 arguments. * @property udfName the name of the UDF */ class UDFWrapper20(private val udfName: String) { @@ -1129,7 +1140,7 @@ class UDFWrapper20(private val udfName: String) { param16: Column, param17: Column, param18: Column, - param19: Column + param19: Column, ): Column { return functions.callUDF( udfName, @@ -1158,12 +1169,12 @@ class UDFWrapper20(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19) -> R + noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19) -> R, ): UDFWrapper20 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") @@ -1185,12 +1196,12 @@ inline fun ()).unWrapper()) + register(name, UDF20(func), schema(typeOf()).unWrap()) return UDFWrapper20(name) } /** - * A wrapper for an UDF with 21 arguments. + * A wrapper for a UDF with 21 arguments. * @property udfName the name of the UDF */ class UDFWrapper21(private val udfName: String) { @@ -1218,7 +1229,7 @@ class UDFWrapper21(private val udfName: String) { param17: Column, param18: Column, param19: Column, - param20: Column + param20: Column, ): Column { return functions.callUDF( udfName, @@ -1248,12 +1259,12 @@ class UDFWrapper21(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20) -> R + noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20) -> R, ): UDFWrapper21 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") @@ -1276,12 +1287,12 @@ inline fun ()).unWrapper()) + register(name, UDF21(func), schema(typeOf()).unWrap()) return UDFWrapper21(name) } /** - * A wrapper for an UDF with 22 arguments. + * A wrapper for a UDF with 22 arguments. * @property udfName the name of the UDF */ class UDFWrapper22(private val udfName: String) { @@ -1310,7 +1321,7 @@ class UDFWrapper22(private val udfName: String) { param18: Column, param19: Column, param20: Column, - param21: Column + param21: Column, ): Column { return functions.callUDF( udfName, @@ -1341,12 +1352,12 @@ class UDFWrapper22(private val udfName: String) { } /** - * Registers the [func] with its [name] in [this] + * Registers the [func] with its [name] in [this]. */ @OptIn(ExperimentalStdlibApi::class) inline fun UDFRegistration.register( name: String, - noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21) -> R + noinline func: (T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21) -> R, ): UDFWrapper22 { T0::class.checkForValidType("T0") T1::class.checkForValidType("T1") @@ -1370,6 +1381,6 @@ inline fun ()).unWrapper()) + register(name, UDF22(func), schema(typeOf()).unWrap()) return UDFWrapper22(name) } From 41c58fa2371e7d68635616d421ffe1afb0b32835 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 1 Mar 2022 17:06:38 +0100 Subject: [PATCH 068/213] splitting and ordering api tests into multiple files --- .../org/apache/spark/sql/KotlinWrappers.scala | 196 ++--- .../jetbrains/kotlinx/spark/api/ApiTest.kt | 688 +----------------- .../kotlinx/spark/api/DatasetFunctionTests.kt | 408 +++++++++++ .../kotlinx/spark/api/EncodingTests.kt | 429 +++++++++++ .../kotlinx/spark/api/TypeInferenceTest.kt | 131 ++-- 5 files changed, 1010 insertions(+), 842 deletions(-) create mode 100644 kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTests.kt create mode 100644 kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTests.kt diff --git a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala index 675110be..9395019c 100644 --- a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala +++ b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala @@ -26,189 +26,201 @@ import org.apache.spark.sql.types.{DataType, Metadata, StructField, StructType} trait DataTypeWithClass { - val dt: DataType - val cls: Class[_] - val nullable: Boolean + val dt: DataType + val cls: Class[ _ ] + val nullable: Boolean } trait ComplexWrapper extends DataTypeWithClass -class KDataTypeWrapper(val dt: StructType - , val cls: Class[_] - , val nullable: Boolean = true) extends StructType with ComplexWrapper { - override def fieldNames: Array[String] = dt.fieldNames +class KDataTypeWrapper( + val dt: StructType, + val cls: Class[ _ ], + val nullable: Boolean = true, +) extends StructType with ComplexWrapper { - override def names: Array[String] = dt.names + override def fieldNames: Array[ String ] = dt.fieldNames - override def equals(that: Any): Boolean = dt.equals(that) + override def names: Array[ String ] = dt.names - override def hashCode(): Int = dt.hashCode() + override def equals(that: Any): Boolean = dt.equals(that) - override def add(field: StructField): StructType = dt.add(field) + override def hashCode(): Int = dt.hashCode() - override def add(name: String, dataType: DataType): StructType = dt.add(name, dataType) + override def add(field: StructField): StructType = dt.add(field) - override def add(name: String, dataType: DataType, nullable: Boolean): StructType = dt.add(name, dataType, nullable) + override def add(name: String, dataType: DataType): StructType = dt.add(name, dataType) - override def add(name: String, dataType: DataType, nullable: Boolean, metadata: Metadata): StructType = dt.add(name, dataType, nullable, metadata) + override def add(name: String, dataType: DataType, nullable: Boolean): StructType = dt.add(name, dataType, nullable) - override def add(name: String, dataType: DataType, nullable: Boolean, comment: String): StructType = dt.add(name, dataType, nullable, comment) + override def add(name: String, dataType: DataType, nullable: Boolean, metadata: Metadata): StructType = dt + .add(name, dataType, nullable, metadata) - override def add(name: String, dataType: String): StructType = dt.add(name, dataType) + override def add(name: String, dataType: DataType, nullable: Boolean, comment: String): StructType = dt + .add(name, dataType, nullable, comment) - override def add(name: String, dataType: String, nullable: Boolean): StructType = dt.add(name, dataType, nullable) + override def add(name: String, dataType: String): StructType = dt.add(name, dataType) - override def add(name: String, dataType: String, nullable: Boolean, metadata: Metadata): StructType = dt.add(name, dataType, nullable, metadata) + override def add(name: String, dataType: String, nullable: Boolean): StructType = dt.add(name, dataType, nullable) - override def add(name: String, dataType: String, nullable: Boolean, comment: String): StructType = dt.add(name, dataType, nullable, comment) + override def add(name: String, dataType: String, nullable: Boolean, metadata: Metadata): StructType = dt + .add(name, dataType, nullable, metadata) - override def apply(name: String): StructField = dt.apply(name) + override def add(name: String, dataType: String, nullable: Boolean, comment: String): StructType = dt + .add(name, dataType, nullable, comment) - override def apply(names: Set[String]): StructType = dt.apply(names) + override def apply(name: String): StructField = dt.apply(name) - override def fieldIndex(name: String): Int = dt.fieldIndex(name) + override def apply(names: Set[ String ]): StructType = dt.apply(names) - override private[sql] def getFieldIndex(name: String) = dt.getFieldIndex(name) + override def fieldIndex(name: String): Int = dt.fieldIndex(name) - private[sql] def findNestedField(fieldNames: Seq[String], includeCollections: Boolean, resolver: Resolver) = dt.findNestedField(fieldNames, includeCollections, resolver) + override private[ sql ] def getFieldIndex(name: String) = dt.getFieldIndex(name) - override private[sql] def buildFormattedString(prefix: String, stringConcat: StringUtils.StringConcat, maxDepth: Int): Unit = dt.buildFormattedString(prefix, stringConcat, maxDepth) + private[ sql ] def findNestedField(fieldNames: Seq[ String ], includeCollections: Boolean, resolver: Resolver) = + dt.findNestedField(fieldNames, includeCollections, resolver) - override protected[sql] def toAttributes: Seq[AttributeReference] = dt.toAttributes + override private[ sql ] def buildFormattedString(prefix: String, stringConcat: StringUtils.StringConcat, maxDepth: Int): Unit = + dt.buildFormattedString(prefix, stringConcat, maxDepth) - override def treeString: String = dt.treeString + override protected[ sql ] def toAttributes: Seq[ AttributeReference ] = dt.toAttributes - override def treeString(maxDepth: Int): String = dt.treeString(maxDepth) + override def treeString: String = dt.treeString - override def printTreeString(): Unit = dt.printTreeString() + override def treeString(maxDepth: Int): String = dt.treeString(maxDepth) - private[sql] override def jsonValue = dt.jsonValue + override def printTreeString(): Unit = dt.printTreeString() - override def apply(fieldIndex: Int): StructField = dt.apply(fieldIndex) + private[ sql ] override def jsonValue = dt.jsonValue - override def length: Int = dt.length + override def apply(fieldIndex: Int): StructField = dt.apply(fieldIndex) - override def iterator: Iterator[StructField] = dt.iterator + override def length: Int = dt.length - override def defaultSize: Int = dt.defaultSize + override def iterator: Iterator[ StructField ] = dt.iterator - override def simpleString: String = dt.simpleString + override def defaultSize: Int = dt.defaultSize - override def catalogString: String = dt.catalogString + override def simpleString: String = dt.simpleString - override def sql: String = dt.sql + override def catalogString: String = dt.catalogString - override def toDDL: String = dt.toDDL + override def sql: String = dt.sql - private[sql] override def simpleString(maxNumberFields: Int) = dt.simpleString(maxNumberFields) + override def toDDL: String = dt.toDDL - override private[sql] def merge(that: StructType) = dt.merge(that) + private[ sql ] override def simpleString(maxNumberFields: Int) = dt.simpleString(maxNumberFields) - private[spark] override def asNullable = dt.asNullable + override private[ sql ] def merge(that: StructType) = dt.merge(that) - private[spark] override def existsRecursively(f: DataType => Boolean) = dt.existsRecursively(f) + private[ spark ] override def asNullable = dt.asNullable - override private[sql] lazy val interpretedOrdering = dt.interpretedOrdering + private[ spark ] override def existsRecursively(f: DataType => Boolean) = dt.existsRecursively(f) - override def toString = s"KDataTypeWrapper(dt=$dt, cls=$cls, nullable=$nullable)" + override private[ sql ] lazy val interpretedOrdering = dt.interpretedOrdering + + override def toString = s"KDataTypeWrapper(dt=$dt, cls=$cls, nullable=$nullable)" } -case class KComplexTypeWrapper(dt: DataType, cls: Class[_], nullable: Boolean) extends DataType with ComplexWrapper { - override private[sql] def unapply(e: Expression) = dt.unapply(e) +case class KComplexTypeWrapper(dt: DataType, cls: Class[ _ ], nullable: Boolean) extends DataType with ComplexWrapper { + + override private[ sql ] def unapply(e: Expression) = dt.unapply(e) - override def typeName: String = dt.typeName + override def typeName: String = dt.typeName - override private[sql] def jsonValue = dt.jsonValue + override private[ sql ] def jsonValue = dt.jsonValue - override def json: String = dt.json + override def json: String = dt.json - override def prettyJson: String = dt.prettyJson + override def prettyJson: String = dt.prettyJson - override def simpleString: String = dt.simpleString + override def simpleString: String = dt.simpleString - override def catalogString: String = dt.catalogString + override def catalogString: String = dt.catalogString - override private[sql] def simpleString(maxNumberFields: Int) = dt.simpleString(maxNumberFields) + override private[ sql ] def simpleString(maxNumberFields: Int) = dt.simpleString(maxNumberFields) - override def sql: String = dt.sql + override def sql: String = dt.sql - override private[spark] def sameType(other: DataType) = dt.sameType(other) + override private[ spark ] def sameType(other: DataType) = dt.sameType(other) - override private[spark] def existsRecursively(f: DataType => Boolean) = dt.existsRecursively(f) + override private[ spark ] def existsRecursively(f: DataType => Boolean) = dt.existsRecursively(f) - private[sql] override def defaultConcreteType = dt.defaultConcreteType + private[ sql ] override def defaultConcreteType = dt.defaultConcreteType - private[sql] override def acceptsType(other: DataType) = dt.acceptsType(other) + private[ sql ] override def acceptsType(other: DataType) = dt.acceptsType(other) - override def defaultSize: Int = dt.defaultSize + override def defaultSize: Int = dt.defaultSize - override private[spark] def asNullable = dt.asNullable + override private[ spark ] def asNullable = dt.asNullable } -case class KSimpleTypeWrapper(dt: DataType, cls: Class[_], nullable: Boolean) extends DataType with DataTypeWithClass { - override private[sql] def unapply(e: Expression) = dt.unapply(e) +case class KSimpleTypeWrapper(dt: DataType, cls: Class[ _ ], nullable: Boolean) extends DataType with DataTypeWithClass { + override private[ sql ] def unapply(e: Expression) = dt.unapply(e) - override def typeName: String = dt.typeName + override def typeName: String = dt.typeName - override private[sql] def jsonValue = dt.jsonValue + override private[ sql ] def jsonValue = dt.jsonValue - override def json: String = dt.json + override def json: String = dt.json - override def prettyJson: String = dt.prettyJson + override def prettyJson: String = dt.prettyJson - override def simpleString: String = dt.simpleString + override def simpleString: String = dt.simpleString - override def catalogString: String = dt.catalogString + override def catalogString: String = dt.catalogString - override private[sql] def simpleString(maxNumberFields: Int) = dt.simpleString(maxNumberFields) + override private[ sql ] def simpleString(maxNumberFields: Int) = dt.simpleString(maxNumberFields) - override def sql: String = dt.sql + override def sql: String = dt.sql - override private[spark] def sameType(other: DataType) = dt.sameType(other) + override private[ spark ] def sameType(other: DataType) = dt.sameType(other) - override private[spark] def existsRecursively(f: DataType => Boolean) = dt.existsRecursively(f) + override private[ spark ] def existsRecursively(f: DataType => Boolean) = dt.existsRecursively(f) - private[sql] override def defaultConcreteType = dt.defaultConcreteType + private[ sql ] override def defaultConcreteType = dt.defaultConcreteType - private[sql] override def acceptsType(other: DataType) = dt.acceptsType(other) + private[ sql ] override def acceptsType(other: DataType) = dt.acceptsType(other) - override def defaultSize: Int = dt.defaultSize + override def defaultSize: Int = dt.defaultSize - override private[spark] def asNullable = dt.asNullable + override private[ spark ] def asNullable = dt.asNullable } class KStructField(val getterName: String, val delegate: StructField) extends StructField { - override private[sql] def buildFormattedString(prefix: String, stringConcat: StringUtils.StringConcat, maxDepth: Int): Unit = delegate.buildFormattedString(prefix, stringConcat, maxDepth) - override def toString(): String = delegate.toString() + override private[ sql ] def buildFormattedString(prefix: String, stringConcat: StringUtils.StringConcat, maxDepth: Int): Unit = + delegate.buildFormattedString(prefix, stringConcat, maxDepth) + + override def toString(): String = delegate.toString() - override private[sql] def jsonValue = delegate.jsonValue + override private[ sql ] def jsonValue = delegate.jsonValue - override def withComment(comment: String): StructField = delegate.withComment(comment) + override def withComment(comment: String): StructField = delegate.withComment(comment) - override def getComment(): Option[String] = delegate.getComment() + override def getComment(): Option[ String ] = delegate.getComment() - override def toDDL: String = delegate.toDDL + override def toDDL: String = delegate.toDDL - override def productElement(n: Int): Any = delegate.productElement(n) + override def productElement(n: Int): Any = delegate.productElement(n) - override def productArity: Int = delegate.productArity + override def productArity: Int = delegate.productArity - override def productIterator: Iterator[Any] = delegate.productIterator + override def productIterator: Iterator[ Any ] = delegate.productIterator - override def productPrefix: String = delegate.productPrefix + override def productPrefix: String = delegate.productPrefix - override val dataType: DataType = delegate.dataType + override val dataType: DataType = delegate.dataType - override def canEqual(that: Any): Boolean = delegate.canEqual(that) + override def canEqual(that: Any): Boolean = delegate.canEqual(that) - override val metadata: Metadata = delegate.metadata - override val name: String = delegate.name - override val nullable: Boolean = delegate.nullable + override val metadata: Metadata = delegate.metadata + override val name: String = delegate.name + override val nullable: Boolean = delegate.nullable } object helpme { - def listToSeq(i: java.util.List[_]): Seq[_] = Seq(i.toArray: _*) + def listToSeq(i: java.util.List[ _ ]): Seq[ _ ] = Seq(i.toArray: _*) } \ No newline at end of file diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index fa320c39..b4e08216 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -52,139 +52,10 @@ import scala.collection.Map as ScalaMap import scala.collection.mutable.Map as ScalaMutableMap class ApiTest : ShouldSpec({ - context("integration tests") { - withSpark(props = mapOf("spark.sql.codegen.comments" to true)) { - should("collect data classes with doubles correctly") { - val ll1 = LonLat(1.0, 2.0) - val ll2 = LonLat(3.0, 4.0) - val lonlats = dsOf(ll1, ll2).collectAsList() - expect(lonlats).contains.inAnyOrder.only.values(ll1.copy(), ll2.copy()) - } - should("contain all generic primitives with complex schema") { - val primitives = c(1, 1.0, 1.toFloat(), 1.toByte(), LocalDate.now(), true) - val primitives2 = c(2, 2.0, 2.toFloat(), 2.toByte(), LocalDate.now().plusDays(1), false) - val tuples = dsOf(primitives, primitives2).collectAsList() - expect(tuples).contains.inAnyOrder.only.values(primitives, primitives2) - } - should("contain all generic primitives with complex nullable schema") { - val primitives = c(1, 1.0, 1.toFloat(), 1.toByte(), LocalDate.now(), true) - val nulls = c(null, null, null, null, null, null) - val tuples = dsOf(primitives, nulls).collectAsList() - expect(tuples).contains.inAnyOrder.only.values(primitives, nulls) - } - should("handle cached operations") { - val result = dsOf(1, 2, 3, 4, 5) - .map { it to (it + 2) } - .withCached { - expect(collectAsList()).contains.inAnyOrder.only.values( - 1 to 3, - 2 to 4, - 3 to 5, - 4 to 6, - 5 to 7 - ) - - val next = filter { it.first % 2 == 0 } - expect(next.collectAsList()).contains.inAnyOrder.only.values(2 to 4, 4 to 6) - next - } - .map { c(it.first, it.second, (it.first + it.second) * 2) } - .collectAsList() - expect(result).contains.inOrder.only.values(c(2, 4, 12), c(4, 6, 20)) - } - should("handle join operations") { - data class Left(val id: Int, val name: String) - - data class Right(val id: Int, val value: Int) - - val first = dsOf(Left(1, "a"), Left(2, "b")) - val second = dsOf(Right(1, 100), Right(3, 300)) - val result = first - .leftJoin(second, first.col("id").eq(second.col("id"))) - .map { c(it.first.id, it.first.name, it.second?.value) } - .collectAsList() - expect(result).contains.inOrder.only.values(c(1, "a", 100), c(2, "b", null)) - } - should("handle map operations") { - val result = dsOf(listOf(1, 2, 3, 4), listOf(3, 4, 5, 6)) - .flatMap { it.iterator() } - .map { it + 4 } - .filter { it < 10 } - .collectAsList() - expect(result).contains.inAnyOrder.only.values(5, 6, 7, 8, 7, 8, 9) - } - should("handle strings converted to lists") { - data class Movie(val id: Long, val genres: String) - data class MovieExpanded(val id: Long, val genres: List) - - val comedies = listOf(Movie(1, "Comedy|Romance"), Movie(2, "Horror|Action")).toDS() - .map { MovieExpanded(it.id, it.genres.split("|").toList()) } - .filter { it.genres.contains("Comedy") } - .collectAsList() - expect(comedies).contains.inAnyOrder.only.values( - MovieExpanded( - 1, - listOf("Comedy", "Romance") - ) - ) - } - should("handle strings converted to arrays") { - data class Movie(val id: Long, val genres: String) - data class MovieExpanded(val id: Long, val genres: Array) { - override fun equals(other: Any?): Boolean { - if (this === other) return true - if (javaClass != other?.javaClass) return false - other as MovieExpanded - return if (id != other.id) false else genres.contentEquals(other.genres) - } - - override fun hashCode(): Int { - var result = id.hashCode() - result = 31 * result + genres.contentHashCode() - return result - } - } - - val comedies = listOf(Movie(1, "Comedy|Romance"), Movie(2, "Horror|Action")).toDS() - .map { MovieExpanded(it.id, it.genres.split("|").toTypedArray()) } - .filter { it.genres.contains("Comedy") } - .collectAsList() - expect(comedies).contains.inAnyOrder.only.values( - MovieExpanded( - 1, - arrayOf("Comedy", "Romance") - ) - ) - } - should("handle arrays of generics") { - data class Test(val id: Long, val data: Array>) - - val result = listOf(Test(1, arrayOf(5.1 to 6, 6.1 to 7))) - .toDS() - .map { it.id to it.data.firstOrNull { liEl -> liEl.first < 6 } } - .map { it.second } - .collectAsList() - expect(result).contains.inOrder.only.values(5.1 to 6) - } - should("handle lists of generics") { - data class Test(val id: Long, val data: List>) - val result = listOf(Test(1, listOf(5.1 to 6, 6.1 to 7))) - .toDS() - .map { it.id to it.data.firstOrNull { liEl -> liEl.first < 6 } } - .map { it.second } - .collectAsList() - expect(result).contains.inOrder.only.values(5.1 to 6) - } - should("!handle primitive arrays") { - val result = listOf(arrayOf(1, 2, 3, 4)) - .toDS() - .map { it.map { ai -> ai + 1 } } - .collectAsList() - .flatten() - expect(result).contains.inOrder.only.values(2, 3, 4, 5) + context("miscellaneous integration tests") { + withSpark(props = mapOf("spark.sql.codegen.comments" to true)) { - } @OptIn(ExperimentalStdlibApi::class) should("broadcast variables") { val largeList = (1..15).map { SomeClass(a = (it..15).toList().toIntArray(), b = it) } @@ -207,6 +78,7 @@ class ApiTest : ShouldSpec({ expect(result).contains.inOrder.only.values(3.0, 5.0, 7.0, 9.0, 11.0) } + should("Handle JavaConversions in Kotlin") { // Test the iterator conversion val scalaIterator: ScalaIterator = listOf("test1", "test2").iterator().asScalaIterator() @@ -242,564 +114,10 @@ class ApiTest : ShouldSpec({ kotlinList.first() shouldBe "a" kotlinList.last() shouldBe "b" } - should("perform flat map on grouped datasets") { - val groupedDataset = listOf(1 to "a", 1 to "b", 2 to "c") - .toDS() - .groupByKey { it.first } - - val flatMapped = groupedDataset.flatMapGroups { key, values -> - val collected = values.asSequence().toList() - - if (collected.size > 1) collected.iterator() - else emptyList>().iterator() - } - - flatMapped.count() shouldBe 2 - } - should("perform map group with state and timeout conf on grouped datasets") { - val groupedDataset = listOf(1 to "a", 1 to "b", 2 to "c") - .toDS() - .groupByKey { it.first } - - val mappedWithStateTimeoutConf = - groupedDataset.mapGroupsWithState(GroupStateTimeout.NoTimeout()) { key, values, state: GroupState -> - var s by state - val collected = values.asSequence().toList() - - s = key - s shouldBe key - - s!! to collected.map { it.second } - } - - mappedWithStateTimeoutConf.count() shouldBe 2 - } - should("perform map group with state on grouped datasets") { - val groupedDataset = listOf(1 to "a", 1 to "b", 2 to "c") - .toDS() - .groupByKey { it.first } - - val mappedWithState = groupedDataset.mapGroupsWithState { key, values, state: GroupState -> - var s by state - val collected = values.asSequence().toList() - - s = key - s shouldBe key - - s!! to collected.map { it.second } - } - - mappedWithState.count() shouldBe 2 - } - should("perform flat map group with state on grouped datasets") { - val groupedDataset = listOf(1 to "a", 1 to "b", 2 to "c") - .toDS() - .groupByKey { it.first } - - val flatMappedWithState = groupedDataset.mapGroupsWithState { key, values, state: GroupState -> - var s by state - val collected = values.asSequence().toList() - - s = key - s shouldBe key - - if (collected.size > 1) collected.iterator() - else emptyList>().iterator() - } - - flatMappedWithState.count() shouldBe 2 - } - should("be able to cogroup grouped datasets") { - val groupedDataset1 = listOf(1 to "a", 1 to "b", 2 to "c") - .toDS() - .groupByKey { it.first } - - val groupedDataset2 = listOf(1 to "d", 5 to "e", 3 to "f") - .toDS() - .groupByKey { it.first } - - val cogrouped = groupedDataset1.cogroup(groupedDataset2) { key, left, right -> - listOf( - key to (left.asSequence() + right.asSequence()) - .map { it.second } - .toList() - ).iterator() - } - - cogrouped.count() shouldBe 4 - } - should("handle LocalDate Datasets") { // uses encoder - val dates = listOf(LocalDate.now(), LocalDate.now()) - val dataset: Dataset = dates.toDS() - dataset.collectAsList() shouldBe dates - } - should("handle Instant Datasets") { // uses encoder - val instants = listOf(Instant.now(), Instant.now()) - val dataset: Dataset = instants.toDS() - dataset.collectAsList() shouldBe instants - } - should("Be able to serialize Instant") { // uses knownDataTypes - val instantPair = Instant.now() to Instant.now() - val dataset = dsOf(instantPair) - dataset.collectAsList() shouldBe listOf(instantPair) - } - should("be able to serialize Date") { // uses knownDataTypes - val datePair = Date.valueOf("2020-02-10") to 5 - val dataset: Dataset> = dsOf(datePair) - dataset.collectAsList() shouldBe listOf(datePair) - } - should("handle Timestamp Datasets") { // uses encoder - val timeStamps = listOf(Timestamp(0L), Timestamp(1L)) - val dataset = timeStamps.toDS() - dataset.collectAsList() shouldBe timeStamps - } - should("be able to serialize Timestamp") { // uses knownDataTypes - val timestampPair = Timestamp(0L) to 2 - val dataset = dsOf(timestampPair) - dataset.collectAsList() shouldBe listOf(timestampPair) - } - should("handle Duration Datasets") { // uses encoder - val dataset = dsOf(Duration.ZERO) - dataset.collectAsList() shouldBe listOf(Duration.ZERO) - } - should("handle Period Datasets") { // uses encoder - val periods = listOf(Period.ZERO, Period.ofDays(2)) - val dataset = periods.toDS() - - dataset.show(false) - - dataset.collectAsList().let { - it[0] shouldBe Period.ZERO - - // NOTE Spark truncates java.time.Period to months. - it[1] shouldBe Period.ofDays(0) - } - - } - should("handle binary datasets") { // uses encoder - val byteArray = "Hello there".encodeToByteArray() - val dataset = dsOf(byteArray) - dataset.collectAsList() shouldBe listOf(byteArray) - } - should("be able to serialize binary") { // uses knownDataTypes - val byteArrayTriple = c("Hello there".encodeToByteArray(), 1, intArrayOf(1, 2, 3)) - val dataset = dsOf(byteArrayTriple) - - val (a, b, c) = dataset.collectAsList().single() - a contentEquals "Hello there".encodeToByteArray() shouldBe true - b shouldBe 1 - c contentEquals intArrayOf(1, 2, 3) shouldBe true - } - should("be able to serialize Decimal") { // uses knownDataTypes - val decimalPair = c(Decimal().set(50), 12) - val dataset = dsOf(decimalPair) - dataset.collectAsList() shouldBe listOf(decimalPair) - } - should("handle BigDecimal datasets") { // uses encoder - val decimals = listOf(BigDecimal.ONE, BigDecimal.TEN) - val dataset = decimals.toDS() - dataset.collectAsList().let { (one, ten) -> - one.compareTo(BigDecimal.ONE) shouldBe 0 - ten.compareTo(BigDecimal.TEN) shouldBe 0 - } - } - should("be able to serialize BigDecimal") { // uses knownDataTypes - val decimalPair = c(BigDecimal.TEN, 12) - val dataset = dsOf(decimalPair) - val (a, b) = dataset.collectAsList().single() - a.compareTo(BigDecimal.TEN) shouldBe 0 - b shouldBe 12 - } - should("be able to serialize CalendarInterval") { // uses knownDataTypes - val calendarIntervalPair = CalendarInterval(1, 0, 0L) to 2 - val dataset = dsOf(calendarIntervalPair) - dataset.collectAsList() shouldBe listOf(calendarIntervalPair) - } - should("handle nullable datasets") { - val ints = listOf(1, 2, 3, null) - val dataset = ints.toDS() - dataset.collectAsList() shouldBe ints - } - should("Be able to serialize Scala Tuples including data classes") { - val dataset = dsOf( - Tuple2("a", Tuple3("a", 1, LonLat(1.0, 1.0))), - Tuple2("b", Tuple3("b", 2, LonLat(1.0, 2.0))), - ) - dataset.show() - val asList = dataset.takeAsList(2) - asList.first() shouldBe Tuple2("a", Tuple3("a", 1, LonLat(1.0, 1.0))) - } - should("Be able to serialize data classes with tuples") { - val dataset = dsOf( - DataClassWithTuple(Tuple3(5L, "test", Tuple1(""))), - DataClassWithTuple(Tuple3(6L, "tessst", Tuple1(""))), - ) - - dataset.show() - val asList = dataset.takeAsList(2) - asList.first().tuple shouldBe Tuple3(5L, "test", Tuple1("")) - } - @Suppress("UNCHECKED_CAST") - should("support dataset select") { - val dataset = dsOf( - SomeClass(intArrayOf(1, 2, 3), 3), - SomeClass(intArrayOf(1, 2, 4), 5), - ) - - val newDS1WithAs: Dataset = dataset.selectTyped( - col("a").`as`(), - ) - newDS1WithAs.collectAsList() - - val newDS2: Dataset> = dataset.selectTyped( - col(SomeClass::a), // NOTE: this only works on 3.0, returning a data class with an array in it - col(SomeClass::b), - ) - newDS2.collectAsList() - - val newDS3: Dataset> = dataset.selectTyped( - col(SomeClass::a), - col(SomeClass::b), - col(SomeClass::b), - ) - newDS3.collectAsList() - - val newDS4: Dataset> = dataset.selectTyped( - col(SomeClass::a), - col(SomeClass::b), - col(SomeClass::b), - col(SomeClass::b), - ) - newDS4.collectAsList() - - val newDS5: Dataset> = dataset.selectTyped( - col(SomeClass::a), - col(SomeClass::b), - col(SomeClass::b), - col(SomeClass::b), - col(SomeClass::b), - ) - newDS5.collectAsList() - } - should("Access columns using invoke on datasets") { - val dataset = dsOf( - SomeClass(intArrayOf(1, 2, 3), 4), - SomeClass(intArrayOf(4, 3, 2), 1), - ) - - dataset.col("a") shouldBe dataset("a") - } - should("Use infix- and operator funs on columns") { - val dataset = dsOf( - SomeOtherClass(intArrayOf(1, 2, 3), 4, true), - SomeOtherClass(intArrayOf(4, 3, 2), 1, true), - ) - - (dataset("a") == dataset("a")) shouldBe dataset("a").equals(dataset("a")) - (dataset("a") != dataset("a")) shouldBe !dataset("a").equals(dataset("a")) - (dataset("a") eq dataset("a")) shouldBe dataset("a").equalTo(dataset("a")) - dataset("a").equalTo(dataset("a")) shouldBe (dataset("a") `===` dataset("a")) - (dataset("a") neq dataset("a")) shouldBe dataset("a").notEqual(dataset("a")) - dataset("a").notEqual(dataset("a")) shouldBe (dataset("a") `=!=` dataset("a")) - !(dataset("a") eq dataset("a")) shouldBe dataset("a").notEqual(dataset("a")) - dataset("a").notEqual(dataset("a")) shouldBe (!(dataset("a") `===` dataset("a"))) - -dataset("b") shouldBe negate(dataset("b")) - !dataset("c") shouldBe not(dataset("c")) - dataset("b") gt 3 shouldBe dataset("b").gt(3) - dataset("b") lt 3 shouldBe dataset("b").lt(3) - dataset("b") leq 3 shouldBe dataset("b").leq(3) - dataset("b") geq 3 shouldBe dataset("b").geq(3) - dataset("b") inRangeOf 0..2 shouldBe dataset("b").between(0, 2) - dataset("c") or dataset("c") shouldBe dataset("c").or(dataset("c")) - dataset("c") and dataset("c") shouldBe dataset("c").and(dataset("c")) - dataset("c").and(dataset("c")) shouldBe (dataset("c") `&&` dataset("c")) - dataset("b") + dataset("b") shouldBe dataset("b").plus(dataset("b")) - dataset("b") - dataset("b") shouldBe dataset("b").minus(dataset("b")) - dataset("b") * dataset("b") shouldBe dataset("b").multiply(dataset("b")) - dataset("b") / dataset("b") shouldBe dataset("b").divide(dataset("b")) - dataset("b") % dataset("b") shouldBe dataset("b").mod(dataset("b")) - dataset("b")[0] shouldBe dataset("b").getItem(0) - } - should("Handle TypedColumns") { - val dataset = dsOf( - SomeOtherClass(intArrayOf(1, 2, 3), 4, true), - SomeOtherClass(intArrayOf(4, 3, 2), 1, true), - ) - - // walking over all column creation methods - val b: Dataset> = dataset.select( - dataset.col(SomeOtherClass::b), - dataset(SomeOtherClass::a), - col(SomeOtherClass::c), - ) - b.collectAsList() - } - should("Handle some where queries using column operator functions") { - val dataset = dsOf( - SomeOtherClass(intArrayOf(1, 2, 3), 4, true), - SomeOtherClass(intArrayOf(4, 3, 2), 1, true), - ) - dataset.collectAsList() - - val column = col("b").`as`() - - val b = dataset.where(column gt 3 and col(SomeOtherClass::c)) - - b.count() shouldBe 1 - } - should("Be able to serialize lists of data classes") { - val dataset = dsOf( - listOf(SomeClass(intArrayOf(1, 2, 3), 4)), - listOf(SomeClass(intArrayOf(3, 2, 1), 0)), - ) - - val (first, second) = dataset.collectAsList() - - first.single().let { (a, b) -> - a.contentEquals(intArrayOf(1, 2, 3)) shouldBe true - b shouldBe 4 - } - second.single().let { (a, b) -> - a.contentEquals(intArrayOf(3, 2, 1)) shouldBe true - b shouldBe 0 - } - } - should("Be able to serialize arrays of data classes") { - val dataset = dsOf( - arrayOf(SomeClass(intArrayOf(1, 2, 3), 4)), - arrayOf(SomeClass(intArrayOf(3, 2, 1), 0)), - ) - - val (first, second) = dataset.collectAsList() - - first.single().let { (a, b) -> - a.contentEquals(intArrayOf(1, 2, 3)) shouldBe true - b shouldBe 4 - } - second.single().let { (a, b) -> - a.contentEquals(intArrayOf(3, 2, 1)) shouldBe true - b shouldBe 0 - } - } - should("Be able to serialize lists of tuples") { - val dataset = dsOf( - listOf(Tuple2(intArrayOf(1, 2, 3), 4)), - listOf(Tuple2(intArrayOf(3, 2, 1), 0)), - ) - - val (first, second) = dataset.collectAsList() - - first.single().let { - it._1().contentEquals(intArrayOf(1, 2, 3)) shouldBe true - it._2() shouldBe 4 - } - second.single().let { - it._1().contentEquals(intArrayOf(3, 2, 1)) shouldBe true - it._2() shouldBe 0 - } - } - should("Allow simple forEachPartition in datasets") { - val dataset = dsOf( - SomeClass(intArrayOf(1, 2, 3), 1), - SomeClass(intArrayOf(4, 3, 2), 1), - ) - dataset.forEachPartition { - it.forEach { - it.b shouldBe 1 - } - } - } - should("Have easier access to keys and values for key/value datasets") { - val dataset: Dataset = dsOf( - SomeClass(intArrayOf(1, 2, 3), 1), - SomeClass(intArrayOf(4, 3, 2), 1), - ) - .groupByKey { it.b } - .reduceGroupsK { a, b -> SomeClass(a.a + b.a, a.b) } - .takeValues() - - dataset.count() shouldBe 1 - } - should("Be able to sort datasets with property reference") { - val dataset: Dataset = dsOf( - SomeClass(intArrayOf(1, 2, 3), 2), - SomeClass(intArrayOf(4, 3, 2), 1), - ) - dataset.sort(SomeClass::b) - dataset.takeAsList(1).first().b shouldBe 2 - - dataset.sort(SomeClass::a, SomeClass::b) - dataset.takeAsList(1).first().b shouldBe 2 - } - should("Have Kotlin ready functions in place of overload ambiguity") { - val dataset: Pair = dsOf( - SomeClass(intArrayOf(1, 2, 3), 1), - SomeClass(intArrayOf(4, 3, 2), 1), - ) - .groupByKey { it: SomeClass -> it.b } - .reduceGroupsK { v1: SomeClass, v2: SomeClass -> v1 } - .filter { it: Pair -> true } // not sure why this does work, but reduce doesn't - .reduceK { v1: Pair, v2: Pair -> v1 } - - dataset.second.a shouldBe intArrayOf(1, 2, 3) - } - should("Generate encoder correctly with complex enum data class") { - val dataset: Dataset = - dsOf( - ComplexEnumDataClass( - 1, - "string", - listOf("1", "2"), - SomeEnum.A, - SomeOtherEnum.C, - listOf(SomeEnum.A, SomeEnum.B), - listOf(SomeOtherEnum.C, SomeOtherEnum.D), - arrayOf(SomeEnum.A, SomeEnum.B), - arrayOf(SomeOtherEnum.C, SomeOtherEnum.D), - mapOf(SomeEnum.A to SomeOtherEnum.C) - ) - ) - - dataset.show(false) - val first = dataset.takeAsList(1).first() - - first.int shouldBe 1 - first.string shouldBe "string" - first.strings shouldBe listOf("1", "2") - first.someEnum shouldBe SomeEnum.A - first.someOtherEnum shouldBe SomeOtherEnum.C - first.someEnums shouldBe listOf(SomeEnum.A, SomeEnum.B) - first.someOtherEnums shouldBe listOf(SomeOtherEnum.C, SomeOtherEnum.D) - first.someEnumArray shouldBe arrayOf(SomeEnum.A, SomeEnum.B) - first.someOtherArray shouldBe arrayOf(SomeOtherEnum.C, SomeOtherEnum.D) - first.enumMap shouldBe mapOf(SomeEnum.A to SomeOtherEnum.C) - } - should("work with lists of maps") { - val result = dsOf( - listOf(mapOf("a" to "b", "x" to "y")), - listOf(mapOf("a" to "b", "x" to "y")), - listOf(mapOf("a" to "b", "x" to "y")) - ) - .showDS() - .map { it.last() } - .map { it["x"] } - .filterNotNull() - .distinct() - .collectAsList() - expect(result).contains.inOrder.only.value("y") - } - should("work with lists of lists") { - val result = dsOf( - listOf(listOf(1, 2, 3)), - listOf(listOf(1, 2, 3)), - listOf(listOf(1, 2, 3)) - ) - .map { it.last() } - .map { it.first() } - .reduceK { a, b -> a + b } - expect(result).toBe(3) - } - should("Generate schema correctly with nullalble list and map") { - val schema = encoder().schema() - schema.fields().forEach { - it.nullable() shouldBe true - } - } - should("Convert Scala RDD to Dataset") { - val rdd0: RDD = sc.parallelize( - listOf(1, 2, 3, 4, 5, 6) - ).rdd() - val dataset0: Dataset = rdd0.toDS() - - dataset0.toList() shouldBe listOf(1, 2, 3, 4, 5, 6) - } - - should("Convert a JavaRDD to a Dataset") { - val rdd1: JavaRDD = sc.parallelize( - listOf(1, 2, 3, 4, 5, 6) - ) - val dataset1: Dataset = rdd1.toDS() - - dataset1.toList() shouldBe listOf(1, 2, 3, 4, 5, 6) - } - should("Convert JavaDoubleRDD to Dataset") { - - // JavaDoubleRDD - val rdd2: JavaDoubleRDD = sc.parallelizeDoubles( - listOf(1.0, 2.0, 3.0, 4.0, 5.0, 6.0) - ) - val dataset2: Dataset = rdd2.toDS() - - dataset2.toList() shouldBe listOf(1.0, 2.0, 3.0, 4.0, 5.0, 6.0) - } - should("Convert JavaPairRDD to Dataset") { - val rdd3: JavaPairRDD = sc.parallelizePairs( - listOf(Tuple2(1, 1.0), Tuple2(2, 2.0), Tuple2(3, 3.0)) - ) - val dataset3: Dataset> = rdd3.toDS() - - dataset3.toList>() shouldBe listOf(Tuple2(1, 1.0), Tuple2(2, 2.0), Tuple2(3, 3.0)) - } - should("Convert Kotlin Serializable data class RDD to Dataset") { - val rdd4 = sc.parallelize( - listOf(SomeClass(intArrayOf(1, 2), 0)) - ) - val dataset4 = rdd4.toDS() - - dataset4.toList().first().let { (a, b) -> - a contentEquals intArrayOf(1, 2) shouldBe true - b shouldBe 0 - } - } - should("Convert Arity RDD to Dataset") { - val rdd5 = sc.parallelize( - listOf(c(1.0, 4)) - ) - val dataset5 = rdd5.toDS() - - dataset5.toList>() shouldBe listOf(c(1.0, 4)) - } - should("Convert List RDD to Dataset") { - val rdd6 = sc.parallelize( - listOf(listOf(1, 2, 3), listOf(4, 5, 6)) - ) - val dataset6 = rdd6.toDS() - - dataset6.toList>() shouldBe listOf(listOf(1, 2, 3), listOf(4, 5, 6)) - } } } }) -data class DataClassWithTuple(val tuple: T) - -data class LonLat(val lon: Double, val lat: Double) - // (data) class must be Serializable to be broadcast data class SomeClass(val a: IntArray, val b: Int) : Serializable - -data class SomeOtherClass(val a: IntArray, val b: Int, val c: Boolean) : Serializable - - -enum class SomeEnum { A, B } - -enum class SomeOtherEnum(val value: Int) { C(1), D(2) } - -data class ComplexEnumDataClass( - val int: Int, - val string: String, - val strings: List, - val someEnum: SomeEnum, - val someOtherEnum: SomeOtherEnum, - val someEnums: List, - val someOtherEnums: List, - val someEnumArray: Array, - val someOtherArray: Array, - val enumMap: Map, -) - -data class NullFieldAbleDataClass( - val optionList: List?, - val optionMap: Map?, -) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTests.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTests.kt new file mode 100644 index 00000000..83645c3a --- /dev/null +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTests.kt @@ -0,0 +1,408 @@ +package org.jetbrains.kotlinx.spark.api + +import ch.tutteli.atrium.api.fluent.en_GB.* +import ch.tutteli.atrium.api.verbs.expect +import io.kotest.core.spec.style.ShouldSpec +import io.kotest.matchers.shouldBe +import org.apache.spark.api.java.JavaDoubleRDD +import org.apache.spark.api.java.JavaPairRDD +import org.apache.spark.api.java.JavaRDD +import org.apache.spark.rdd.RDD +import org.apache.spark.sql.Dataset +import org.apache.spark.sql.functions +import org.apache.spark.sql.streaming.GroupState +import org.apache.spark.sql.streaming.GroupStateTimeout +import scala.Tuple2 +import scala.Tuple3 +import java.io.Serializable + +class DatasetFunctionTests : ShouldSpec({ + + context("dataset extensions") { + withSpark(props = mapOf("spark.sql.codegen.comments" to true)) { + + should("handle cached operations") { + val result = dsOf(1, 2, 3, 4, 5) + .map { it to (it + 2) } + .withCached { + expect(collectAsList()).contains.inAnyOrder.only.values( + 1 to 3, + 2 to 4, + 3 to 5, + 4 to 6, + 5 to 7 + ) + + val next = filter { it.first % 2 == 0 } + expect(next.collectAsList()).contains.inAnyOrder.only.values(2 to 4, 4 to 6) + next + } + .map { c(it.first, it.second, (it.first + it.second) * 2) } + .collectAsList() + expect(result).contains.inOrder.only.values(c(2, 4, 12), c(4, 6, 20)) + } + + should("handle join operations") { + data class Left(val id: Int, val name: String) + + data class Right(val id: Int, val value: Int) + + val first = dsOf(Left(1, "a"), Left(2, "b")) + val second = dsOf(Right(1, 100), Right(3, 300)) + val result = first + .leftJoin(second, first.col("id").eq(second.col("id"))) + .map { c(it.first.id, it.first.name, it.second?.value) } + .collectAsList() + expect(result).contains.inOrder.only.values(c(1, "a", 100), c(2, "b", null)) + } + + should("handle map operations") { + val result = dsOf(listOf(1, 2, 3, 4), listOf(3, 4, 5, 6)) + .flatMap { it.iterator() } + .map { it + 4 } + .filter { it < 10 } + .collectAsList() + expect(result).contains.inAnyOrder.only.values(5, 6, 7, 8, 7, 8, 9) + } + + should("Allow simple forEachPartition in datasets") { + val dataset = dsOf( + SomeClass(intArrayOf(1, 2, 3), 1), + SomeClass(intArrayOf(4, 3, 2), 1), + ) + dataset.forEachPartition { + it.forEach { + it.b shouldBe 1 + } + } + } + + should("Have easier access to keys and values for key/value datasets") { + val dataset: Dataset = dsOf( + SomeClass(intArrayOf(1, 2, 3), 1), + SomeClass(intArrayOf(4, 3, 2), 1), + ) + .groupByKey { it.b } + .reduceGroupsK { a, b -> SomeClass(a.a + b.a, a.b) } + .takeValues() + + dataset.count() shouldBe 1 + } + + should("Be able to sort datasets with property reference") { + val dataset: Dataset = dsOf( + SomeClass(intArrayOf(1, 2, 3), 2), + SomeClass(intArrayOf(4, 3, 2), 1), + ) + dataset.sort(SomeClass::b) + dataset.takeAsList(1).first().b shouldBe 2 + + dataset.sort(SomeClass::a, SomeClass::b) + dataset.takeAsList(1).first().b shouldBe 2 + } + + should("Have Kotlin ready functions in place of overload ambiguity") { + val dataset: Pair = dsOf( + SomeClass(intArrayOf(1, 2, 3), 1), + SomeClass(intArrayOf(4, 3, 2), 1), + ) + .groupByKey { it: SomeClass -> it.b } + .reduceGroupsK { v1: SomeClass, v2: SomeClass -> v1 } + .filter { it: Pair -> true } // not sure why this does work, but reduce doesn't + .reduceK { v1: Pair, v2: Pair -> v1 } + + dataset.second.a shouldBe intArrayOf(1, 2, 3) + } + } + } + + context("grouped dataset extensions") { + withSpark(props = mapOf("spark.sql.codegen.comments" to true)) { + + should("perform flat map on grouped datasets") { + val groupedDataset = listOf(1 to "a", 1 to "b", 2 to "c") + .toDS() + .groupByKey { it.first } + + val flatMapped = groupedDataset.flatMapGroups { key, values -> + val collected = values.asSequence().toList() + + if (collected.size > 1) collected.iterator() + else emptyList>().iterator() + } + + flatMapped.count() shouldBe 2 + } + + should("perform map group with state and timeout conf on grouped datasets") { + val groupedDataset = listOf(1 to "a", 1 to "b", 2 to "c") + .toDS() + .groupByKey { it.first } + + val mappedWithStateTimeoutConf = + groupedDataset.mapGroupsWithState(GroupStateTimeout.NoTimeout()) { key, values, state: GroupState -> + var s by state + val collected = values.asSequence().toList() + + s = key + s shouldBe key + + s!! to collected.map { it.second } + } + + mappedWithStateTimeoutConf.count() shouldBe 2 + } + + should("perform map group with state on grouped datasets") { + val groupedDataset = listOf(1 to "a", 1 to "b", 2 to "c") + .toDS() + .groupByKey { it.first } + + val mappedWithState = groupedDataset.mapGroupsWithState { key, values, state: GroupState -> + var s by state + val collected = values.asSequence().toList() + + s = key + s shouldBe key + + s!! to collected.map { it.second } + } + + mappedWithState.count() shouldBe 2 + } + + should("perform flat map group with state on grouped datasets") { + val groupedDataset = listOf(1 to "a", 1 to "b", 2 to "c") + .toDS() + .groupByKey { it.first } + + val flatMappedWithState = groupedDataset.mapGroupsWithState { key, values, state: GroupState -> + var s by state + val collected = values.asSequence().toList() + + s = key + s shouldBe key + + if (collected.size > 1) collected.iterator() + else emptyList>().iterator() + } + + flatMappedWithState.count() shouldBe 2 + } + + should("be able to cogroup grouped datasets") { + val groupedDataset1 = listOf(1 to "a", 1 to "b", 2 to "c") + .toDS() + .groupByKey { it.first } + + val groupedDataset2 = listOf(1 to "d", 5 to "e", 3 to "f") + .toDS() + .groupByKey { it.first } + + val cogrouped = groupedDataset1.cogroup(groupedDataset2) { key, left, right -> + listOf( + key to (left.asSequence() + right.asSequence()) + .map { it.second } + .toList() + ).iterator() + } + + cogrouped.count() shouldBe 4 + } + } + } + + context("RDD conversions") { + withSpark(props = mapOf("spark.sql.codegen.comments" to true)) { + + should("Convert Scala RDD to Dataset") { + val rdd0: RDD = sc.parallelize( + listOf(1, 2, 3, 4, 5, 6) + ).rdd() + val dataset0: Dataset = rdd0.toDS() + + dataset0.toList() shouldBe listOf(1, 2, 3, 4, 5, 6) + } + + should("Convert a JavaRDD to a Dataset") { + val rdd1: JavaRDD = sc.parallelize( + listOf(1, 2, 3, 4, 5, 6) + ) + val dataset1: Dataset = rdd1.toDS() + + dataset1.toList() shouldBe listOf(1, 2, 3, 4, 5, 6) + } + + should("Convert JavaDoubleRDD to Dataset") { + + // JavaDoubleRDD + val rdd2: JavaDoubleRDD = sc.parallelizeDoubles( + listOf(1.0, 2.0, 3.0, 4.0, 5.0, 6.0) + ) + val dataset2: Dataset = rdd2.toDS() + + dataset2.toList() shouldBe listOf(1.0, 2.0, 3.0, 4.0, 5.0, 6.0) + } + + should("Convert JavaPairRDD to Dataset") { + val rdd3: JavaPairRDD = sc.parallelizePairs( + listOf(Tuple2(1, 1.0), Tuple2(2, 2.0), Tuple2(3, 3.0)) + ) + val dataset3: Dataset> = rdd3.toDS() + + dataset3.toList>() shouldBe listOf(Tuple2(1, 1.0), Tuple2(2, 2.0), Tuple2(3, 3.0)) + } + + should("Convert Kotlin Serializable data class RDD to Dataset") { + val rdd4 = sc.parallelize( + listOf(SomeClass(intArrayOf(1, 2), 0)) + ) + val dataset4 = rdd4.toDS() + + dataset4.toList().first().let { (a, b) -> + a contentEquals intArrayOf(1, 2) shouldBe true + b shouldBe 0 + } + } + + should("Convert Arity RDD to Dataset") { + val rdd5 = sc.parallelize( + listOf(c(1.0, 4)) + ) + val dataset5 = rdd5.toDS() + + dataset5.toList>() shouldBe listOf(c(1.0, 4)) + } + + should("Convert List RDD to Dataset") { + val rdd6 = sc.parallelize( + listOf(listOf(1, 2, 3), listOf(4, 5, 6)) + ) + val dataset6 = rdd6.toDS() + + dataset6.toList>() shouldBe listOf(listOf(1, 2, 3), listOf(4, 5, 6)) + } + } + } + + context("Column functions") { + withSpark(props = mapOf("spark.sql.codegen.comments" to true)) { + + @Suppress("UNCHECKED_CAST") + should("support dataset select") { + val dataset = dsOf( + SomeClass(intArrayOf(1, 2, 3), 3), + SomeClass(intArrayOf(1, 2, 4), 5), + ) + + val newDS1WithAs: Dataset = dataset.selectTyped( + functions.col("a").`as`(), + ) + newDS1WithAs.collectAsList() + + val newDS2: Dataset> = dataset.selectTyped( + col(SomeClass::a), // NOTE: this only works on 3.0, returning a data class with an array in it + col(SomeClass::b), + ) + newDS2.collectAsList() + + val newDS3: Dataset> = dataset.selectTyped( + col(SomeClass::a), + col(SomeClass::b), + col(SomeClass::b), + ) + newDS3.collectAsList() + + val newDS4: Dataset> = dataset.selectTyped( + col(SomeClass::a), + col(SomeClass::b), + col(SomeClass::b), + col(SomeClass::b), + ) + newDS4.collectAsList() + + val newDS5: Dataset> = dataset.selectTyped( + col(SomeClass::a), + col(SomeClass::b), + col(SomeClass::b), + col(SomeClass::b), + col(SomeClass::b), + ) + newDS5.collectAsList() + } + + should("Access columns using invoke on datasets") { + val dataset = dsOf( + SomeClass(intArrayOf(1, 2, 3), 4), + SomeClass(intArrayOf(4, 3, 2), 1), + ) + + dataset.col("a") shouldBe dataset("a") + } + + should("Use infix- and operator funs on columns") { + val dataset = dsOf( + SomeOtherClass(intArrayOf(1, 2, 3), 4, true), + SomeOtherClass(intArrayOf(4, 3, 2), 1, true), + ) + + (dataset("a") == dataset("a")) shouldBe dataset("a").equals(dataset("a")) + (dataset("a") != dataset("a")) shouldBe !dataset("a").equals(dataset("a")) + (dataset("a") eq dataset("a")) shouldBe dataset("a").equalTo(dataset("a")) + dataset("a").equalTo(dataset("a")) shouldBe (dataset("a") `===` dataset("a")) + (dataset("a") neq dataset("a")) shouldBe dataset("a").notEqual(dataset("a")) + dataset("a").notEqual(dataset("a")) shouldBe (dataset("a") `=!=` dataset("a")) + !(dataset("a") eq dataset("a")) shouldBe dataset("a").notEqual(dataset("a")) + dataset("a").notEqual(dataset("a")) shouldBe (!(dataset("a") `===` dataset("a"))) + -dataset("b") shouldBe functions.negate(dataset("b")) + !dataset("c") shouldBe functions.not(dataset("c")) + dataset("b") gt 3 shouldBe dataset("b").gt(3) + dataset("b") lt 3 shouldBe dataset("b").lt(3) + dataset("b") leq 3 shouldBe dataset("b").leq(3) + dataset("b") geq 3 shouldBe dataset("b").geq(3) + dataset("b") inRangeOf 0..2 shouldBe dataset("b").between(0, 2) + dataset("c") or dataset("c") shouldBe dataset("c").or(dataset("c")) + dataset("c") and dataset("c") shouldBe dataset("c").and(dataset("c")) + dataset("c").and(dataset("c")) shouldBe (dataset("c") `&&` dataset("c")) + dataset("b") + dataset("b") shouldBe dataset("b").plus(dataset("b")) + dataset("b") - dataset("b") shouldBe dataset("b").minus(dataset("b")) + dataset("b") * dataset("b") shouldBe dataset("b").multiply(dataset("b")) + dataset("b") / dataset("b") shouldBe dataset("b").divide(dataset("b")) + dataset("b") % dataset("b") shouldBe dataset("b").mod(dataset("b")) + dataset("b")[0] shouldBe dataset("b").getItem(0) + } + + should("Handle TypedColumns") { + val dataset = dsOf( + SomeOtherClass(intArrayOf(1, 2, 3), 4, true), + SomeOtherClass(intArrayOf(4, 3, 2), 1, true), + ) + + // walking over all column creation methods + val b: Dataset> = dataset.select( + dataset.col(SomeOtherClass::b), + dataset(SomeOtherClass::a), + col(SomeOtherClass::c), + ) + b.collectAsList() + } + + should("Handle some where queries using column operator functions") { + val dataset = dsOf( + SomeOtherClass(intArrayOf(1, 2, 3), 4, true), + SomeOtherClass(intArrayOf(4, 3, 2), 1, true), + ) + dataset.collectAsList() + + val column = functions.col("b").`as`() + + val b = dataset.where(column gt 3 and col(SomeOtherClass::c)) + + b.count() shouldBe 1 + } + + } + } +}) + +data class SomeOtherClass(val a: IntArray, val b: Int, val c: Boolean) : Serializable diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTests.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTests.kt new file mode 100644 index 00000000..2c3fc768 --- /dev/null +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTests.kt @@ -0,0 +1,429 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.api + +import ch.tutteli.atrium.api.fluent.en_GB.* +import ch.tutteli.atrium.api.verbs.expect +import io.kotest.core.spec.style.ShouldSpec +import io.kotest.matchers.shouldBe +import org.apache.spark.sql.Dataset +import org.apache.spark.sql.types.Decimal +import org.apache.spark.unsafe.types.CalendarInterval +import scala.Product +import scala.Tuple1 +import scala.Tuple2 +import scala.Tuple3 +import java.math.BigDecimal +import java.sql.Date +import java.sql.Timestamp +import java.time.Duration +import java.time.Instant +import java.time.LocalDate +import java.time.Period + +class EncodingTests : ShouldSpec({ + + context("encoders") { + withSpark(props = mapOf("spark.sql.codegen.comments" to true)) { + + should("handle LocalDate Datasets") { + val dates = listOf(LocalDate.now(), LocalDate.now()) + val dataset: Dataset = dates.toDS() + dataset.collectAsList() shouldBe dates + } + + should("handle Instant Datasets") { + val instants = listOf(Instant.now(), Instant.now()) + val dataset: Dataset = instants.toDS() + dataset.collectAsList() shouldBe instants + } + + should("handle Timestamp Datasets") { + val timeStamps = listOf(Timestamp(0L), Timestamp(1L)) + val dataset = timeStamps.toDS() + dataset.collectAsList() shouldBe timeStamps + } + + should("handle Duration Datasets") { + val dataset = dsOf(Duration.ZERO) + dataset.collectAsList() shouldBe listOf(Duration.ZERO) + } + + should("handle Period Datasets") { + val periods = listOf(Period.ZERO, Period.ofDays(2)) + val dataset = periods.toDS() + + dataset.show(false) + + dataset.collectAsList().let { + it[0] shouldBe Period.ZERO + + // NOTE Spark truncates java.time.Period to months. + it[1] shouldBe Period.ofDays(0) + } + } + + should("handle binary datasets") { + val byteArray = "Hello there".encodeToByteArray() + val dataset = dsOf(byteArray) + dataset.collectAsList() shouldBe listOf(byteArray) + } + + should("handle BigDecimal datasets") { + val decimals = listOf(BigDecimal.ONE, BigDecimal.TEN) + val dataset = decimals.toDS() + dataset.collectAsList().let { (one, ten) -> + one.compareTo(BigDecimal.ONE) shouldBe 0 + ten.compareTo(BigDecimal.TEN) shouldBe 0 + } + } + + should("handle nullable datasets") { + val ints = listOf(1, 2, 3, null) + val dataset = ints.toDS() + dataset.collectAsList() shouldBe ints + } + } + } + context("known dataTypes") { + withSpark(props = mapOf("spark.sql.codegen.comments" to true)) { + + should("be able to serialize Instant") { + val instantPair = Instant.now() to Instant.now() + val dataset = dsOf(instantPair) + dataset.collectAsList() shouldBe listOf(instantPair) + } + + should("be able to serialize Date") { + val datePair = Date.valueOf("2020-02-10") to 5 + val dataset: Dataset> = dsOf(datePair) + dataset.collectAsList() shouldBe listOf(datePair) + } + + should("be able to serialize Timestamp") { + val timestampPair = Timestamp(0L) to 2 + val dataset = dsOf(timestampPair) + dataset.collectAsList() shouldBe listOf(timestampPair) + } + + should("be able to serialize binary") { + val byteArrayTriple = c("Hello there".encodeToByteArray(), 1, intArrayOf(1, 2, 3)) + val dataset = dsOf(byteArrayTriple) + + val (a, b, c) = dataset.collectAsList().single() + a contentEquals "Hello there".encodeToByteArray() shouldBe true + b shouldBe 1 + c contentEquals intArrayOf(1, 2, 3) shouldBe true + } + + should("be able to serialize Decimal") { + val decimalPair = c(Decimal().set(50), 12) + val dataset = dsOf(decimalPair) + dataset.collectAsList() shouldBe listOf(decimalPair) + } + + should("be able to serialize BigDecimal") { + val decimalPair = c(BigDecimal.TEN, 12) + val dataset = dsOf(decimalPair) + val (a, b) = dataset.collectAsList().single() + a.compareTo(BigDecimal.TEN) shouldBe 0 + b shouldBe 12 + } + + should("be able to serialize CalendarInterval") { + val calendarIntervalPair = CalendarInterval(1, 0, 0L) to 2 + val dataset = dsOf(calendarIntervalPair) + dataset.collectAsList() shouldBe listOf(calendarIntervalPair) + } + + should("Be able to serialize Scala Tuples including data classes") { + val dataset = dsOf( + Tuple2("a", Tuple3("a", 1, LonLat(1.0, 1.0))), + Tuple2("b", Tuple3("b", 2, LonLat(1.0, 2.0))), + ) + dataset.show() + val asList = dataset.takeAsList(2) + asList.first() shouldBe Tuple2("a", Tuple3("a", 1, LonLat(1.0, 1.0))) + } + + should("Be able to serialize data classes with tuples") { + val dataset = dsOf( + DataClassWithTuple(Tuple3(5L, "test", Tuple1(""))), + DataClassWithTuple(Tuple3(6L, "tessst", Tuple1(""))), + ) + + dataset.show() + val asList = dataset.takeAsList(2) + asList.first().tuple shouldBe Tuple3(5L, "test", Tuple1("")) + } + } + } + + context("schema") { + withSpark(props = mapOf("spark.sql.codegen.comments" to true)) { + + should("collect data classes with doubles correctly") { + val ll1 = LonLat(1.0, 2.0) + val ll2 = LonLat(3.0, 4.0) + val lonlats = dsOf(ll1, ll2).collectAsList() + expect(lonlats).contains.inAnyOrder.only.values(ll1.copy(), ll2.copy()) + } + + should("contain all generic primitives with complex schema") { + val primitives = c(1, 1.0, 1.toFloat(), 1.toByte(), LocalDate.now(), true) + val primitives2 = c(2, 2.0, 2.toFloat(), 2.toByte(), LocalDate.now().plusDays(1), false) + val tuples = dsOf(primitives, primitives2).collectAsList() + expect(tuples).contains.inAnyOrder.only.values(primitives, primitives2) + } + + should("contain all generic primitives with complex nullable schema") { + val primitives = c(1, 1.0, 1.toFloat(), 1.toByte(), LocalDate.now(), true) + val nulls = c(null, null, null, null, null, null) + val tuples = dsOf(primitives, nulls).collectAsList() + expect(tuples).contains.inAnyOrder.only.values(primitives, nulls) + } + + should("Be able to serialize lists of data classes") { + val dataset = dsOf( + listOf(SomeClass(intArrayOf(1, 2, 3), 4)), + listOf(SomeClass(intArrayOf(3, 2, 1), 0)), + ) + + val (first, second) = dataset.collectAsList() + + first.single().let { (a, b) -> + a.contentEquals(intArrayOf(1, 2, 3)) shouldBe true + b shouldBe 4 + } + second.single().let { (a, b) -> + a.contentEquals(intArrayOf(3, 2, 1)) shouldBe true + b shouldBe 0 + } + } + + should("Be able to serialize arrays of data classes") { + val dataset = dsOf( + arrayOf(SomeClass(intArrayOf(1, 2, 3), 4)), + arrayOf(SomeClass(intArrayOf(3, 2, 1), 0)), + ) + + val (first, second) = dataset.collectAsList() + + first.single().let { (a, b) -> + a.contentEquals(intArrayOf(1, 2, 3)) shouldBe true + b shouldBe 4 + } + second.single().let { (a, b) -> + a.contentEquals(intArrayOf(3, 2, 1)) shouldBe true + b shouldBe 0 + } + } + + should("Be able to serialize lists of tuples") { + val dataset = dsOf( + listOf(Tuple2(intArrayOf(1, 2, 3), 4)), + listOf(Tuple2(intArrayOf(3, 2, 1), 0)), + ) + + val (first, second) = dataset.collectAsList() + + first.single().let { + it._1().contentEquals(intArrayOf(1, 2, 3)) shouldBe true + it._2() shouldBe 4 + } + second.single().let { + it._1().contentEquals(intArrayOf(3, 2, 1)) shouldBe true + it._2() shouldBe 0 + } + } + + should("Generate encoder correctly with complex enum data class") { + val dataset: Dataset = + dsOf( + ComplexEnumDataClass( + int = 1, + string = "string", + strings = listOf("1", "2"), + someEnum = SomeEnum.A, + someOtherEnum = SomeOtherEnum.C, + someEnums = listOf(SomeEnum.A, SomeEnum.B), + someOtherEnums = listOf(SomeOtherEnum.C, SomeOtherEnum.D), + someEnumArray = arrayOf(SomeEnum.A, SomeEnum.B), + someOtherArray = arrayOf(SomeOtherEnum.C, SomeOtherEnum.D), + enumMap = mapOf(SomeEnum.A to SomeOtherEnum.C), + ) + ) + + dataset.show(false) + val first = dataset.takeAsList(1).first() + + first.int shouldBe 1 + first.string shouldBe "string" + first.strings shouldBe listOf("1", "2") + first.someEnum shouldBe SomeEnum.A + first.someOtherEnum shouldBe SomeOtherEnum.C + first.someEnums shouldBe listOf(SomeEnum.A, SomeEnum.B) + first.someOtherEnums shouldBe listOf(SomeOtherEnum.C, SomeOtherEnum.D) + first.someEnumArray shouldBe arrayOf(SomeEnum.A, SomeEnum.B) + first.someOtherArray shouldBe arrayOf(SomeOtherEnum.C, SomeOtherEnum.D) + first.enumMap shouldBe mapOf(SomeEnum.A to SomeOtherEnum.C) + } + + should("work with lists of maps") { + val result = dsOf( + listOf(mapOf("a" to "b", "x" to "y")), + listOf(mapOf("a" to "b", "x" to "y")), + listOf(mapOf("a" to "b", "x" to "y")) + ) + .showDS() + .map { it.last() } + .map { it["x"] } + .filterNotNull() + .distinct() + .collectAsList() + expect(result).contains.inOrder.only.value("y") + } + + should("work with lists of lists") { + val result = dsOf( + listOf(listOf(1, 2, 3)), + listOf(listOf(1, 2, 3)), + listOf(listOf(1, 2, 3)) + ) + .map { it.last() } + .map { it.first() } + .reduceK { a, b -> a + b } + expect(result).toBe(3) + } + + should("Generate schema correctly with nullalble list and map") { + val schema = encoder().schema() + schema.fields().forEach { + it.nullable() shouldBe true + } + } + + should("handle strings converted to lists") { + data class Movie(val id: Long, val genres: String) + data class MovieExpanded(val id: Long, val genres: List) + + val comedies = listOf(Movie(1, "Comedy|Romance"), Movie(2, "Horror|Action")).toDS() + .map { MovieExpanded(it.id, it.genres.split("|").toList()) } + .filter { it.genres.contains("Comedy") } + .collectAsList() + expect(comedies).contains.inAnyOrder.only.values( + MovieExpanded( + 1, + listOf("Comedy", "Romance") + ) + ) + } + + should("handle strings converted to arrays") { + + data class Movie(val id: Long, val genres: String) + + data class MovieExpanded(val id: Long, val genres: Array) { + override fun equals(other: Any?): Boolean { + if (this === other) return true + if (javaClass != other?.javaClass) return false + other as MovieExpanded + return if (id != other.id) false else genres.contentEquals(other.genres) + } + + override fun hashCode(): Int { + var result = id.hashCode() + result = 31 * result + genres.contentHashCode() + return result + } + } + + val comedies = listOf(Movie(1, "Comedy|Romance"), Movie(2, "Horror|Action")).toDS() + .map { MovieExpanded(it.id, it.genres.split("|").toTypedArray()) } + .filter { it.genres.contains("Comedy") } + .collectAsList() + + expect(comedies).contains.inAnyOrder.only.values( + MovieExpanded( + 1, + arrayOf("Comedy", "Romance") + ) + ) + } + + should("handle arrays of generics") { + data class Test(val id: Long, val data: Array>) + + val result = listOf(Test(1, arrayOf(5.1 to 6, 6.1 to 7))) + .toDS() + .map { it.id to it.data.firstOrNull { liEl -> liEl.first < 6 } } + .map { it.second } + .collectAsList() + expect(result).contains.inOrder.only.values(5.1 to 6) + } + + should("handle lists of generics") { + data class Test(val id: Long, val data: List>) + + val result = listOf(Test(1, listOf(5.1 to 6, 6.1 to 7))) + .toDS() + .map { it.id to it.data.firstOrNull { liEl -> liEl.first < 6 } } + .map { it.second } + .collectAsList() + expect(result).contains.inOrder.only.values(5.1 to 6) + } + + should("!handle primitive arrays") { + val result = listOf(arrayOf(1, 2, 3, 4)) + .toDS() + .map { it.map { ai -> ai + 1 } } + .collectAsList() + .flatten() + expect(result).contains.inOrder.only.values(2, 3, 4, 5) + } + } + } +}) + +data class DataClassWithTuple(val tuple: T) + +data class LonLat(val lon: Double, val lat: Double) + +enum class SomeEnum { A, B } + +enum class SomeOtherEnum(val value: Int) { C(1), D(2) } + +data class ComplexEnumDataClass( + val int: Int, + val string: String, + val strings: List, + val someEnum: SomeEnum, + val someOtherEnum: SomeOtherEnum, + val someEnums: List, + val someOtherEnums: List, + val someEnumArray: Array, + val someOtherArray: Array, + val enumMap: Map, +) + +data class NullFieldAbleDataClass( + val optionList: List?, + val optionMap: Map?, +) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt index 8950ec80..2d481e79 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt @@ -41,21 +41,21 @@ class TypeInferenceTest : ShouldSpec({ val struct = Struct.fromJson(schema(typeOf>>()).prettyJson())!! should("contain correct typings") { expect(struct.fields).notToBeNull().contains.inAnyOrder.only.entries( - hasField("first", "string"), - hasStruct("second", - hasField("vala", "integer"), - hasStruct("tripl1", - hasField("first", "integer"), - hasStruct("second", - hasField("vala2", "long"), - hasStruct("para2", - hasField("first", "long"), - hasField("second", "string") - ) - ), - hasField("third", "integer") + hasField("first", "string"), + hasStruct("second", + hasField("vala", "integer"), + hasStruct("tripl1", + hasField("first", "integer"), + hasStruct("second", + hasField("vala2", "long"), + hasStruct("para2", + hasField("first", "long"), + hasField("second", "string") ) + ), + hasField("third", "integer") ) + ) ) } } @@ -67,23 +67,23 @@ class TypeInferenceTest : ShouldSpec({ val struct = Struct.fromJson(schema(typeOf>>()).prettyJson())!! should("contain correct typings") { expect(struct.fields).notToBeNull().contains.inAnyOrder.only.entries( - hasField("first", "string"), - hasStruct("second", - hasField("vala", "integer"), - hasStruct("tripl1", - hasField("first", "integer"), - hasStruct("second", - hasField("vala2", "long"), - hasStruct("para2", - hasField("first", "long"), - hasStruct("second", - hasField("vala3", "double") - ) - ) - ), - hasField("third", "integer") + hasField("first", "string"), + hasStruct("second", + hasField("vala", "integer"), + hasStruct("tripl1", + hasField("first", "integer"), + hasStruct("second", + hasField("vala2", "long"), + hasStruct("para2", + hasField("first", "long"), + hasStruct("second", + hasField("vala3", "double") + ) ) + ), + hasField("third", "integer") ) + ) ) } } @@ -93,9 +93,9 @@ class TypeInferenceTest : ShouldSpec({ val struct = Struct.fromJson(schema(typeOf()).prettyJson())!! should("return correct types too") { expect(struct.fields).notToBeNull().contains.inAnyOrder.only.entries( - hasField("a", "string"), - hasField("b", "integer"), - hasField("c", "double") + hasField("a", "string"), + hasField("b", "integer"), + hasField("c", "double") ) } } @@ -115,8 +115,8 @@ class TypeInferenceTest : ShouldSpec({ isOfType("array") feature { f(it::elementType) }.notToBeNull().isA { feature { f(it.value::fields) }.notToBeNull().contains.inAnyOrder.only.entries( - hasField("first", "integer"), - hasField("second", "long") + hasField("first", "integer"), + hasField("second", "long") ) } } @@ -131,7 +131,7 @@ class TypeInferenceTest : ShouldSpec({ isOfType("array") feature { f(it::elementType) }.notToBeNull().isA { feature { f(it.value::fields) }.notToBeNull().contains.inAnyOrder.only.entries( - hasField("e", "string") + hasField("e", "string") ) } } @@ -174,8 +174,8 @@ class TypeInferenceTest : ShouldSpec({ val struct = Struct.fromJson(schema(typeOf()).prettyJson())!! should("Not change order of fields") { expect(struct.fields).notToBeNull().containsExactly( - hasField("lon", "double"), - hasField("lat", "double") + hasField("lon", "double"), + hasField("lat", "double") ) } } @@ -186,38 +186,39 @@ class TypeInferenceTest : ShouldSpec({ should("show that list is nullable and element is not") { expect(struct) - .feature("some", { fields }) { - notToBeNull().contains.inOrder.only.entry { - this - .feature("field name", { name }) { toBe("optionList") } - .feature("optionList is nullable", { nullable }) { toBe(true) } - .feature("optionList", { type }) { - this - .isA() - .feature("element type of optionList", { value.elementType }) { toBe(SimpleElement("integer")) } - .feature("optionList contains null", { value.containsNull }) { toBe(false) } - .feature("optionList type", { value }) { isOfType("array") } - } - } + .feature("some", { fields }) { + notToBeNull().contains.inOrder.only.entry { + this + .feature("field name", { name }) { toBe("optionList") } + .feature("optionList is nullable", { nullable }) { toBe(true) } + .feature("optionList", { type }) { + this + .isA() + .feature("element type of optionList", + { value.elementType }) { toBe(SimpleElement("integer")) } + .feature("optionList contains null", { value.containsNull }) { toBe(false) } + .feature("optionList type", { value }) { isOfType("array") } + } } + } } should("generate valid serializer schema") { expect(encoder().schema()) { this - .feature("data type", { this.fields()?.toList() }) { - this.notToBeNull().contains.inOrder.only.entry { - this - .feature("element name", { name() }) { toBe("optionList") } - .feature("field type", { dataType() }, { - this - .isA() - .feature("element type", { elementType() }) { isA() } - .feature("element nullable", { containsNull() }) { toBe(expected = false) } - }) - .feature("optionList nullable", { nullable() }) { toBe(true) } - } + .feature("data type", { this.fields()?.toList() }) { + this.notToBeNull().contains.inOrder.only.entry { + this + .feature("element name", { name() }) { toBe("optionList") } + .feature("field type", { dataType() }, { + this + .isA() + .feature("element type", { elementType() }) { isA() } + .feature("element nullable", { containsNull() }) { toBe(expected = false) } + }) + .feature("optionList nullable", { nullable() }) { toBe(true) } } + } } } } @@ -229,15 +230,15 @@ private fun Expect.isOfType(type: String) { } private fun hasStruct( - name: String, - expectedField: Expect.() -> Unit, - vararg expectedFields: Expect.() -> Unit, + name: String, + expectedField: Expect.() -> Unit, + vararg expectedFields: Expect.() -> Unit, ): Expect.() -> Unit { return { feature { f(it::name) }.toBe(name) feature { f(it::type) }.isA { feature { f(it.value::fields) }.notToBeNull().contains.inAnyOrder.only.entries(expectedField, - *expectedFields) + *expectedFields) } } } From f4fee40c823d35ed66ce39b80c5cc4ce9e5550ea Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 1 Mar 2022 17:08:23 +0100 Subject: [PATCH 069/213] splitting and ordering api tests into multiple files --- .../kotlinx/spark/examples/Broadcasting.kt | 12 ++++---- .../spark/examples/CachedOperations.kt | 14 ++++----- .../spark/examples/MapAndListOperations.kt | 21 +++++++------ .../kotlinx/spark/examples/WordCount.kt | 30 +++++++++---------- 4 files changed, 40 insertions(+), 37 deletions(-) diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Broadcasting.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Broadcasting.kt index 6ef97b63..2e5914e3 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Broadcasting.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Broadcasting.kt @@ -31,12 +31,12 @@ data class SomeClass(val a: IntArray, val b: Int) : Serializable fun main() = withSpark { val broadcastVariable = spark.broadcast(SomeClass(a = intArrayOf(5, 6), b = 3)) val result = listOf(1, 2, 3, 4, 5) - .toDS() - .map { - val receivedBroadcast = broadcastVariable.value - it + receivedBroadcast.a.first() - } - .collectAsList() + .toDS() + .map { + val receivedBroadcast = broadcastVariable.value + it + receivedBroadcast.a.first() + } + .collectAsList() println(result) } diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/CachedOperations.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/CachedOperations.kt index 8dad442a..e8e4dbf1 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/CachedOperations.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/CachedOperations.kt @@ -24,13 +24,13 @@ import org.jetbrains.kotlinx.spark.api.* fun main() { withSpark { dsOf(1, 2, 3, 4, 5) - .map { it to (it + 2) } - .withCached { - showDS() + .map { it to (it + 2) } + .withCached { + showDS() - filter { it.first % 2 == 0 }.showDS() - } - .map { c(it.first, it.second, (it.first + it.second) * 2) } - .show() + filter { it.first % 2 == 0 }.showDS() + } + .map { c(it.first, it.second, (it.first + it.second) * 2) } + .show() } } diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/MapAndListOperations.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/MapAndListOperations.kt index 89bfdacd..c67504cf 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/MapAndListOperations.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/MapAndListOperations.kt @@ -23,15 +23,18 @@ import org.jetbrains.kotlinx.spark.api.* fun main() { withSpark(props = mapOf("spark.sql.codegen.wholeStage" to true)) { - dsOf(mapOf(1 to c(1, 2, 3), 2 to c(1, 2, 3)), mapOf(3 to c(1, 2, 3), 4 to c(1, 2, 3))) - .flatMap { it.toList().map { p -> listOf(p.first, p.second._1, p.second._2, p.second._3) }.iterator() } - .flatten() - .map { c(it) } - .also { it.printSchema() } - .distinct() - .sort("_1") - .debugCodegen() - .show() + dsOf( + mapOf(1 to c(1, 2, 3), 2 to c(1, 2, 3)), + mapOf(3 to c(1, 2, 3), 4 to c(1, 2, 3)), + ) + .flatMap { it.toList().map { p -> listOf(p.first, p.second._1, p.second._2, p.second._3) }.iterator() } + .flatten() + .map { c(it) } + .also { it.printSchema() } + .distinct() + .sort("_1") + .debugCodegen() + .show() } } diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/WordCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/WordCount.kt index 88f43a62..996e36be 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/WordCount.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/WordCount.kt @@ -27,22 +27,22 @@ const val MEANINGFUL_WORD_LENGTH = 4 fun main() { withSpark { spark - .read() - .textFile(this::class.java.classLoader.getResource("the-catcher-in-the-rye.txt")?.path) - .map { it.split(Regex("\\s")) } - .flatten() - .cleanup() - .groupByKey { it } - .mapGroups { k, iter -> k to iter.asSequence().count() } - .sort { arrayOf(it.col("second").desc()) } - .limit(20) - .map { it.second to it.first } - .show(false) + .read() + .textFile(this::class.java.classLoader.getResource("the-catcher-in-the-rye.txt")?.path) + .map { it.split(Regex("\\s")) } + .flatten() + .cleanup() + .groupByKey { it } + .mapGroups { k, iter -> k to iter.asSequence().count() } + .sort { arrayOf(it.col("second").desc()) } + .limit(20) + .map { it.second to it.first } + .show(false) } } fun Dataset.cleanup() = - filter { it.isNotBlank() } - .map { it.trim(',', ' ', '\n', ':', '.', ';', '?', '!', '"', '\'', '\t', ' ') } - .filter { !it.endsWith("n’t") } - .filter { it.length >= MEANINGFUL_WORD_LENGTH } + filter { it.isNotBlank() } + .map { it.trim(',', ' ', '\n', ':', '.', ';', '?', '!', '"', '\'', '\t', ' ') } + .filter { !it.endsWith("n’t") } + .filter { it.length >= MEANINGFUL_WORD_LENGTH } From 04ae82a545d0cfa68496d09a81d3edc418afc97a Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 1 Mar 2022 17:09:29 +0100 Subject: [PATCH 070/213] splitting and ordering api tests into multiple files --- .../api/{DatasetFunctionTests.kt => DatasetFunctionTest.kt} | 2 +- .../kotlinx/spark/api/{EncodingTests.kt => EncodingTest.kt} | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) rename kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/{DatasetFunctionTests.kt => DatasetFunctionTest.kt} (99%) rename kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/{EncodingTests.kt => EncodingTest.kt} (99%) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTests.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt similarity index 99% rename from kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTests.kt rename to kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt index 83645c3a..f352ff68 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTests.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt @@ -16,7 +16,7 @@ import scala.Tuple2 import scala.Tuple3 import java.io.Serializable -class DatasetFunctionTests : ShouldSpec({ +class DatasetFunctionTest : ShouldSpec({ context("dataset extensions") { withSpark(props = mapOf("spark.sql.codegen.comments" to true)) { diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTests.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt similarity index 99% rename from kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTests.kt rename to kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt index 2c3fc768..9d37194a 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTests.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt @@ -38,7 +38,7 @@ import java.time.Instant import java.time.LocalDate import java.time.Period -class EncodingTests : ShouldSpec({ +class EncodingTest : ShouldSpec({ context("encoders") { withSpark(props = mapOf("spark.sql.codegen.comments" to true)) { From f94c3d84d786d636866fc5e87b8b6473afcfa6dc Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 2 Mar 2022 17:45:11 +0100 Subject: [PATCH 071/213] testing jupyter --- kotlin-spark-api/3.2/pom_2.12.xml | 4 + .../kotlinx/spark/api/jupyter/Integration.kt | 96 +++++++++++++++++++ .../kotlin-jupyter-libraries/libraries.json | 8 ++ .../kotlinx/spark/api/DatasetFunctionTest.kt | 19 ++++ pom.xml | 14 +++ 5 files changed, 141 insertions(+) create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt create mode 100644 kotlin-spark-api/3.2/src/main/resources/META-INF/kotlin-jupyter-libraries/libraries.json diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 826547d2..85374860 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -27,6 +27,10 @@ org.jetbrains.kotlinx.spark core-3.2_${scala.compat.version} + + org.jetbrains.kotlinx + kotlin-jupyter-api + diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt new file mode 100644 index 00000000..4819e1de --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -0,0 +1,96 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.api.jupyter + +import org.apache.spark.SparkConf +import org.jetbrains.kotlinx.jupyter.api.VariableDeclaration +import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration +import org.jetbrains.kotlinx.spark.api.* +import org.jetbrains.kotlinx.spark.api.setLogLevel +import kotlin.reflect.* +import org.jetbrains.kotlinx.spark.api.sparkContext + +@OptIn(ExperimentalStdlibApi::class) +internal class Integration : JupyterIntegration() { + + private val kotlinVersion = "1.5.30" + private val scalaCompatVersion = "2.12" + private val scalaVersion = "2.12.15" + private val spark3Version = "3.2.1" + + override fun Builder.onLoaded() { + + + dependencies( + "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion", + "org.jetbrains.kotlin:kotlin-reflect:$kotlinVersion", + "org.apache.spark:spark-sql_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-streaming_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-mllib_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-sql_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-repl_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-graphx_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-launcher_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-catalyst_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-streaming_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-core_$scalaCompatVersion:$spark3Version", + "org.scala-lang:scala-library:$scalaVersion", + "org.scala-lang.modules:scala-xml_$scalaCompatVersion:2.0.1", + "org.scala-lang:scala-reflect:$scalaVersion", + "org.scala-lang:scala-compiler:$scalaVersion", + "commons-io:commons-io:2.11.0", + ) + + import("org.jetbrains.kotlinx.spark.api.*") + import("org.apache.spark.sql.functions.*") + import("org.apache.spark.*") + import("org.apache.spark.sql.*") + import("org.apache.spark.sql.SparkSession.Builder") + import("scala.collection.Seq") + + // starting spark and unwrapping KSparkContext functions + onLoaded { + execute("""println("Running!!")""") + execute( + """|val spark = org.apache.spark.sql.SparkSession + | .builder() + | .master(SparkConf().get("spark.master", "local[*]")) + | .appName("Jupyter") + | .getOrCreate()""".trimMargin() + ) +// execute("""spark.sparkContext.setLogLevel(SparkLogLevel.ERROR)""") +// execute("""val sc: JavaSparkContext by lazy { JavaSparkContext(spark.sparkContext) }""") +// val spark = org.apache.spark.sql.SparkSession +// .builder() +// .master(SparkConf().get("spark.master", "local[*]")) +// .appName("Jupyter") +// .getOrCreate() +// spark.sparkContext.setLogLevel(SparkLogLevel.ERROR) + +// declare( +// listOf(VariableDeclaration( +// name = "spark", +// value = spark, +// type = typeOf(), +// )) +// ) + } + } +} diff --git a/kotlin-spark-api/3.2/src/main/resources/META-INF/kotlin-jupyter-libraries/libraries.json b/kotlin-spark-api/3.2/src/main/resources/META-INF/kotlin-jupyter-libraries/libraries.json new file mode 100644 index 00000000..2041fce8 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/resources/META-INF/kotlin-jupyter-libraries/libraries.json @@ -0,0 +1,8 @@ +{ + "definitions": [], + "producers": [ + { + "fqn": "org.jetbrains.kotlinx.spark.api.jupyter.Integration" + } + ] +} diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt index f352ff68..b9667e8a 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt @@ -1,3 +1,22 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ package org.jetbrains.kotlinx.spark.api import ch.tutteli.atrium.api.fluent.en_GB.* diff --git a/pom.xml b/pom.xml index 0df3adac..36a98486 100644 --- a/pom.xml +++ b/pom.xml @@ -16,6 +16,7 @@ 4.6.0 1.0.1 3.2.1 + 0.11.0-61 2.10.0 @@ -51,6 +52,11 @@ kotlin-reflect ${kotlin.version} + + org.jetbrains.kotlinx + kotlin-jupyter-api + ${kotlin-jupyter-api.version} + @@ -164,6 +170,9 @@ src/main/ src/test/ + + **/*.json + apache_v2 =LICENSE= =LICENSE END= @@ -175,6 +184,11 @@ update-file-header + + + **/*.json + + process-sources From 4f68d0607a3a3a2d01f3a99d3d187909edaaf9cf Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 3 Mar 2022 11:52:57 +0100 Subject: [PATCH 072/213] fixed instant tests --- .../org/jetbrains/kotlinx/spark/api/EncodingTest.kt | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt index 9d37194a..0be1050c 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt @@ -52,7 +52,11 @@ class EncodingTest : ShouldSpec({ should("handle Instant Datasets") { val instants = listOf(Instant.now(), Instant.now()) val dataset: Dataset = instants.toDS() - dataset.collectAsList() shouldBe instants + dataset.collectAsList().let { (first, second) -> + val (a, b) = instants + a.compareTo(first) shouldBe 0 + b.compareTo(second) shouldBe 0 + } } should("handle Timestamp Datasets") { @@ -108,7 +112,11 @@ class EncodingTest : ShouldSpec({ should("be able to serialize Instant") { val instantPair = Instant.now() to Instant.now() val dataset = dsOf(instantPair) - dataset.collectAsList() shouldBe listOf(instantPair) + dataset.collectAsList().single().let { (first, second) -> + val (a, b) = instantPair + a.compareTo(first) shouldBe 0 + b.compareTo(second) shouldBe 0 + } } should("be able to serialize Date") { From 8d038a5c342a3585390949087ee72b94d81c8b28 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 3 Mar 2022 12:38:11 +0100 Subject: [PATCH 073/213] attempting to make it work --- core/3.2/pom_2.12.xml | 6 +++ .../kotlinx/spark/api/jupyter/Integration.kt | 52 +++++++++++-------- 2 files changed, 35 insertions(+), 23 deletions(-) diff --git a/core/3.2/pom_2.12.xml b/core/3.2/pom_2.12.xml index 8ed1eb4f..33b5698c 100644 --- a/core/3.2/pom_2.12.xml +++ b/core/3.2/pom_2.12.xml @@ -29,6 +29,12 @@ ${spark3.version} provided + + org.apache.spark + spark-core_${scala.compat.version} + ${spark3.version} + provided + diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt index 4819e1de..6ed54924 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -20,7 +20,9 @@ package org.jetbrains.kotlinx.spark.api.jupyter import org.apache.spark.SparkConf +import org.apache.spark.api.java.JavaSparkContext import org.jetbrains.kotlinx.jupyter.api.VariableDeclaration +import org.jetbrains.kotlinx.jupyter.api.declare import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration import org.jetbrains.kotlinx.spark.api.* import org.jetbrains.kotlinx.spark.api.setLogLevel @@ -67,30 +69,34 @@ internal class Integration : JupyterIntegration() { // starting spark and unwrapping KSparkContext functions onLoaded { - execute("""println("Running!!")""") - execute( - """|val spark = org.apache.spark.sql.SparkSession - | .builder() - | .master(SparkConf().get("spark.master", "local[*]")) - | .appName("Jupyter") - | .getOrCreate()""".trimMargin() + println("Running!!") + + System.setProperty( + "spark.jars", + listOf( + "~/.m2/repository/org/apache/spark/spark-core_2.12/3.2.1/spark-core_2.12-3.2.1.jar", + ).joinToString(",") + ) + + val spark = org.apache.spark.sql.SparkSession + .builder() + .master(SparkConf().get("spark.master", "local[*]")) + .appName("Jupyter") + .getOrCreate() + + // execute("%dumpClassesForSpark") + + spark.sparkContext.setLogLevel(SparkLogLevel.ERROR) + + + val sc = JavaSparkContext(spark.sparkContext) + + declare( + "spark" to spark, + "sc" to sc, ) -// execute("""spark.sparkContext.setLogLevel(SparkLogLevel.ERROR)""") -// execute("""val sc: JavaSparkContext by lazy { JavaSparkContext(spark.sparkContext) }""") -// val spark = org.apache.spark.sql.SparkSession -// .builder() -// .master(SparkConf().get("spark.master", "local[*]")) -// .appName("Jupyter") -// .getOrCreate() -// spark.sparkContext.setLogLevel(SparkLogLevel.ERROR) - -// declare( -// listOf(VariableDeclaration( -// name = "spark", -// value = spark, -// type = typeOf(), -// )) -// ) + + } } } From d5c0b32dc635ece4dffd5ebb89469e6592843373 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 3 Mar 2022 13:46:31 +0100 Subject: [PATCH 074/213] basics work! --- core/3.2/pom_2.12.xml | 13 +++--- kotlin-spark-api/3.2/pom_2.12.xml | 4 +- .../kotlinx/spark/api/jupyter/Integration.kt | 40 +++++-------------- 3 files changed, 18 insertions(+), 39 deletions(-) diff --git a/core/3.2/pom_2.12.xml b/core/3.2/pom_2.12.xml index 33b5698c..0752c43f 100644 --- a/core/3.2/pom_2.12.xml +++ b/core/3.2/pom_2.12.xml @@ -27,14 +27,13 @@ org.apache.spark spark-sql_${scala.compat.version} ${spark3.version} - provided - - - org.apache.spark - spark-core_${scala.compat.version} - ${spark3.version} - provided + + + + + + diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 85374860..dbdbbdfe 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -38,13 +38,13 @@ org.apache.spark spark-sql_${scala.compat.version} ${spark3.version} - provided + org.apache.spark spark-streaming_${scala.compat.version} ${spark3.version} - provided + diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt index 6ed54924..3c5a7740 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -19,15 +19,7 @@ */ package org.jetbrains.kotlinx.spark.api.jupyter -import org.apache.spark.SparkConf -import org.apache.spark.api.java.JavaSparkContext -import org.jetbrains.kotlinx.jupyter.api.VariableDeclaration -import org.jetbrains.kotlinx.jupyter.api.declare import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration -import org.jetbrains.kotlinx.spark.api.* -import org.jetbrains.kotlinx.spark.api.setLogLevel -import kotlin.reflect.* -import org.jetbrains.kotlinx.spark.api.sparkContext @OptIn(ExperimentalStdlibApi::class) internal class Integration : JupyterIntegration() { @@ -64,6 +56,7 @@ internal class Integration : JupyterIntegration() { import("org.apache.spark.sql.functions.*") import("org.apache.spark.*") import("org.apache.spark.sql.*") + import("org.apache.spark.api.java.*") import("org.apache.spark.sql.SparkSession.Builder") import("scala.collection.Seq") @@ -71,30 +64,17 @@ internal class Integration : JupyterIntegration() { onLoaded { println("Running!!") - System.setProperty( - "spark.jars", - listOf( - "~/.m2/repository/org/apache/spark/spark-core_2.12/3.2.1/spark-core_2.12-3.2.1.jar", - ).joinToString(",") + execute( + """|val spark = org.jetbrains.kotlinx.spark.api.SparkSession + | .builder() + | .master(SparkConf().get("spark.master", "local[*]")) + | .appName("Jupyter") + | .getOrCreate()""".trimMargin() ) - val spark = org.apache.spark.sql.SparkSession - .builder() - .master(SparkConf().get("spark.master", "local[*]")) - .appName("Jupyter") - .getOrCreate() - - // execute("%dumpClassesForSpark") - - spark.sparkContext.setLogLevel(SparkLogLevel.ERROR) - - - val sc = JavaSparkContext(spark.sparkContext) - - declare( - "spark" to spark, - "sc" to sc, - ) + execute("""spark.sparkContext.setLogLevel(SparkLogLevel.ERROR)""") + execute("""val sc = org.apache.spark.api.java.JavaSparkContext(spark.sparkContext)""") +// execute("""fun udf(): org.apache.spark.sql.UDFRegistration { return spark.udf() }""") } From e9f4b359900f3cde85aa4e7d251001b320013e02 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 3 Mar 2022 15:25:44 +0100 Subject: [PATCH 075/213] updating readme --- README.md | 37 +++++++++++++++++++++++++++++++++++-- kotlin-spark-api.json | 11 +++++++++++ 2 files changed, 46 insertions(+), 2 deletions(-) create mode 100644 kotlin-spark-api.json diff --git a/README.md b/README.md index 498334d2..ed1b1c51 100644 --- a/README.md +++ b/README.md @@ -71,6 +71,36 @@ Once you have configured the dependency, you only need to add the following impo import org.jetbrains.kotlinx.spark.api.* ``` +### Jupyter + +The Kotlin Spark API also supports Kotlin Jupyter notebooks. +To it, simply add + +```jupyterpython +%use kotlin-spark-api +``` +to the top of your notebook. This will get the latest version of the API, together with the latest version of Spark. +To define a certain version of Spark or the API itself, simply add it like this: +```jupyterpython +%use kotlin-spark-api(spark=3.2, version=1.0.4) +``` + +Inside the notebook a Spark session will be initiated automatically. This can be accessed via the `spark` value. +`sc: JavaSparkContext` can also be accessed directly. + +One limitation of the notebooks is that the `SparkSession` context cannot be applied +implicitly to function calls. This means that instead of writing: +```kotlin +val ds = listOf(...).toDS() +``` +you'll need to write: +```kotlin +val ds = listOf(...).toDS(spark) +``` + +Other than that, the API operates pretty similarly. + + ## Kotlin for Apache Spark features ### Creating a SparkSession in Kotlin @@ -79,12 +109,13 @@ val spark = SparkSession .builder() .master("local[2]") .appName("Simple Application").orCreate - ``` +This is not needed when running the Kotlin Spark API from a Jupyter notebook. + ### Creating a Dataset in Kotlin ```kotlin -spark.toDS("a" to 1, "b" to 2) +spark.dsOf("a" to 1, "b" to 2) ``` The example above produces `Dataset>`. @@ -100,6 +131,8 @@ We provide you with useful function `withSpark`, which accepts everything that m After work block ends, `spark.stop()` is called automatically. +Do not use this when running the Kotlin Spark API from a Jupyter notebook. + ```kotlin withSpark { dsOf(1, 2) diff --git a/kotlin-spark-api.json b/kotlin-spark-api.json new file mode 100644 index 00000000..1809fbb5 --- /dev/null +++ b/kotlin-spark-api.json @@ -0,0 +1,11 @@ +{ + "description": "Kotlin for Apache® Spark™", + "properties": { + "spark": "3.2", + "version": "1.0.4" + }, + "link": "https://github.com/JetBrains/kotlin-spark-api", + "dependencies": [ + "org.jetbrains.kotlinx.spark:kotlin-spark-api-$spark:$version" + ] +} \ No newline at end of file From 34a0cbdeaf33343d19b87581f01f3ac0f80dcad1 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 7 Mar 2022 14:33:53 +0100 Subject: [PATCH 076/213] added dataset html renderer, trying to fix library issues --- kotlin-spark-api/3.2/pom_2.12.xml | 318 ++++++++++-------- .../kotlinx/spark/api/jupyter/Integration.kt | 44 ++- pom.xml | 1 + 3 files changed, 230 insertions(+), 133 deletions(-) diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index dbdbbdfe..05f3a638 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -1,141 +1,195 @@ - + - 4.0.0 + 4.0.0 - Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) - kotlin-spark-api-3.2 - Kotlin API compatible with spark 3.2.0 Kotlin for Apache Spark - - org.jetbrains.kotlinx.spark - kotlin-spark-api-parent_2.12 - 1.0.4-SNAPSHOT - ../../pom_2.12.xml - - jar + Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + kotlin-spark-api-3.2 + Kotlin API compatible with spark 3.2.0 Kotlin for Apache Spark + + org.jetbrains.kotlinx.spark + kotlin-spark-api-parent_2.12 + 1.0.4-SNAPSHOT + ../../pom_2.12.xml + + jar - - - org.jetbrains.kotlin - kotlin-stdlib-jdk8 - - - org.jetbrains.kotlin - kotlin-reflect - - - org.jetbrains.kotlinx.spark - core-3.2_${scala.compat.version} - - - org.jetbrains.kotlinx - kotlin-jupyter-api - + + + kotlinx-html + kotlinx-html + https://maven.pkg.jetbrains.space/public/p/kotlinx-html/maven + + + + + org.jetbrains.kotlinx + kotlinx-html-jvm + ${kotlinx.html.version} + + + org.jetbrains.kotlin + kotlin-stdlib-jdk8 + + + org.jetbrains.kotlin + kotlin-reflect + + + org.jetbrains.kotlinx.spark + core-3.2_${scala.compat.version} + + + org.jetbrains.kotlinx + kotlin-jupyter-api + - - - org.apache.spark - spark-sql_${scala.compat.version} - ${spark3.version} - - - - org.apache.spark - spark-streaming_${scala.compat.version} - ${spark3.version} - - - - - io.kotest - kotest-runner-junit5-jvm - ${kotest.version} - test - - - io.kotest.extensions - kotest-extensions-allure - ${kotest-extension-allure.version} - test - - - com.beust - klaxon - ${klaxon.version} - test - - - ch.tutteli.atrium - atrium-fluent-en_GB - ${atrium.version} - test - - + + + org.apache.spark + spark-sql_${scala.compat.version} + ${spark3.version} + + + + org.apache.spark + spark-streaming_${scala.compat.version} + ${spark3.version} + + + org.apache.spark + spark-catalyst_${scala.compat.version} + ${spark3.version} + + + org.apache.spark + spark-mllib_${scala.compat.version} + ${spark3.version} + + + org.apache.spark + spark-repl_${scala.compat.version} + ${spark3.version} + + + org.apache.spark + spark-graphx_${scala.compat.version} + ${spark3.version} + + + org.apache.spark + spark-launcher_${scala.compat.version} + ${spark3.version} + - - src/main/kotlin - src/test/kotlin - target/${scala.compat.version} - - - org.jetbrains.kotlin - kotlin-maven-plugin - - - compile - - compile - - - - test-compile - - test-compile - - - - - - org.apache.maven.plugins - maven-surefire-plugin - - - org.jetbrains.dokka - dokka-maven-plugin - ${dokka.version} - - 8 - - - - dokka - - dokka - - pre-site - - - javadocjar - - javadocJar - - pre-integration-test - - - - - io.qameta.allure - allure-maven - - ${project.basedir}/allure-results/${scala.compat.version} - - - - org.jacoco - jacoco-maven-plugin - - - + + org.scala-lang + scala-library + ${scala.version} + + + org.scala-lang + scala-reflect + ${scala.version} + + + org.scala-lang + scala-compiler + ${scala.version} + + + + + io.kotest + kotest-runner-junit5-jvm + ${kotest.version} + test + + + io.kotest.extensions + kotest-extensions-allure + ${kotest-extension-allure.version} + test + + + com.beust + klaxon + ${klaxon.version} + test + + + ch.tutteli.atrium + atrium-fluent-en_GB + ${atrium.version} + test + + + + + src/main/kotlin + src/test/kotlin + target/${scala.compat.version} + + + org.jetbrains.kotlin + kotlin-maven-plugin + + + compile + + compile + + + + test-compile + + test-compile + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + + org.jetbrains.dokka + dokka-maven-plugin + ${dokka.version} + + 8 + + + + dokka + + dokka + + pre-site + + + javadocjar + + javadocJar + + pre-integration-test + + + + + io.qameta.allure + allure-maven + + ${project.basedir}/allure-results/${scala.compat.version} + + + + org.jacoco + jacoco-maven-plugin + + + diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt index 3c5a7740..4606b6c3 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -19,7 +19,14 @@ */ package org.jetbrains.kotlinx.spark.api.jupyter +import com.sun.xml.internal.fastinfoset.alphabet.BuiltInRestrictedAlphabets.table +import kotlinx.html.* +import kotlinx.html.stream.appendHTML +import org.apache.spark.sql.functions.* +import org.apache.spark.sql.Dataset +import org.jetbrains.kotlinx.jupyter.api.HTML import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration +import org.jetbrains.kotlinx.spark.api.forEach @OptIn(ExperimentalStdlibApi::class) internal class Integration : JupyterIntegration() { @@ -31,7 +38,6 @@ internal class Integration : JupyterIntegration() { override fun Builder.onLoaded() { - dependencies( "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion", "org.jetbrains.kotlin:kotlin-reflect:$kotlinVersion", @@ -78,5 +84,41 @@ internal class Integration : JupyterIntegration() { } + + // Render Dataset + render> { + HTML(it.toHtml()) + } } } + + +private fun Dataset.toHtml(limit: Int = 20, truncate: Int = 20): String = buildString { + appendHTML().table { + tr { + for (header in columns()) th { + +header.let { + if (truncate > 0 && it.length > truncate) { + // do not show ellipses for strings shorter than 4 characters. + if (truncate < 4) it.substring(0, truncate) + else it.substring(0, truncate - 3) + "..." + } else { + it + } + } + } + } + + val data = select(col("*")) + .takeAsList(limit) + .toList() + + for (row in data) tr { + for (i in 0 until row.size()) td { + +row.get(i).toString() + } + } + } + + +} diff --git a/pom.xml b/pom.xml index 36a98486..7ba65dee 100644 --- a/pom.xml +++ b/pom.xml @@ -17,6 +17,7 @@ 1.0.1 3.2.1 0.11.0-61 + 0.7.3 2.10.0 From 6c1b0d9ec3447e754fb5eebd3fc542a1b00eb9e7 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Mon, 21 Feb 2022 17:11:49 +0100 Subject: [PATCH 077/213] added withSparkStreaming function and example. Let's see if something like this could form the basis of easy streaming support :) --- .../kotlinx/spark/examples/Streaming.kt | 48 +++++++++++++++ .../kotlinx/spark/api/SparkSession.kt | 58 ++++++++++++++++++- 2 files changed, 105 insertions(+), 1 deletion(-) create mode 100644 examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt new file mode 100644 index 00000000..bc9284d5 --- /dev/null +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt @@ -0,0 +1,48 @@ +/*- + * =LICENSE= + * Kotlin Spark API: Examples for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.examples + +import org.apache.spark.SparkConf +import org.apache.spark.sql.Dataset +import org.apache.spark.streaming.Duration +import org.apache.spark.streaming.Durations +import org.apache.spark.streaming.api.java.JavaStreamingContext +import org.jetbrains.kotlinx.spark.api.* + +data class TestRow( + val word: String, +) + +fun main() = withSparkStreaming(Durations.seconds(1)) { + + val lines = ssc.socketTextStream("localhost", 9999) + val words = lines.flatMap { it.split(" ").iterator() } + + words.foreachRDD { rdd, time -> + val dataframe: Dataset = rdd.map { TestRow(it) }.toDS() + + dataframe + .groupByKey { it.word } + .count() + .show() + + } + +} \ No newline at end of file diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index 118abf48..b65fc6b1 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -33,6 +33,8 @@ import org.apache.spark.rdd.RDD import org.apache.spark.sql.Dataset import org.apache.spark.sql.SparkSession.Builder import org.apache.spark.sql.UDFRegistration +import org.apache.spark.streaming.Duration +import org.apache.spark.streaming.api.java.JavaStreamingContext import org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions @@ -41,7 +43,7 @@ import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions * * @param spark The current [SparkSession] to wrap */ -class KSparkSession(val spark: SparkSession) { +open class KSparkSession(val spark: SparkSession) { /** Lazy instance of [JavaSparkContext] wrapper around [sparkContext]. */ val sc: JavaSparkContext by lazy { JavaSparkContext(spark.sparkContext) } @@ -76,6 +78,13 @@ class KSparkSession(val spark: SparkSession) { val udf: UDFRegistration get() = spark.udf() } +/** + * This wrapper over [SparkSession] and [JavaStreamingContext] provides several additional methods to create [org.apache.spark.sql.Dataset] + */ +class KSparkStreamingSession(spark: SparkSession, val ssc: JavaStreamingContext) : KSparkSession(spark) + + + /** * The entry point to programming Spark with the Dataset and DataFrame API. * @@ -175,6 +184,53 @@ inline fun withSpark(sparkConf: SparkConf, logLevel: SparkLogLevel = ERROR, func ) } + +/** + * Wrapper for spark streaming creation. `spark: SparkSession` and `ssc: JavaStreamingContext` are provided, started, + * awaited, and stopped automatically. + * + * @param batchDuration The time interval at which streaming data will be divided into batches + * @param props spark options, value types are runtime-checked for type-correctness + * @param master Sets the Spark master URL to connect to, such as "local" to run locally, "local[4]" to + * run locally with 4 cores, or "spark://master:7077" to run on a Spark standalone cluster. By default, it + * tries to get the system value "spark.master", otherwise it uses "local[*]" + * @param appName Sets a name for the application, which will be shown in the Spark web UI. + * If no application name is set, a randomly generated name will be used. + * @param logLevel Control our logLevel. This overrides any user-defined log settings. + * @param func function which will be executed in context of [KSparkStreamingSession] (it means that `this` inside block will point to [KSparkStreamingSession]) + * todo: provide alternatives with path instead of batchDuration etc + */ +@JvmOverloads +inline fun withSparkStreaming( + batchDuration: Duration, + props: Map = emptyMap(), + master: String = SparkConf().get("spark.master", "local[*]"), + appName: String = "Kotlin Spark Sample", + logLevel: SparkLogLevel = SparkLogLevel.ERROR, + func: KSparkStreamingSession.() -> Unit, +) { + val conf = SparkConf() + .setMaster(master) + .setAppName(appName) + .apply { + props.forEach { + set(it.key, it.toString()) + } + } + + val ssc = JavaStreamingContext(conf, batchDuration) + val spark = SparkSession.builder().config(conf).getOrCreate() + + KSparkStreamingSession(spark, ssc).apply { + spark.sparkContext.setLogLevel(logLevel) + func() + ssc.start() + ssc.awaitTermination() + sc.stop() + spark.stop() + } +} + /** * Broadcast a read-only variable to the cluster, returning a * [org.apache.spark.broadcast.Broadcast] object for reading it in distributed functions. From 95a95630913decc5d4be4fedfdaa08d09ed0f67b Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Mon, 21 Feb 2022 17:47:53 +0100 Subject: [PATCH 078/213] makes withSparkStreaming reuse the normal withSpark --- .../kotlinx/spark/api/SparkSession.kt | 69 +++++++------------ 1 file changed, 24 insertions(+), 45 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index b65fc6b1..bb0c4299 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -209,59 +209,38 @@ inline fun withSparkStreaming( logLevel: SparkLogLevel = SparkLogLevel.ERROR, func: KSparkStreamingSession.() -> Unit, ) { - val conf = SparkConf() - .setMaster(master) - .setAppName(appName) - .apply { - props.forEach { - set(it.key, it.toString()) - } + withSpark( + props = props, + master = master, + appName = appName, + logLevel = logLevel, + ) { + val ssc = JavaStreamingContext(sc, batchDuration) + KSparkStreamingSession(session = this, ssc = ssc).apply { + func() + ssc.start() + ssc.awaitTermination() } - - val ssc = JavaStreamingContext(conf, batchDuration) - val spark = SparkSession.builder().config(conf).getOrCreate() - - KSparkStreamingSession(spark, ssc).apply { - spark.sparkContext.setLogLevel(logLevel) - func() - ssc.start() - ssc.awaitTermination() - sc.stop() - spark.stop() } } /** - * Broadcast a read-only variable to the cluster, returning a - * [org.apache.spark.broadcast.Broadcast] object for reading it in distributed functions. - * The variable will be sent to each cluster only once. - * - * @param value value to broadcast to the Spark nodes - * @return `Broadcast` object, a read-only variable cached on each machine + * This wrapper over [SparkSession] provides several additional methods to create [org.apache.spark.sql.Dataset] */ -inline fun SparkSession.broadcast(value: T): Broadcast = try { - sparkContext.broadcast(value, encoder().clsTag()) -} catch (e: ClassNotFoundException) { - JavaSparkContext(sparkContext).broadcast(value) +open class KSparkSession(val spark: SparkSession) { + + val sc: JavaSparkContext = JavaSparkContext(spark.sparkContext) + + inline fun List.toDS() = toDS(spark) + inline fun Array.toDS() = spark.dsOf(*this) + inline fun dsOf(vararg arg: T) = spark.dsOf(*arg) + inline fun RDD.toDS() = toDS(spark) + inline fun JavaRDDLike.toDS() = toDS(spark) + val udf: UDFRegistration get() = spark.udf() } /** - * Broadcast a read-only variable to the cluster, returning a - * [org.apache.spark.broadcast.Broadcast] object for reading it in distributed functions. - * The variable will be sent to each cluster only once. - * - * @param value value to broadcast to the Spark nodes - * @return `Broadcast` object, a read-only variable cached on each machine - * @see broadcast + * This wrapper over [SparkSession] and [JavaStreamingContext] provides several additional methods to create [org.apache.spark.sql.Dataset] */ -@Deprecated( - "You can now use `spark.broadcast()` instead.", - ReplaceWith("spark.broadcast(value)"), - DeprecationLevel.WARNING -) -inline fun SparkContext.broadcast(value: T): Broadcast = try { - broadcast(value, encoder().clsTag()) -} catch (e: ClassNotFoundException) { - JavaSparkContext(this).broadcast(value) -} +class KSparkStreamingSession(spark: SparkSession, val ssc: JavaStreamingContext) : KSparkSession(spark) From 9d1450bbe2bcfc74363416aac0cb5c920b16d442 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Thu, 24 Feb 2022 15:16:06 +0100 Subject: [PATCH 079/213] removed sc.stop() --- .../kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index bb0c4299..9b33ae49 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -155,6 +155,7 @@ inline fun withSpark( * @param logLevel Control our logLevel. This overrides any user-defined log settings. * @param func function which will be executed in context of [KSparkSession] (it means that `this` inside block will point to [KSparkSession]) */ +@Suppress("UsePropertyAccessSyntax") @JvmOverloads inline fun withSpark(builder: Builder, logLevel: SparkLogLevel = ERROR, func: KSparkSession.() -> Unit) { builder @@ -229,7 +230,7 @@ inline fun withSparkStreaming( */ open class KSparkSession(val spark: SparkSession) { - val sc: JavaSparkContext = JavaSparkContext(spark.sparkContext) + val sc: JavaSparkContext by lazy { JavaSparkContext(spark.sparkContext) } inline fun List.toDS() = toDS(spark) inline fun Array.toDS() = spark.dsOf(*this) From f2ce00096c7f66018010b97dc5905b84ee5f2f74 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 7 Mar 2022 16:32:49 +0100 Subject: [PATCH 080/213] fixed merge --- .../kotlinx/spark/api/SparkSession.kt | 43 ++++++++++++------- .../kotlinx/spark/api/DatasetFunctionTest.kt | 19 ++++++++ 2 files changed, 47 insertions(+), 15 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index 9b33ae49..d81898c3 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -217,7 +217,7 @@ inline fun withSparkStreaming( logLevel = logLevel, ) { val ssc = JavaStreamingContext(sc, batchDuration) - KSparkStreamingSession(session = this, ssc = ssc).apply { + KSparkStreamingSession(spark = spark, ssc = ssc).apply { func() ssc.start() ssc.awaitTermination() @@ -226,22 +226,35 @@ inline fun withSparkStreaming( } /** - * This wrapper over [SparkSession] provides several additional methods to create [org.apache.spark.sql.Dataset] + * Broadcast a read-only variable to the cluster, returning a + * [org.apache.spark.broadcast.Broadcast] object for reading it in distributed functions. + * The variable will be sent to each cluster only once. + * + * @param value value to broadcast to the Spark nodes + * @return `Broadcast` object, a read-only variable cached on each machine */ -open class KSparkSession(val spark: SparkSession) { - - val sc: JavaSparkContext by lazy { JavaSparkContext(spark.sparkContext) } - - inline fun List.toDS() = toDS(spark) - inline fun Array.toDS() = spark.dsOf(*this) - inline fun dsOf(vararg arg: T) = spark.dsOf(*arg) - inline fun RDD.toDS() = toDS(spark) - inline fun JavaRDDLike.toDS() = toDS(spark) - val udf: UDFRegistration get() = spark.udf() +inline fun SparkSession.broadcast(value: T): Broadcast = try { + sparkContext.broadcast(value, encoder().clsTag()) +} catch (e: ClassNotFoundException) { + JavaSparkContext(sparkContext).broadcast(value) } /** - * This wrapper over [SparkSession] and [JavaStreamingContext] provides several additional methods to create [org.apache.spark.sql.Dataset] + * Broadcast a read-only variable to the cluster, returning a + * [org.apache.spark.broadcast.Broadcast] object for reading it in distributed functions. + * The variable will be sent to each cluster only once. + * + * @param value value to broadcast to the Spark nodes + * @return `Broadcast` object, a read-only variable cached on each machine + * @see broadcast */ -class KSparkStreamingSession(spark: SparkSession, val ssc: JavaStreamingContext) : KSparkSession(spark) - +@Deprecated( + "You can now use `spark.broadcast()` instead.", + ReplaceWith("spark.broadcast(value)"), + DeprecationLevel.WARNING +) +inline fun SparkContext.broadcast(value: T): Broadcast = try { + broadcast(value, encoder().clsTag()) +} catch (e: ClassNotFoundException) { + JavaSparkContext(this).broadcast(value) +} \ No newline at end of file diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt index f352ff68..b9667e8a 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt @@ -1,3 +1,22 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ package org.jetbrains.kotlinx.spark.api import ch.tutteli.atrium.api.fluent.en_GB.* From cdf7296283eb8fb14cb147515e84e84b166c4284 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 7 Mar 2022 17:53:58 +0100 Subject: [PATCH 081/213] working on tests --- .../kotlinx/spark/examples/Streaming.kt | 3 ++ .../kotlinx/spark/api/SparkSession.kt | 9 +++- .../kotlinx/spark/api/StreamingTest.kt | 42 +++++++++++++++++++ 3 files changed, 52 insertions(+), 2 deletions(-) create mode 100644 kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt index bc9284d5..d66ef3e3 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt @@ -35,6 +35,9 @@ fun main() = withSparkStreaming(Durations.seconds(1)) { val lines = ssc.socketTextStream("localhost", 9999) val words = lines.flatMap { it.split(" ").iterator() } + lines. + + words.foreachRDD { rdd, time -> val dataframe: Dataset = rdd.map { TestRow(it) }.toDS() diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index d81898c3..6cbdf29e 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -81,7 +81,11 @@ open class KSparkSession(val spark: SparkSession) { /** * This wrapper over [SparkSession] and [JavaStreamingContext] provides several additional methods to create [org.apache.spark.sql.Dataset] */ -class KSparkStreamingSession(spark: SparkSession, val ssc: JavaStreamingContext) : KSparkSession(spark) +class KSparkStreamingSession(spark: SparkSession, val ssc: JavaStreamingContext) : KSparkSession(spark) { + + /** Can be overwritten to be run after the streaming session has started and before it's terminated. */ + var runAfterStart: KSparkStreamingSession.() -> Unit = {} +} @@ -198,7 +202,7 @@ inline fun withSpark(sparkConf: SparkConf, logLevel: SparkLogLevel = ERROR, func * @param appName Sets a name for the application, which will be shown in the Spark web UI. * If no application name is set, a randomly generated name will be used. * @param logLevel Control our logLevel. This overrides any user-defined log settings. - * @param func function which will be executed in context of [KSparkStreamingSession] (it means that `this` inside block will point to [KSparkStreamingSession]) + * @param beforeStart function which will be executed in context of [KSparkStreamingSession] (it means that `this` inside block will point to [KSparkStreamingSession]) * todo: provide alternatives with path instead of batchDuration etc */ @JvmOverloads @@ -220,6 +224,7 @@ inline fun withSparkStreaming( KSparkStreamingSession(spark = spark, ssc = ssc).apply { func() ssc.start() + runAfterStart() ssc.awaitTermination() } } diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt new file mode 100644 index 00000000..50bbce76 --- /dev/null +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -0,0 +1,42 @@ +package org.jetbrains.kotlinx.spark.api + +import io.kotest.core.spec.style.ShouldSpec +import io.kotest.matchers.shouldBe +import org.apache.spark.sql.execution.streaming.MemoryStream +import org.apache.spark.sql.streaming.OutputMode +import org.apache.spark.streaming.Duration + +class StreamingTest : ShouldSpec({ + context("streaming") { + should("stream") { + + withSpark/*Streaming(Duration(1))*/ { + // WIP this doesn't use ssc at all? + + val events = MemoryStream(100, spark.sqlContext(), null, encoder()) + val sessions = events.toDS() + sessions.isStreaming shouldBe true + + val transformedSessions = sessions.map { (it * 2).toString() } + + val streamingQuery = transformedSessions + .writeStream() + .format("memory") + .queryName("test") + .outputMode(OutputMode.Append()) + .start() + + val currentOffset = events.addData(listOf(1, 2, 3).asScalaIterable()) + streamingQuery.processAllAvailable() + events.commit(currentOffset) + + spark.table("test") + .show(false) + + + } + + + } + } +}) \ No newline at end of file From 1104a649dc8cb724d68027c71be2a205a0da6b67 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 7 Mar 2022 19:08:33 +0100 Subject: [PATCH 082/213] fixed import --- .../org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt | 2 -- 1 file changed, 2 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt index 4606b6c3..669d87d2 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -19,14 +19,12 @@ */ package org.jetbrains.kotlinx.spark.api.jupyter -import com.sun.xml.internal.fastinfoset.alphabet.BuiltInRestrictedAlphabets.table import kotlinx.html.* import kotlinx.html.stream.appendHTML import org.apache.spark.sql.functions.* import org.apache.spark.sql.Dataset import org.jetbrains.kotlinx.jupyter.api.HTML import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration -import org.jetbrains.kotlinx.spark.api.forEach @OptIn(ExperimentalStdlibApi::class) internal class Integration : JupyterIntegration() { From 330536c6c9b471a18bbe2671e8f9d78309a12a62 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 8 Mar 2022 11:24:28 +0100 Subject: [PATCH 083/213] added timeout and working streaming test --- .../kotlinx/spark/examples/Streaming.kt | 4 +- .../kotlinx/spark/api/SparkSession.kt | 7 ++- .../kotlinx/spark/api/StreamingTest.kt | 44 +++++++++---------- 3 files changed, 28 insertions(+), 27 deletions(-) diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt index d66ef3e3..de77c96f 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt @@ -30,13 +30,11 @@ data class TestRow( val word: String, ) -fun main() = withSparkStreaming(Durations.seconds(1)) { +fun main() = withSparkStreaming(Durations.seconds(1), timeout = 10_000) { val lines = ssc.socketTextStream("localhost", 9999) val words = lines.flatMap { it.split(" ").iterator() } - lines. - words.foreachRDD { rdd, time -> val dataframe: Dataset = rdd.map { TestRow(it) }.toDS() diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index 6cbdf29e..96f753c7 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -202,7 +202,8 @@ inline fun withSpark(sparkConf: SparkConf, logLevel: SparkLogLevel = ERROR, func * @param appName Sets a name for the application, which will be shown in the Spark web UI. * If no application name is set, a randomly generated name will be used. * @param logLevel Control our logLevel. This overrides any user-defined log settings. - * @param beforeStart function which will be executed in context of [KSparkStreamingSession] (it means that `this` inside block will point to [KSparkStreamingSession]) + * @param timeout The time in milliseconds to wait for the stream to terminate without input. -1 by default, this means no timeout. + * @param func function which will be executed in context of [KSparkStreamingSession] (it means that `this` inside block will point to [KSparkStreamingSession]) * todo: provide alternatives with path instead of batchDuration etc */ @JvmOverloads @@ -212,6 +213,7 @@ inline fun withSparkStreaming( master: String = SparkConf().get("spark.master", "local[*]"), appName: String = "Kotlin Spark Sample", logLevel: SparkLogLevel = SparkLogLevel.ERROR, + timeout: Long = -1L, func: KSparkStreamingSession.() -> Unit, ) { withSpark( @@ -225,7 +227,8 @@ inline fun withSparkStreaming( func() ssc.start() runAfterStart() - ssc.awaitTermination() + ssc.awaitTerminationOrTimeout(timeout) + ssc.stop() } } } diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt index 50bbce76..672832b5 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -1,41 +1,41 @@ package org.jetbrains.kotlinx.spark.api import io.kotest.core.spec.style.ShouldSpec +import io.kotest.matchers.collections.shouldBeIn import io.kotest.matchers.shouldBe -import org.apache.spark.sql.execution.streaming.MemoryStream -import org.apache.spark.sql.streaming.OutputMode import org.apache.spark.streaming.Duration +import java.io.Serializable +import org.jetbrains.kotlinx.spark.api.* +import java.util.LinkedList + class StreamingTest : ShouldSpec({ context("streaming") { should("stream") { - withSpark/*Streaming(Duration(1))*/ { - // WIP this doesn't use ssc at all? - - val events = MemoryStream(100, spark.sqlContext(), null, encoder()) - val sessions = events.toDS() - sessions.isStreaming shouldBe true - - val transformedSessions = sessions.map { (it * 2).toString() } + val input = listOf("aaa", "bbb", "aaa", "ccc") - val streamingQuery = transformedSessions - .writeStream() - .format("memory") - .queryName("test") - .outputMode(OutputMode.Append()) - .start() + val results = object : Serializable { + @Volatile + var counter = 0 + } - val currentOffset = events.addData(listOf(1, 2, 3).asScalaIterable()) - streamingQuery.processAllAvailable() - events.commit(currentOffset) + withSparkStreaming(Duration(10), timeout = 1000) { + val resultsBroadcast = spark.broadcast(results) - spark.table("test") - .show(false) + val rdd = sc.parallelize(input) + val queue = LinkedList(listOf(rdd)) + val inputStream = ssc.queueStream(queue) + inputStream.foreachRDD { rdd, _ -> + rdd.foreach { + it shouldBeIn input + resultsBroadcast.value.counter++ + } + } } - + results.counter shouldBe input.size } } From d2e792a3144f383f904228400298a06d77e69485 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 8 Mar 2022 11:37:30 +0100 Subject: [PATCH 084/213] added timeout and working streaming test --- .../kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt index 672832b5..e0139b9c 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -29,7 +29,7 @@ class StreamingTest : ShouldSpec({ val inputStream = ssc.queueStream(queue) inputStream.foreachRDD { rdd, _ -> - rdd.foreach { + rdd.toDS().forEach { it shouldBeIn input resultsBroadcast.value.counter++ } From 8dc0b0f79add94fe346dd9b68eb7d4edadfe3cf8 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 9 Mar 2022 13:35:23 +0100 Subject: [PATCH 085/213] added sorting functions and tests --- .../jetbrains/kotlinx/spark/api/Dataset.kt | 24 ++++++++++++ .../kotlinx/spark/api/DatasetFunctionTest.kt | 37 +++++++++++++++++++ 2 files changed, 61 insertions(+) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt index b5070b84..31227762 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt @@ -309,6 +309,30 @@ inline fun Dataset.fullJoin( */ inline fun Dataset.sort(columns: (Dataset) -> Array): Dataset = sort(*columns(this)) +/** Returns a dataset sorted by the first (`_1`) value of each [Tuple2] inside. */ +@JvmName("sortByTuple2Key") +fun Dataset>.sortByKey(): Dataset> = sort("_1") + +/** Returns a dataset sorted by the second (`_2`) value of each [Tuple2] inside. */ +@JvmName("sortByTuple2Value") +fun Dataset>.sortByValue(): Dataset> = sort("_2") + +/** Returns a dataset sorted by the first (`_1`) value of each [Arity2] inside. */ +@JvmName("sortByArity2Key") +fun Dataset>.sortByKey(): Dataset> = sort("_1") + +/** Returns a dataset sorted by the second (`_2`) value of each [Arity2] inside. */ +@JvmName("sortByArity2Value") +fun Dataset>.sortByValue(): Dataset> = sort("_2") + +/** Returns a dataset sorted by the first (`first`) value of each [Pair] inside. */ +@JvmName("sortByPairKey") +fun Dataset>.sortByKey(): Dataset> = sort("first") + +/** Returns a dataset sorted by the second (`second`) value of each [Pair] inside. */ +@JvmName("sortByPairValue") +fun Dataset>.sortByValue(): Dataset> = sort("second") + /** * This function creates block, where one can call any further computations on already cached dataset * Data will be unpersisted automatically at the end of computation diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt index f352ff68..495948e3 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt @@ -282,6 +282,42 @@ class DatasetFunctionTest : ShouldSpec({ dataset6.toList>() shouldBe listOf(listOf(1, 2, 3), listOf(4, 5, 6)) } + + should("Sort Arity2 Dataset") { + val list = listOf( + c(1, 6), + c(2, 5), + c(3, 4), + ) + val dataset = list.toDS() + + dataset.sortByKey().collectAsList() shouldBe list.sortedBy { it._1 } + dataset.sortByValue().collectAsList() shouldBe list.sortedBy { it._2 } + } + + should("Sort Tuple2 Dataset") { + val list = listOf( + Tuple2(1, 6), + Tuple2(2, 5), + Tuple2(3, 4), + ) + val dataset = list.toDS() + + dataset.sortByKey().collectAsList() shouldBe list.sortedBy { it._1 } + dataset.sortByValue().collectAsList() shouldBe list.sortedBy { it._2 } + } + + should("Sort Pair Dataset") { + val list = listOf( + Pair(1, 6), + Pair(2, 5), + Pair(3, 4), + ) + val dataset = list.toDS() + + dataset.sortByKey().collectAsList() shouldBe list.sortedBy { it.first } + dataset.sortByValue().collectAsList() shouldBe list.sortedBy { it.second } + } } } @@ -401,6 +437,7 @@ class DatasetFunctionTest : ShouldSpec({ b.count() shouldBe 1 } + } } }) From b90721334cfbb8ee22f169dd9425f8b51e3a5b85 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 10 Mar 2022 18:02:03 +0100 Subject: [PATCH 086/213] added dataset renderer with nice css, jupyter test still broken --- kotlin-spark-api/3.2/pom_2.12.xml | 58 +++---- .../kotlinx/spark/api/jupyter/Integration.kt | 69 ++++++--- .../3.2/src/main/resources/table.css | 146 ++++++++++++++++++ .../kotlinx/spark/api/JupyterTests.kt | 51 ++++++ pom.xml | 17 +- 5 files changed, 280 insertions(+), 61 deletions(-) create mode 100644 kotlin-spark-api/3.2/src/main/resources/table.css create mode 100644 kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 05f3a638..6959e659 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -21,6 +21,11 @@ kotlinx-html https://maven.pkg.jetbrains.space/public/p/kotlinx-html/maven + + kotlin + kotlin + https://maven.pkg.jetbrains.space/kotlin/p/kotlin/dev + @@ -47,6 +52,7 @@ + org.apache.spark @@ -59,49 +65,19 @@ spark-streaming_${scala.compat.version} ${spark3.version} - - org.apache.spark - spark-catalyst_${scala.compat.version} - ${spark3.version} - - - org.apache.spark - spark-mllib_${scala.compat.version} - ${spark3.version} - - - org.apache.spark - spark-repl_${scala.compat.version} - ${spark3.version} - - - org.apache.spark - spark-graphx_${scala.compat.version} - ${spark3.version} - - - org.apache.spark - spark-launcher_${scala.compat.version} - ${spark3.version} - + + - org.scala-lang - scala-library - ${scala.version} - - - org.scala-lang - scala-reflect - ${scala.version} + org.jetbrains.kotlinx + kotlin-jupyter-test-kit + test - org.scala-lang - scala-compiler - ${scala.version} + org.junit.jupiter + junit-jupiter + test - - io.kotest kotest-runner-junit5-jvm @@ -126,6 +102,12 @@ ${atrium.version} test + + org.testng + testng + RELEASE + test + diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt index 669d87d2..dbeca5b4 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -23,8 +23,13 @@ import kotlinx.html.* import kotlinx.html.stream.appendHTML import org.apache.spark.sql.functions.* import org.apache.spark.sql.Dataset +import org.apache.spark.unsafe.array.ByteArrayMethods +import org.intellij.lang.annotations.Language import org.jetbrains.kotlinx.jupyter.api.HTML import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration +import org.jetbrains.kotlinx.spark.api.KSparkSession +import org.jetbrains.kotlinx.spark.api.asKotlinList +import java.io.InputStreamReader @OptIn(ExperimentalStdlibApi::class) internal class Integration : JupyterIntegration() { @@ -68,21 +73,27 @@ internal class Integration : JupyterIntegration() { onLoaded { println("Running!!") - execute( - """|val spark = org.jetbrains.kotlinx.spark.api.SparkSession - | .builder() - | .master(SparkConf().get("spark.master", "local[*]")) - | .appName("Jupyter") - | .getOrCreate()""".trimMargin() + @Language("kts") + val spark = execute( + """ + val spark = org.jetbrains.kotlinx.spark.api.SparkSession + .builder() + .master(SparkConf().get("spark.master", "local[*]")) + .appName("Jupyter") + .getOrCreate() + """.trimIndent() ) - execute("""spark.sparkContext.setLogLevel(SparkLogLevel.ERROR)""") - execute("""val sc = org.apache.spark.api.java.JavaSparkContext(spark.sparkContext)""") -// execute("""fun udf(): org.apache.spark.sql.UDFRegistration { return spark.udf() }""") + @Language("kts") + val logLevel = execute("""spark.sparkContext.setLogLevel(SparkLogLevel.ERROR)""") + + @Language("kts") + val sc = execute("""val sc = org.apache.spark.api.java.JavaSparkContext(spark.sparkContext)""") } + // Render Dataset render> { HTML(it.toHtml()) @@ -91,10 +102,29 @@ internal class Integration : JupyterIntegration() { } -private fun Dataset.toHtml(limit: Int = 20, truncate: Int = 20): String = buildString { - appendHTML().table { +private fun Dataset.toHtml(limit: Int = 20, truncate: Int = 30): String = buildString { + appendHTML().head { + style("text/css") { + unsafe { + val resource = "/table.css" + val res = Integration::class.java + .getResourceAsStream(resource) ?: error("Resource '$resource' not found") + val readRes = InputStreamReader(res).readText() + raw("\n" + readRes) + } + } + } + + appendHTML().table("dataset") { + val numRows = limit.coerceIn(0 until ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH) + val tmpRows = getRows(numRows, truncate).asKotlinList().map { it.asKotlinList() } + + val hasMoreData = tmpRows.size - 1 > numRows + val rows = tmpRows.take(numRows + 1) + + tr { - for (header in columns()) th { + for (header in rows.first()) th { +header.let { if (truncate > 0 && it.length > truncate) { // do not show ellipses for strings shorter than 4 characters. @@ -107,16 +137,15 @@ private fun Dataset.toHtml(limit: Int = 20, truncate: Int = 20): String = } } - val data = select(col("*")) - .takeAsList(limit) - .toList() - - for (row in data) tr { - for (i in 0 until row.size()) td { - +row.get(i).toString() + for (row in rows.drop(1)) tr { + for (item in row) td { + +item } } - } + if (hasMoreData) tr { + +"only showing top $numRows ${if (numRows == 1) "row" else "rows"}" + } + } } diff --git a/kotlin-spark-api/3.2/src/main/resources/table.css b/kotlin-spark-api/3.2/src/main/resources/table.css new file mode 100644 index 00000000..f656add9 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/resources/table.css @@ -0,0 +1,146 @@ +:root { + --background: #fff; + --background-odd: #f5f5f5; + --background-hover: #d9edfd; + --header-text-color: #474747; + --text-color: #848484; + --text-color-dark: #000; + --text-color-medium: #737373; + --text-color-pale: #b3b3b3; + --inner-border-color: #aaa; + --bold-border-color: #000; + --link-color: #296eaa; + --link-color-pale: #296eaa; + --link-hover: #1a466c; +} + +:root[theme="dark"], :root [data-jp-theme-light="false"]{ + --background: #303030; + --background-odd: #3c3c3c; + --background-hover: #464646; + --header-text-color: #dddddd; + --text-color: #b3b3b3; + --text-color-dark: #dddddd; + --text-color-medium: #b2b2b2; + --text-color-pale: #737373; + --inner-border-color: #707070; + --bold-border-color: #777777; + --link-color: #008dc0; + --link-color-pale: #97e1fb; + --link-hover: #00688e; +} + +table.dataset { + font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; + font-size: 12px; + background-color: var(--background); + color: var(--text-color); + border: none; + border-collapse: collapse; +} + +table.dataset th, td { + padding: 6px; + border: 1px solid transparent; + text-align: left; +} + +table.dataset th { + background-color: var(--background); + color: var(--header-text-color); +} + +table.dataset td { + vertical-align: top; +} + +table.dataset th.bottomBorder { + border-bottom-color: var(--bold-border-color); +} + +table.dataset tbody > tr:nth-child(odd) { + background: var(--background-odd); +} + +table.dataset tbody > tr:nth-child(even) { + background: var(--background); +} + +table.dataset tbody > tr:hover { + background: var(--background-hover); +} + +table.dataset a { + cursor: pointer; + color: var(--link-color); + text-decoration: none; +} + +table.dataset tr:hover > td a { + color: var(--link-color-pale); +} + +table.dataset a:hover { + color: var(--link-hover); + text-decoration: underline; +} + +table.dataset img { + max-width: fit-content; +} + +table.dataset th.complex { + background-color: var(--background); + border: 1px solid var(--background); +} + +table.dataset .leftBorder { + border-left-color: var(--inner-border-color); +} + +table.dataset .rightBorder { + border-right-color: var(--inner-border-color); +} + +table.dataset .rightAlign { + text-align: right; +} + +table.dataset .expanderSvg { + width: 8px; + height: 8px; + margin-right: 3px; +} + +table.dataset .expander { + display: flex; + align-items: center; +} + +/* formatting */ + +table.dataset .null { + color: var(--text-color-pale); +} + +table.dataset .structural { + color: var(--text-color-medium); + font-weight: bold; +} + +table.dataset .datasetCaption { + font-weight: bold; +} + +table.dataset .numbers { + color: var(--text-color-dark); +} + +table.dataset td:hover .formatted .structural, .null { + color: var(--text-color-dark); +} + +table.dataset tr:hover .formatted .structural, .null { + color: var(--text-color-dark); +} + diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt new file mode 100644 index 00000000..496e6bec --- /dev/null +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt @@ -0,0 +1,51 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.api + +import io.kotest.core.spec.style.shouldSpec +import io.kotest.matchers.string.shouldContain +import org.intellij.lang.annotations.Language +import org.jetbrains.kotlinx.jupyter.testkit.JupyterReplTestCase +import org.junit.jupiter.api.Test + +class JupyterTests : JupyterReplTestCase() { + + @Test + fun `Do the tests`() { + + @Language("kts") + val html = execHtml( + """ + val ds = listOf(1, 2, 3).toDS(spark) + ds + """.trimIndent() + ) + + println(html) + + html shouldContain "value" + html shouldContain "1" + html shouldContain "2" + html shouldContain "3" + + + } + +} diff --git a/pom.xml b/pom.xml index 7ba65dee..07eea0da 100644 --- a/pom.xml +++ b/pom.xml @@ -14,6 +14,7 @@ 1.6.10 0.16.0 4.6.0 + 5.8.2 1.0.1 3.2.1 0.11.0-61 @@ -58,6 +59,18 @@ kotlin-jupyter-api ${kotlin-jupyter-api.version} + + org.jetbrains.kotlinx + kotlin-jupyter-test-kit + ${kotlin-jupyter-api.version} + + + org.junit + junit-bom + ${junit.version} + pom + import + @@ -171,9 +184,6 @@ src/main/ src/test/ - - **/*.json - apache_v2 =LICENSE= =LICENSE END= @@ -188,6 +198,7 @@ **/*.json + **/*.css process-sources From f026f579e98a5a6854e3a52579bf78f4d7b34d1a Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 11 Mar 2022 12:55:20 +0100 Subject: [PATCH 087/213] attempting to make jupyter test work --- kotlin-spark-api/3.2/pom_2.12.xml | 8 +++ .../kotlinx/spark/api/JupyterTests.kt | 49 ++++++++++--------- 2 files changed, 35 insertions(+), 22 deletions(-) diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 6959e659..63d460ca 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -84,6 +84,14 @@ ${kotest.version} test + + + + + + + + io.kotest.extensions kotest-extensions-allure diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt index 496e6bec..e29f4ed3 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt @@ -19,33 +19,38 @@ */ package org.jetbrains.kotlinx.spark.api -import io.kotest.core.spec.style.shouldSpec +import io.kotest.core.spec.style.ShouldSpec import io.kotest.matchers.string.shouldContain import org.intellij.lang.annotations.Language import org.jetbrains.kotlinx.jupyter.testkit.JupyterReplTestCase -import org.junit.jupiter.api.Test -class JupyterTests : JupyterReplTestCase() { - - @Test - fun `Do the tests`() { - - @Language("kts") - val html = execHtml( - """ - val ds = listOf(1, 2, 3).toDS(spark) - ds - """.trimIndent() - ) - - println(html) +class JupyterTests : ShouldSpec(object : (ShouldSpec) -> Unit, JupyterReplTestCase() { + + override fun invoke(it: ShouldSpec) = it.run() + + fun ShouldSpec.run() { + context("Jupyter") { +// @Language("kts") +// val html = execHtml( +// """ +// val ds = listOf(1, 2, 3).toDS(spark) +// ds +// """.trimIndent() +// ) +// +// println(html) +// +// html shouldContain "value" +// html shouldContain "1" +// html shouldContain "2" +// html shouldContain "3" + + + } + } - html shouldContain "value" - html shouldContain "1" - html shouldContain "2" - html shouldContain "3" +// val jupyter = object : JupyterReplTestCase() {} - } -} +}) From 8071758ff9daa29af967074c72e82d5aa71cc092 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 11 Mar 2022 17:52:52 +0100 Subject: [PATCH 088/213] added simple RDD rendering --- .../kotlinx/spark/api/jupyter/Integration.kt | 118 ++++++++++++++---- 1 file changed, 91 insertions(+), 27 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt index dbeca5b4..34e61adc 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -21,14 +21,17 @@ package org.jetbrains.kotlinx.spark.api.jupyter import kotlinx.html.* import kotlinx.html.stream.appendHTML +import org.apache.spark.api.java.JavaRDDLike +import org.apache.spark.api.java.function.MapGroupsFunction +import org.apache.spark.rdd.RDD import org.apache.spark.sql.functions.* import org.apache.spark.sql.Dataset +import org.apache.spark.sql.KeyValueGroupedDataset import org.apache.spark.unsafe.array.ByteArrayMethods import org.intellij.lang.annotations.Language import org.jetbrains.kotlinx.jupyter.api.HTML import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration -import org.jetbrains.kotlinx.spark.api.KSparkSession -import org.jetbrains.kotlinx.spark.api.asKotlinList +import org.jetbrains.kotlinx.spark.api.* import java.io.InputStreamReader @OptIn(ExperimentalStdlibApi::class) @@ -69,28 +72,30 @@ internal class Integration : JupyterIntegration() { import("org.apache.spark.sql.SparkSession.Builder") import("scala.collection.Seq") + var spark: SparkSession? = null + // starting spark and unwrapping KSparkContext functions onLoaded { println("Running!!") @Language("kts") - val spark = execute( + val sparkField = execute( """ val spark = org.jetbrains.kotlinx.spark.api.SparkSession .builder() .master(SparkConf().get("spark.master", "local[*]")) .appName("Jupyter") .getOrCreate() + spark """.trimIndent() ) + spark = sparkField.value as SparkSession @Language("kts") val logLevel = execute("""spark.sparkContext.setLogLevel(SparkLogLevel.ERROR)""") @Language("kts") val sc = execute("""val sc = org.apache.spark.api.java.JavaSparkContext(spark.sparkContext)""") - - } @@ -98,11 +103,22 @@ internal class Integration : JupyterIntegration() { render> { HTML(it.toHtml()) } + + render> { + HTML(it.toJavaRDD().toHtml()) + } + + render> { + HTML(it.toHtml()) + } + +// render> { +// HTML(it.toHtml(spark!!)) +// } } } - -private fun Dataset.toHtml(limit: Int = 20, truncate: Int = 30): String = buildString { +private fun createHtmlTable(fillTable: TABLE.() -> Unit): String = buildString { appendHTML().head { style("text/css") { unsafe { @@ -115,37 +131,85 @@ private fun Dataset.toHtml(limit: Int = 20, truncate: Int = 30): String = } } - appendHTML().table("dataset") { - val numRows = limit.coerceIn(0 until ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH) - val tmpRows = getRows(numRows, truncate).asKotlinList().map { it.asKotlinList() } + appendHTML().table("dataset", fillTable) +} + + +private fun JavaRDDLike.toHtml(limit: Int = 20, truncate: Int = 30): String = createHtmlTable { + val numRows = limit.coerceIn(0 until ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH) + val tmpRows = take(numRows).toList() - val hasMoreData = tmpRows.size - 1 > numRows - val rows = tmpRows.take(numRows + 1) + val hasMoreData = tmpRows.size - 1 > numRows + val rows = tmpRows.take(numRows) + tr { th { +"Values" } } - tr { - for (header in rows.first()) th { - +header.let { - if (truncate > 0 && it.length > truncate) { - // do not show ellipses for strings shorter than 4 characters. - if (truncate < 4) it.substring(0, truncate) - else it.substring(0, truncate - 3) + "..." - } else { - it - } + for (row in rows) tr { + td { + val string = when (row) { + is ByteArray -> row.joinToString(prefix = "[", postfix = "]") { "%02X".format(it) } + + is CharArray -> row.iterator().asSequence().toList().toString() + is ShortArray -> row.iterator().asSequence().toList().toString() + is IntArray -> row.iterator().asSequence().toList().toString() + is LongArray -> row.iterator().asSequence().toList().toString() + is FloatArray -> row.iterator().asSequence().toList().toString() + is DoubleArray -> row.iterator().asSequence().toList().toString() + is BooleanArray -> row.iterator().asSequence().toList().toString() + is Array<*> -> row.iterator().asSequence().toList().toString() + is Iterable<*> -> row.iterator().asSequence().toList().toString() + is Iterator<*> -> row.asSequence().toList().toString() + + // TODO maybe others? + + else -> row.toString() + } + + +string.let { + if (truncate > 0 && it.length > truncate) { + // do not show ellipses for strings shorter than 4 characters. + if (truncate < 4) it.substring(0, truncate) + else it.substring(0, truncate - 3) + "..." + } else { + it } } } + } - for (row in rows.drop(1)) tr { - for (item in row) td { - +item + if (hasMoreData) tr { + +"only showing top $numRows ${if (numRows == 1) "row" else "rows"}" + } +} + +private fun Dataset.toHtml(limit: Int = 20, truncate: Int = 30): String = createHtmlTable { + val numRows = limit.coerceIn(0 until ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH) + val tmpRows = getRows(numRows, truncate).asKotlinList().map { it.asKotlinList() } + + val hasMoreData = tmpRows.size - 1 > numRows + val rows = tmpRows.take(numRows + 1) + + tr { + for (header in rows.first()) th { + +header.let { + if (truncate > 0 && it.length > truncate) { + // do not show ellipses for strings shorter than 4 characters. + if (truncate < 4) it.substring(0, truncate) + else it.substring(0, truncate - 3) + "..." + } else { + it + } } } + } - if (hasMoreData) tr { - +"only showing top $numRows ${if (numRows == 1) "row" else "rows"}" + for (row in rows.drop(1)) tr { + for (item in row) td { + +item } } + if (hasMoreData) tr { + +"only showing top $numRows ${if (numRows == 1) "row" else "rows"}" + } } From ac9b6d1250a6230a88f21b2ed19783ce1109fef6 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 14 Mar 2022 13:23:30 +0100 Subject: [PATCH 089/213] jupyter tests work when targeting jdk 11 --- kotlin-spark-api/3.2/pom_2.12.xml | 7 -- .../kotlinx/spark/api/JupyterTests.kt | 82 +++++++++++++------ pom.xml | 2 +- 3 files changed, 59 insertions(+), 32 deletions(-) diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 63d460ca..a1f799e3 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -84,13 +84,6 @@ ${kotest.version} test - - - - - - - io.kotest.extensions diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt index e29f4ed3..9ace75db 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt @@ -20,37 +20,71 @@ package org.jetbrains.kotlinx.spark.api import io.kotest.core.spec.style.ShouldSpec +import io.kotest.matchers.nulls.shouldNotBeNull import io.kotest.matchers.string.shouldContain +import io.kotest.matchers.types.shouldBeInstanceOf +import jupyter.kotlin.DependsOn import org.intellij.lang.annotations.Language -import org.jetbrains.kotlinx.jupyter.testkit.JupyterReplTestCase - -class JupyterTests : ShouldSpec(object : (ShouldSpec) -> Unit, JupyterReplTestCase() { - - override fun invoke(it: ShouldSpec) = it.run() - - fun ShouldSpec.run() { - context("Jupyter") { -// @Language("kts") -// val html = execHtml( -// """ -// val ds = listOf(1, 2, 3).toDS(spark) -// ds -// """.trimIndent() -// ) -// -// println(html) -// -// html shouldContain "value" -// html shouldContain "1" -// html shouldContain "2" -// html shouldContain "3" +import org.jetbrains.kotlinx.jupyter.EvalRequestData +import org.jetbrains.kotlinx.jupyter.ReplForJupyter +import org.jetbrains.kotlinx.jupyter.api.Code +import org.jetbrains.kotlinx.jupyter.api.MimeTypedResult +import org.jetbrains.kotlinx.jupyter.repl.EvalResultEx +import org.jetbrains.kotlinx.jupyter.testkit.ReplProvider +import kotlin.script.experimental.jvm.util.classpathFromClassloader +class JupyterTests : ShouldSpec({ + val replProvider: ReplProvider = ReplProvider.withoutLibraryResolution + val currentClassLoader = DependsOn::class.java.classLoader + val scriptClasspath = classpathFromClassloader(currentClassLoader).orEmpty() + + fun createRepl() = replProvider(scriptClasspath) + fun withRepl(action: ReplForJupyter.() -> Unit) = createRepl().action() + + context("DF rendering") { + should("render DFs") { + withRepl { + @Language("kts") + val html = execHtml( + """ + val ds = listOf(1, 2, 3).toDS(spark) + ds + """.trimIndent() + ) + println(html) + + html shouldContain "value" + html shouldContain "1" + html shouldContain "2" + html shouldContain "3" + } } } +}) +fun ReplForJupyter.execEx(code: Code): EvalResultEx { + return evalEx(EvalRequestData(code)) +} -// val jupyter = object : JupyterReplTestCase() {} +fun ReplForJupyter.exec(code: Code): Any? { + return execEx(code).renderedValue +} +fun ReplForJupyter.execRaw(code: Code): Any? { + return execEx(code).rawValue +} -}) +@JvmName("execTyped") +inline fun ReplForJupyter.exec(code: Code): T { + val res = exec(code) + res.shouldBeInstanceOf() + return res +} + +fun ReplForJupyter.execHtml(code: Code): String { + val res = exec(code) + val html = res["text/html"] + html.shouldNotBeNull() + return html +} \ No newline at end of file diff --git a/pom.xml b/pom.xml index 07eea0da..fa31ff5a 100644 --- a/pom.xml +++ b/pom.xml @@ -17,7 +17,7 @@ 5.8.2 1.0.1 3.2.1 - 0.11.0-61 + 0.11.0-62 0.7.3 From a6336f459a1aaa1273a9195c85509e682b5c1c7f Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 14 Mar 2022 14:17:40 +0100 Subject: [PATCH 090/213] more tests and trying to please qodana --- .../org/jetbrains/kotlinx/spark/api/Column.kt | 2 +- .../kotlinx/spark/api/jupyter/Integration.kt | 5 +- .../kotlinx/spark/api/JupyterTests.kt | 139 ++++++++++++++++-- 3 files changed, 128 insertions(+), 18 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt index ffa42ada..72e8a9b7 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt @@ -68,7 +68,7 @@ inline operator fun Dataset.invoke(column: KProperty1< replaceWith = ReplaceWith("this `===` c"), level = DeprecationLevel.ERROR, ) -infix fun Column.`==`(c: Column) = `$eq$eq$eq`(c) +infix fun Column.`==`(c: Column): Column = `$eq$eq$eq`(c) /** * Unary minus, i.e. negate the expression. diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt index 34e61adc..88df58f5 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -70,7 +70,8 @@ internal class Integration : JupyterIntegration() { import("org.apache.spark.sql.*") import("org.apache.spark.api.java.*") import("org.apache.spark.sql.SparkSession.Builder") - import("scala.collection.Seq") + import("scala.collection.*") + import("org.apache.spark.rdd.*") var spark: SparkSession? = null @@ -89,7 +90,7 @@ internal class Integration : JupyterIntegration() { spark """.trimIndent() ) - spark = sparkField.value as SparkSession + spark = sparkField.value!! as SparkSession @Language("kts") val logLevel = execute("""spark.sparkContext.setLogLevel(SparkLogLevel.ERROR)""") diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt index 9ace75db..f308fe79 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt @@ -21,9 +21,11 @@ package org.jetbrains.kotlinx.spark.api import io.kotest.core.spec.style.ShouldSpec import io.kotest.matchers.nulls.shouldNotBeNull +import io.kotest.matchers.shouldNotBe import io.kotest.matchers.string.shouldContain import io.kotest.matchers.types.shouldBeInstanceOf import jupyter.kotlin.DependsOn +import org.apache.spark.api.java.JavaSparkContext import org.intellij.lang.annotations.Language import org.jetbrains.kotlinx.jupyter.EvalRequestData import org.jetbrains.kotlinx.jupyter.ReplForJupyter @@ -38,11 +40,29 @@ class JupyterTests : ShouldSpec({ val currentClassLoader = DependsOn::class.java.classLoader val scriptClasspath = classpathFromClassloader(currentClassLoader).orEmpty() - fun createRepl() = replProvider(scriptClasspath) - fun withRepl(action: ReplForJupyter.() -> Unit) = createRepl().action() + fun createRepl(): ReplForJupyter = replProvider(scriptClasspath) + fun withRepl(action: ReplForJupyter.() -> Unit): Unit = createRepl().action() - context("DF rendering") { - should("render DFs") { + context("Jupyter") { + should("Have spark instance") { + withRepl { + @Language("kts") + val spark = exec("""spark""") + + spark as? SparkSession shouldNotBe null + } + } + + should("Have JavaSparkContext instance") { + withRepl { + @Language("kts") + val sc = exec("""sc""") + + sc as? JavaSparkContext shouldNotBe null + } + } + + should("render Datasets") { withRepl { @Language("kts") val html = execHtml( @@ -58,31 +78,120 @@ class JupyterTests : ShouldSpec({ html shouldContain "2" html shouldContain "3" } + } + + should("render JavaRDDs") { + withRepl { + @Language("kts") + val html = execHtml( + """ + val rdd: JavaRDD> = sc.parallelize(listOf( + listOf(1, 2, 3), + listOf(4, 5, 6), + )) + rdd + """.trimIndent() + ) + println(html) + + html shouldContain "[1, 2, 3]" + html shouldContain "[4, 5, 6]" + } + } + + should("render JavaRDDs with Arrays") { + withRepl { + @Language("kts") + val html = execHtml( + """ + val rdd: JavaRDD = sc.parallelize(listOf( + intArrayOf(1, 2, 3), + intArrayOf(4, 5, 6), + )) + rdd + """.trimIndent() + ) + println(html) + html shouldContain "[1, 2, 3]" + html shouldContain "[4, 5, 6]" + } + } + + should("render JavaPairRDDs") { + withRepl { + @Language("kts") + val html = execHtml( + """ + val rdd: JavaPairRDD = sc.parallelizePairs(listOf( + c(1, 2).toTuple(), + c(3, 4).toTuple(), + )) + rdd + """.trimIndent() + ) + println(html) + + html shouldContain "(1,2)" + html shouldContain "(3,4)" + + } + } + + should("render JavaDoubleRDD") { + withRepl { + @Language("kts") + val html = execHtml( + """ + val rdd: JavaDoubleRDD = sc.parallelizeDoubles(listOf(1.0, 2.0, 3.0, 4.0,)) + rdd + """.trimIndent() + ) + println(html) + + html shouldContain "1.0" + html shouldContain "2.0" + html shouldContain "3.0" + html shouldContain "4.0" + + } + } + + should("render Scala RDD") { + withRepl { + @Language("kts") + val html = execHtml( + """ + val rdd: RDD> = sc.parallelize(listOf( + listOf(1, 2, 3), + listOf(4, 5, 6), + )).rdd() + rdd + """.trimIndent() + ) + println(html) + + html shouldContain "[1, 2, 3]" + html shouldContain "[4, 5, 6]" + } } } }) -fun ReplForJupyter.execEx(code: Code): EvalResultEx { - return evalEx(EvalRequestData(code)) -} +private fun ReplForJupyter.execEx(code: Code): EvalResultEx = evalEx(EvalRequestData(code)) -fun ReplForJupyter.exec(code: Code): Any? { - return execEx(code).renderedValue -} +private fun ReplForJupyter.exec(code: Code): Any? = execEx(code).renderedValue -fun ReplForJupyter.execRaw(code: Code): Any? { - return execEx(code).rawValue -} +private fun ReplForJupyter.execRaw(code: Code): Any? = execEx(code).rawValue @JvmName("execTyped") -inline fun ReplForJupyter.exec(code: Code): T { +private inline fun ReplForJupyter.exec(code: Code): T { val res = exec(code) res.shouldBeInstanceOf() return res } -fun ReplForJupyter.execHtml(code: Code): String { +private fun ReplForJupyter.execHtml(code: Code): String { val res = exec(code) val html = res["text/html"] html.shouldNotBeNull() From a91d35bbea22f71d3088626f6740f6bd96482f56 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 14 Mar 2022 14:48:07 +0100 Subject: [PATCH 091/213] more tests and trying to please qodana --- .../org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt index 88df58f5..c08de5bf 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -89,8 +89,8 @@ internal class Integration : JupyterIntegration() { .getOrCreate() spark """.trimIndent() - ) - spark = sparkField.value!! as SparkSession + ).value!! as SparkSession + spark = sparkField @Language("kts") val logLevel = execute("""spark.sparkContext.setLogLevel(SparkLogLevel.ERROR)""") From f346c3b3eea8c8554b9973642f56fe1bf33deb8a Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 14 Mar 2022 16:37:42 +0100 Subject: [PATCH 092/213] added more tests and catches for RDDs that cannot be rendered --- .../kotlinx/spark/api/jupyter/Integration.kt | 92 +++++++++++-------- .../kotlinx/spark/api/JupyterTests.kt | 25 +++++ 2 files changed, 81 insertions(+), 36 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt index c08de5bf..7083182d 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -34,6 +34,18 @@ import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration import org.jetbrains.kotlinx.spark.api.* import java.io.InputStreamReader + +import org.jetbrains.kotlinx.spark.api.* +import org.apache.spark.sql.functions.* +import org.apache.spark.* +import org.apache.spark.sql.* +import org.apache.spark.api.java.* +import org.apache.spark.sql.SparkSession.Builder +import scala.collection.* +import org.apache.spark.rdd.* +import org.jetbrains.kotlinx.spark.api.SparkSession +import java.io.Serializable + @OptIn(ExperimentalStdlibApi::class) internal class Integration : JupyterIntegration() { @@ -70,14 +82,16 @@ internal class Integration : JupyterIntegration() { import("org.apache.spark.sql.*") import("org.apache.spark.api.java.*") import("org.apache.spark.sql.SparkSession.Builder") - import("scala.collection.*") + import("scala.collection.Seq") import("org.apache.spark.rdd.*") + import("java.io.Serializable") var spark: SparkSession? = null + val a: Map = mapOf() + // starting spark and unwrapping KSparkContext functions onLoaded { - println("Running!!") @Language("kts") val sparkField = execute( @@ -136,51 +150,57 @@ private fun createHtmlTable(fillTable: TABLE.() -> Unit): String = buildString { } -private fun JavaRDDLike.toHtml(limit: Int = 20, truncate: Int = 30): String = createHtmlTable { - val numRows = limit.coerceIn(0 until ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH) - val tmpRows = take(numRows).toList() - - val hasMoreData = tmpRows.size - 1 > numRows - val rows = tmpRows.take(numRows) +private fun JavaRDDLike.toHtml(limit: Int = 20, truncate: Int = 30): String = try { + createHtmlTable { + val numRows = limit.coerceIn(0 until ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH) + val tmpRows = take(numRows).toList() - tr { th { +"Values" } } + val hasMoreData = tmpRows.size - 1 > numRows + val rows = tmpRows.take(numRows) - for (row in rows) tr { - td { - val string = when (row) { - is ByteArray -> row.joinToString(prefix = "[", postfix = "]") { "%02X".format(it) } + tr { th { +"Values" } } - is CharArray -> row.iterator().asSequence().toList().toString() - is ShortArray -> row.iterator().asSequence().toList().toString() - is IntArray -> row.iterator().asSequence().toList().toString() - is LongArray -> row.iterator().asSequence().toList().toString() - is FloatArray -> row.iterator().asSequence().toList().toString() - is DoubleArray -> row.iterator().asSequence().toList().toString() - is BooleanArray -> row.iterator().asSequence().toList().toString() - is Array<*> -> row.iterator().asSequence().toList().toString() - is Iterable<*> -> row.iterator().asSequence().toList().toString() - is Iterator<*> -> row.asSequence().toList().toString() + for (row in rows) tr { + td { + val string = when (row) { + is ByteArray -> row.joinToString(prefix = "[", postfix = "]") { "%02X".format(it) } - // TODO maybe others? + is CharArray -> row.iterator().asSequence().toList().toString() + is ShortArray -> row.iterator().asSequence().toList().toString() + is IntArray -> row.iterator().asSequence().toList().toString() + is LongArray -> row.iterator().asSequence().toList().toString() + is FloatArray -> row.iterator().asSequence().toList().toString() + is DoubleArray -> row.iterator().asSequence().toList().toString() + is BooleanArray -> row.iterator().asSequence().toList().toString() + is Array<*> -> row.iterator().asSequence().toList().toString() + is Iterable<*> -> row.iterator().asSequence().toList().toString() + is Iterator<*> -> row.asSequence().toList().toString() + is Serializable -> row.toString() + // maybe others? - else -> row.toString() - } + else -> row.toString() + } - +string.let { - if (truncate > 0 && it.length > truncate) { - // do not show ellipses for strings shorter than 4 characters. - if (truncate < 4) it.substring(0, truncate) - else it.substring(0, truncate - 3) + "..." - } else { - it + +string.let { + if (truncate > 0 && it.length > truncate) { + // do not show ellipses for strings shorter than 4 characters. + if (truncate < 4) it.substring(0, truncate) + else it.substring(0, truncate - 3) + "..." + } else { + it + } } } } - } - if (hasMoreData) tr { - +"only showing top $numRows ${if (numRows == 1) "row" else "rows"}" + if (hasMoreData) tr { + +"only showing top $numRows ${if (numRows == 1) "row" else "rows"}" + } } +} catch (e: SparkException) { + // Whenever toString() on the contents doesn't work, since the class might be unknown... + """${toString()} + |Cannot render this RDD of this class.""".trimMargin() } private fun Dataset.toHtml(limit: Int = 20, truncate: Int = 30): String = createHtmlTable { diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt index f308fe79..e37028c2 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt @@ -118,6 +118,31 @@ class JupyterTests : ShouldSpec({ } } + should("not render JavaRDDs with custom class") { + withRepl { + @Language("kts") + val html = execHtml( + """ + data class Test( + val longFirstName: String, + val second: LongArray, + val somethingSpecial: Map, + ): Serializable + + val rdd = sc.parallelize( + listOf( + Test("aaaaaaaaa", longArrayOf(1L, 100000L, 24L), mapOf(1 to "one", 2 to "two")), + Test("aaaaaaaaa", longArrayOf(1L, 100000L, 24L), mapOf(1 to "one", 2 to "two")), + ) + ) + rdd + """.trimIndent() + ) + html shouldContain "Cannot render this RDD of this class." + + } + } + should("render JavaPairRDDs") { withRepl { @Language("kts") From fd1940836813163a8af4595da7e12312d251c6bb Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 14 Mar 2022 17:11:49 +0100 Subject: [PATCH 093/213] updated readme --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index ed1b1c51..5f7226e0 100644 --- a/README.md +++ b/README.md @@ -100,6 +100,8 @@ val ds = listOf(...).toDS(spark) Other than that, the API operates pretty similarly. +There is also support for HTML rendering of Datasets and simple (Java)RDDs. + ## Kotlin for Apache Spark features From 4222a0380ee537fbd6610dc8fdc14df3afcf6eb7 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 17 Mar 2022 19:18:44 +0100 Subject: [PATCH 094/213] still exploring, just pushing to keep my work safe :) --- .../GeneratePairStreamingFunctions.kt | 87 +++ .../JavaRecoverableNetworkWordCount.kt | 215 +++++++ .../examples/KotlinDirectKafkaWordCount.kt | 113 ++++ kotlin-spark-api/3.2/pom_2.12.xml | 16 + .../kotlinx/spark/api/Conversions.kt | 14 + .../kotlinx/spark/api/SparkSession.kt | 69 +- .../jetbrains/kotlinx/spark/api/Streaming.kt | 603 ++++++++++++++++++ .../kotlinx/spark/api/StreamingTest.kt | 21 +- 8 files changed, 1124 insertions(+), 14 deletions(-) create mode 100644 examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/GeneratePairStreamingFunctions.kt create mode 100644 examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt create mode 100644 examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinDirectKafkaWordCount.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/GeneratePairStreamingFunctions.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/GeneratePairStreamingFunctions.kt new file mode 100644 index 00000000..740774e2 --- /dev/null +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/GeneratePairStreamingFunctions.kt @@ -0,0 +1,87 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.examples + +import org.apache.spark.streaming.dstream.PairDStreamFunctions +import org.intellij.lang.annotations.Language +import kotlin.reflect.KFunction +import kotlin.reflect.full.functions + + +object GeneratePairStreamingFunctions { + +// fun JavaDStream>.reduceByKey(func: (V, V) -> V): JavaDStream> = +// mapToPair { it.toTuple() } +// .reduceByKey(func) +// .map { it.toArity() } + + @JvmStatic + fun main(args: Array) { + + val klass = PairDStreamFunctions::class + + val functions = klass.functions + + for (function: KFunction<*> in functions) with(function) { + + val types = (typeParameters.map { it.name }.toSet() + "K" + "V").joinToString() + + val parameterString = parameters.drop(1).joinToString { + "${it.name}: ${it.type}" + } + val parameterStringNoType = parameters.drop(1).joinToString { it.name!! } + + @Language("kt") + val new = """ + fun <$types> JavaDStream>.$name($parameterString) + + """.trimIndent() + +// +// val new = +// if (returnType.toString().contains("org.apache.spark.streaming.api.java.JavaPairDStream")) { +// val newReturnType = returnType.toString() +// .replaceFirst("JavaPairDStream<", "JavaDStream", ">") +// +// """ +// fun <$types> JavaDStream>.$name($parameterString): $newReturnType = +// mapToPair { it.toTuple() } +// .$name($parameterStringNoType) +// .map { it.toArity() } +// +// """.trimIndent() +// } else { +// """ +// fun <$types> JavaDStream>.$name($parameterString): $returnType = +// mapToPair { it.toTuple() } +// .$name($parameterStringNoType) +// +// """.trimIndent() +// } +// .replace("!", "") +// .replace("(Mutable)", "") +// +// if ("\$" !in new) println(new) + } + + + } +} diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt new file mode 100644 index 00000000..ef2fc398 --- /dev/null +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt @@ -0,0 +1,215 @@ +/*- + * =LICENSE= + * Kotlin Spark API: Examples for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.examples + +import com.google.common.io.Files +import org.apache.spark.api.java.JavaPairRDD +import org.apache.spark.api.java.JavaSparkContext +import org.apache.spark.broadcast.Broadcast +import org.apache.spark.streaming.Durations +import org.apache.spark.streaming.Time +import org.apache.spark.util.LongAccumulator +import org.jetbrains.kotlinx.spark.api.* +import scala.Tuple2 +import java.io.File +import java.nio.charset.Charset +import java.util.regex.Pattern +import kotlin.system.exitProcess + + +/** + * Use this singleton to get or register a Broadcast variable. + */ +internal object JavaWordExcludeList { + + @Volatile + private var instance: Broadcast>? = null + + fun getInstance(sc: JavaSparkContext): Broadcast> { + if (instance == null) synchronized(JavaWordExcludeList::class.java) { + if (instance == null) { + val wordExcludeList = listOf("a", "b", "c") + instance = sc.broadcast(wordExcludeList) + } + } + return instance!! + } +} + +/** + * Use this singleton to get or register an Accumulator. + */ +internal object JavaDroppedWordsCounter { + + @Volatile + private var instance: LongAccumulator? = null + + fun getInstance(sc: JavaSparkContext): LongAccumulator { + if (instance == null) synchronized(JavaDroppedWordsCounter::class.java) { + if (instance == null) + instance = sc.sc().longAccumulator("DroppedWordsCounter") + } + return instance!! + } +} + +/** + * Counts words in text encoded with UTF8 received from the network every second. This example also + * shows how to use lazily instantiated singleton instances for Accumulator and Broadcast so that + * they can be registered on driver failures. + * + * Usage: JavaRecoverableNetworkWordCount + * and describe the TCP server that Spark Streaming would connect to receive + * data. directory to HDFS-compatible file system which checkpoint data + * file to which the word counts will be appended + * + * and must be absolute paths + * + * To run this on your local machine, you need to first run a Netcat server + * + * `$ nc -lk 9999` + * + * and run the example as + * + * `$ ./bin/run-example org.apache.spark.examples.streaming.JavaRecoverableNetworkWordCount \ + * localhost 9999 ~/checkpoint/ ~/out` + * + * If the directory ~/checkpoint/ does not exist (e.g. running for the first time), it will create + * a new StreamingContext (will print "Creating new context" to the console). Otherwise, if + * checkpoint data exists in ~/checkpoint/, then it will create StreamingContext from + * the checkpoint data. + * + * Refer to the online documentation for more details. + */ +object JavaRecoverableNetworkWordCount { + + private val SPACE = Pattern.compile(" ") + + private const val DEFAULT_IP = "localhost" + private const val DEFAULT_PORT = "9999" + private const val DEFAULT_CHECKPOINT_DIRECTORY = "~/checkpoint/" + private const val DEFAULT_OUTPUT_PATH = "~/out" + + @Throws(Exception::class) + @JvmStatic + fun main(args: Array) { + if (args.size != 4 && args.isNotEmpty()) { + System.err.println("You arguments were " + listOf(*args)) + System.err.println( + """Usage: JavaRecoverableNetworkWordCount + . and describe the TCP server that Spark + Streaming would connect to receive data. directory to + HDFS-compatible file system which checkpoint data file to which + the word counts will be appended + + In local mode, should be 'local[n]' with n > 1 + Both and must be absolute paths""".trimIndent() + ) + exitProcess(1) + } + val ip = args.getOrElse(0) { DEFAULT_IP } + val port = args.getOrElse(1) { DEFAULT_PORT }.toInt() + val checkpointDirectory = args.getOrElse(2) { DEFAULT_CHECKPOINT_DIRECTORY } + val outputPath = args.getOrElse(3) { DEFAULT_OUTPUT_PATH } + + // (used to detect the new context) + // Create the context with a 1 second batch size or load from checkpointDirectory + withSparkStreaming( +// checkpointPath = checkpointDirectory, TODO + batchDuration = Durations.seconds(1), + appName = "JavaRecoverableNetworkWordCount", + ) { + createContext( + ip = ip, + port = port, + outputPath = outputPath, + ) + } + } + + @Suppress("UnstableApiUsage") + private fun KSparkStreamingSession.createContext( + ip: String, + port: Int, + outputPath: String, + ) { + // If you do not see this printed, that means the StreamingContext has been loaded + // from the new checkpoint + println("Creating new context") + val outputFile = File(outputPath) + if (outputFile.exists()) outputFile.delete() + + // Create a socket stream on target ip:port and count the + // words in input stream of \n delimited text (e.g. generated by 'nc') + val lines = ssc.socketTextStream(ip, port) + + val words = lines.flatMap { it.split(SPACE).iterator() } + + val wordCounts = words + .mapToPair { c(it, 1).toTuple() } + .reduceByKey { a: Int, b: Int -> a + b } + +// val wordCounts = words +// .mapToPair { Tuple2(it, 1) } +// .reduceByKey { a: Int, b: Int -> a + b } + +// val wordCounts = words +// .map { it to 1 } +// .reduceByKey { a: Int, b: Int -> a + b } +// +// val wordCounts = words +// .map { c(it, 1) } +// .reduceByKey { a: Int, b: Int -> a + b } + + + wordCounts.foreachRDD { rdd, time: Time -> + + // Get or register the excludeList Broadcast + val excludeList = JavaWordExcludeList.getInstance(JavaSparkContext(rdd.context())) + + // Get or register the droppedWordsCounter Accumulator + val droppedWordsCounter = JavaDroppedWordsCounter.getInstance(JavaSparkContext(rdd.context())) + + // Use excludeList to drop words and use droppedWordsCounter to count them + val counts = rdd.filter { wordCount -> + if (excludeList.value().contains(wordCount._1)) { + droppedWordsCounter.add(wordCount._2.toLong()) + false + } else { + true + } + }.collect().toString() + val output = "Counts at time $time $counts" + println(output) + println("Dropped ${droppedWordsCounter.value()} word(s) totally") + println("Appending to " + outputFile.absolutePath) + Files.append( + """ + $output + + """.trimIndent(), + outputFile, + Charset.defaultCharset(), + ) + } + } + + +} diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinDirectKafkaWordCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinDirectKafkaWordCount.kt new file mode 100644 index 00000000..eea40720 --- /dev/null +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinDirectKafkaWordCount.kt @@ -0,0 +1,113 @@ +/*- + * =LICENSE= + * Kotlin Spark API: Examples for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.examples + +import org.apache.kafka.clients.consumer.ConsumerConfig.* +import org.apache.kafka.clients.consumer.ConsumerRecord +import org.apache.kafka.common.serialization.StringDeserializer +import org.apache.spark.streaming.Durations +import org.apache.spark.streaming.api.java.JavaDStream +import org.apache.spark.streaming.api.java.JavaInputDStream +import org.apache.spark.streaming.api.java.JavaPairDStream +import org.apache.spark.streaming.kafka010.ConsumerStrategies +import org.apache.spark.streaming.kafka010.KafkaUtils +import org.apache.spark.streaming.kafka010.LocationStrategies +import org.jetbrains.kotlinx.spark.api.c +import org.jetbrains.kotlinx.spark.api.toTuple +import org.jetbrains.kotlinx.spark.api.withSparkStreaming +import java.io.Serializable +import java.util.regex.Pattern +import kotlin.system.exitProcess + + +/** + * Src: https://github.com/apache/spark/blob/master/examples/src/main/java/org/apache/spark/examples/streaming/JavaDirectKafkaWordCount.java + * + * Consumes messages from one or more topics in Kafka and does wordcount. + * Usage: JavaDirectKafkaWordCount + * is a list of one or more Kafka brokers + * is a consumer group name to consume from topics + * is a list of one or more kafka topics to consume from + * + * Example: + * + * First make sure you have a Kafka producer running. For instance, when running locally: + * $ kafka-console-producer.sh --topic quickstart-events --bootstrap-server localhost:9092 + * + * Then start the program normally or like this: + * $ bin/run-example streaming.JavaDirectKafkaWordCount broker1-host:port,broker2-host:port \ + * consumer-group topic1,topic2 + */ +object KotlinDirectKafkaWordCount { + private val SPACE = Pattern.compile(" ") + + private const val DEFAULT_BROKER = "localhost:9092" + private const val DEFAULT_GROUP_ID = "consumer-group" + private const val DEFAULT_TOPIC = "quickstart-events" + + @JvmStatic + fun main(args: Array) { + if (args.size < 3 && args.isNotEmpty()) { + System.err.println( + """Usage: JavaDirectKafkaWordCount + is a list of one or more Kafka brokers + is a consumer group name to consume from topics + is a list of one or more kafka topics to consume from + """.trimIndent() + ) + exitProcess(1) + } + + val brokers: String = args.getOrElse(0) { DEFAULT_BROKER } + val groupId: String = args.getOrElse(1) { DEFAULT_GROUP_ID } + val topics: String = args.getOrElse(2) { DEFAULT_TOPIC } + + // Create context with a 2 seconds batch interval + withSparkStreaming(batchDuration = Durations.seconds(2), appName = "JavaDirectKafkaWordCount") { + + val topicsSet: Set = topics.split(',').toSet() + + val kafkaParams: Map = mapOf( + BOOTSTRAP_SERVERS_CONFIG to brokers, + GROUP_ID_CONFIG to groupId, + KEY_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, + VALUE_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, + ) + + // Create direct kafka stream with brokers and topics + val messages: JavaInputDStream> = KafkaUtils.createDirectStream( + ssc, + LocationStrategies.PreferConsistent(), + ConsumerStrategies.Subscribe(topicsSet, kafkaParams), + ) + + // Get the lines, split them into words, count the words and print + val lines: JavaDStream = messages.map { it.value() } + val words: JavaDStream = lines.flatMap { it.split(SPACE).iterator() } + + val wordCounts: JavaPairDStream = words + .mapToPair { c(it, 1).toTuple() } + .reduceByKey { a: Int, b: Int -> a + b } + + wordCounts.print() + + } + } +} diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 826547d2..9a33982a 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -28,6 +28,22 @@ core-3.2_${scala.compat.version} + + org.apache.spark + spark-sql-kafka-0-10_${scala.compat.version} + ${spark3.version} + + + org.apache.spark + spark-streaming-kafka-0-10_${scala.compat.version} + ${spark3.version} + + + org.apache.kafka + kafka-streams-test-utils + 3.1.0 + test + diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt index 8b67a1bc..2863ad08 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt @@ -27,6 +27,7 @@ package org.jetbrains.kotlinx.spark.api +import org.apache.spark.api.java.Optional import scala.* import scala.collection.JavaConverters import java.util.* @@ -43,6 +44,19 @@ import scala.collection.mutable.Map as ScalaMutableMap import scala.collection.mutable.Seq as ScalaMutableSeq import scala.collection.mutable.Set as ScalaMutableSet + +/** Converts Scala [Option] to Kotlin nullable. */ +fun Option.toNullable(): T? = getOrElse { null } + +/** Converts nullable value to Scala [Option]. */ +fun T?.toOption(): Option = Option.apply(this) + +/** Converts [Optional] to Kotlin nullable. */ +fun Optional.toNullable(): T? = orElse(null) + +/** Converts nullable value to [Optional]. */ +fun T?.toOptional(): Optional = Optional.ofNullable(this) + /** * @see JavaConverters.asScalaIterator for more information. */ diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index 96f753c7..d4481f1d 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -34,6 +34,7 @@ import org.apache.spark.sql.Dataset import org.apache.spark.sql.SparkSession.Builder import org.apache.spark.sql.UDFRegistration import org.apache.spark.streaming.Duration +import org.apache.spark.streaming.Durations import org.apache.spark.streaming.api.java.JavaStreamingContext import org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions @@ -193,8 +194,15 @@ inline fun withSpark(sparkConf: SparkConf, logLevel: SparkLogLevel = ERROR, func /** * Wrapper for spark streaming creation. `spark: SparkSession` and `ssc: JavaStreamingContext` are provided, started, * awaited, and stopped automatically. + * The use of a checkpoint directory is optional. + * If checkpoint data exists in the provided `checkpointPath`, then StreamingContext will be + * recreated from the checkpoint data. If the data does not exist, then the provided factory + * will be used to create a JavaStreamingContext. * - * @param batchDuration The time interval at which streaming data will be divided into batches + * @param batchDuration The time interval at which streaming data will be divided into batches. Defaults to 1 second. + * @param checkpointPath If checkpoint data exists in the provided `checkpointPath`, then StreamingContext will be + * recreated from the checkpoint data. If the data does not exist (or `null` is provided), then the streaming context will be built using + * the other provided parameters. * @param props spark options, value types are runtime-checked for type-correctness * @param master Sets the Spark master URL to connect to, such as "local" to run locally, "local[4]" to * run locally with 4 cores, or "spark://master:7077" to run on a Spark standalone cluster. By default, it @@ -208,7 +216,8 @@ inline fun withSpark(sparkConf: SparkConf, logLevel: SparkLogLevel = ERROR, func */ @JvmOverloads inline fun withSparkStreaming( - batchDuration: Duration, + batchDuration: Duration = Durations.seconds(1L), + checkpointPath: String? = null, props: Map = emptyMap(), master: String = SparkConf().get("spark.master", "local[*]"), appName: String = "Kotlin Spark Sample", @@ -216,19 +225,53 @@ inline fun withSparkStreaming( timeout: Long = -1L, func: KSparkStreamingSession.() -> Unit, ) { - withSpark( - props = props, - master = master, - appName = appName, - logLevel = logLevel, - ) { - val ssc = JavaStreamingContext(sc, batchDuration) - KSparkStreamingSession(spark = spark, ssc = ssc).apply { - func() - ssc.start() - runAfterStart() + if (checkpointPath != null) { + TODO() +// var kSparkStreamingSession: KSparkStreamingSession? = null +// val ssc = JavaStreamingContext.getOrCreate(checkpointPath) { +// val jssc = JavaStreamingContext( +// SparkConf() +// .setAppName(appName) +// .setMaster(master) +// .setAll(props.map { (key, value) -> +// c(key, value.toString()).toTuple() +// }.asScalaIterable()), +// batchDuration, +// ) +// jssc.sparkContext().sc().setLogLevel(logLevel) +// jssc.checkpoint(checkpointPath) +// kSparkStreamingSession = KSparkStreamingSession( +// spark = SparkSession +// .builder() +// .sparkContext(jssc.sparkContext().sc()) +// .getOrCreate(), +// ssc = jssc, +// ).apply { func() } +// +// jssc +// } +// ssc.start() +// kSparkStreamingSession?.apply { runAfterStart() } +// ssc.awaitTerminationOrTimeout(timeout) +// ssc.stop() + } else { + + withSpark( + props = props, + master = master, + appName = appName, + logLevel = logLevel, + ) { + val ssc = JavaStreamingContext(sc, batchDuration) + KSparkStreamingSession(spark = spark, ssc = ssc).apply { + func() + ssc.start() + runAfterStart() + } + ssc.awaitTerminationOrTimeout(timeout) ssc.stop() + } } } diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt new file mode 100644 index 00000000..7fc71530 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt @@ -0,0 +1,603 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.api + +import org.apache.spark.Partitioner +import org.apache.spark.api.java.JavaRDD +import org.apache.spark.api.java.Optional +import org.apache.spark.streaming.Duration +import org.apache.spark.streaming.StateSpec +import org.apache.spark.streaming.api.java.JavaDStream +import org.apache.spark.streaming.api.java.JavaDStreamLike +import org.apache.spark.streaming.api.java.JavaMapWithStateDStream +import org.apache.spark.streaming.api.java.JavaPairDStream +import scala.Tuple2 +import scala.Tuple3 + +//fun JavaDStreamLike, *, *>.reduceByKey(func: (V, V) -> V): JavaDStream> = +// mapToPair(Arity2::toTuple) +// .reduceByKey(func) +// .map(Tuple2::toArity) + + +@JvmName("tuple2ToPairDStream") +fun JavaDStreamLike, *, *>.toPairDStream(): JavaPairDStream = + mapToPair { it } + +@JvmName("arity2ToPairDStream") +fun JavaDStreamLike, *, *>.toPairDStream(): JavaPairDStream = + mapToPair(Arity2::toTuple) + +@JvmName("pairToPairDStream") +fun JavaDStreamLike, *, *>.toPairDStream(): JavaPairDStream = + mapToPair(Pair::toTuple) + +/** + * Return a new DStream by applying `groupByKey` to each RDD. Hash partitioning is used to + * generate the RDDs with Spark's default number of partitions. + */ +fun JavaDStreamLike, *, *>.groupByKey(): JavaDStream>> = + mapToPair { it.toTuple() } + .groupByKey() + .map { it.toArity() } + +/** + * Return a new DStream by applying `groupByKey` to each RDD. Hash partitioning is used to + * generate the RDDs with `numPartitions` partitions. + */ +fun JavaDStreamLike, *, *>.groupByKey(numPartitions: Int): JavaDStream>> = + mapToPair { it.toTuple() } + .groupByKey(numPartitions) + .map { it.toArity() } + +/** + * Return a new DStream by applying `groupByKey` on each RDD. The supplied + * org.apache.spark.Partitioner is used to control the partitioning of each RDD. + */ +fun JavaDStreamLike, *, *>.groupByKey(partitioner: Partitioner): JavaDStream>> = + mapToPair { it.toTuple() } + .groupByKey(partitioner) + .map { it.toArity() } + +/** + * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are + * merged using the associative and commutative reduce function. Hash partitioning is used to + * generate the RDDs with Spark's default number of partitions. + */ +fun JavaDStreamLike, *, *>.reduceByKey(reduceFunc: (V, V) -> V): JavaDStream> = + mapToPair { it.toTuple() } + .reduceByKey(reduceFunc) + .map { it.toArity() } + +/** + * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are + * merged using the supplied reduce function. Hash partitioning is used to generate the RDDs + * with `numPartitions` partitions. + */ +fun JavaDStreamLike, *, *>.reduceByKey( + reduceFunc: (V, V) -> V, + numPartitions: Int, +): JavaDStream> = + mapToPair { it.toTuple() } + .reduceByKey(reduceFunc, numPartitions) + .map { it.toArity() } + +/** + * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are + * merged using the supplied reduce function. org.apache.spark.Partitioner is used to control + * the partitioning of each RDD. + */ +fun JavaDStreamLike, *, *>.reduceByKey( + reduceFunc: (V, V) -> V, + partitioner: Partitioner, +): JavaDStream> = + mapToPair { it.toTuple() } + .reduceByKey(reduceFunc, partitioner) + .map { it.toArity() } + +/** + * Combine elements of each key in DStream's RDDs using custom functions. This is similar to the + * combineByKey for RDDs. Please refer to combineByKey in + * org.apache.spark.rdd.PairRDDFunctions in the Spark core documentation for more information. + */ +fun JavaDStreamLike, *, *>.combineByKey( + createCombiner: (V) -> C, + mergeValue: (C, V) -> C, + mergeCombiner: (C, C) -> C, + partitioner: Partitioner, + mapSideCombine: Boolean = true, +): JavaDStream> = + mapToPair { it.toTuple() } + .combineByKey(createCombiner, mergeValue, mergeCombiner, partitioner, mapSideCombine) + .map { it.toArity() } + +/** + * Return a new DStream by applying `groupByKey` over a sliding window on `this` DStream. + * Similar to `DStream.groupByKey()`, but applies it over a sliding window. + * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param numPartitions number of partitions of each RDD in the new DStream; if not specified + * then Spark's default number of partitions will be used + */ +fun JavaDStreamLike, *, *>.groupByKeyAndWindow( + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = + mapToPair { it.toTuple() } + .groupByKeyAndWindow(windowDuration, slideDuration, numPartitions) + .map { it.toArity() } + +/** + * Create a new DStream by applying `groupByKey` over a sliding window on `this` DStream. + * Similar to `DStream.groupByKey()`, but applies it over a sliding window. + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param partitioner partitioner for controlling the partitioning of each RDD in the new + * DStream. + */ +fun JavaDStreamLike, *, *>.groupByKeyAndWindow( + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + partitioner: Partitioner, +): JavaDStream>> = + mapToPair { it.toTuple() } + .groupByKeyAndWindow(windowDuration, slideDuration, partitioner) + .map { it.toArity() } + +/** + * Return a new DStream by applying `reduceByKey` over a sliding window. This is similar to + * `DStream.reduceByKey()` but applies it over a sliding window. Hash partitioning is used to + * generate the RDDs with `numPartitions` partitions. + * @param reduceFunc associative and commutative reduce function + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param numPartitions number of partitions of each RDD in the new DStream. + */ +fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( + reduceFunc: (V, V) -> V, + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream> = + mapToPair { it.toTuple() } + .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, numPartitions) + .map { it.toArity() } + +/** + * Return a new DStream by applying `reduceByKey` over a sliding window. Similar to + * `DStream.reduceByKey()`, but applies it over a sliding window. + * @param reduceFunc associative and commutative reduce function + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param partitioner partitioner for controlling the partitioning of each RDD + * in the new DStream. + */ +fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( + reduceFunc: (V, V) -> V, + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + partitioner: Partitioner, +): JavaDStream> = + mapToPair { it.toTuple() } + .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, partitioner) + .map { it.toArity() } + +/** + * Return a new DStream by applying incremental `reduceByKey` over a sliding window. + * The reduced value of over a new window is calculated using the old window's reduced value : + * 1. reduce the new values that entered the window (e.g., adding new counts) + * + * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) + * + * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. + * However, it is applicable to only "invertible reduce functions". + * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. + * @param reduceFunc associative and commutative reduce function + * @param invReduceFunc inverse reduce function; such that for all y, invertible x: + * `invReduceFunc(reduceFunc(x, y), x) = y` + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param filterFunc Optional function to filter expired key-value pairs; + * only pairs that satisfy the function are retained + */ +fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( + reduceFunc: (V, V) -> V, + invReduceFunc: (V, V) -> V, + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), + filterFunc: ((Arity2) -> Boolean)? = null, +): JavaDStream> = + mapToPair { it.toTuple() } + .reduceByKeyAndWindow( + reduceFunc, + invReduceFunc, + windowDuration, + slideDuration, + numPartitions, + filterFunc?.let { + { tuple -> + filterFunc(tuple.toArity()) + } + } + ) + .map { it.toArity() } + +/** + * Return a new DStream by applying incremental `reduceByKey` over a sliding window. + * The reduced value of over a new window is calculated using the old window's reduced value : + * 1. reduce the new values that entered the window (e.g., adding new counts) + * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) + * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. + * However, it is applicable to only "invertible reduce functions". + * @param reduceFunc associative and commutative reduce function + * @param invReduceFunc inverse reduce function + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param partitioner partitioner for controlling the partitioning of each RDD in the new + * DStream. + * @param filterFunc Optional function to filter expired key-value pairs; + * only pairs that satisfy the function are retained + */ +fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( + reduceFunc: (V, V) -> V, + invReduceFunc: (V, V) -> V, + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + partitioner: Partitioner, + filterFunc: ((Arity2) -> Boolean)? = null, +): JavaDStream> = + mapToPair { it.toTuple() } + .reduceByKeyAndWindow( + reduceFunc, + invReduceFunc, + windowDuration, + slideDuration, + partitioner, + filterFunc?.let { + { tuple -> + filterFunc(tuple.toArity()) + } + } + ) + .map { it.toArity() } + +/** + * Return a [MapWithStateDStream] by applying a function to every key-value element of + * `this` stream, while maintaining some state data for each unique key. The mapping function + * and other specification (e.g. partitioners, timeouts, initial state data, etc.) of this + * transformation can be specified using `StateSpec` class. The state data is accessible in + * as a parameter of type `State` in the mapping function. + * + * Example of using `mapWithState`: + * {{{ + * // A mapping function that maintains an integer state and return a String + * def mappingFunction(key: String, value: Option[Int], state: State[Int]): Option[String] = { + * // Use state.exists(), state.get(), state.update() and state.remove() + * // to manage state, and return the necessary string + * } + * + * val spec = StateSpec.function(mappingFunction).numPartitions(10) + * + * val mapWithStateDStream = keyValueDStream.mapWithState[StateType, MappedType](spec) + * }}} + * + * @param spec Specification of this transformation + * @tparam StateType Class type of the state data + * @tparam MappedType Class type of the mapped data + */ +fun JavaDStreamLike, *, *>.mapWithState( + spec: StateSpec, +): JavaMapWithStateDStream = + mapToPair { it.toTuple() } + .mapWithState(spec) + +/** + * Return a new "state" DStream where the state for each key is updated by applying + * the given function on the previous state of the key and the new values of each key. + * In every batch the updateFunc will be called for each state even if there are no new values. + * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. + * @param updateFunc State update function. If `this` function returns None, then + * corresponding state key-value pair will be eliminated. + * @tparam S State type + */ +fun JavaDStreamLike, *, *>.updateStateByKey( + updateFunc: (List, S?) -> S?, + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream> = + mapToPair { it.toTuple() } + .updateStateByKey( + { list: List, s: Optional -> + updateFunc(list, s.toNullable()).toOptional() + }, + numPartitions, + ) + .map { it.toArity() } + +/** + * Return a new "state" DStream where the state for each key is updated by applying + * the given function on the previous state of the key and the new values of each key. + * In every batch the updateFunc will be called for each state even if there are no new values. + * [[org.apache.spark.Partitioner]] is used to control the partitioning of each RDD. + * @param updateFunc State update function. Note, that this function may generate a different + * tuple with a different key than the input key. Therefore keys may be removed + * or added in this way. It is up to the developer to decide whether to + * remember the partitioner despite the key being changed. + * @param partitioner Partitioner for controlling the partitioning of each RDD in the new + * DStream + * @tparam S State type + */ +fun JavaDStreamLike, *, *>.updateStateByKey( + updateFunc: (List, S?) -> S?, + partitioner: Partitioner, +): JavaDStream> = + mapToPair { it.toTuple() } + .updateStateByKey( + { list: List, s: Optional -> + updateFunc(list, s.toNullable()).toOptional() + }, + partitioner, + ) + .map { it.toArity() } + +/** + * Return a new "state" DStream where the state for each key is updated by applying + * the given function on the previous state of the key and the new values of the key. + * org.apache.spark.Partitioner is used to control the partitioning of each RDD. + * @param updateFunc State update function. If `this` function returns None, then + * corresponding state key-value pair will be eliminated. + * @param partitioner Partitioner for controlling the partitioning of each RDD in the new + * DStream. + * @param initialRDD initial state value of each key. + * @tparam S State type + */ +fun JavaDStreamLike, *, *>.updateStateByKey( + updateFunc: (List, S?) -> S?, + partitioner: Partitioner, + initialRDD: JavaRDD>, +): JavaDStream> = + mapToPair { it.toTuple() } + .updateStateByKey( + { list: List, s: Optional -> + updateFunc(list, s.toNullable()).toOptional() + }, + partitioner, + initialRDD.mapToPair { it.toTuple() }, + ) + .map { it.toArity() } + +/** + * Return a new DStream by applying a map function to the value of each key-value pairs in + * 'this' DStream without changing the key. + */ +fun JavaDStreamLike, *, *>.mapValues( + mapValuesFunc: (V) -> U, +): JavaDStream> = + mapToPair { it.toTuple() } + .mapValues(mapValuesFunc) + .map { it.toArity() } + +/** + * Return a new DStream by applying a flatmap function to the value of each key-value pairs in + * 'this' DStream without changing the key. + */ +fun JavaDStreamLike, *, *>.flatMapValues( + flatMapValuesFunc: (V) -> Iterator, +): JavaDStream> = + mapToPair { it.toTuple() } + .flatMapValues(flatMapValuesFunc) + .map { it.toArity() } + +/** + * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. + * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. + */ +fun JavaDStreamLike, *, *>.cogroup( + other: JavaDStreamLike, *, *>, + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream, Iterable>>> = + mapToPair { it.toTuple() } + .cogroup( + other.mapToPair { it.toTuple() }, + numPartitions, + ) + .map { + c(it._1, it._2.toArity()) + } + +/** + * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. + * The supplied org.apache.spark.Partitioner is used to partition the generated RDDs. + */ +fun JavaDStreamLike, *, *>.cogroup( + other: JavaDStreamLike, *, *>, + partitioner: Partitioner, +): JavaDStream, Iterable>>> = + mapToPair { it.toTuple() } + .cogroup( + other.mapToPair { it.toTuple() }, + partitioner, + ) + .map { + c(it._1, it._2.toArity()) + } + +/** + * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. + * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. + */ +fun JavaDStreamLike, *, *>.join( + other: JavaDStreamLike, *, *>, + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = + mapToPair { it.toTuple() } + .join( + other.mapToPair { it.toTuple() }, + numPartitions, + ) + .map { + c(it._1, it._2.toArity()) + } + +/** + * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. + * The supplied org.apache.spark.Partitioner is used to control the partitioning of each RDD. + */ +fun JavaDStreamLike, *, *>.join( + other: JavaDStreamLike, *, *>, + partitioner: Partitioner, +): JavaDStream>> = + mapToPair { it.toTuple() } + .join( + other.mapToPair { it.toTuple() }, + partitioner, + ) + .map { + c(it._1, it._2.toArity()) + } + +/** + * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and + * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` + * partitions. + */ +fun JavaDStreamLike, *, *>.leftOuterJoin( + other: JavaDStreamLike, *, *>, + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = + mapToPair { it.toTuple() } + .leftOuterJoin( + other.mapToPair { it.toTuple() }, + numPartitions, + ) + .map { + c(it._1, c(it._2._1, it._2._2.toNullable())) + } + +/** + * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and + * `other` DStream. The supplied org.apache.spark.Partitioner is used to control + * the partitioning of each RDD. + */ +fun JavaDStreamLike, *, *>.leftOuterJoin( + other: JavaDStreamLike, *, *>, + partitioner: Partitioner, +): JavaDStream>> = + mapToPair { it.toTuple() } + .leftOuterJoin( + other.mapToPair { it.toTuple() }, + partitioner, + ) + .map { + c(it._1, c(it._2._1, it._2._2.toNullable())) + } + +/** + * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and + * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` + * partitions. + */ +fun JavaDStreamLike, *, *>.rightOuterJoin( + other: JavaDStreamLike, *, *>, + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = + mapToPair { it.toTuple() } + .rightOuterJoin( + other.mapToPair { it.toTuple() }, + numPartitions, + ) + .map { + c(it._1, c(it._2._1.toNullable(), it._2._2)) + } + +/** + * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and + * `other` DStream. The supplied org.apache.spark.Partitioner is used to control + * the partitioning of each RDD. + */ +fun JavaDStreamLike, *, *>.rightOuterJoin( + other: JavaDStreamLike, *, *>, + partitioner: Partitioner, +): JavaDStream>> = + mapToPair { it.toTuple() } + .rightOuterJoin( + other.mapToPair { it.toTuple() }, + partitioner, + ) + .map { + c(it._1, c(it._2._1.toNullable(), it._2._2)) + } + +/** + * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and + * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` + * partitions. + */ +fun JavaDStreamLike, *, *>.fullOuterJoin( + other: JavaDStreamLike, *, *>, + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = + mapToPair { it.toTuple() } + .fullOuterJoin( + other.mapToPair { it.toTuple() }, + numPartitions, + ) + .map { + c(it._1, c(it._2._1.toNullable(), it._2._2.toNullable())) + } + +/** + * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and + * `other` DStream. The supplied org.apache.spark.Partitioner is used to control + * the partitioning of each RDD. + */ +fun JavaDStreamLike, *, *>.fullOuterJoin( + other: JavaDStreamLike, *, *>, + partitioner: Partitioner, +): JavaDStream>> = + mapToPair { it.toTuple() } + .fullOuterJoin( + other.mapToPair { it.toTuple() }, + partitioner, + ) + .map { + c(it._1, c(it._2._1.toNullable(), it._2._2.toNullable())) + } + diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt index e0139b9c..d93c7d23 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -1,3 +1,22 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ package org.jetbrains.kotlinx.spark.api import io.kotest.core.spec.style.ShouldSpec @@ -39,4 +58,4 @@ class StreamingTest : ShouldSpec({ } } -}) \ No newline at end of file +}) From 5769e925f705edd74c659d798c2c865d8d44eb54 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 17 Mar 2022 19:41:32 +0100 Subject: [PATCH 095/213] still exploring, just pushing to keep my work safe :) --- .../jetbrains/kotlinx/spark/api/Streaming.kt | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt index 7fc71530..b551b081 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt @@ -19,6 +19,7 @@ */ package org.jetbrains.kotlinx.spark.api +import com.sun.org.apache.xml.internal.serialize.OutputFormat import org.apache.spark.Partitioner import org.apache.spark.api.java.JavaRDD import org.apache.spark.api.java.Optional @@ -601,3 +602,22 @@ fun JavaDStreamLike, *, *>.fullOuterJoin( c(it._1, c(it._2._1.toNullable(), it._2._2.toNullable())) } +/** + * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is + * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". + */ +fun JavaDStreamLike, *, *>.saveAsHadoopFiles( + prefix: String, suffix: String, +): Unit = + mapToPair { it.toTuple() } + .saveAsHadoopFiles(prefix, suffix) + +/** + * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is + * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". + */ +fun JavaDStreamLike, *, *>.saveAsNewAPIHadoopFiles( + prefix: String, suffix: String, +): Unit = + mapToPair { it.toTuple() } + .saveAsNewAPIHadoopFiles(prefix, suffix) \ No newline at end of file From 14278ffb5c1eb6691d475642e9efd036fe225f16 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 17 Mar 2022 20:07:14 +0100 Subject: [PATCH 096/213] pairs are recognized too --- .../JavaRecoverableNetworkWordCount.kt | 17 +- .../jetbrains/kotlinx/spark/api/Streaming.kt | 657 +++++++++++++++++- 2 files changed, 631 insertions(+), 43 deletions(-) diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt index ef2fc398..f89e09f3 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt @@ -163,21 +163,12 @@ object JavaRecoverableNetworkWordCount { val words = lines.flatMap { it.split(SPACE).iterator() } val wordCounts = words - .mapToPair { c(it, 1).toTuple() } + .map { c(it, 1) } .reduceByKey { a: Int, b: Int -> a + b } -// val wordCounts = words -// .mapToPair { Tuple2(it, 1) } -// .reduceByKey { a: Int, b: Int -> a + b } - -// val wordCounts = words -// .map { it to 1 } -// .reduceByKey { a: Int, b: Int -> a + b } -// -// val wordCounts = words -// .map { c(it, 1) } -// .reduceByKey { a: Int, b: Int -> a + b } - + val wordCounts2 = words + .map { it to 1 } + .reduceByKey { a: Int, b: Int -> a + b } wordCounts.foreachRDD { rdd, time: Time -> diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt index b551b081..f0152f83 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt @@ -50,20 +50,14 @@ fun JavaDStreamLike, *, *>.toPairDStream(): JavaPairDStream< fun JavaDStreamLike, *, *>.toPairDStream(): JavaPairDStream = mapToPair(Pair::toTuple) -/** - * Return a new DStream by applying `groupByKey` to each RDD. Hash partitioning is used to - * generate the RDDs with Spark's default number of partitions. - */ -fun JavaDStreamLike, *, *>.groupByKey(): JavaDStream>> = - mapToPair { it.toTuple() } - .groupByKey() - .map { it.toArity() } - /** * Return a new DStream by applying `groupByKey` to each RDD. Hash partitioning is used to * generate the RDDs with `numPartitions` partitions. */ -fun JavaDStreamLike, *, *>.groupByKey(numPartitions: Int): JavaDStream>> = +@JvmName("groupByKeyArity2") +fun JavaDStreamLike, *, *>.groupByKey( + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = mapToPair { it.toTuple() } .groupByKey(numPartitions) .map { it.toArity() } @@ -72,29 +66,21 @@ fun JavaDStreamLike, *, *>.groupByKey(numPartitions: Int): J * Return a new DStream by applying `groupByKey` on each RDD. The supplied * org.apache.spark.Partitioner is used to control the partitioning of each RDD. */ +@JvmName("groupByKeyArity2") fun JavaDStreamLike, *, *>.groupByKey(partitioner: Partitioner): JavaDStream>> = mapToPair { it.toTuple() } .groupByKey(partitioner) .map { it.toArity() } -/** - * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are - * merged using the associative and commutative reduce function. Hash partitioning is used to - * generate the RDDs with Spark's default number of partitions. - */ -fun JavaDStreamLike, *, *>.reduceByKey(reduceFunc: (V, V) -> V): JavaDStream> = - mapToPair { it.toTuple() } - .reduceByKey(reduceFunc) - .map { it.toArity() } - /** * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are * merged using the supplied reduce function. Hash partitioning is used to generate the RDDs * with `numPartitions` partitions. */ +@JvmName("reduceByKeyArity2") fun JavaDStreamLike, *, *>.reduceByKey( + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), reduceFunc: (V, V) -> V, - numPartitions: Int, ): JavaDStream> = mapToPair { it.toTuple() } .reduceByKey(reduceFunc, numPartitions) @@ -105,9 +91,10 @@ fun JavaDStreamLike, *, *>.reduceByKey( * merged using the supplied reduce function. org.apache.spark.Partitioner is used to control * the partitioning of each RDD. */ +@JvmName("reduceByKeyArity2") fun JavaDStreamLike, *, *>.reduceByKey( - reduceFunc: (V, V) -> V, partitioner: Partitioner, + reduceFunc: (V, V) -> V, ): JavaDStream> = mapToPair { it.toTuple() } .reduceByKey(reduceFunc, partitioner) @@ -118,6 +105,7 @@ fun JavaDStreamLike, *, *>.reduceByKey( * combineByKey for RDDs. Please refer to combineByKey in * org.apache.spark.rdd.PairRDDFunctions in the Spark core documentation for more information. */ +@JvmName("combineByKeyArity2") fun JavaDStreamLike, *, *>.combineByKey( createCombiner: (V) -> C, mergeValue: (C, V) -> C, @@ -141,6 +129,7 @@ fun JavaDStreamLike, *, *>.combineByKey( * @param numPartitions number of partitions of each RDD in the new DStream; if not specified * then Spark's default number of partitions will be used */ +@JvmName("groupByKeyAndWindowArity2") fun JavaDStreamLike, *, *>.groupByKeyAndWindow( windowDuration: Duration, slideDuration: Duration = dstream().slideDuration(), @@ -161,6 +150,7 @@ fun JavaDStreamLike, *, *>.groupByKeyAndWindow( * @param partitioner partitioner for controlling the partitioning of each RDD in the new * DStream. */ +@JvmName("groupByKeyAndWindowArity2") fun JavaDStreamLike, *, *>.groupByKeyAndWindow( windowDuration: Duration, slideDuration: Duration = dstream().slideDuration(), @@ -182,11 +172,12 @@ fun JavaDStreamLike, *, *>.groupByKeyAndWindow( * DStream's batching interval * @param numPartitions number of partitions of each RDD in the new DStream. */ +@JvmName("reduceByKeyAndWindowArity2") fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( - reduceFunc: (V, V) -> V, windowDuration: Duration, slideDuration: Duration = dstream().slideDuration(), numPartitions: Int = dstream().ssc().sc().defaultParallelism(), + reduceFunc: (V, V) -> V, ): JavaDStream> = mapToPair { it.toTuple() } .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, numPartitions) @@ -204,11 +195,12 @@ fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( * @param partitioner partitioner for controlling the partitioning of each RDD * in the new DStream. */ +@JvmName("reduceByKeyAndWindowArity2") fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( - reduceFunc: (V, V) -> V, windowDuration: Duration, slideDuration: Duration = dstream().slideDuration(), partitioner: Partitioner, + reduceFunc: (V, V) -> V, ): JavaDStream> = mapToPair { it.toTuple() } .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, partitioner) @@ -235,13 +227,14 @@ fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( * @param filterFunc Optional function to filter expired key-value pairs; * only pairs that satisfy the function are retained */ +@JvmName("reduceByKeyAndWindowArity2") fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( - reduceFunc: (V, V) -> V, invReduceFunc: (V, V) -> V, windowDuration: Duration, slideDuration: Duration = dstream().slideDuration(), numPartitions: Int = dstream().ssc().sc().defaultParallelism(), filterFunc: ((Arity2) -> Boolean)? = null, + reduceFunc: (V, V) -> V, ): JavaDStream> = mapToPair { it.toTuple() } .reduceByKeyAndWindow( @@ -277,13 +270,14 @@ fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( * @param filterFunc Optional function to filter expired key-value pairs; * only pairs that satisfy the function are retained */ +@JvmName("reduceByKeyAndWindowArity2") fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( - reduceFunc: (V, V) -> V, invReduceFunc: (V, V) -> V, windowDuration: Duration, slideDuration: Duration = dstream().slideDuration(), partitioner: Partitioner, filterFunc: ((Arity2) -> Boolean)? = null, + reduceFunc: (V, V) -> V, ): JavaDStream> = mapToPair { it.toTuple() } .reduceByKeyAndWindow( @@ -324,6 +318,7 @@ fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( * @tparam StateType Class type of the state data * @tparam MappedType Class type of the mapped data */ +@JvmName("mapWithStateArity2") fun JavaDStreamLike, *, *>.mapWithState( spec: StateSpec, ): JavaMapWithStateDStream = @@ -339,9 +334,10 @@ fun JavaDStreamLike, *, *>.mapWithSta * corresponding state key-value pair will be eliminated. * @tparam S State type */ +@JvmName("updateStateByKeyArity2") fun JavaDStreamLike, *, *>.updateStateByKey( - updateFunc: (List, S?) -> S?, numPartitions: Int = dstream().ssc().sc().defaultParallelism(), + updateFunc: (List, S?) -> S?, ): JavaDStream> = mapToPair { it.toTuple() } .updateStateByKey( @@ -365,9 +361,10 @@ fun JavaDStreamLike, *, *>.updateStateByKey( * DStream * @tparam S State type */ +@JvmName("updateStateByKeyArity2") fun JavaDStreamLike, *, *>.updateStateByKey( - updateFunc: (List, S?) -> S?, partitioner: Partitioner, + updateFunc: (List, S?) -> S?, ): JavaDStream> = mapToPair { it.toTuple() } .updateStateByKey( @@ -389,10 +386,11 @@ fun JavaDStreamLike, *, *>.updateStateByKey( * @param initialRDD initial state value of each key. * @tparam S State type */ +@JvmName("updateStateByKeyArity2") fun JavaDStreamLike, *, *>.updateStateByKey( - updateFunc: (List, S?) -> S?, partitioner: Partitioner, initialRDD: JavaRDD>, + updateFunc: (List, S?) -> S?, ): JavaDStream> = mapToPair { it.toTuple() } .updateStateByKey( @@ -408,6 +406,7 @@ fun JavaDStreamLike, *, *>.updateStateByKey( * Return a new DStream by applying a map function to the value of each key-value pairs in * 'this' DStream without changing the key. */ +@JvmName("mapValuesArity2") fun JavaDStreamLike, *, *>.mapValues( mapValuesFunc: (V) -> U, ): JavaDStream> = @@ -419,6 +418,7 @@ fun JavaDStreamLike, *, *>.mapValues( * Return a new DStream by applying a flatmap function to the value of each key-value pairs in * 'this' DStream without changing the key. */ +@JvmName("flatMapValuesArity2") fun JavaDStreamLike, *, *>.flatMapValues( flatMapValuesFunc: (V) -> Iterator, ): JavaDStream> = @@ -430,6 +430,7 @@ fun JavaDStreamLike, *, *>.flatMapValues( * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. */ +@JvmName("cogroupArity2") fun JavaDStreamLike, *, *>.cogroup( other: JavaDStreamLike, *, *>, numPartitions: Int = dstream().ssc().sc().defaultParallelism(), @@ -447,6 +448,7 @@ fun JavaDStreamLike, *, *>.cogroup( * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. * The supplied org.apache.spark.Partitioner is used to partition the generated RDDs. */ +@JvmName("cogroupArity2") fun JavaDStreamLike, *, *>.cogroup( other: JavaDStreamLike, *, *>, partitioner: Partitioner, @@ -464,6 +466,7 @@ fun JavaDStreamLike, *, *>.cogroup( * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. */ +@JvmName("joinArity2") fun JavaDStreamLike, *, *>.join( other: JavaDStreamLike, *, *>, numPartitions: Int = dstream().ssc().sc().defaultParallelism(), @@ -481,6 +484,7 @@ fun JavaDStreamLike, *, *>.join( * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. * The supplied org.apache.spark.Partitioner is used to control the partitioning of each RDD. */ +@JvmName("joinArity2") fun JavaDStreamLike, *, *>.join( other: JavaDStreamLike, *, *>, partitioner: Partitioner, @@ -499,6 +503,7 @@ fun JavaDStreamLike, *, *>.join( * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` * partitions. */ +@JvmName("leftOuterJoinArity2") fun JavaDStreamLike, *, *>.leftOuterJoin( other: JavaDStreamLike, *, *>, numPartitions: Int = dstream().ssc().sc().defaultParallelism(), @@ -517,6 +522,7 @@ fun JavaDStreamLike, *, *>.leftOuterJoin( * `other` DStream. The supplied org.apache.spark.Partitioner is used to control * the partitioning of each RDD. */ +@JvmName("leftOuterJoinArity2") fun JavaDStreamLike, *, *>.leftOuterJoin( other: JavaDStreamLike, *, *>, partitioner: Partitioner, @@ -535,6 +541,7 @@ fun JavaDStreamLike, *, *>.leftOuterJoin( * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` * partitions. */ +@JvmName("rightOuterJoinArity2") fun JavaDStreamLike, *, *>.rightOuterJoin( other: JavaDStreamLike, *, *>, numPartitions: Int = dstream().ssc().sc().defaultParallelism(), @@ -553,6 +560,7 @@ fun JavaDStreamLike, *, *>.rightOuterJoin( * `other` DStream. The supplied org.apache.spark.Partitioner is used to control * the partitioning of each RDD. */ +@JvmName("rightOuterJoinArity2") fun JavaDStreamLike, *, *>.rightOuterJoin( other: JavaDStreamLike, *, *>, partitioner: Partitioner, @@ -571,6 +579,7 @@ fun JavaDStreamLike, *, *>.rightOuterJoin( * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` * partitions. */ +@JvmName("fullOuterJoinArity2") fun JavaDStreamLike, *, *>.fullOuterJoin( other: JavaDStreamLike, *, *>, numPartitions: Int = dstream().ssc().sc().defaultParallelism(), @@ -589,6 +598,7 @@ fun JavaDStreamLike, *, *>.fullOuterJoin( * `other` DStream. The supplied org.apache.spark.Partitioner is used to control * the partitioning of each RDD. */ +@JvmName("fullOuterJoinArity2") fun JavaDStreamLike, *, *>.fullOuterJoin( other: JavaDStreamLike, *, *>, partitioner: Partitioner, @@ -606,6 +616,7 @@ fun JavaDStreamLike, *, *>.fullOuterJoin( * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". */ +@JvmName("saveAsHadoopFilesArity2") fun JavaDStreamLike, *, *>.saveAsHadoopFiles( prefix: String, suffix: String, ): Unit = @@ -616,8 +627,594 @@ fun JavaDStreamLike, *, *>.saveAsHadoopFiles( * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". */ +@JvmName("saveAsNewAPIHadoopFilesArity2") fun JavaDStreamLike, *, *>.saveAsNewAPIHadoopFiles( prefix: String, suffix: String, ): Unit = mapToPair { it.toTuple() } - .saveAsNewAPIHadoopFiles(prefix, suffix) \ No newline at end of file + .saveAsNewAPIHadoopFiles(prefix, suffix) + +/** + * Return a new DStream by applying `groupByKey` to each RDD. Hash partitioning is used to + * generate the RDDs with `numPartitions` partitions. + */ +@JvmName("groupByKeyPair") +fun JavaDStreamLike, *, *>.groupByKey( + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = + mapToPair { it.toTuple() } + .groupByKey(numPartitions) + .map { it.toPair() } + +/** + * Return a new DStream by applying `groupByKey` on each RDD. The supplied + * org.apache.spark.Partitioner is used to control the partitioning of each RDD. + */ +@JvmName("groupByKeyPair") +fun JavaDStreamLike, *, *>.groupByKey(partitioner: Partitioner): JavaDStream>> = + mapToPair { it.toTuple() } + .groupByKey(partitioner) + .map { it.toPair() } + +/** + * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are + * merged using the supplied reduce function. Hash partitioning is used to generate the RDDs + * with `numPartitions` partitions. + */ +@JvmName("reduceByKeyPair") +fun JavaDStreamLike, *, *>.reduceByKey( + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), + reduceFunc: (V, V) -> V, +): JavaDStream> = + mapToPair { it.toTuple() } + .reduceByKey(reduceFunc, numPartitions) + .map { it.toPair() } + +/** + * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are + * merged using the supplied reduce function. org.apache.spark.Partitioner is used to control + * the partitioning of each RDD. + */ +@JvmName("reduceByKeyPair") +fun JavaDStreamLike, *, *>.reduceByKey( + partitioner: Partitioner, + reduceFunc: (V, V) -> V, +): JavaDStream> = + mapToPair { it.toTuple() } + .reduceByKey(reduceFunc, partitioner) + .map { it.toPair() } + +/** + * Combine elements of each key in DStream's RDDs using custom functions. This is similar to the + * combineByKey for RDDs. Please refer to combineByKey in + * org.apache.spark.rdd.PairRDDFunctions in the Spark core documentation for more information. + */ +@JvmName("combineByKeyPair") +fun JavaDStreamLike, *, *>.combineByKey( + createCombiner: (V) -> C, + mergeValue: (C, V) -> C, + mergeCombiner: (C, C) -> C, + partitioner: Partitioner, + mapSideCombine: Boolean = true, +): JavaDStream> = + mapToPair { it.toTuple() } + .combineByKey(createCombiner, mergeValue, mergeCombiner, partitioner, mapSideCombine) + .map { it.toPair() } + +/** + * Return a new DStream by applying `groupByKey` over a sliding window on `this` DStream. + * Similar to `DStream.groupByKey()`, but applies it over a sliding window. + * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param numPartitions number of partitions of each RDD in the new DStream; if not specified + * then Spark's default number of partitions will be used + */ +@JvmName("groupByKeyAndWindowPair") +fun JavaDStreamLike, *, *>.groupByKeyAndWindow( + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = + mapToPair { it.toTuple() } + .groupByKeyAndWindow(windowDuration, slideDuration, numPartitions) + .map { it.toPair() } + +/** + * Create a new DStream by applying `groupByKey` over a sliding window on `this` DStream. + * Similar to `DStream.groupByKey()`, but applies it over a sliding window. + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param partitioner partitioner for controlling the partitioning of each RDD in the new + * DStream. + */ +@JvmName("groupByKeyAndWindowPair") +fun JavaDStreamLike, *, *>.groupByKeyAndWindow( + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + partitioner: Partitioner, +): JavaDStream>> = + mapToPair { it.toTuple() } + .groupByKeyAndWindow(windowDuration, slideDuration, partitioner) + .map { it.toPair() } + +/** + * Return a new DStream by applying `reduceByKey` over a sliding window. This is similar to + * `DStream.reduceByKey()` but applies it over a sliding window. Hash partitioning is used to + * generate the RDDs with `numPartitions` partitions. + * @param reduceFunc associative and commutative reduce function + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param numPartitions number of partitions of each RDD in the new DStream. + */ +@JvmName("reduceByKeyAndWindowPair") +fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), + reduceFunc: (V, V) -> V, +): JavaDStream> = + mapToPair { it.toTuple() } + .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, numPartitions) + .map { it.toPair() } + +/** + * Return a new DStream by applying `reduceByKey` over a sliding window. Similar to + * `DStream.reduceByKey()`, but applies it over a sliding window. + * @param reduceFunc associative and commutative reduce function + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param partitioner partitioner for controlling the partitioning of each RDD + * in the new DStream. + */ +@JvmName("reduceByKeyAndWindowPair") +fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + partitioner: Partitioner, + reduceFunc: (V, V) -> V, +): JavaDStream> = + mapToPair { it.toTuple() } + .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, partitioner) + .map { it.toPair() } + +/** + * Return a new DStream by applying incremental `reduceByKey` over a sliding window. + * The reduced value of over a new window is calculated using the old window's reduced value : + * 1. reduce the new values that entered the window (e.g., adding new counts) + * + * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) + * + * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. + * However, it is applicable to only "invertible reduce functions". + * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. + * @param reduceFunc associative and commutative reduce function + * @param invReduceFunc inverse reduce function; such that for all y, invertible x: + * `invReduceFunc(reduceFunc(x, y), x) = y` + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param filterFunc Optional function to filter expired key-value pairs; + * only pairs that satisfy the function are retained + */ +@JvmName("reduceByKeyAndWindowPair") +fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( + invReduceFunc: (V, V) -> V, + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), + filterFunc: ((Pair) -> Boolean)? = null, + reduceFunc: (V, V) -> V, +): JavaDStream> = + mapToPair { it.toTuple() } + .reduceByKeyAndWindow( + reduceFunc, + invReduceFunc, + windowDuration, + slideDuration, + numPartitions, + filterFunc?.let { + { tuple -> + filterFunc(tuple.toPair()) + } + } + ) + .map { it.toPair() } + +/** + * Return a new DStream by applying incremental `reduceByKey` over a sliding window. + * The reduced value of over a new window is calculated using the old window's reduced value : + * 1. reduce the new values that entered the window (e.g., adding new counts) + * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) + * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. + * However, it is applicable to only "invertible reduce functions". + * @param reduceFunc associative and commutative reduce function + * @param invReduceFunc inverse reduce function + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param partitioner partitioner for controlling the partitioning of each RDD in the new + * DStream. + * @param filterFunc Optional function to filter expired key-value pairs; + * only pairs that satisfy the function are retained + */ +@JvmName("reduceByKeyAndWindowPair") +fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( + invReduceFunc: (V, V) -> V, + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + partitioner: Partitioner, + filterFunc: ((Pair) -> Boolean)? = null, + reduceFunc: (V, V) -> V, +): JavaDStream> = + mapToPair { it.toTuple() } + .reduceByKeyAndWindow( + reduceFunc, + invReduceFunc, + windowDuration, + slideDuration, + partitioner, + filterFunc?.let { + { tuple -> + filterFunc(tuple.toPair()) + } + } + ) + .map { it.toPair() } + +/** + * Return a [MapWithStateDStream] by applying a function to every key-value element of + * `this` stream, while maintaining some state data for each unique key. The mapping function + * and other specification (e.g. partitioners, timeouts, initial state data, etc.) of this + * transformation can be specified using `StateSpec` class. The state data is accessible in + * as a parameter of type `State` in the mapping function. + * + * Example of using `mapWithState`: + * {{{ + * // A mapping function that maintains an integer state and return a String + * def mappingFunction(key: String, value: Option[Int], state: State[Int]): Option[String] = { + * // Use state.exists(), state.get(), state.update() and state.remove() + * // to manage state, and return the necessary string + * } + * + * val spec = StateSpec.function(mappingFunction).numPartitions(10) + * + * val mapWithStateDStream = keyValueDStream.mapWithState[StateType, MappedType](spec) + * }}} + * + * @param spec Specification of this transformation + * @tparam StateType Class type of the state data + * @tparam MappedType Class type of the mapped data + */ +@JvmName("mapWithStatePair") +fun JavaDStreamLike, *, *>.mapWithState( + spec: StateSpec, +): JavaMapWithStateDStream = + mapToPair { it.toTuple() } + .mapWithState(spec) + +/** + * Return a new "state" DStream where the state for each key is updated by applying + * the given function on the previous state of the key and the new values of each key. + * In every batch the updateFunc will be called for each state even if there are no new values. + * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. + * @param updateFunc State update function. If `this` function returns None, then + * corresponding state key-value pair will be eliminated. + * @tparam S State type + */ +@JvmName("updateStateByKeyPair") +fun JavaDStreamLike, *, *>.updateStateByKey( + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), + updateFunc: (List, S?) -> S?, +): JavaDStream> = + mapToPair { it.toTuple() } + .updateStateByKey( + { list: List, s: Optional -> + updateFunc(list, s.toNullable()).toOptional() + }, + numPartitions, + ) + .map { it.toPair() } + +/** + * Return a new "state" DStream where the state for each key is updated by applying + * the given function on the previous state of the key and the new values of each key. + * In every batch the updateFunc will be called for each state even if there are no new values. + * [[org.apache.spark.Partitioner]] is used to control the partitioning of each RDD. + * @param updateFunc State update function. Note, that this function may generate a different + * tuple with a different key than the input key. Therefore keys may be removed + * or added in this way. It is up to the developer to decide whether to + * remember the partitioner despite the key being changed. + * @param partitioner Partitioner for controlling the partitioning of each RDD in the new + * DStream + * @tparam S State type + */ +@JvmName("updateStateByKeyPair") +fun JavaDStreamLike, *, *>.updateStateByKey( + partitioner: Partitioner, + updateFunc: (List, S?) -> S?, +): JavaDStream> = + mapToPair { it.toTuple() } + .updateStateByKey( + { list: List, s: Optional -> + updateFunc(list, s.toNullable()).toOptional() + }, + partitioner, + ) + .map { it.toPair() } + +/** + * Return a new "state" DStream where the state for each key is updated by applying + * the given function on the previous state of the key and the new values of the key. + * org.apache.spark.Partitioner is used to control the partitioning of each RDD. + * @param updateFunc State update function. If `this` function returns None, then + * corresponding state key-value pair will be eliminated. + * @param partitioner Partitioner for controlling the partitioning of each RDD in the new + * DStream. + * @param initialRDD initial state value of each key. + * @tparam S State type + */ +@JvmName("updateStateByKeyPair") +fun JavaDStreamLike, *, *>.updateStateByKey( + partitioner: Partitioner, + initialRDD: JavaRDD>, + updateFunc: (List, S?) -> S?, +): JavaDStream> = + mapToPair { it.toTuple() } + .updateStateByKey( + { list: List, s: Optional -> + updateFunc(list, s.toNullable()).toOptional() + }, + partitioner, + initialRDD.mapToPair { it.toTuple() }, + ) + .map { it.toPair() } + +/** + * Return a new DStream by applying a map function to the value of each key-value pairs in + * 'this' DStream without changing the key. + */ +@JvmName("mapValuesPair") +fun JavaDStreamLike, *, *>.mapValues( + mapValuesFunc: (V) -> U, +): JavaDStream> = + mapToPair { it.toTuple() } + .mapValues(mapValuesFunc) + .map { it.toPair() } + +/** + * Return a new DStream by applying a flatmap function to the value of each key-value pairs in + * 'this' DStream without changing the key. + */ +@JvmName("flatMapValuesPair") +fun JavaDStreamLike, *, *>.flatMapValues( + flatMapValuesFunc: (V) -> Iterator, +): JavaDStream> = + mapToPair { it.toTuple() } + .flatMapValues(flatMapValuesFunc) + .map { it.toPair() } + +/** + * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. + * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. + */ +@JvmName("cogroupPair") +fun JavaDStreamLike, *, *>.cogroup( + other: JavaDStreamLike, *, *>, + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream, Iterable>>> = + mapToPair { it.toTuple() } + .cogroup( + other.mapToPair { it.toTuple() }, + numPartitions, + ) + .map { + Pair(it._1, it._2.toPair()) + } + +/** + * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. + * The supplied org.apache.spark.Partitioner is used to partition the generated RDDs. + */ +@JvmName("cogroupPair") +fun JavaDStreamLike, *, *>.cogroup( + other: JavaDStreamLike, *, *>, + partitioner: Partitioner, +): JavaDStream, Iterable>>> = + mapToPair { it.toTuple() } + .cogroup( + other.mapToPair { it.toTuple() }, + partitioner, + ) + .map { + Pair(it._1, it._2.toPair()) + } + +/** + * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. + * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. + */ +@JvmName("joinPair") +fun JavaDStreamLike, *, *>.join( + other: JavaDStreamLike, *, *>, + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = + mapToPair { it.toTuple() } + .join( + other.mapToPair { it.toTuple() }, + numPartitions, + ) + .map { + Pair(it._1, it._2.toPair()) + } + +/** + * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. + * The supplied org.apache.spark.Partitioner is used to control the partitioning of each RDD. + */ +@JvmName("joinPair") +fun JavaDStreamLike, *, *>.join( + other: JavaDStreamLike, *, *>, + partitioner: Partitioner, +): JavaDStream>> = + mapToPair { it.toTuple() } + .join( + other.mapToPair { it.toTuple() }, + partitioner, + ) + .map { + Pair(it._1, it._2.toPair()) + } + +/** + * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and + * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` + * partitions. + */ +@JvmName("leftOuterJoinPair") +fun JavaDStreamLike, *, *>.leftOuterJoin( + other: JavaDStreamLike, *, *>, + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = + mapToPair { it.toTuple() } + .leftOuterJoin( + other.mapToPair { it.toTuple() }, + numPartitions, + ) + .map { + Pair(it._1, Pair(it._2._1, it._2._2.toNullable())) + } + +/** + * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and + * `other` DStream. The supplied org.apache.spark.Partitioner is used to control + * the partitioning of each RDD. + */ +@JvmName("leftOuterJoinPair") +fun JavaDStreamLike, *, *>.leftOuterJoin( + other: JavaDStreamLike, *, *>, + partitioner: Partitioner, +): JavaDStream>> = + mapToPair { it.toTuple() } + .leftOuterJoin( + other.mapToPair { it.toTuple() }, + partitioner, + ) + .map { + Pair(it._1, Pair(it._2._1, it._2._2.toNullable())) + } + +/** + * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and + * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` + * partitions. + */ +@JvmName("rightOuterJoinPair") +fun JavaDStreamLike, *, *>.rightOuterJoin( + other: JavaDStreamLike, *, *>, + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = + mapToPair { it.toTuple() } + .rightOuterJoin( + other.mapToPair { it.toTuple() }, + numPartitions, + ) + .map { + Pair(it._1, Pair(it._2._1.toNullable(), it._2._2)) + } + +/** + * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and + * `other` DStream. The supplied org.apache.spark.Partitioner is used to control + * the partitioning of each RDD. + */ +@JvmName("rightOuterJoinPair") +fun JavaDStreamLike, *, *>.rightOuterJoin( + other: JavaDStreamLike, *, *>, + partitioner: Partitioner, +): JavaDStream>> = + mapToPair { it.toTuple() } + .rightOuterJoin( + other.mapToPair { it.toTuple() }, + partitioner, + ) + .map { + Pair(it._1, Pair(it._2._1.toNullable(), it._2._2)) + } + +/** + * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and + * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` + * partitions. + */ +@JvmName("fullOuterJoinPair") +fun JavaDStreamLike, *, *>.fullOuterJoin( + other: JavaDStreamLike, *, *>, + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = + mapToPair { it.toTuple() } + .fullOuterJoin( + other.mapToPair { it.toTuple() }, + numPartitions, + ) + .map { + Pair(it._1, Pair(it._2._1.toNullable(), it._2._2.toNullable())) + } + +/** + * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and + * `other` DStream. The supplied org.apache.spark.Partitioner is used to control + * the partitioning of each RDD. + */ +@JvmName("fullOuterJoinPair") +fun JavaDStreamLike, *, *>.fullOuterJoin( + other: JavaDStreamLike, *, *>, + partitioner: Partitioner, +): JavaDStream>> = + mapToPair { it.toTuple() } + .fullOuterJoin( + other.mapToPair { it.toTuple() }, + partitioner, + ) + .map { + Pair(it._1, Pair(it._2._1.toNullable(), it._2._2.toNullable())) + } + +/** + * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is + * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". + */ +@JvmName("saveAsHadoopFilesPair") +fun JavaDStreamLike, *, *>.saveAsHadoopFiles( + prefix: String, suffix: String, +): Unit = + mapToPair { it.toTuple() } + .saveAsHadoopFiles(prefix, suffix) + +/** + * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is + * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". + */ +@JvmName("saveAsNewAPIHadoopFilesPair") +fun JavaDStreamLike, *, *>.saveAsNewAPIHadoopFiles( + prefix: String, suffix: String, +): Unit = + mapToPair { it.toTuple() } + .saveAsNewAPIHadoopFiles(prefix, suffix) + From eec03cbb80b80b5e2a4b2ead8c64c221f54d9134 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 17 Mar 2022 20:11:12 +0100 Subject: [PATCH 097/213] and tuples cause why not --- .../JavaRecoverableNetworkWordCount.kt | 4 + .../jetbrains/kotlinx/spark/api/Streaming.kt | 591 ++++++++++++++++++ 2 files changed, 595 insertions(+) diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt index f89e09f3..4f06dd75 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt @@ -170,6 +170,10 @@ object JavaRecoverableNetworkWordCount { .map { it to 1 } .reduceByKey { a: Int, b: Int -> a + b } + val wordCounts3 = words + .map { Tuple2(it, 1) } + .reduceByKey { a: Int, b: Int -> a + b } + wordCounts.foreachRDD { rdd, time: Time -> // Get or register the excludeList Broadcast diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt index f0152f83..bdad23e1 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt @@ -634,6 +634,10 @@ fun JavaDStreamLike, *, *>.saveAsNewAPIHadoopFiles( mapToPair { it.toTuple() } .saveAsNewAPIHadoopFiles(prefix, suffix) + + + + /** * Return a new DStream by applying `groupByKey` to each RDD. Hash partitioning is used to * generate the RDDs with `numPartitions` partitions. @@ -1218,3 +1222,590 @@ fun JavaDStreamLike, *, *>.saveAsNewAPIHadoopFiles( mapToPair { it.toTuple() } .saveAsNewAPIHadoopFiles(prefix, suffix) + + + + +/** + * Return a new DStream by applying `groupByKey` to each RDD. Hash partitioning is used to + * generate the RDDs with `numPartitions` partitions. + */ +@JvmName("groupByKeyTuple2") +fun JavaDStreamLike, *, *>.groupByKey( + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = + mapToPair { it } + .groupByKey(numPartitions) + .map { it } + +/** + * Return a new DStream by applying `groupByKey` on each RDD. The supplied + * org.apache.spark.Partitioner is used to control the partitioning of each RDD. + */ +@JvmName("groupByKeyTuple2") +fun JavaDStreamLike, *, *>.groupByKey(partitioner: Partitioner): JavaDStream>> = + mapToPair { it } + .groupByKey(partitioner) + .map { it } + +/** + * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are + * merged using the supplied reduce function. Hash partitioning is used to generate the RDDs + * with `numPartitions` partitions. + */ +@JvmName("reduceByKeyTuple2") +fun JavaDStreamLike, *, *>.reduceByKey( + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), + reduceFunc: (V, V) -> V, +): JavaDStream> = + mapToPair { it } + .reduceByKey(reduceFunc, numPartitions) + .map { it } + +/** + * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are + * merged using the supplied reduce function. org.apache.spark.Partitioner is used to control + * the partitioning of each RDD. + */ +@JvmName("reduceByKeyTuple2") +fun JavaDStreamLike, *, *>.reduceByKey( + partitioner: Partitioner, + reduceFunc: (V, V) -> V, +): JavaDStream> = + mapToPair { it } + .reduceByKey(reduceFunc, partitioner) + .map { it } + +/** + * Combine elements of each key in DStream's RDDs using custom functions. This is similar to the + * combineByKey for RDDs. Please refer to combineByKey in + * org.apache.spark.rdd.PairRDDFunctions in the Spark core documentation for more information. + */ +@JvmName("combineByKeyTuple2") +fun JavaDStreamLike, *, *>.combineByKey( + createCombiner: (V) -> C, + mergeValue: (C, V) -> C, + mergeCombiner: (C, C) -> C, + partitioner: Partitioner, + mapSideCombine: Boolean = true, +): JavaDStream> = + mapToPair { it } + .combineByKey(createCombiner, mergeValue, mergeCombiner, partitioner, mapSideCombine) + .map { it } + +/** + * Return a new DStream by applying `groupByKey` over a sliding window on `this` DStream. + * Similar to `DStream.groupByKey()`, but applies it over a sliding window. + * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param numPartitions number of partitions of each RDD in the new DStream; if not specified + * then Spark's default number of partitions will be used + */ +@JvmName("groupByKeyAndWindowTuple2") +fun JavaDStreamLike, *, *>.groupByKeyAndWindow( + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = + mapToPair { it } + .groupByKeyAndWindow(windowDuration, slideDuration, numPartitions) + .map { it } + +/** + * Create a new DStream by applying `groupByKey` over a sliding window on `this` DStream. + * Similar to `DStream.groupByKey()`, but applies it over a sliding window. + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param partitioner partitioner for controlling the partitioning of each RDD in the new + * DStream. + */ +@JvmName("groupByKeyAndWindowTuple2") +fun JavaDStreamLike, *, *>.groupByKeyAndWindow( + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + partitioner: Partitioner, +): JavaDStream>> = + mapToPair { it } + .groupByKeyAndWindow(windowDuration, slideDuration, partitioner) + .map { it } + +/** + * Return a new DStream by applying `reduceByKey` over a sliding window. This is similar to + * `DStream.reduceByKey()` but applies it over a sliding window. Hash partitioning is used to + * generate the RDDs with `numPartitions` partitions. + * @param reduceFunc associative and commutative reduce function + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param numPartitions number of partitions of each RDD in the new DStream. + */ +@JvmName("reduceByKeyAndWindowTuple2") +fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), + reduceFunc: (V, V) -> V, +): JavaDStream> = + mapToPair { it } + .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, numPartitions) + .map { it } + +/** + * Return a new DStream by applying `reduceByKey` over a sliding window. Similar to + * `DStream.reduceByKey()`, but applies it over a sliding window. + * @param reduceFunc associative and commutative reduce function + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param partitioner partitioner for controlling the partitioning of each RDD + * in the new DStream. + */ +@JvmName("reduceByKeyAndWindowTuple2") +fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + partitioner: Partitioner, + reduceFunc: (V, V) -> V, +): JavaDStream> = + mapToPair { it } + .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, partitioner) + .map { it } + +/** + * Return a new DStream by applying incremental `reduceByKey` over a sliding window. + * The reduced value of over a new window is calculated using the old window's reduced value : + * 1. reduce the new values that entered the window (e.g., adding new counts) + * + * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) + * + * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. + * However, it is applicable to only "invertible reduce functions". + * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. + * @param reduceFunc associative and commutative reduce function + * @param invReduceFunc inverse reduce function; such that for all y, invertible x: + * `invReduceFunc(reduceFunc(x, y), x) = y` + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param filterFunc Optional function to filter expired key-value pairs; + * only pairs that satisfy the function are retained + */ +@JvmName("reduceByKeyAndWindowTuple2") +fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( + invReduceFunc: (V, V) -> V, + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), + filterFunc: ((Tuple2) -> Boolean)? = null, + reduceFunc: (V, V) -> V, +): JavaDStream> = + mapToPair { it } + .reduceByKeyAndWindow( + reduceFunc, + invReduceFunc, + windowDuration, + slideDuration, + numPartitions, + filterFunc?.let { + { tuple -> + filterFunc(tuple) + } + } + ) + .map { it } + +/** + * Return a new DStream by applying incremental `reduceByKey` over a sliding window. + * The reduced value of over a new window is calculated using the old window's reduced value : + * 1. reduce the new values that entered the window (e.g., adding new counts) + * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) + * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. + * However, it is applicable to only "invertible reduce functions". + * @param reduceFunc associative and commutative reduce function + * @param invReduceFunc inverse reduce function + * @param windowDuration width of the window; must be a multiple of this DStream's + * batching interval + * @param slideDuration sliding interval of the window (i.e., the interval after which + * the new DStream will generate RDDs); must be a multiple of this + * DStream's batching interval + * @param partitioner partitioner for controlling the partitioning of each RDD in the new + * DStream. + * @param filterFunc Optional function to filter expired key-value pairs; + * only pairs that satisfy the function are retained + */ +@JvmName("reduceByKeyAndWindowTuple2") +fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( + invReduceFunc: (V, V) -> V, + windowDuration: Duration, + slideDuration: Duration = dstream().slideDuration(), + partitioner: Partitioner, + filterFunc: ((Tuple2) -> Boolean)? = null, + reduceFunc: (V, V) -> V, +): JavaDStream> = + mapToPair { it } + .reduceByKeyAndWindow( + reduceFunc, + invReduceFunc, + windowDuration, + slideDuration, + partitioner, + filterFunc?.let { + { tuple -> + filterFunc(tuple) + } + } + ) + .map { it } + +/** + * Return a [MapWithStateDStream] by applying a function to every key-value element of + * `this` stream, while maintaining some state data for each unique key. The mapping function + * and other specification (e.g. partitioners, timeouts, initial state data, etc.) of this + * transformation can be specified using `StateSpec` class. The state data is accessible in + * as a parameter of type `State` in the mapping function. + * + * Example of using `mapWithState`: + * {{{ + * // A mapping function that maintains an integer state and return a String + * def mappingFunction(key: String, value: Option[Int], state: State[Int]): Option[String] = { + * // Use state.exists(), state.get(), state.update() and state.remove() + * // to manage state, and return the necessary string + * } + * + * val spec = StateSpec.function(mappingFunction).numPartitions(10) + * + * val mapWithStateDStream = keyValueDStream.mapWithState[StateType, MappedType](spec) + * }}} + * + * @param spec Specification of this transformation + * @tparam StateType Class type of the state data + * @tparam MappedType Class type of the mapped data + */ +@JvmName("mapWithStateTuple2") +fun JavaDStreamLike, *, *>.mapWithState( + spec: StateSpec, +): JavaMapWithStateDStream = + mapToPair { it } + .mapWithState(spec) + +/** + * Return a new "state" DStream where the state for each key is updated by applying + * the given function on the previous state of the key and the new values of each key. + * In every batch the updateFunc will be called for each state even if there are no new values. + * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. + * @param updateFunc State update function. If `this` function returns None, then + * corresponding state key-value pair will be eliminated. + * @tparam S State type + */ +@JvmName("updateStateByKeyTuple2") +fun JavaDStreamLike, *, *>.updateStateByKey( + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), + updateFunc: (List, S?) -> S?, +): JavaDStream> = + mapToPair { it } + .updateStateByKey( + { list: List, s: Optional -> + updateFunc(list, s.toNullable()).toOptional() + }, + numPartitions, + ) + .map { it } + +/** + * Return a new "state" DStream where the state for each key is updated by applying + * the given function on the previous state of the key and the new values of each key. + * In every batch the updateFunc will be called for each state even if there are no new values. + * [[org.apache.spark.Partitioner]] is used to control the partitioning of each RDD. + * @param updateFunc State update function. Note, that this function may generate a different + * tuple with a different key than the input key. Therefore keys may be removed + * or added in this way. It is up to the developer to decide whether to + * remember the partitioner despite the key being changed. + * @param partitioner Partitioner for controlling the partitioning of each RDD in the new + * DStream + * @tparam S State type + */ +@JvmName("updateStateByKeyTuple2") +fun JavaDStreamLike, *, *>.updateStateByKey( + partitioner: Partitioner, + updateFunc: (List, S?) -> S?, +): JavaDStream> = + mapToPair { it } + .updateStateByKey( + { list: List, s: Optional -> + updateFunc(list, s.toNullable()).toOptional() + }, + partitioner, + ) + .map { it } + +/** + * Return a new "state" DStream where the state for each key is updated by applying + * the given function on the previous state of the key and the new values of the key. + * org.apache.spark.Partitioner is used to control the partitioning of each RDD. + * @param updateFunc State update function. If `this` function returns None, then + * corresponding state key-value pair will be eliminated. + * @param partitioner Partitioner for controlling the partitioning of each RDD in the new + * DStream. + * @param initialRDD initial state value of each key. + * @tparam S State type + */ +@JvmName("updateStateByKeyTuple2") +fun JavaDStreamLike, *, *>.updateStateByKey( + partitioner: Partitioner, + initialRDD: JavaRDD>, + updateFunc: (List, S?) -> S?, +): JavaDStream> = + mapToPair { it } + .updateStateByKey( + { list: List, s: Optional -> + updateFunc(list, s.toNullable()).toOptional() + }, + partitioner, + initialRDD.mapToPair { it }, + ) + .map { it } + +/** + * Return a new DStream by applying a map function to the value of each key-value pairs in + * 'this' DStream without changing the key. + */ +@JvmName("mapValuesTuple2") +fun JavaDStreamLike, *, *>.mapValues( + mapValuesFunc: (V) -> U, +): JavaDStream> = + mapToPair { it } + .mapValues(mapValuesFunc) + .map { it } + +/** + * Return a new DStream by applying a flatmap function to the value of each key-value pairs in + * 'this' DStream without changing the key. + */ +@JvmName("flatMapValuesTuple2") +fun JavaDStreamLike, *, *>.flatMapValues( + flatMapValuesFunc: (V) -> Iterator, +): JavaDStream> = + mapToPair { it } + .flatMapValues(flatMapValuesFunc) + .map { it } + +/** + * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. + * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. + */ +@JvmName("cogroupTuple2") +fun JavaDStreamLike, *, *>.cogroup( + other: JavaDStreamLike, *, *>, + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream, Iterable>>> = + mapToPair { it } + .cogroup( + other.mapToPair { it }, + numPartitions, + ) + .map { + Tuple2(it._1, it._2) + } + +/** + * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. + * The supplied org.apache.spark.Partitioner is used to partition the generated RDDs. + */ +@JvmName("cogroupTuple2") +fun JavaDStreamLike, *, *>.cogroup( + other: JavaDStreamLike, *, *>, + partitioner: Partitioner, +): JavaDStream, Iterable>>> = + mapToPair { it } + .cogroup( + other.mapToPair { it }, + partitioner, + ) + .map { + Tuple2(it._1, it._2) + } + +/** + * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. + * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. + */ +@JvmName("joinTuple2") +fun JavaDStreamLike, *, *>.join( + other: JavaDStreamLike, *, *>, + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = + mapToPair { it } + .join( + other.mapToPair { it }, + numPartitions, + ) + .map { + Tuple2(it._1, it._2) + } + +/** + * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. + * The supplied org.apache.spark.Partitioner is used to control the partitioning of each RDD. + */ +@JvmName("joinTuple2") +fun JavaDStreamLike, *, *>.join( + other: JavaDStreamLike, *, *>, + partitioner: Partitioner, +): JavaDStream>> = + mapToPair { it } + .join( + other.mapToPair { it }, + partitioner, + ) + .map { + Tuple2(it._1, it._2) + } + +/** + * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and + * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` + * partitions. + */ +@JvmName("leftOuterJoinTuple2") +fun JavaDStreamLike, *, *>.leftOuterJoin( + other: JavaDStreamLike, *, *>, + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = + mapToPair { it } + .leftOuterJoin( + other.mapToPair { it }, + numPartitions, + ) + .map { + Tuple2(it._1, Tuple2(it._2._1, it._2._2.toNullable())) + } + +/** + * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and + * `other` DStream. The supplied org.apache.spark.Partitioner is used to control + * the partitioning of each RDD. + */ +@JvmName("leftOuterJoinTuple2") +fun JavaDStreamLike, *, *>.leftOuterJoin( + other: JavaDStreamLike, *, *>, + partitioner: Partitioner, +): JavaDStream>> = + mapToPair { it } + .leftOuterJoin( + other.mapToPair { it }, + partitioner, + ) + .map { + Tuple2(it._1, Tuple2(it._2._1, it._2._2.toNullable())) + } + +/** + * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and + * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` + * partitions. + */ +@JvmName("rightOuterJoinTuple2") +fun JavaDStreamLike, *, *>.rightOuterJoin( + other: JavaDStreamLike, *, *>, + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = + mapToPair { it } + .rightOuterJoin( + other.mapToPair { it }, + numPartitions, + ) + .map { + Tuple2(it._1, Tuple2(it._2._1.toNullable(), it._2._2)) + } + +/** + * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and + * `other` DStream. The supplied org.apache.spark.Partitioner is used to control + * the partitioning of each RDD. + */ +@JvmName("rightOuterJoinTuple2") +fun JavaDStreamLike, *, *>.rightOuterJoin( + other: JavaDStreamLike, *, *>, + partitioner: Partitioner, +): JavaDStream>> = + mapToPair { it } + .rightOuterJoin( + other.mapToPair { it }, + partitioner, + ) + .map { + Tuple2(it._1, Tuple2(it._2._1.toNullable(), it._2._2)) + } + +/** + * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and + * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` + * partitions. + */ +@JvmName("fullOuterJoinTuple2") +fun JavaDStreamLike, *, *>.fullOuterJoin( + other: JavaDStreamLike, *, *>, + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +): JavaDStream>> = + mapToPair { it } + .fullOuterJoin( + other.mapToPair { it }, + numPartitions, + ) + .map { + Tuple2(it._1, Tuple2(it._2._1.toNullable(), it._2._2.toNullable())) + } + +/** + * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and + * `other` DStream. The supplied org.apache.spark.Partitioner is used to control + * the partitioning of each RDD. + */ +@JvmName("fullOuterJoinTuple2") +fun JavaDStreamLike, *, *>.fullOuterJoin( + other: JavaDStreamLike, *, *>, + partitioner: Partitioner, +): JavaDStream>> = + mapToPair { it } + .fullOuterJoin( + other.mapToPair { it }, + partitioner, + ) + .map { + Tuple2(it._1, Tuple2(it._2._1.toNullable(), it._2._2.toNullable())) + } + +/** + * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is + * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". + */ +@JvmName("saveAsHadoopFilesTuple2") +fun JavaDStreamLike, *, *>.saveAsHadoopFiles( + prefix: String, suffix: String, +): Unit = + mapToPair { it } + .saveAsHadoopFiles(prefix, suffix) + +/** + * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is + * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". + */ +@JvmName("saveAsNewAPIHadoopFilesTuple2") +fun JavaDStreamLike, *, *>.saveAsNewAPIHadoopFiles( + prefix: String, suffix: String, +): Unit = + mapToPair { it } + .saveAsNewAPIHadoopFiles(prefix, suffix) From bc9fd3ea62b87105fd4a8fa96760d6e1632848ad Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 21 Mar 2022 18:25:11 +0100 Subject: [PATCH 098/213] added Option(al) converters, testing fakeClassTag(). Working with streaming pair-functions, decided to go tuple-first instead of arities --- .../GeneratePairStreamingFunctions.kt | 87 - .../JavaRecoverableNetworkWordCount.kt | 91 +- .../kotlinx/spark/api/Conversions.kt | 608 +++- .../jetbrains/kotlinx/spark/api/Streaming.kt | 2658 ++++++++--------- 4 files changed, 1952 insertions(+), 1492 deletions(-) delete mode 100644 examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/GeneratePairStreamingFunctions.kt diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/GeneratePairStreamingFunctions.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/GeneratePairStreamingFunctions.kt deleted file mode 100644 index 740774e2..00000000 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/GeneratePairStreamingFunctions.kt +++ /dev/null @@ -1,87 +0,0 @@ -/*- - * =LICENSE= - * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) - * ---------- - * Copyright (C) 2019 - 2022 JetBrains - * ---------- - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * =LICENSEEND= - */ -package org.jetbrains.kotlinx.spark.examples - -import org.apache.spark.streaming.dstream.PairDStreamFunctions -import org.intellij.lang.annotations.Language -import kotlin.reflect.KFunction -import kotlin.reflect.full.functions - - -object GeneratePairStreamingFunctions { - -// fun JavaDStream>.reduceByKey(func: (V, V) -> V): JavaDStream> = -// mapToPair { it.toTuple() } -// .reduceByKey(func) -// .map { it.toArity() } - - @JvmStatic - fun main(args: Array) { - - val klass = PairDStreamFunctions::class - - val functions = klass.functions - - for (function: KFunction<*> in functions) with(function) { - - val types = (typeParameters.map { it.name }.toSet() + "K" + "V").joinToString() - - val parameterString = parameters.drop(1).joinToString { - "${it.name}: ${it.type}" - } - val parameterStringNoType = parameters.drop(1).joinToString { it.name!! } - - @Language("kt") - val new = """ - fun <$types> JavaDStream>.$name($parameterString) - - """.trimIndent() - -// -// val new = -// if (returnType.toString().contains("org.apache.spark.streaming.api.java.JavaPairDStream")) { -// val newReturnType = returnType.toString() -// .replaceFirst("JavaPairDStream<", "JavaDStream", ">") -// -// """ -// fun <$types> JavaDStream>.$name($parameterString): $newReturnType = -// mapToPair { it.toTuple() } -// .$name($parameterStringNoType) -// .map { it.toArity() } -// -// """.trimIndent() -// } else { -// """ -// fun <$types> JavaDStream>.$name($parameterString): $returnType = -// mapToPair { it.toTuple() } -// .$name($parameterStringNoType) -// -// """.trimIndent() -// } -// .replace("!", "") -// .replace("(Mutable)", "") -// -// if ("\$" !in new) println(new) - } - - - } -} diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt index 4f06dd75..9b64149e 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt @@ -17,21 +17,33 @@ * limitations under the License. * =LICENSEEND= */ +@file:OptIn(ExperimentalTime::class) + package org.jetbrains.kotlinx.spark.examples import com.google.common.io.Files import org.apache.spark.api.java.JavaPairRDD import org.apache.spark.api.java.JavaSparkContext import org.apache.spark.broadcast.Broadcast +import org.apache.spark.streaming.Duration import org.apache.spark.streaming.Durations import org.apache.spark.streaming.Time +import org.apache.spark.streaming.api.java.JavaDStream +import org.apache.spark.streaming.api.java.JavaPairDStream +import org.apache.spark.streaming.dstream.DStream +import org.apache.spark.streaming.dstream.PairDStreamFunctions import org.apache.spark.util.LongAccumulator import org.jetbrains.kotlinx.spark.api.* +import scala.Tuple1 import scala.Tuple2 +import scala.reflect.ClassTag import java.io.File import java.nio.charset.Charset import java.util.regex.Pattern +import kotlin.experimental.ExperimentalTypeInference import kotlin.system.exitProcess +import kotlin.time.ExperimentalTime +import kotlin.time.measureTimedValue /** @@ -144,6 +156,8 @@ object JavaRecoverableNetworkWordCount { } } + + @OptIn(ExperimentalTypeInference::class) @Suppress("UnstableApiUsage") private fun KSparkStreamingSession.createContext( ip: String, @@ -162,36 +176,75 @@ object JavaRecoverableNetworkWordCount { val words = lines.flatMap { it.split(SPACE).iterator() } - val wordCounts = words - .map { c(it, 1) } - .reduceByKey { a: Int, b: Int -> a + b } +// val wordCounts = words +// .map { c(it, 1) } +// .reduceByKey { a, b -> a + b } +// .reduceByKey { a, b -> a + b } +// .reduceByKey { a, b -> a + b } +// .reduceByKey { a, b -> a + b } +// .reduceByKey { a, b -> a + b } +// .reduceByKey { a, b -> a + b } +// .reduceByKey { a, b -> a + b } +// .reduceByKey { a, b -> a + b } + + val wordCounts4 = words + .mapToPair { Tuple2(it, 1) } + .reduceByKey { a, b -> a + b } + .reduceByKey { a, b -> a + b } + .reduceByKey { a, b -> a + b } + .reduceByKey { a, b -> a + b } + .reduceByKey { a, b -> a + b } + .reduceByKey { a, b -> a + b } + .reduceByKey { a, b -> a + b } + .reduceByKey { a, b -> a + b } + - val wordCounts2 = words - .map { it to 1 } - .reduceByKey { a: Int, b: Int -> a + b } +// val wordCounts2 = words +// .map { it to 1 } +// .reduceByKey { a, b -> a + b } val wordCounts3 = words .map { Tuple2(it, 1) } - .reduceByKey { a: Int, b: Int -> a + b } + .reduceByKey { a, b -> a + b } + .reduceByKey { a, b -> a + b } + .reduceByKey { a, b -> a + b } + .reduceByKey { a, b -> a + b } + .reduceByKey { a, b -> a + b } + .reduceByKey { a, b -> a + b } + .reduceByKey { a, b -> a + b } + .reduceByKey { a, b -> a + b } - wordCounts.foreachRDD { rdd, time: Time -> +// val wordCounts5 = words +// .dstream() +// .map({ Tuple2(it, 1) }, fakeClassTag()) +// .let { DStream.toPairDStreamFunctions(it, fakeClassTag(), fakeClassTag(), null) } +// .reduceByKey { a, b -> a + b } +// .let { JavaDStream(it, fakeClassTag()) } + + wordCounts3.foreachRDD { rdd, time: Time -> + val sc = JavaSparkContext(rdd.context()) // Get or register the excludeList Broadcast - val excludeList = JavaWordExcludeList.getInstance(JavaSparkContext(rdd.context())) + val excludeList = JavaWordExcludeList.getInstance(sc) // Get or register the droppedWordsCounter Accumulator - val droppedWordsCounter = JavaDroppedWordsCounter.getInstance(JavaSparkContext(rdd.context())) + val droppedWordsCounter = JavaDroppedWordsCounter.getInstance(sc) // Use excludeList to drop words and use droppedWordsCounter to count them - val counts = rdd.filter { wordCount -> - if (excludeList.value().contains(wordCount._1)) { - droppedWordsCounter.add(wordCount._2.toLong()) - false - } else { - true - } - }.collect().toString() - val output = "Counts at time $time $counts" + val (counts, duration) = measureTimedValue { + rdd.filter { wordCount -> + if (excludeList.value().contains(wordCount._1)) { + droppedWordsCounter.add(wordCount._2.toLong()) + false + } else { + true + } + }.collect() + } + + println("Debug: ${rdd.toDebugString()}") + + val output = "Counts at time $time $counts\n$duration" println(output) println("Dropped ${droppedWordsCounter.value()} word(s) totally") println("Appending to " + outputFile.absolutePath) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt index 2863ad08..f644dfbf 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt @@ -30,6 +30,7 @@ package org.jetbrains.kotlinx.spark.api import org.apache.spark.api.java.Optional import scala.* import scala.collection.JavaConverters +import scala.reflect.ClassTag import java.util.* import java.util.Enumeration import java.util.concurrent.ConcurrentMap @@ -46,16 +47,30 @@ import scala.collection.mutable.Set as ScalaMutableSet /** Converts Scala [Option] to Kotlin nullable. */ -fun Option.toNullable(): T? = getOrElse { null } +fun Option.getOrNull(): T? = getOrElse(null) /** Converts nullable value to Scala [Option]. */ -fun T?.toOption(): Option = Option.apply(this) +fun T?.asOption(): Option = Option.apply(this) /** Converts [Optional] to Kotlin nullable. */ -fun Optional.toNullable(): T? = orElse(null) +fun Optional.getOrNull(): T? = orNull() /** Converts nullable value to [Optional]. */ -fun T?.toOptional(): Optional = Optional.ofNullable(this) +fun T?.asOptional(): Optional = Optional.ofNullable(this) + +/** + * TODO test + * Produces a ClassTag[T], which is actually just a casted ClassTag[AnyRef]. + * + * This method is used to keep ClassTags out of the external Java API, as the Java compiler + * cannot produce them automatically. While this ClassTag-faking does please the compiler, + * it can cause problems at runtime if the Scala API relies on ClassTags for correctness. + * + * Often, though, a ClassTag[AnyRef] will not lead to incorrect behavior, just worse performance + * or security issues. For instance, an Array[AnyRef] can hold any type T, but may lose primitive + * specialization. + */ +fun fakeClassTag(): ClassTag = ClassTag.AnyRef() as ClassTag /** * @see JavaConverters.asScalaIterator for more information. @@ -241,97 +256,356 @@ fun Tuple3.toArity(): Arity3 = Arity3 Tuple4.toArity(): Arity4 = Arity4(this._1(), this._2(), this._3(), this._4()) +fun Tuple4.toArity(): Arity4 = + Arity4(this._1(), this._2(), this._3(), this._4()) /** * Returns a new Arity5 based on this Tuple5. **/ -fun Tuple5.toArity(): Arity5 = Arity5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple5.toArity(): Arity5 = + Arity5(this._1(), this._2(), this._3(), this._4(), this._5()) /** * Returns a new Arity6 based on this Tuple6. **/ -fun Tuple6.toArity(): Arity6 = Arity6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple6.toArity(): Arity6 = + Arity6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) /** * Returns a new Arity7 based on this Tuple7. **/ -fun Tuple7.toArity(): Arity7 = Arity7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple7.toArity(): Arity7 = + Arity7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) /** * Returns a new Arity8 based on this Tuple8. **/ -fun Tuple8.toArity(): Arity8 = Arity8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple8.toArity(): Arity8 = + Arity8( + this._1(), + this._2(), + this._3(), + this._4(), + this._5(), + this._6(), + this._7(), + this._8() + ) /** * Returns a new Arity9 based on this Tuple9. **/ -fun Tuple9.toArity(): Arity9 = Arity9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple9.toArity(): Arity9 = + Arity9( + this._1(), + this._2(), + this._3(), + this._4(), + this._5(), + this._6(), + this._7(), + this._8(), + this._9() + ) /** * Returns a new Arity10 based on this Tuple10. **/ -fun Tuple10.toArity(): Arity10 = Arity10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple10.toArity(): Arity10 = + Arity10( + this._1(), + this._2(), + this._3(), + this._4(), + this._5(), + this._6(), + this._7(), + this._8(), + this._9(), + this._10() + ) /** * Returns a new Arity11 based on this Tuple11. **/ -fun Tuple11.toArity(): Arity11 = Arity11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple11.toArity(): Arity11 = + Arity11( + this._1(), + this._2(), + this._3(), + this._4(), + this._5(), + this._6(), + this._7(), + this._8(), + this._9(), + this._10(), + this._11() + ) /** * Returns a new Arity12 based on this Tuple12. **/ -fun Tuple12.toArity(): Arity12 = Arity12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12.toArity(): Arity12 = + Arity12( + this._1(), + this._2(), + this._3(), + this._4(), + this._5(), + this._6(), + this._7(), + this._8(), + this._9(), + this._10(), + this._11(), + this._12() + ) /** * Returns a new Arity13 based on this Tuple13. **/ -fun Tuple13.toArity(): Arity13 = Arity13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13.toArity(): Arity13 = + Arity13( + this._1(), + this._2(), + this._3(), + this._4(), + this._5(), + this._6(), + this._7(), + this._8(), + this._9(), + this._10(), + this._11(), + this._12(), + this._13() + ) /** * Returns a new Arity14 based on this Tuple14. **/ -fun Tuple14.toArity(): Arity14 = Arity14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14.toArity(): Arity14 = + Arity14( + this._1(), + this._2(), + this._3(), + this._4(), + this._5(), + this._6(), + this._7(), + this._8(), + this._9(), + this._10(), + this._11(), + this._12(), + this._13(), + this._14() + ) /** * Returns a new Arity15 based on this Tuple15. **/ -fun Tuple15.toArity(): Arity15 = Arity15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15.toArity(): Arity15 = + Arity15( + this._1(), + this._2(), + this._3(), + this._4(), + this._5(), + this._6(), + this._7(), + this._8(), + this._9(), + this._10(), + this._11(), + this._12(), + this._13(), + this._14(), + this._15() + ) /** * Returns a new Arity16 based on this Tuple16. **/ -fun Tuple16.toArity(): Arity16 = Arity16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16.toArity(): Arity16 = + Arity16( + this._1(), + this._2(), + this._3(), + this._4(), + this._5(), + this._6(), + this._7(), + this._8(), + this._9(), + this._10(), + this._11(), + this._12(), + this._13(), + this._14(), + this._15(), + this._16() + ) /** * Returns a new Arity17 based on this Tuple17. **/ -fun Tuple17.toArity(): Arity17 = Arity17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17.toArity(): Arity17 = + Arity17( + this._1(), + this._2(), + this._3(), + this._4(), + this._5(), + this._6(), + this._7(), + this._8(), + this._9(), + this._10(), + this._11(), + this._12(), + this._13(), + this._14(), + this._15(), + this._16(), + this._17() + ) /** * Returns a new Arity18 based on this Tuple18. **/ -fun Tuple18.toArity(): Arity18 = Arity18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18.toArity(): Arity18 = + Arity18( + this._1(), + this._2(), + this._3(), + this._4(), + this._5(), + this._6(), + this._7(), + this._8(), + this._9(), + this._10(), + this._11(), + this._12(), + this._13(), + this._14(), + this._15(), + this._16(), + this._17(), + this._18() + ) /** * Returns a new Arity19 based on this Tuple19. **/ -fun Tuple19.toArity(): Arity19 = Arity19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19.toArity(): Arity19 = + Arity19( + this._1(), + this._2(), + this._3(), + this._4(), + this._5(), + this._6(), + this._7(), + this._8(), + this._9(), + this._10(), + this._11(), + this._12(), + this._13(), + this._14(), + this._15(), + this._16(), + this._17(), + this._18(), + this._19() + ) /** * Returns a new Arity20 based on this Tuple20. **/ -fun Tuple20.toArity(): Arity20 = Arity20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20.toArity(): Arity20 = + Arity20( + this._1(), + this._2(), + this._3(), + this._4(), + this._5(), + this._6(), + this._7(), + this._8(), + this._9(), + this._10(), + this._11(), + this._12(), + this._13(), + this._14(), + this._15(), + this._16(), + this._17(), + this._18(), + this._19(), + this._20() + ) /** * Returns a new Arity21 based on this Tuple21. **/ -fun Tuple21.toArity(): Arity21 = Arity21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21.toArity(): Arity21 = + Arity21( + this._1(), + this._2(), + this._3(), + this._4(), + this._5(), + this._6(), + this._7(), + this._8(), + this._9(), + this._10(), + this._11(), + this._12(), + this._13(), + this._14(), + this._15(), + this._16(), + this._17(), + this._18(), + this._19(), + this._20(), + this._21() + ) /** * Returns a new Arity22 based on this Tuple22. **/ -fun Tuple22.toArity(): Arity22 = Arity22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22.toArity(): Arity22 = + Arity22( + this._1(), + this._2(), + this._3(), + this._4(), + this._5(), + this._6(), + this._7(), + this._8(), + this._9(), + this._10(), + this._11(), + this._12(), + this._13(), + this._14(), + this._15(), + this._16(), + this._17(), + this._18(), + this._19(), + this._20(), + this._21(), + this._22() + ) /** * Returns a new Tuple1 based on this Arity1. @@ -351,94 +625,344 @@ fun Arity3.toTuple(): Tuple3 = Tuple3 Arity4.toTuple(): Tuple4 = Tuple4(this._1, this._2, this._3, this._4) +fun Arity4.toTuple(): Tuple4 = + Tuple4(this._1, this._2, this._3, this._4) /** * Returns a new Tuple5 based on this Arity5. **/ -fun Arity5.toTuple(): Tuple5 = Tuple5(this._1, this._2, this._3, this._4, this._5) +fun Arity5.toTuple(): Tuple5 = + Tuple5(this._1, this._2, this._3, this._4, this._5) /** * Returns a new Tuple6 based on this Arity6. **/ -fun Arity6.toTuple(): Tuple6 = Tuple6(this._1, this._2, this._3, this._4, this._5, this._6) +fun Arity6.toTuple(): Tuple6 = + Tuple6(this._1, this._2, this._3, this._4, this._5, this._6) /** * Returns a new Tuple7 based on this Arity7. **/ -fun Arity7.toTuple(): Tuple7 = Tuple7(this._1, this._2, this._3, this._4, this._5, this._6, this._7) +fun Arity7.toTuple(): Tuple7 = + Tuple7(this._1, this._2, this._3, this._4, this._5, this._6, this._7) /** * Returns a new Tuple8 based on this Arity8. **/ -fun Arity8.toTuple(): Tuple8 = Tuple8(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8) +fun Arity8.toTuple(): Tuple8 = + Tuple8(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8) /** * Returns a new Tuple9 based on this Arity9. **/ -fun Arity9.toTuple(): Tuple9 = Tuple9(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9) +fun Arity9.toTuple(): Tuple9 = + Tuple9( + this._1, + this._2, + this._3, + this._4, + this._5, + this._6, + this._7, + this._8, + this._9 + ) /** * Returns a new Tuple10 based on this Arity10. **/ -fun Arity10.toTuple(): Tuple10 = Tuple10(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10) +fun Arity10.toTuple(): Tuple10 = + Tuple10( + this._1, + this._2, + this._3, + this._4, + this._5, + this._6, + this._7, + this._8, + this._9, + this._10 + ) /** * Returns a new Tuple11 based on this Arity11. **/ -fun Arity11.toTuple(): Tuple11 = Tuple11(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11) +fun Arity11.toTuple(): Tuple11 = + Tuple11( + this._1, + this._2, + this._3, + this._4, + this._5, + this._6, + this._7, + this._8, + this._9, + this._10, + this._11 + ) /** * Returns a new Tuple12 based on this Arity12. **/ -fun Arity12.toTuple(): Tuple12 = Tuple12(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12) +fun Arity12.toTuple(): Tuple12 = + Tuple12( + this._1, + this._2, + this._3, + this._4, + this._5, + this._6, + this._7, + this._8, + this._9, + this._10, + this._11, + this._12 + ) /** * Returns a new Tuple13 based on this Arity13. **/ -fun Arity13.toTuple(): Tuple13 = Tuple13(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13) +fun Arity13.toTuple(): Tuple13 = + Tuple13( + this._1, + this._2, + this._3, + this._4, + this._5, + this._6, + this._7, + this._8, + this._9, + this._10, + this._11, + this._12, + this._13 + ) /** * Returns a new Tuple14 based on this Arity14. **/ -fun Arity14.toTuple(): Tuple14 = Tuple14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14) +fun Arity14.toTuple(): Tuple14 = + Tuple14( + this._1, + this._2, + this._3, + this._4, + this._5, + this._6, + this._7, + this._8, + this._9, + this._10, + this._11, + this._12, + this._13, + this._14 + ) /** * Returns a new Tuple15 based on this Arity15. **/ -fun Arity15.toTuple(): Tuple15 = Tuple15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15) +fun Arity15.toTuple(): Tuple15 = + Tuple15( + this._1, + this._2, + this._3, + this._4, + this._5, + this._6, + this._7, + this._8, + this._9, + this._10, + this._11, + this._12, + this._13, + this._14, + this._15 + ) /** * Returns a new Tuple16 based on this Arity16. **/ -fun Arity16.toTuple(): Tuple16 = Tuple16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16) +fun Arity16.toTuple(): Tuple16 = + Tuple16( + this._1, + this._2, + this._3, + this._4, + this._5, + this._6, + this._7, + this._8, + this._9, + this._10, + this._11, + this._12, + this._13, + this._14, + this._15, + this._16 + ) /** * Returns a new Tuple17 based on this Arity17. **/ -fun Arity17.toTuple(): Tuple17 = Tuple17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17) +fun Arity17.toTuple(): Tuple17 = + Tuple17( + this._1, + this._2, + this._3, + this._4, + this._5, + this._6, + this._7, + this._8, + this._9, + this._10, + this._11, + this._12, + this._13, + this._14, + this._15, + this._16, + this._17 + ) /** * Returns a new Tuple18 based on this Arity18. **/ -fun Arity18.toTuple(): Tuple18 = Tuple18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18) +fun Arity18.toTuple(): Tuple18 = + Tuple18( + this._1, + this._2, + this._3, + this._4, + this._5, + this._6, + this._7, + this._8, + this._9, + this._10, + this._11, + this._12, + this._13, + this._14, + this._15, + this._16, + this._17, + this._18 + ) /** * Returns a new Tuple19 based on this Arity19. **/ -fun Arity19.toTuple(): Tuple19 = Tuple19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19) +fun Arity19.toTuple(): Tuple19 = + Tuple19( + this._1, + this._2, + this._3, + this._4, + this._5, + this._6, + this._7, + this._8, + this._9, + this._10, + this._11, + this._12, + this._13, + this._14, + this._15, + this._16, + this._17, + this._18, + this._19 + ) /** * Returns a new Tuple20 based on this Arity20. **/ -fun Arity20.toTuple(): Tuple20 = Tuple20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20) +fun Arity20.toTuple(): Tuple20 = + Tuple20( + this._1, + this._2, + this._3, + this._4, + this._5, + this._6, + this._7, + this._8, + this._9, + this._10, + this._11, + this._12, + this._13, + this._14, + this._15, + this._16, + this._17, + this._18, + this._19, + this._20 + ) /** * Returns a new Tuple21 based on this Arity21. **/ -fun Arity21.toTuple(): Tuple21 = Tuple21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21) +fun Arity21.toTuple(): Tuple21 = + Tuple21( + this._1, + this._2, + this._3, + this._4, + this._5, + this._6, + this._7, + this._8, + this._9, + this._10, + this._11, + this._12, + this._13, + this._14, + this._15, + this._16, + this._17, + this._18, + this._19, + this._20, + this._21 + ) /** * Returns a new Tuple22 based on this Arity22. **/ -fun Arity22.toTuple(): Tuple22 = Tuple22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22) +fun Arity22.toTuple(): Tuple22 = + Tuple22( + this._1, + this._2, + this._3, + this._4, + this._5, + this._6, + this._7, + this._8, + this._9, + this._10, + this._11, + this._12, + this._13, + this._14, + this._15, + this._16, + this._17, + this._18, + this._19, + this._20, + this._21, + this._22 + ) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt index bdad23e1..74aaa520 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt @@ -19,8 +19,8 @@ */ package org.jetbrains.kotlinx.spark.api -import com.sun.org.apache.xml.internal.serialize.OutputFormat import org.apache.spark.Partitioner +import org.apache.spark.api.java.JavaPairRDD import org.apache.spark.api.java.JavaRDD import org.apache.spark.api.java.Optional import org.apache.spark.streaming.Duration @@ -29,1201 +29,1192 @@ import org.apache.spark.streaming.api.java.JavaDStream import org.apache.spark.streaming.api.java.JavaDStreamLike import org.apache.spark.streaming.api.java.JavaMapWithStateDStream import org.apache.spark.streaming.api.java.JavaPairDStream +import org.apache.spark.streaming.dstream.DStream import scala.Tuple2 -import scala.Tuple3 -//fun JavaDStreamLike, *, *>.reduceByKey(func: (V, V) -> V): JavaDStream> = -// mapToPair(Arity2::toTuple) -// .reduceByKey(func) -// .map(Tuple2::toArity) +@JvmName("tuple2ToPairDStream") +fun JavaDStream>.toPairDStream(): JavaPairDStream = + JavaPairDStream.fromJavaDStream(this) -@JvmName("tuple2ToPairDStream") -fun JavaDStreamLike, *, *>.toPairDStream(): JavaPairDStream = - mapToPair { it } - -@JvmName("arity2ToPairDStream") -fun JavaDStreamLike, *, *>.toPairDStream(): JavaPairDStream = - mapToPair(Arity2::toTuple) - -@JvmName("pairToPairDStream") -fun JavaDStreamLike, *, *>.toPairDStream(): JavaPairDStream = - mapToPair(Pair::toTuple) - -/** - * Return a new DStream by applying `groupByKey` to each RDD. Hash partitioning is used to - * generate the RDDs with `numPartitions` partitions. - */ -@JvmName("groupByKeyArity2") -fun JavaDStreamLike, *, *>.groupByKey( - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -): JavaDStream>> = - mapToPair { it.toTuple() } - .groupByKey(numPartitions) - .map { it.toArity() } - -/** - * Return a new DStream by applying `groupByKey` on each RDD. The supplied - * org.apache.spark.Partitioner is used to control the partitioning of each RDD. - */ -@JvmName("groupByKeyArity2") -fun JavaDStreamLike, *, *>.groupByKey(partitioner: Partitioner): JavaDStream>> = - mapToPair { it.toTuple() } - .groupByKey(partitioner) - .map { it.toArity() } - -/** - * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are - * merged using the supplied reduce function. Hash partitioning is used to generate the RDDs - * with `numPartitions` partitions. - */ -@JvmName("reduceByKeyArity2") -fun JavaDStreamLike, *, *>.reduceByKey( - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), - reduceFunc: (V, V) -> V, -): JavaDStream> = - mapToPair { it.toTuple() } - .reduceByKey(reduceFunc, numPartitions) - .map { it.toArity() } - -/** - * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are - * merged using the supplied reduce function. org.apache.spark.Partitioner is used to control - * the partitioning of each RDD. - */ -@JvmName("reduceByKeyArity2") -fun JavaDStreamLike, *, *>.reduceByKey( - partitioner: Partitioner, - reduceFunc: (V, V) -> V, -): JavaDStream> = - mapToPair { it.toTuple() } - .reduceByKey(reduceFunc, partitioner) - .map { it.toArity() } - -/** - * Combine elements of each key in DStream's RDDs using custom functions. This is similar to the - * combineByKey for RDDs. Please refer to combineByKey in - * org.apache.spark.rdd.PairRDDFunctions in the Spark core documentation for more information. - */ -@JvmName("combineByKeyArity2") -fun JavaDStreamLike, *, *>.combineByKey( - createCombiner: (V) -> C, - mergeValue: (C, V) -> C, - mergeCombiner: (C, C) -> C, - partitioner: Partitioner, - mapSideCombine: Boolean = true, -): JavaDStream> = - mapToPair { it.toTuple() } - .combineByKey(createCombiner, mergeValue, mergeCombiner, partitioner, mapSideCombine) - .map { it.toArity() } - -/** - * Return a new DStream by applying `groupByKey` over a sliding window on `this` DStream. - * Similar to `DStream.groupByKey()`, but applies it over a sliding window. - * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. - * @param windowDuration width of the window; must be a multiple of this DStream's - * batching interval - * @param slideDuration sliding interval of the window (i.e., the interval after which - * the new DStream will generate RDDs); must be a multiple of this - * DStream's batching interval - * @param numPartitions number of partitions of each RDD in the new DStream; if not specified - * then Spark's default number of partitions will be used - */ -@JvmName("groupByKeyAndWindowArity2") -fun JavaDStreamLike, *, *>.groupByKeyAndWindow( - windowDuration: Duration, - slideDuration: Duration = dstream().slideDuration(), - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -): JavaDStream>> = - mapToPair { it.toTuple() } - .groupByKeyAndWindow(windowDuration, slideDuration, numPartitions) - .map { it.toArity() } - -/** - * Create a new DStream by applying `groupByKey` over a sliding window on `this` DStream. - * Similar to `DStream.groupByKey()`, but applies it over a sliding window. - * @param windowDuration width of the window; must be a multiple of this DStream's - * batching interval - * @param slideDuration sliding interval of the window (i.e., the interval after which - * the new DStream will generate RDDs); must be a multiple of this - * DStream's batching interval - * @param partitioner partitioner for controlling the partitioning of each RDD in the new - * DStream. - */ -@JvmName("groupByKeyAndWindowArity2") -fun JavaDStreamLike, *, *>.groupByKeyAndWindow( - windowDuration: Duration, - slideDuration: Duration = dstream().slideDuration(), - partitioner: Partitioner, -): JavaDStream>> = - mapToPair { it.toTuple() } - .groupByKeyAndWindow(windowDuration, slideDuration, partitioner) - .map { it.toArity() } - -/** - * Return a new DStream by applying `reduceByKey` over a sliding window. This is similar to - * `DStream.reduceByKey()` but applies it over a sliding window. Hash partitioning is used to - * generate the RDDs with `numPartitions` partitions. - * @param reduceFunc associative and commutative reduce function - * @param windowDuration width of the window; must be a multiple of this DStream's - * batching interval - * @param slideDuration sliding interval of the window (i.e., the interval after which - * the new DStream will generate RDDs); must be a multiple of this - * DStream's batching interval - * @param numPartitions number of partitions of each RDD in the new DStream. - */ -@JvmName("reduceByKeyAndWindowArity2") -fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( - windowDuration: Duration, - slideDuration: Duration = dstream().slideDuration(), - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), - reduceFunc: (V, V) -> V, -): JavaDStream> = - mapToPair { it.toTuple() } - .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, numPartitions) - .map { it.toArity() } - -/** - * Return a new DStream by applying `reduceByKey` over a sliding window. Similar to - * `DStream.reduceByKey()`, but applies it over a sliding window. - * @param reduceFunc associative and commutative reduce function - * @param windowDuration width of the window; must be a multiple of this DStream's - * batching interval - * @param slideDuration sliding interval of the window (i.e., the interval after which - * the new DStream will generate RDDs); must be a multiple of this - * DStream's batching interval - * @param partitioner partitioner for controlling the partitioning of each RDD - * in the new DStream. - */ -@JvmName("reduceByKeyAndWindowArity2") -fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( - windowDuration: Duration, - slideDuration: Duration = dstream().slideDuration(), - partitioner: Partitioner, - reduceFunc: (V, V) -> V, -): JavaDStream> = - mapToPair { it.toTuple() } - .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, partitioner) - .map { it.toArity() } - -/** - * Return a new DStream by applying incremental `reduceByKey` over a sliding window. - * The reduced value of over a new window is calculated using the old window's reduced value : - * 1. reduce the new values that entered the window (e.g., adding new counts) - * - * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) - * - * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. - * However, it is applicable to only "invertible reduce functions". - * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. - * @param reduceFunc associative and commutative reduce function - * @param invReduceFunc inverse reduce function; such that for all y, invertible x: - * `invReduceFunc(reduceFunc(x, y), x) = y` - * @param windowDuration width of the window; must be a multiple of this DStream's - * batching interval - * @param slideDuration sliding interval of the window (i.e., the interval after which - * the new DStream will generate RDDs); must be a multiple of this - * DStream's batching interval - * @param filterFunc Optional function to filter expired key-value pairs; - * only pairs that satisfy the function are retained - */ -@JvmName("reduceByKeyAndWindowArity2") -fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( - invReduceFunc: (V, V) -> V, - windowDuration: Duration, - slideDuration: Duration = dstream().slideDuration(), - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), - filterFunc: ((Arity2) -> Boolean)? = null, - reduceFunc: (V, V) -> V, -): JavaDStream> = - mapToPair { it.toTuple() } - .reduceByKeyAndWindow( - reduceFunc, - invReduceFunc, - windowDuration, - slideDuration, - numPartitions, - filterFunc?.let { - { tuple -> - filterFunc(tuple.toArity()) - } - } - ) - .map { it.toArity() } - -/** - * Return a new DStream by applying incremental `reduceByKey` over a sliding window. - * The reduced value of over a new window is calculated using the old window's reduced value : - * 1. reduce the new values that entered the window (e.g., adding new counts) - * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) - * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. - * However, it is applicable to only "invertible reduce functions". - * @param reduceFunc associative and commutative reduce function - * @param invReduceFunc inverse reduce function - * @param windowDuration width of the window; must be a multiple of this DStream's - * batching interval - * @param slideDuration sliding interval of the window (i.e., the interval after which - * the new DStream will generate RDDs); must be a multiple of this - * DStream's batching interval - * @param partitioner partitioner for controlling the partitioning of each RDD in the new - * DStream. - * @param filterFunc Optional function to filter expired key-value pairs; - * only pairs that satisfy the function are retained - */ -@JvmName("reduceByKeyAndWindowArity2") -fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( - invReduceFunc: (V, V) -> V, - windowDuration: Duration, - slideDuration: Duration = dstream().slideDuration(), - partitioner: Partitioner, - filterFunc: ((Arity2) -> Boolean)? = null, - reduceFunc: (V, V) -> V, -): JavaDStream> = - mapToPair { it.toTuple() } - .reduceByKeyAndWindow( - reduceFunc, - invReduceFunc, - windowDuration, - slideDuration, - partitioner, - filterFunc?.let { - { tuple -> - filterFunc(tuple.toArity()) - } - } - ) - .map { it.toArity() } - -/** - * Return a [MapWithStateDStream] by applying a function to every key-value element of - * `this` stream, while maintaining some state data for each unique key. The mapping function - * and other specification (e.g. partitioners, timeouts, initial state data, etc.) of this - * transformation can be specified using `StateSpec` class. The state data is accessible in - * as a parameter of type `State` in the mapping function. - * - * Example of using `mapWithState`: - * {{{ - * // A mapping function that maintains an integer state and return a String - * def mappingFunction(key: String, value: Option[Int], state: State[Int]): Option[String] = { - * // Use state.exists(), state.get(), state.update() and state.remove() - * // to manage state, and return the necessary string - * } - * - * val spec = StateSpec.function(mappingFunction).numPartitions(10) - * - * val mapWithStateDStream = keyValueDStream.mapWithState[StateType, MappedType](spec) - * }}} - * - * @param spec Specification of this transformation - * @tparam StateType Class type of the state data - * @tparam MappedType Class type of the mapped data - */ -@JvmName("mapWithStateArity2") -fun JavaDStreamLike, *, *>.mapWithState( - spec: StateSpec, -): JavaMapWithStateDStream = - mapToPair { it.toTuple() } - .mapWithState(spec) - -/** - * Return a new "state" DStream where the state for each key is updated by applying - * the given function on the previous state of the key and the new values of each key. - * In every batch the updateFunc will be called for each state even if there are no new values. - * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. - * @param updateFunc State update function. If `this` function returns None, then - * corresponding state key-value pair will be eliminated. - * @tparam S State type - */ -@JvmName("updateStateByKeyArity2") -fun JavaDStreamLike, *, *>.updateStateByKey( - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), - updateFunc: (List, S?) -> S?, -): JavaDStream> = - mapToPair { it.toTuple() } - .updateStateByKey( - { list: List, s: Optional -> - updateFunc(list, s.toNullable()).toOptional() - }, - numPartitions, - ) - .map { it.toArity() } - -/** - * Return a new "state" DStream where the state for each key is updated by applying - * the given function on the previous state of the key and the new values of each key. - * In every batch the updateFunc will be called for each state even if there are no new values. - * [[org.apache.spark.Partitioner]] is used to control the partitioning of each RDD. - * @param updateFunc State update function. Note, that this function may generate a different - * tuple with a different key than the input key. Therefore keys may be removed - * or added in this way. It is up to the developer to decide whether to - * remember the partitioner despite the key being changed. - * @param partitioner Partitioner for controlling the partitioning of each RDD in the new - * DStream - * @tparam S State type - */ -@JvmName("updateStateByKeyArity2") -fun JavaDStreamLike, *, *>.updateStateByKey( - partitioner: Partitioner, - updateFunc: (List, S?) -> S?, -): JavaDStream> = - mapToPair { it.toTuple() } - .updateStateByKey( - { list: List, s: Optional -> - updateFunc(list, s.toNullable()).toOptional() - }, - partitioner, - ) - .map { it.toArity() } - -/** - * Return a new "state" DStream where the state for each key is updated by applying - * the given function on the previous state of the key and the new values of the key. - * org.apache.spark.Partitioner is used to control the partitioning of each RDD. - * @param updateFunc State update function. If `this` function returns None, then - * corresponding state key-value pair will be eliminated. - * @param partitioner Partitioner for controlling the partitioning of each RDD in the new - * DStream. - * @param initialRDD initial state value of each key. - * @tparam S State type - */ -@JvmName("updateStateByKeyArity2") -fun JavaDStreamLike, *, *>.updateStateByKey( - partitioner: Partitioner, - initialRDD: JavaRDD>, - updateFunc: (List, S?) -> S?, -): JavaDStream> = - mapToPair { it.toTuple() } - .updateStateByKey( - { list: List, s: Optional -> - updateFunc(list, s.toNullable()).toOptional() - }, - partitioner, - initialRDD.mapToPair { it.toTuple() }, - ) - .map { it.toArity() } - -/** - * Return a new DStream by applying a map function to the value of each key-value pairs in - * 'this' DStream without changing the key. - */ -@JvmName("mapValuesArity2") -fun JavaDStreamLike, *, *>.mapValues( - mapValuesFunc: (V) -> U, -): JavaDStream> = - mapToPair { it.toTuple() } - .mapValues(mapValuesFunc) - .map { it.toArity() } - -/** - * Return a new DStream by applying a flatmap function to the value of each key-value pairs in - * 'this' DStream without changing the key. - */ -@JvmName("flatMapValuesArity2") -fun JavaDStreamLike, *, *>.flatMapValues( - flatMapValuesFunc: (V) -> Iterator, -): JavaDStream> = - mapToPair { it.toTuple() } - .flatMapValues(flatMapValuesFunc) - .map { it.toArity() } - -/** - * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. - * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. - */ -@JvmName("cogroupArity2") -fun JavaDStreamLike, *, *>.cogroup( - other: JavaDStreamLike, *, *>, - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -): JavaDStream, Iterable>>> = - mapToPair { it.toTuple() } - .cogroup( - other.mapToPair { it.toTuple() }, - numPartitions, - ) - .map { - c(it._1, it._2.toArity()) - } - -/** - * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. - * The supplied org.apache.spark.Partitioner is used to partition the generated RDDs. - */ -@JvmName("cogroupArity2") -fun JavaDStreamLike, *, *>.cogroup( - other: JavaDStreamLike, *, *>, - partitioner: Partitioner, -): JavaDStream, Iterable>>> = - mapToPair { it.toTuple() } - .cogroup( - other.mapToPair { it.toTuple() }, - partitioner, - ) - .map { - c(it._1, it._2.toArity()) - } - -/** - * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. - * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. - */ -@JvmName("joinArity2") -fun JavaDStreamLike, *, *>.join( - other: JavaDStreamLike, *, *>, - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -): JavaDStream>> = - mapToPair { it.toTuple() } - .join( - other.mapToPair { it.toTuple() }, - numPartitions, - ) - .map { - c(it._1, it._2.toArity()) - } - -/** - * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. - * The supplied org.apache.spark.Partitioner is used to control the partitioning of each RDD. - */ -@JvmName("joinArity2") -fun JavaDStreamLike, *, *>.join( - other: JavaDStreamLike, *, *>, - partitioner: Partitioner, -): JavaDStream>> = - mapToPair { it.toTuple() } - .join( - other.mapToPair { it.toTuple() }, - partitioner, - ) - .map { - c(it._1, it._2.toArity()) - } - -/** - * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and - * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` - * partitions. - */ -@JvmName("leftOuterJoinArity2") -fun JavaDStreamLike, *, *>.leftOuterJoin( - other: JavaDStreamLike, *, *>, - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -): JavaDStream>> = - mapToPair { it.toTuple() } - .leftOuterJoin( - other.mapToPair { it.toTuple() }, - numPartitions, - ) - .map { - c(it._1, c(it._2._1, it._2._2.toNullable())) - } - -/** - * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and - * `other` DStream. The supplied org.apache.spark.Partitioner is used to control - * the partitioning of each RDD. - */ -@JvmName("leftOuterJoinArity2") -fun JavaDStreamLike, *, *>.leftOuterJoin( - other: JavaDStreamLike, *, *>, - partitioner: Partitioner, -): JavaDStream>> = - mapToPair { it.toTuple() } - .leftOuterJoin( - other.mapToPair { it.toTuple() }, - partitioner, - ) - .map { - c(it._1, c(it._2._1, it._2._2.toNullable())) - } - -/** - * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and - * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` - * partitions. - */ -@JvmName("rightOuterJoinArity2") -fun JavaDStreamLike, *, *>.rightOuterJoin( - other: JavaDStreamLike, *, *>, - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -): JavaDStream>> = - mapToPair { it.toTuple() } - .rightOuterJoin( - other.mapToPair { it.toTuple() }, - numPartitions, - ) - .map { - c(it._1, c(it._2._1.toNullable(), it._2._2)) - } - -/** - * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and - * `other` DStream. The supplied org.apache.spark.Partitioner is used to control - * the partitioning of each RDD. - */ -@JvmName("rightOuterJoinArity2") -fun JavaDStreamLike, *, *>.rightOuterJoin( - other: JavaDStreamLike, *, *>, - partitioner: Partitioner, -): JavaDStream>> = - mapToPair { it.toTuple() } - .rightOuterJoin( - other.mapToPair { it.toTuple() }, - partitioner, - ) - .map { - c(it._1, c(it._2._1.toNullable(), it._2._2)) - } - -/** - * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and - * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` - * partitions. - */ -@JvmName("fullOuterJoinArity2") -fun JavaDStreamLike, *, *>.fullOuterJoin( - other: JavaDStreamLike, *, *>, - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -): JavaDStream>> = - mapToPair { it.toTuple() } - .fullOuterJoin( - other.mapToPair { it.toTuple() }, - numPartitions, - ) - .map { - c(it._1, c(it._2._1.toNullable(), it._2._2.toNullable())) - } - -/** - * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and - * `other` DStream. The supplied org.apache.spark.Partitioner is used to control - * the partitioning of each RDD. - */ -@JvmName("fullOuterJoinArity2") -fun JavaDStreamLike, *, *>.fullOuterJoin( - other: JavaDStreamLike, *, *>, - partitioner: Partitioner, -): JavaDStream>> = - mapToPair { it.toTuple() } - .fullOuterJoin( - other.mapToPair { it.toTuple() }, - partitioner, - ) - .map { - c(it._1, c(it._2._1.toNullable(), it._2._2.toNullable())) - } - -/** - * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is - * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". - */ -@JvmName("saveAsHadoopFilesArity2") -fun JavaDStreamLike, *, *>.saveAsHadoopFiles( - prefix: String, suffix: String, -): Unit = - mapToPair { it.toTuple() } - .saveAsHadoopFiles(prefix, suffix) - -/** - * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is - * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". - */ -@JvmName("saveAsNewAPIHadoopFilesArity2") -fun JavaDStreamLike, *, *>.saveAsNewAPIHadoopFiles( - prefix: String, suffix: String, -): Unit = - mapToPair { it.toTuple() } - .saveAsNewAPIHadoopFiles(prefix, suffix) - - - - - -/** - * Return a new DStream by applying `groupByKey` to each RDD. Hash partitioning is used to - * generate the RDDs with `numPartitions` partitions. - */ -@JvmName("groupByKeyPair") -fun JavaDStreamLike, *, *>.groupByKey( - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -): JavaDStream>> = - mapToPair { it.toTuple() } - .groupByKey(numPartitions) - .map { it.toPair() } - -/** - * Return a new DStream by applying `groupByKey` on each RDD. The supplied - * org.apache.spark.Partitioner is used to control the partitioning of each RDD. - */ -@JvmName("groupByKeyPair") -fun JavaDStreamLike, *, *>.groupByKey(partitioner: Partitioner): JavaDStream>> = - mapToPair { it.toTuple() } - .groupByKey(partitioner) - .map { it.toPair() } - -/** - * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are - * merged using the supplied reduce function. Hash partitioning is used to generate the RDDs - * with `numPartitions` partitions. - */ -@JvmName("reduceByKeyPair") -fun JavaDStreamLike, *, *>.reduceByKey( - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), - reduceFunc: (V, V) -> V, -): JavaDStream> = - mapToPair { it.toTuple() } - .reduceByKey(reduceFunc, numPartitions) - .map { it.toPair() } - -/** - * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are - * merged using the supplied reduce function. org.apache.spark.Partitioner is used to control - * the partitioning of each RDD. - */ -@JvmName("reduceByKeyPair") -fun JavaDStreamLike, *, *>.reduceByKey( - partitioner: Partitioner, - reduceFunc: (V, V) -> V, -): JavaDStream> = - mapToPair { it.toTuple() } - .reduceByKey(reduceFunc, partitioner) - .map { it.toPair() } - -/** - * Combine elements of each key in DStream's RDDs using custom functions. This is similar to the - * combineByKey for RDDs. Please refer to combineByKey in - * org.apache.spark.rdd.PairRDDFunctions in the Spark core documentation for more information. - */ -@JvmName("combineByKeyPair") -fun JavaDStreamLike, *, *>.combineByKey( - createCombiner: (V) -> C, - mergeValue: (C, V) -> C, - mergeCombiner: (C, C) -> C, - partitioner: Partitioner, - mapSideCombine: Boolean = true, -): JavaDStream> = - mapToPair { it.toTuple() } - .combineByKey(createCombiner, mergeValue, mergeCombiner, partitioner, mapSideCombine) - .map { it.toPair() } - -/** - * Return a new DStream by applying `groupByKey` over a sliding window on `this` DStream. - * Similar to `DStream.groupByKey()`, but applies it over a sliding window. - * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. - * @param windowDuration width of the window; must be a multiple of this DStream's - * batching interval - * @param slideDuration sliding interval of the window (i.e., the interval after which - * the new DStream will generate RDDs); must be a multiple of this - * DStream's batching interval - * @param numPartitions number of partitions of each RDD in the new DStream; if not specified - * then Spark's default number of partitions will be used - */ -@JvmName("groupByKeyAndWindowPair") -fun JavaDStreamLike, *, *>.groupByKeyAndWindow( - windowDuration: Duration, - slideDuration: Duration = dstream().slideDuration(), - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -): JavaDStream>> = - mapToPair { it.toTuple() } - .groupByKeyAndWindow(windowDuration, slideDuration, numPartitions) - .map { it.toPair() } - -/** - * Create a new DStream by applying `groupByKey` over a sliding window on `this` DStream. - * Similar to `DStream.groupByKey()`, but applies it over a sliding window. - * @param windowDuration width of the window; must be a multiple of this DStream's - * batching interval - * @param slideDuration sliding interval of the window (i.e., the interval after which - * the new DStream will generate RDDs); must be a multiple of this - * DStream's batching interval - * @param partitioner partitioner for controlling the partitioning of each RDD in the new - * DStream. - */ -@JvmName("groupByKeyAndWindowPair") -fun JavaDStreamLike, *, *>.groupByKeyAndWindow( - windowDuration: Duration, - slideDuration: Duration = dstream().slideDuration(), - partitioner: Partitioner, -): JavaDStream>> = - mapToPair { it.toTuple() } - .groupByKeyAndWindow(windowDuration, slideDuration, partitioner) - .map { it.toPair() } - -/** - * Return a new DStream by applying `reduceByKey` over a sliding window. This is similar to - * `DStream.reduceByKey()` but applies it over a sliding window. Hash partitioning is used to - * generate the RDDs with `numPartitions` partitions. - * @param reduceFunc associative and commutative reduce function - * @param windowDuration width of the window; must be a multiple of this DStream's - * batching interval - * @param slideDuration sliding interval of the window (i.e., the interval after which - * the new DStream will generate RDDs); must be a multiple of this - * DStream's batching interval - * @param numPartitions number of partitions of each RDD in the new DStream. - */ -@JvmName("reduceByKeyAndWindowPair") -fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( - windowDuration: Duration, - slideDuration: Duration = dstream().slideDuration(), - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), - reduceFunc: (V, V) -> V, -): JavaDStream> = - mapToPair { it.toTuple() } - .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, numPartitions) - .map { it.toPair() } - -/** - * Return a new DStream by applying `reduceByKey` over a sliding window. Similar to - * `DStream.reduceByKey()`, but applies it over a sliding window. - * @param reduceFunc associative and commutative reduce function - * @param windowDuration width of the window; must be a multiple of this DStream's - * batching interval - * @param slideDuration sliding interval of the window (i.e., the interval after which - * the new DStream will generate RDDs); must be a multiple of this - * DStream's batching interval - * @param partitioner partitioner for controlling the partitioning of each RDD - * in the new DStream. - */ -@JvmName("reduceByKeyAndWindowPair") -fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( - windowDuration: Duration, - slideDuration: Duration = dstream().slideDuration(), - partitioner: Partitioner, - reduceFunc: (V, V) -> V, -): JavaDStream> = - mapToPair { it.toTuple() } - .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, partitioner) - .map { it.toPair() } - -/** - * Return a new DStream by applying incremental `reduceByKey` over a sliding window. - * The reduced value of over a new window is calculated using the old window's reduced value : - * 1. reduce the new values that entered the window (e.g., adding new counts) - * - * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) - * - * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. - * However, it is applicable to only "invertible reduce functions". - * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. - * @param reduceFunc associative and commutative reduce function - * @param invReduceFunc inverse reduce function; such that for all y, invertible x: - * `invReduceFunc(reduceFunc(x, y), x) = y` - * @param windowDuration width of the window; must be a multiple of this DStream's - * batching interval - * @param slideDuration sliding interval of the window (i.e., the interval after which - * the new DStream will generate RDDs); must be a multiple of this - * DStream's batching interval - * @param filterFunc Optional function to filter expired key-value pairs; - * only pairs that satisfy the function are retained - */ -@JvmName("reduceByKeyAndWindowPair") -fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( - invReduceFunc: (V, V) -> V, - windowDuration: Duration, - slideDuration: Duration = dstream().slideDuration(), - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), - filterFunc: ((Pair) -> Boolean)? = null, - reduceFunc: (V, V) -> V, -): JavaDStream> = - mapToPair { it.toTuple() } - .reduceByKeyAndWindow( - reduceFunc, - invReduceFunc, - windowDuration, - slideDuration, - numPartitions, - filterFunc?.let { - { tuple -> - filterFunc(tuple.toPair()) - } - } - ) - .map { it.toPair() } - -/** - * Return a new DStream by applying incremental `reduceByKey` over a sliding window. - * The reduced value of over a new window is calculated using the old window's reduced value : - * 1. reduce the new values that entered the window (e.g., adding new counts) - * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) - * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. - * However, it is applicable to only "invertible reduce functions". - * @param reduceFunc associative and commutative reduce function - * @param invReduceFunc inverse reduce function - * @param windowDuration width of the window; must be a multiple of this DStream's - * batching interval - * @param slideDuration sliding interval of the window (i.e., the interval after which - * the new DStream will generate RDDs); must be a multiple of this - * DStream's batching interval - * @param partitioner partitioner for controlling the partitioning of each RDD in the new - * DStream. - * @param filterFunc Optional function to filter expired key-value pairs; - * only pairs that satisfy the function are retained - */ -@JvmName("reduceByKeyAndWindowPair") -fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( - invReduceFunc: (V, V) -> V, - windowDuration: Duration, - slideDuration: Duration = dstream().slideDuration(), - partitioner: Partitioner, - filterFunc: ((Pair) -> Boolean)? = null, - reduceFunc: (V, V) -> V, -): JavaDStream> = - mapToPair { it.toTuple() } - .reduceByKeyAndWindow( - reduceFunc, - invReduceFunc, - windowDuration, - slideDuration, - partitioner, - filterFunc?.let { - { tuple -> - filterFunc(tuple.toPair()) - } - } - ) - .map { it.toPair() } - -/** - * Return a [MapWithStateDStream] by applying a function to every key-value element of - * `this` stream, while maintaining some state data for each unique key. The mapping function - * and other specification (e.g. partitioners, timeouts, initial state data, etc.) of this - * transformation can be specified using `StateSpec` class. The state data is accessible in - * as a parameter of type `State` in the mapping function. - * - * Example of using `mapWithState`: - * {{{ - * // A mapping function that maintains an integer state and return a String - * def mappingFunction(key: String, value: Option[Int], state: State[Int]): Option[String] = { - * // Use state.exists(), state.get(), state.update() and state.remove() - * // to manage state, and return the necessary string - * } - * - * val spec = StateSpec.function(mappingFunction).numPartitions(10) - * - * val mapWithStateDStream = keyValueDStream.mapWithState[StateType, MappedType](spec) - * }}} - * - * @param spec Specification of this transformation - * @tparam StateType Class type of the state data - * @tparam MappedType Class type of the mapped data - */ -@JvmName("mapWithStatePair") -fun JavaDStreamLike, *, *>.mapWithState( - spec: StateSpec, -): JavaMapWithStateDStream = - mapToPair { it.toTuple() } - .mapWithState(spec) - -/** - * Return a new "state" DStream where the state for each key is updated by applying - * the given function on the previous state of the key and the new values of each key. - * In every batch the updateFunc will be called for each state even if there are no new values. - * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. - * @param updateFunc State update function. If `this` function returns None, then - * corresponding state key-value pair will be eliminated. - * @tparam S State type - */ -@JvmName("updateStateByKeyPair") -fun JavaDStreamLike, *, *>.updateStateByKey( - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), - updateFunc: (List, S?) -> S?, -): JavaDStream> = - mapToPair { it.toTuple() } - .updateStateByKey( - { list: List, s: Optional -> - updateFunc(list, s.toNullable()).toOptional() - }, - numPartitions, - ) - .map { it.toPair() } - -/** - * Return a new "state" DStream where the state for each key is updated by applying - * the given function on the previous state of the key and the new values of each key. - * In every batch the updateFunc will be called for each state even if there are no new values. - * [[org.apache.spark.Partitioner]] is used to control the partitioning of each RDD. - * @param updateFunc State update function. Note, that this function may generate a different - * tuple with a different key than the input key. Therefore keys may be removed - * or added in this way. It is up to the developer to decide whether to - * remember the partitioner despite the key being changed. - * @param partitioner Partitioner for controlling the partitioning of each RDD in the new - * DStream - * @tparam S State type - */ -@JvmName("updateStateByKeyPair") -fun JavaDStreamLike, *, *>.updateStateByKey( - partitioner: Partitioner, - updateFunc: (List, S?) -> S?, -): JavaDStream> = - mapToPair { it.toTuple() } - .updateStateByKey( - { list: List, s: Optional -> - updateFunc(list, s.toNullable()).toOptional() - }, - partitioner, - ) - .map { it.toPair() } - -/** - * Return a new "state" DStream where the state for each key is updated by applying - * the given function on the previous state of the key and the new values of the key. - * org.apache.spark.Partitioner is used to control the partitioning of each RDD. - * @param updateFunc State update function. If `this` function returns None, then - * corresponding state key-value pair will be eliminated. - * @param partitioner Partitioner for controlling the partitioning of each RDD in the new - * DStream. - * @param initialRDD initial state value of each key. - * @tparam S State type - */ -@JvmName("updateStateByKeyPair") -fun JavaDStreamLike, *, *>.updateStateByKey( - partitioner: Partitioner, - initialRDD: JavaRDD>, - updateFunc: (List, S?) -> S?, -): JavaDStream> = - mapToPair { it.toTuple() } - .updateStateByKey( - { list: List, s: Optional -> - updateFunc(list, s.toNullable()).toOptional() - }, - partitioner, - initialRDD.mapToPair { it.toTuple() }, - ) - .map { it.toPair() } - -/** - * Return a new DStream by applying a map function to the value of each key-value pairs in - * 'this' DStream without changing the key. - */ -@JvmName("mapValuesPair") -fun JavaDStreamLike, *, *>.mapValues( - mapValuesFunc: (V) -> U, -): JavaDStream> = - mapToPair { it.toTuple() } - .mapValues(mapValuesFunc) - .map { it.toPair() } - -/** - * Return a new DStream by applying a flatmap function to the value of each key-value pairs in - * 'this' DStream without changing the key. - */ -@JvmName("flatMapValuesPair") -fun JavaDStreamLike, *, *>.flatMapValues( - flatMapValuesFunc: (V) -> Iterator, -): JavaDStream> = - mapToPair { it.toTuple() } - .flatMapValues(flatMapValuesFunc) - .map { it.toPair() } - -/** - * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. - * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. - */ -@JvmName("cogroupPair") -fun JavaDStreamLike, *, *>.cogroup( - other: JavaDStreamLike, *, *>, - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -): JavaDStream, Iterable>>> = - mapToPair { it.toTuple() } - .cogroup( - other.mapToPair { it.toTuple() }, - numPartitions, - ) - .map { - Pair(it._1, it._2.toPair()) - } - -/** - * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. - * The supplied org.apache.spark.Partitioner is used to partition the generated RDDs. - */ -@JvmName("cogroupPair") -fun JavaDStreamLike, *, *>.cogroup( - other: JavaDStreamLike, *, *>, - partitioner: Partitioner, -): JavaDStream, Iterable>>> = - mapToPair { it.toTuple() } - .cogroup( - other.mapToPair { it.toTuple() }, - partitioner, - ) - .map { - Pair(it._1, it._2.toPair()) - } - -/** - * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. - * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. - */ -@JvmName("joinPair") -fun JavaDStreamLike, *, *>.join( - other: JavaDStreamLike, *, *>, - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -): JavaDStream>> = - mapToPair { it.toTuple() } - .join( - other.mapToPair { it.toTuple() }, - numPartitions, - ) - .map { - Pair(it._1, it._2.toPair()) - } - -/** - * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. - * The supplied org.apache.spark.Partitioner is used to control the partitioning of each RDD. - */ -@JvmName("joinPair") -fun JavaDStreamLike, *, *>.join( - other: JavaDStreamLike, *, *>, - partitioner: Partitioner, -): JavaDStream>> = - mapToPair { it.toTuple() } - .join( - other.mapToPair { it.toTuple() }, - partitioner, - ) - .map { - Pair(it._1, it._2.toPair()) - } - -/** - * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and - * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` - * partitions. - */ -@JvmName("leftOuterJoinPair") -fun JavaDStreamLike, *, *>.leftOuterJoin( - other: JavaDStreamLike, *, *>, - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -): JavaDStream>> = - mapToPair { it.toTuple() } - .leftOuterJoin( - other.mapToPair { it.toTuple() }, - numPartitions, - ) - .map { - Pair(it._1, Pair(it._2._1, it._2._2.toNullable())) - } - -/** - * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and - * `other` DStream. The supplied org.apache.spark.Partitioner is used to control - * the partitioning of each RDD. - */ -@JvmName("leftOuterJoinPair") -fun JavaDStreamLike, *, *>.leftOuterJoin( - other: JavaDStreamLike, *, *>, - partitioner: Partitioner, -): JavaDStream>> = - mapToPair { it.toTuple() } - .leftOuterJoin( - other.mapToPair { it.toTuple() }, - partitioner, - ) - .map { - Pair(it._1, Pair(it._2._1, it._2._2.toNullable())) - } - -/** - * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and - * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` - * partitions. - */ -@JvmName("rightOuterJoinPair") -fun JavaDStreamLike, *, *>.rightOuterJoin( - other: JavaDStreamLike, *, *>, - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -): JavaDStream>> = - mapToPair { it.toTuple() } - .rightOuterJoin( - other.mapToPair { it.toTuple() }, - numPartitions, - ) - .map { - Pair(it._1, Pair(it._2._1.toNullable(), it._2._2)) - } - -/** - * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and - * `other` DStream. The supplied org.apache.spark.Partitioner is used to control - * the partitioning of each RDD. - */ -@JvmName("rightOuterJoinPair") -fun JavaDStreamLike, *, *>.rightOuterJoin( - other: JavaDStreamLike, *, *>, - partitioner: Partitioner, -): JavaDStream>> = - mapToPair { it.toTuple() } - .rightOuterJoin( - other.mapToPair { it.toTuple() }, - partitioner, - ) - .map { - Pair(it._1, Pair(it._2._1.toNullable(), it._2._2)) - } - -/** - * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and - * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` - * partitions. - */ -@JvmName("fullOuterJoinPair") -fun JavaDStreamLike, *, *>.fullOuterJoin( - other: JavaDStreamLike, *, *>, - numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -): JavaDStream>> = - mapToPair { it.toTuple() } - .fullOuterJoin( - other.mapToPair { it.toTuple() }, - numPartitions, - ) - .map { - Pair(it._1, Pair(it._2._1.toNullable(), it._2._2.toNullable())) - } - -/** - * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and - * `other` DStream. The supplied org.apache.spark.Partitioner is used to control - * the partitioning of each RDD. - */ -@JvmName("fullOuterJoinPair") -fun JavaDStreamLike, *, *>.fullOuterJoin( - other: JavaDStreamLike, *, *>, - partitioner: Partitioner, -): JavaDStream>> = - mapToPair { it.toTuple() } - .fullOuterJoin( - other.mapToPair { it.toTuple() }, - partitioner, - ) - .map { - Pair(it._1, Pair(it._2._1.toNullable(), it._2._2.toNullable())) - } - -/** - * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is - * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". - */ -@JvmName("saveAsHadoopFilesPair") -fun JavaDStreamLike, *, *>.saveAsHadoopFiles( - prefix: String, suffix: String, -): Unit = - mapToPair { it.toTuple() } - .saveAsHadoopFiles(prefix, suffix) - -/** - * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is - * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". - */ -@JvmName("saveAsNewAPIHadoopFilesPair") -fun JavaDStreamLike, *, *>.saveAsNewAPIHadoopFiles( - prefix: String, suffix: String, -): Unit = - mapToPair { it.toTuple() } - .saveAsNewAPIHadoopFiles(prefix, suffix) +fun JavaRDD>.toPairRDD(): JavaPairRDD = JavaPairRDD.fromJavaRDD(this) +@JvmName("arity2ToPairDStream") +fun JavaDStreamLike, *, *>.toPairDStream(): JavaPairDStream = + mapToPair(Arity2::toTuple) +@JvmName("pairToPairDStream") +fun JavaDStreamLike, *, *>.toPairDStream(): JavaPairDStream = + mapToPair(Pair::toTuple) +///** +// * Return a new DStream by applying `groupByKey` to each RDD. Hash partitioning is used to +// * generate the RDDs with `numPartitions` partitions. +// */ +//@JvmName("groupByKeyArity2") +//fun JavaDStreamLike, *, *>.groupByKey( +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .groupByKey(numPartitions) +// .map { it.toArity() } +// +///** +// * Return a new DStream by applying `groupByKey` on each RDD. The supplied +// * org.apache.spark.Partitioner is used to control the partitioning of each RDD. +// */ +//@JvmName("groupByKeyArity2") +//fun JavaDStreamLike, *, *>.groupByKey(partitioner: Partitioner): JavaDStream>> = +// mapToPair { it.toTuple() } +// .groupByKey(partitioner) +// .map { it.toArity() } +// +///** +// * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are +// * merged using the supplied reduce function. Hash partitioning is used to generate the RDDs +// * with `numPartitions` partitions. +// */ +//@JvmName("reduceByKeyArity2") +//fun JavaDStreamLike, *, *>.reduceByKey( +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +// reduceFunc: (V, V) -> V, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .reduceByKey(reduceFunc, numPartitions) +// .map { it.toArity() } +// +///** +// * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are +// * merged using the supplied reduce function. org.apache.spark.Partitioner is used to control +// * the partitioning of each RDD. +// */ +//@JvmName("reduceByKeyArity2") +//fun JavaDStreamLike, *, *>.reduceByKey( +// partitioner: Partitioner, +// reduceFunc: (V, V) -> V, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .reduceByKey(reduceFunc, partitioner) +// .map { it.toArity() } +// +///** +// * Combine elements of each key in DStream's RDDs using custom functions. This is similar to the +// * combineByKey for RDDs. Please refer to combineByKey in +// * org.apache.spark.rdd.PairRDDFunctions in the Spark core documentation for more information. +// */ +//@JvmName("combineByKeyArity2") +//fun JavaDStreamLike, *, *>.combineByKey( +// createCombiner: (V) -> C, +// mergeValue: (C, V) -> C, +// mergeCombiner: (C, C) -> C, +// partitioner: Partitioner, +// mapSideCombine: Boolean = true, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .combineByKey(createCombiner, mergeValue, mergeCombiner, partitioner, mapSideCombine) +// .map { it.toArity() } +// +///** +// * Return a new DStream by applying `groupByKey` over a sliding window on `this` DStream. +// * Similar to `DStream.groupByKey()`, but applies it over a sliding window. +// * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. +// * @param windowDuration width of the window; must be a multiple of this DStream's +// * batching interval +// * @param slideDuration sliding interval of the window (i.e., the interval after which +// * the new DStream will generate RDDs); must be a multiple of this +// * DStream's batching interval +// * @param numPartitions number of partitions of each RDD in the new DStream; if not specified +// * then Spark's default number of partitions will be used +// */ +//@JvmName("groupByKeyAndWindowArity2") +//fun JavaDStreamLike, *, *>.groupByKeyAndWindow( +// windowDuration: Duration, +// slideDuration: Duration = dstream().slideDuration(), +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .groupByKeyAndWindow(windowDuration, slideDuration, numPartitions) +// .map { it.toArity() } +// +///** +// * Create a new DStream by applying `groupByKey` over a sliding window on `this` DStream. +// * Similar to `DStream.groupByKey()`, but applies it over a sliding window. +// * @param windowDuration width of the window; must be a multiple of this DStream's +// * batching interval +// * @param slideDuration sliding interval of the window (i.e., the interval after which +// * the new DStream will generate RDDs); must be a multiple of this +// * DStream's batching interval +// * @param partitioner partitioner for controlling the partitioning of each RDD in the new +// * DStream. +// */ +//@JvmName("groupByKeyAndWindowArity2") +//fun JavaDStreamLike, *, *>.groupByKeyAndWindow( +// windowDuration: Duration, +// slideDuration: Duration = dstream().slideDuration(), +// partitioner: Partitioner, +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .groupByKeyAndWindow(windowDuration, slideDuration, partitioner) +// .map { it.toArity() } +// +///** +// * Return a new DStream by applying `reduceByKey` over a sliding window. This is similar to +// * `DStream.reduceByKey()` but applies it over a sliding window. Hash partitioning is used to +// * generate the RDDs with `numPartitions` partitions. +// * @param reduceFunc associative and commutative reduce function +// * @param windowDuration width of the window; must be a multiple of this DStream's +// * batching interval +// * @param slideDuration sliding interval of the window (i.e., the interval after which +// * the new DStream will generate RDDs); must be a multiple of this +// * DStream's batching interval +// * @param numPartitions number of partitions of each RDD in the new DStream. +// */ +//@JvmName("reduceByKeyAndWindowArity2") +//fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( +// windowDuration: Duration, +// slideDuration: Duration = dstream().slideDuration(), +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +// reduceFunc: (V, V) -> V, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, numPartitions) +// .map { it.toArity() } +// +///** +// * Return a new DStream by applying `reduceByKey` over a sliding window. Similar to +// * `DStream.reduceByKey()`, but applies it over a sliding window. +// * @param reduceFunc associative and commutative reduce function +// * @param windowDuration width of the window; must be a multiple of this DStream's +// * batching interval +// * @param slideDuration sliding interval of the window (i.e., the interval after which +// * the new DStream will generate RDDs); must be a multiple of this +// * DStream's batching interval +// * @param partitioner partitioner for controlling the partitioning of each RDD +// * in the new DStream. +// */ +//@JvmName("reduceByKeyAndWindowArity2") +//fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( +// windowDuration: Duration, +// slideDuration: Duration = dstream().slideDuration(), +// partitioner: Partitioner, +// reduceFunc: (V, V) -> V, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, partitioner) +// .map { it.toArity() } +// +///** +// * Return a new DStream by applying incremental `reduceByKey` over a sliding window. +// * The reduced value of over a new window is calculated using the old window's reduced value : +// * 1. reduce the new values that entered the window (e.g., adding new counts) +// * +// * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) +// * +// * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. +// * However, it is applicable to only "invertible reduce functions". +// * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. +// * @param reduceFunc associative and commutative reduce function +// * @param invReduceFunc inverse reduce function; such that for all y, invertible x: +// * `invReduceFunc(reduceFunc(x, y), x) = y` +// * @param windowDuration width of the window; must be a multiple of this DStream's +// * batching interval +// * @param slideDuration sliding interval of the window (i.e., the interval after which +// * the new DStream will generate RDDs); must be a multiple of this +// * DStream's batching interval +// * @param filterFunc Optional function to filter expired key-value pairs; +// * only pairs that satisfy the function are retained +// */ +//@JvmName("reduceByKeyAndWindowArity2") +//fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( +// invReduceFunc: (V, V) -> V, +// windowDuration: Duration, +// slideDuration: Duration = dstream().slideDuration(), +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +// filterFunc: ((Arity2) -> Boolean)? = null, +// reduceFunc: (V, V) -> V, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .reduceByKeyAndWindow( +// reduceFunc, +// invReduceFunc, +// windowDuration, +// slideDuration, +// numPartitions, +// filterFunc?.let { +// { tuple -> +// filterFunc(tuple.toArity()) +// } +// } +// ) +// .map { it.toArity() } +// +///** +// * Return a new DStream by applying incremental `reduceByKey` over a sliding window. +// * The reduced value of over a new window is calculated using the old window's reduced value : +// * 1. reduce the new values that entered the window (e.g., adding new counts) +// * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) +// * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. +// * However, it is applicable to only "invertible reduce functions". +// * @param reduceFunc associative and commutative reduce function +// * @param invReduceFunc inverse reduce function +// * @param windowDuration width of the window; must be a multiple of this DStream's +// * batching interval +// * @param slideDuration sliding interval of the window (i.e., the interval after which +// * the new DStream will generate RDDs); must be a multiple of this +// * DStream's batching interval +// * @param partitioner partitioner for controlling the partitioning of each RDD in the new +// * DStream. +// * @param filterFunc Optional function to filter expired key-value pairs; +// * only pairs that satisfy the function are retained +// */ +//@JvmName("reduceByKeyAndWindowArity2") +//fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( +// invReduceFunc: (V, V) -> V, +// windowDuration: Duration, +// slideDuration: Duration = dstream().slideDuration(), +// partitioner: Partitioner, +// filterFunc: ((Arity2) -> Boolean)? = null, +// reduceFunc: (V, V) -> V, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .reduceByKeyAndWindow( +// reduceFunc, +// invReduceFunc, +// windowDuration, +// slideDuration, +// partitioner, +// filterFunc?.let { +// { tuple -> +// filterFunc(tuple.toArity()) +// } +// } +// ) +// .map { it.toArity() } +// +///** +// * Return a [MapWithStateDStream] by applying a function to every key-value element of +// * `this` stream, while maintaining some state data for each unique key. The mapping function +// * and other specification (e.g. partitioners, timeouts, initial state data, etc.) of this +// * transformation can be specified using `StateSpec` class. The state data is accessible in +// * as a parameter of type `State` in the mapping function. +// * +// * Example of using `mapWithState`: +// * {{{ +// * // A mapping function that maintains an integer state and return a String +// * def mappingFunction(key: String, value: Option[Int], state: State[Int]): Option[String] = { +// * // Use state.exists(), state.get(), state.update() and state.remove() +// * // to manage state, and return the necessary string +// * } +// * +// * val spec = StateSpec.function(mappingFunction).numPartitions(10) +// * +// * val mapWithStateDStream = keyValueDStream.mapWithState[StateType, MappedType](spec) +// * }}} +// * +// * @param spec Specification of this transformation +// * @tparam StateType Class type of the state data +// * @tparam MappedType Class type of the mapped data +// */ +//@JvmName("mapWithStateArity2") +//fun JavaDStreamLike, *, *>.mapWithState( +// spec: StateSpec, +//): JavaMapWithStateDStream = +// mapToPair { it.toTuple() } +// .mapWithState(spec) +// +///** +// * Return a new "state" DStream where the state for each key is updated by applying +// * the given function on the previous state of the key and the new values of each key. +// * In every batch the updateFunc will be called for each state even if there are no new values. +// * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. +// * @param updateFunc State update function. If `this` function returns None, then +// * corresponding state key-value pair will be eliminated. +// * @tparam S State type +// */ +//@JvmName("updateStateByKeyArity2") +//fun JavaDStreamLike, *, *>.updateStateByKey( +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +// updateFunc: (List, S?) -> S?, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .updateStateByKey( +// { list: List, s: Optional -> +// updateFunc(list, s.getOrNull()).asOptional() +// }, +// numPartitions, +// ) +// .map { it.toArity() } +// +///** +// * Return a new "state" DStream where the state for each key is updated by applying +// * the given function on the previous state of the key and the new values of each key. +// * In every batch the updateFunc will be called for each state even if there are no new values. +// * [[org.apache.spark.Partitioner]] is used to control the partitioning of each RDD. +// * @param updateFunc State update function. Note, that this function may generate a different +// * tuple with a different key than the input key. Therefore keys may be removed +// * or added in this way. It is up to the developer to decide whether to +// * remember the partitioner despite the key being changed. +// * @param partitioner Partitioner for controlling the partitioning of each RDD in the new +// * DStream +// * @tparam S State type +// */ +//@JvmName("updateStateByKeyArity2") +//fun JavaDStreamLike, *, *>.updateStateByKey( +// partitioner: Partitioner, +// updateFunc: (List, S?) -> S?, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .updateStateByKey( +// { list: List, s: Optional -> +// updateFunc(list, s.getOrNull()).asOptional() +// }, +// partitioner, +// ) +// .map { it.toArity() } +// +///** +// * Return a new "state" DStream where the state for each key is updated by applying +// * the given function on the previous state of the key and the new values of the key. +// * org.apache.spark.Partitioner is used to control the partitioning of each RDD. +// * @param updateFunc State update function. If `this` function returns None, then +// * corresponding state key-value pair will be eliminated. +// * @param partitioner Partitioner for controlling the partitioning of each RDD in the new +// * DStream. +// * @param initialRDD initial state value of each key. +// * @tparam S State type +// */ +//@JvmName("updateStateByKeyArity2") +//fun JavaDStreamLike, *, *>.updateStateByKey( +// partitioner: Partitioner, +// initialRDD: JavaRDD>, +// updateFunc: (List, S?) -> S?, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .updateStateByKey( +// { list: List, s: Optional -> +// updateFunc(list, s.getOrNull()).asOptional() +// }, +// partitioner, +// initialRDD.mapToPair { it.toTuple() }, +// ) +// .map { it.toArity() } +// +///** +// * Return a new DStream by applying a map function to the value of each key-value pairs in +// * 'this' DStream without changing the key. +// */ +//@JvmName("mapValuesArity2") +//fun JavaDStreamLike, *, *>.mapValues( +// mapValuesFunc: (V) -> U, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .mapValues(mapValuesFunc) +// .map { it.toArity() } +// +///** +// * Return a new DStream by applying a flatmap function to the value of each key-value pairs in +// * 'this' DStream without changing the key. +// */ +//@JvmName("flatMapValuesArity2") +//fun JavaDStreamLike, *, *>.flatMapValues( +// flatMapValuesFunc: (V) -> Iterator, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .flatMapValues(flatMapValuesFunc) +// .map { it.toArity() } +// +///** +// * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. +// * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. +// */ +//@JvmName("cogroupArity2") +//fun JavaDStreamLike, *, *>.cogroup( +// other: JavaDStreamLike, *, *>, +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +//): JavaDStream, Iterable>>> = +// mapToPair { it.toTuple() } +// .cogroup( +// other.mapToPair { it.toTuple() }, +// numPartitions, +// ) +// .map { +// c(it._1, it._2.toArity()) +// } +// +///** +// * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. +// * The supplied org.apache.spark.Partitioner is used to partition the generated RDDs. +// */ +//@JvmName("cogroupArity2") +//fun JavaDStreamLike, *, *>.cogroup( +// other: JavaDStreamLike, *, *>, +// partitioner: Partitioner, +//): JavaDStream, Iterable>>> = +// mapToPair { it.toTuple() } +// .cogroup( +// other.mapToPair { it.toTuple() }, +// partitioner, +// ) +// .map { +// c(it._1, it._2.toArity()) +// } +// +///** +// * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. +// * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. +// */ +//@JvmName("joinArity2") +//fun JavaDStreamLike, *, *>.join( +// other: JavaDStreamLike, *, *>, +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .join( +// other.mapToPair { it.toTuple() }, +// numPartitions, +// ) +// .map { +// c(it._1, it._2.toArity()) +// } +// +///** +// * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. +// * The supplied org.apache.spark.Partitioner is used to control the partitioning of each RDD. +// */ +//@JvmName("joinArity2") +//fun JavaDStreamLike, *, *>.join( +// other: JavaDStreamLike, *, *>, +// partitioner: Partitioner, +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .join( +// other.mapToPair { it.toTuple() }, +// partitioner, +// ) +// .map { +// c(it._1, it._2.toArity()) +// } +// +///** +// * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and +// * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` +// * partitions. +// */ +//@JvmName("leftOuterJoinArity2") +//fun JavaDStreamLike, *, *>.leftOuterJoin( +// other: JavaDStreamLike, *, *>, +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .leftOuterJoin( +// other.mapToPair { it.toTuple() }, +// numPartitions, +// ) +// .map { +// c(it._1, c(it._2._1, it._2._2.getOrNull())) +// } +// +///** +// * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and +// * `other` DStream. The supplied org.apache.spark.Partitioner is used to control +// * the partitioning of each RDD. +// */ +//@JvmName("leftOuterJoinArity2") +//fun JavaDStreamLike, *, *>.leftOuterJoin( +// other: JavaDStreamLike, *, *>, +// partitioner: Partitioner, +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .leftOuterJoin( +// other.mapToPair { it.toTuple() }, +// partitioner, +// ) +// .map { +// c(it._1, c(it._2._1, it._2._2.getOrNull())) +// } +// +///** +// * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and +// * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` +// * partitions. +// */ +//@JvmName("rightOuterJoinArity2") +//fun JavaDStreamLike, *, *>.rightOuterJoin( +// other: JavaDStreamLike, *, *>, +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .rightOuterJoin( +// other.mapToPair { it.toTuple() }, +// numPartitions, +// ) +// .map { +// c(it._1, c(it._2._1.getOrNull(), it._2._2)) +// } +// +///** +// * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and +// * `other` DStream. The supplied org.apache.spark.Partitioner is used to control +// * the partitioning of each RDD. +// */ +//@JvmName("rightOuterJoinArity2") +//fun JavaDStreamLike, *, *>.rightOuterJoin( +// other: JavaDStreamLike, *, *>, +// partitioner: Partitioner, +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .rightOuterJoin( +// other.mapToPair { it.toTuple() }, +// partitioner, +// ) +// .map { +// c(it._1, c(it._2._1.getOrNull(), it._2._2)) +// } +// +///** +// * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and +// * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` +// * partitions. +// */ +//@JvmName("fullOuterJoinArity2") +//fun JavaDStreamLike, *, *>.fullOuterJoin( +// other: JavaDStreamLike, *, *>, +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .fullOuterJoin( +// other.mapToPair { it.toTuple() }, +// numPartitions, +// ) +// .map { +// c(it._1, c(it._2._1.getOrNull(), it._2._2.getOrNull())) +// } +// +///** +// * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and +// * `other` DStream. The supplied org.apache.spark.Partitioner is used to control +// * the partitioning of each RDD. +// */ +//@JvmName("fullOuterJoinArity2") +//fun JavaDStreamLike, *, *>.fullOuterJoin( +// other: JavaDStreamLike, *, *>, +// partitioner: Partitioner, +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .fullOuterJoin( +// other.mapToPair { it.toTuple() }, +// partitioner, +// ) +// .map { +// c(it._1, c(it._2._1.getOrNull(), it._2._2.getOrNull())) +// } +// +///** +// * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is +// * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". +// */ +//@JvmName("saveAsHadoopFilesArity2") +//fun JavaDStreamLike, *, *>.saveAsHadoopFiles( +// prefix: String, suffix: String, +//): Unit = +// mapToPair { it.toTuple() } +// .saveAsHadoopFiles(prefix, suffix) +// +///** +// * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is +// * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". +// */ +//@JvmName("saveAsNewAPIHadoopFilesArity2") +//fun JavaDStreamLike, *, *>.saveAsNewAPIHadoopFiles( +// prefix: String, suffix: String, +//): Unit = +// mapToPair { it.toTuple() } +// .saveAsNewAPIHadoopFiles(prefix, suffix) +// +// +///** +// * Return a new DStream by applying `groupByKey` to each RDD. Hash partitioning is used to +// * generate the RDDs with `numPartitions` partitions. +// */ +//@JvmName("groupByKeyPair") +//fun JavaDStreamLike, *, *>.groupByKey( +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .groupByKey(numPartitions) +// .map { it.toPair() } +// +///** +// * Return a new DStream by applying `groupByKey` on each RDD. The supplied +// * org.apache.spark.Partitioner is used to control the partitioning of each RDD. +// */ +//@JvmName("groupByKeyPair") +//fun JavaDStreamLike, *, *>.groupByKey(partitioner: Partitioner): JavaDStream>> = +// mapToPair { it.toTuple() } +// .groupByKey(partitioner) +// .map { it.toPair() } +// +///** +// * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are +// * merged using the supplied reduce function. Hash partitioning is used to generate the RDDs +// * with `numPartitions` partitions. +// */ +//@JvmName("reduceByKeyPair") +//fun JavaDStreamLike, *, *>.reduceByKey( +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +// reduceFunc: (V, V) -> V, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .reduceByKey(reduceFunc, numPartitions) +// .map { it.toPair() } +// +///** +// * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are +// * merged using the supplied reduce function. org.apache.spark.Partitioner is used to control +// * the partitioning of each RDD. +// */ +//@JvmName("reduceByKeyPair") +//fun JavaDStreamLike, *, *>.reduceByKey( +// partitioner: Partitioner, +// reduceFunc: (V, V) -> V, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .reduceByKey(reduceFunc, partitioner) +// .map { it.toPair() } +// +///** +// * Combine elements of each key in DStream's RDDs using custom functions. This is similar to the +// * combineByKey for RDDs. Please refer to combineByKey in +// * org.apache.spark.rdd.PairRDDFunctions in the Spark core documentation for more information. +// */ +//@JvmName("combineByKeyPair") +//fun JavaDStreamLike, *, *>.combineByKey( +// createCombiner: (V) -> C, +// mergeValue: (C, V) -> C, +// mergeCombiner: (C, C) -> C, +// partitioner: Partitioner, +// mapSideCombine: Boolean = true, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .combineByKey(createCombiner, mergeValue, mergeCombiner, partitioner, mapSideCombine) +// .map { it.toPair() } +// +///** +// * Return a new DStream by applying `groupByKey` over a sliding window on `this` DStream. +// * Similar to `DStream.groupByKey()`, but applies it over a sliding window. +// * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. +// * @param windowDuration width of the window; must be a multiple of this DStream's +// * batching interval +// * @param slideDuration sliding interval of the window (i.e., the interval after which +// * the new DStream will generate RDDs); must be a multiple of this +// * DStream's batching interval +// * @param numPartitions number of partitions of each RDD in the new DStream; if not specified +// * then Spark's default number of partitions will be used +// */ +//@JvmName("groupByKeyAndWindowPair") +//fun JavaDStreamLike, *, *>.groupByKeyAndWindow( +// windowDuration: Duration, +// slideDuration: Duration = dstream().slideDuration(), +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .groupByKeyAndWindow(windowDuration, slideDuration, numPartitions) +// .map { it.toPair() } +// +///** +// * Create a new DStream by applying `groupByKey` over a sliding window on `this` DStream. +// * Similar to `DStream.groupByKey()`, but applies it over a sliding window. +// * @param windowDuration width of the window; must be a multiple of this DStream's +// * batching interval +// * @param slideDuration sliding interval of the window (i.e., the interval after which +// * the new DStream will generate RDDs); must be a multiple of this +// * DStream's batching interval +// * @param partitioner partitioner for controlling the partitioning of each RDD in the new +// * DStream. +// */ +//@JvmName("groupByKeyAndWindowPair") +//fun JavaDStreamLike, *, *>.groupByKeyAndWindow( +// windowDuration: Duration, +// slideDuration: Duration = dstream().slideDuration(), +// partitioner: Partitioner, +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .groupByKeyAndWindow(windowDuration, slideDuration, partitioner) +// .map { it.toPair() } +// +///** +// * Return a new DStream by applying `reduceByKey` over a sliding window. This is similar to +// * `DStream.reduceByKey()` but applies it over a sliding window. Hash partitioning is used to +// * generate the RDDs with `numPartitions` partitions. +// * @param reduceFunc associative and commutative reduce function +// * @param windowDuration width of the window; must be a multiple of this DStream's +// * batching interval +// * @param slideDuration sliding interval of the window (i.e., the interval after which +// * the new DStream will generate RDDs); must be a multiple of this +// * DStream's batching interval +// * @param numPartitions number of partitions of each RDD in the new DStream. +// */ +//@JvmName("reduceByKeyAndWindowPair") +//fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( +// windowDuration: Duration, +// slideDuration: Duration = dstream().slideDuration(), +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +// reduceFunc: (V, V) -> V, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, numPartitions) +// .map { it.toPair() } +// +///** +// * Return a new DStream by applying `reduceByKey` over a sliding window. Similar to +// * `DStream.reduceByKey()`, but applies it over a sliding window. +// * @param reduceFunc associative and commutative reduce function +// * @param windowDuration width of the window; must be a multiple of this DStream's +// * batching interval +// * @param slideDuration sliding interval of the window (i.e., the interval after which +// * the new DStream will generate RDDs); must be a multiple of this +// * DStream's batching interval +// * @param partitioner partitioner for controlling the partitioning of each RDD +// * in the new DStream. +// */ +//@JvmName("reduceByKeyAndWindowPair") +//fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( +// windowDuration: Duration, +// slideDuration: Duration = dstream().slideDuration(), +// partitioner: Partitioner, +// reduceFunc: (V, V) -> V, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, partitioner) +// .map { it.toPair() } +// +///** +// * Return a new DStream by applying incremental `reduceByKey` over a sliding window. +// * The reduced value of over a new window is calculated using the old window's reduced value : +// * 1. reduce the new values that entered the window (e.g., adding new counts) +// * +// * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) +// * +// * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. +// * However, it is applicable to only "invertible reduce functions". +// * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. +// * @param reduceFunc associative and commutative reduce function +// * @param invReduceFunc inverse reduce function; such that for all y, invertible x: +// * `invReduceFunc(reduceFunc(x, y), x) = y` +// * @param windowDuration width of the window; must be a multiple of this DStream's +// * batching interval +// * @param slideDuration sliding interval of the window (i.e., the interval after which +// * the new DStream will generate RDDs); must be a multiple of this +// * DStream's batching interval +// * @param filterFunc Optional function to filter expired key-value pairs; +// * only pairs that satisfy the function are retained +// */ +//@JvmName("reduceByKeyAndWindowPair") +//fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( +// invReduceFunc: (V, V) -> V, +// windowDuration: Duration, +// slideDuration: Duration = dstream().slideDuration(), +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +// filterFunc: ((Pair) -> Boolean)? = null, +// reduceFunc: (V, V) -> V, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .reduceByKeyAndWindow( +// reduceFunc, +// invReduceFunc, +// windowDuration, +// slideDuration, +// numPartitions, +// filterFunc?.let { +// { tuple -> +// filterFunc(tuple.toPair()) +// } +// } +// ) +// .map { it.toPair() } +// +///** +// * Return a new DStream by applying incremental `reduceByKey` over a sliding window. +// * The reduced value of over a new window is calculated using the old window's reduced value : +// * 1. reduce the new values that entered the window (e.g., adding new counts) +// * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) +// * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. +// * However, it is applicable to only "invertible reduce functions". +// * @param reduceFunc associative and commutative reduce function +// * @param invReduceFunc inverse reduce function +// * @param windowDuration width of the window; must be a multiple of this DStream's +// * batching interval +// * @param slideDuration sliding interval of the window (i.e., the interval after which +// * the new DStream will generate RDDs); must be a multiple of this +// * DStream's batching interval +// * @param partitioner partitioner for controlling the partitioning of each RDD in the new +// * DStream. +// * @param filterFunc Optional function to filter expired key-value pairs; +// * only pairs that satisfy the function are retained +// */ +//@JvmName("reduceByKeyAndWindowPair") +//fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( +// invReduceFunc: (V, V) -> V, +// windowDuration: Duration, +// slideDuration: Duration = dstream().slideDuration(), +// partitioner: Partitioner, +// filterFunc: ((Pair) -> Boolean)? = null, +// reduceFunc: (V, V) -> V, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .reduceByKeyAndWindow( +// reduceFunc, +// invReduceFunc, +// windowDuration, +// slideDuration, +// partitioner, +// filterFunc?.let { +// { tuple -> +// filterFunc(tuple.toPair()) +// } +// } +// ) +// .map { it.toPair() } +// +///** +// * Return a [MapWithStateDStream] by applying a function to every key-value element of +// * `this` stream, while maintaining some state data for each unique key. The mapping function +// * and other specification (e.g. partitioners, timeouts, initial state data, etc.) of this +// * transformation can be specified using `StateSpec` class. The state data is accessible in +// * as a parameter of type `State` in the mapping function. +// * +// * Example of using `mapWithState`: +// * {{{ +// * // A mapping function that maintains an integer state and return a String +// * def mappingFunction(key: String, value: Option[Int], state: State[Int]): Option[String] = { +// * // Use state.exists(), state.get(), state.update() and state.remove() +// * // to manage state, and return the necessary string +// * } +// * +// * val spec = StateSpec.function(mappingFunction).numPartitions(10) +// * +// * val mapWithStateDStream = keyValueDStream.mapWithState[StateType, MappedType](spec) +// * }}} +// * +// * @param spec Specification of this transformation +// * @tparam StateType Class type of the state data +// * @tparam MappedType Class type of the mapped data +// */ +//@JvmName("mapWithStatePair") +//fun JavaDStreamLike, *, *>.mapWithState( +// spec: StateSpec, +//): JavaMapWithStateDStream = +// mapToPair { it.toTuple() } +// .mapWithState(spec) +// +///** +// * Return a new "state" DStream where the state for each key is updated by applying +// * the given function on the previous state of the key and the new values of each key. +// * In every batch the updateFunc will be called for each state even if there are no new values. +// * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. +// * @param updateFunc State update function. If `this` function returns None, then +// * corresponding state key-value pair will be eliminated. +// * @tparam S State type +// */ +//@JvmName("updateStateByKeyPair") +//fun JavaDStreamLike, *, *>.updateStateByKey( +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +// updateFunc: (List, S?) -> S?, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .updateStateByKey( +// { list: List, s: Optional -> +// updateFunc(list, s.getOrNull()).asOptional() +// }, +// numPartitions, +// ) +// .map { it.toPair() } +// +///** +// * Return a new "state" DStream where the state for each key is updated by applying +// * the given function on the previous state of the key and the new values of each key. +// * In every batch the updateFunc will be called for each state even if there are no new values. +// * [[org.apache.spark.Partitioner]] is used to control the partitioning of each RDD. +// * @param updateFunc State update function. Note, that this function may generate a different +// * tuple with a different key than the input key. Therefore keys may be removed +// * or added in this way. It is up to the developer to decide whether to +// * remember the partitioner despite the key being changed. +// * @param partitioner Partitioner for controlling the partitioning of each RDD in the new +// * DStream +// * @tparam S State type +// */ +//@JvmName("updateStateByKeyPair") +//fun JavaDStreamLike, *, *>.updateStateByKey( +// partitioner: Partitioner, +// updateFunc: (List, S?) -> S?, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .updateStateByKey( +// { list: List, s: Optional -> +// updateFunc(list, s.getOrNull()).asOptional() +// }, +// partitioner, +// ) +// .map { it.toPair() } +// +///** +// * Return a new "state" DStream where the state for each key is updated by applying +// * the given function on the previous state of the key and the new values of the key. +// * org.apache.spark.Partitioner is used to control the partitioning of each RDD. +// * @param updateFunc State update function. If `this` function returns None, then +// * corresponding state key-value pair will be eliminated. +// * @param partitioner Partitioner for controlling the partitioning of each RDD in the new +// * DStream. +// * @param initialRDD initial state value of each key. +// * @tparam S State type +// */ +//@JvmName("updateStateByKeyPair") +//fun JavaDStreamLike, *, *>.updateStateByKey( +// partitioner: Partitioner, +// initialRDD: JavaRDD>, +// updateFunc: (List, S?) -> S?, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .updateStateByKey( +// { list: List, s: Optional -> +// updateFunc(list, s.getOrNull()).asOptional() +// }, +// partitioner, +// initialRDD.mapToPair { it.toTuple() }, +// ) +// .map { it.toPair() } +// +///** +// * Return a new DStream by applying a map function to the value of each key-value pairs in +// * 'this' DStream without changing the key. +// */ +//@JvmName("mapValuesPair") +//fun JavaDStreamLike, *, *>.mapValues( +// mapValuesFunc: (V) -> U, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .mapValues(mapValuesFunc) +// .map { it.toPair() } +// +///** +// * Return a new DStream by applying a flatmap function to the value of each key-value pairs in +// * 'this' DStream without changing the key. +// */ +//@JvmName("flatMapValuesPair") +//fun JavaDStreamLike, *, *>.flatMapValues( +// flatMapValuesFunc: (V) -> Iterator, +//): JavaDStream> = +// mapToPair { it.toTuple() } +// .flatMapValues(flatMapValuesFunc) +// .map { it.toPair() } +// +///** +// * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. +// * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. +// */ +//@JvmName("cogroupPair") +//fun JavaDStreamLike, *, *>.cogroup( +// other: JavaDStreamLike, *, *>, +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +//): JavaDStream, Iterable>>> = +// mapToPair { it.toTuple() } +// .cogroup( +// other.mapToPair { it.toTuple() }, +// numPartitions, +// ) +// .map { +// Pair(it._1, it._2.toPair()) +// } +// +///** +// * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. +// * The supplied org.apache.spark.Partitioner is used to partition the generated RDDs. +// */ +//@JvmName("cogroupPair") +//fun JavaDStreamLike, *, *>.cogroup( +// other: JavaDStreamLike, *, *>, +// partitioner: Partitioner, +//): JavaDStream, Iterable>>> = +// mapToPair { it.toTuple() } +// .cogroup( +// other.mapToPair { it.toTuple() }, +// partitioner, +// ) +// .map { +// Pair(it._1, it._2.toPair()) +// } +// +///** +// * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. +// * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. +// */ +//@JvmName("joinPair") +//fun JavaDStreamLike, *, *>.join( +// other: JavaDStreamLike, *, *>, +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .join( +// other.mapToPair { it.toTuple() }, +// numPartitions, +// ) +// .map { +// Pair(it._1, it._2.toPair()) +// } +// +///** +// * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. +// * The supplied org.apache.spark.Partitioner is used to control the partitioning of each RDD. +// */ +//@JvmName("joinPair") +//fun JavaDStreamLike, *, *>.join( +// other: JavaDStreamLike, *, *>, +// partitioner: Partitioner, +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .join( +// other.mapToPair { it.toTuple() }, +// partitioner, +// ) +// .map { +// Pair(it._1, it._2.toPair()) +// } +// +///** +// * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and +// * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` +// * partitions. +// */ +//@JvmName("leftOuterJoinPair") +//fun JavaDStreamLike, *, *>.leftOuterJoin( +// other: JavaDStreamLike, *, *>, +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .leftOuterJoin( +// other.mapToPair { it.toTuple() }, +// numPartitions, +// ) +// .map { +// Pair(it._1, Pair(it._2._1, it._2._2.getOrNull())) +// } +// +///** +// * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and +// * `other` DStream. The supplied org.apache.spark.Partitioner is used to control +// * the partitioning of each RDD. +// */ +//@JvmName("leftOuterJoinPair") +//fun JavaDStreamLike, *, *>.leftOuterJoin( +// other: JavaDStreamLike, *, *>, +// partitioner: Partitioner, +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .leftOuterJoin( +// other.mapToPair { it.toTuple() }, +// partitioner, +// ) +// .map { +// Pair(it._1, Pair(it._2._1, it._2._2.getOrNull())) +// } +// +///** +// * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and +// * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` +// * partitions. +// */ +//@JvmName("rightOuterJoinPair") +//fun JavaDStreamLike, *, *>.rightOuterJoin( +// other: JavaDStreamLike, *, *>, +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .rightOuterJoin( +// other.mapToPair { it.toTuple() }, +// numPartitions, +// ) +// .map { +// Pair(it._1, Pair(it._2._1.getOrNull(), it._2._2)) +// } +// +///** +// * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and +// * `other` DStream. The supplied org.apache.spark.Partitioner is used to control +// * the partitioning of each RDD. +// */ +//@JvmName("rightOuterJoinPair") +//fun JavaDStreamLike, *, *>.rightOuterJoin( +// other: JavaDStreamLike, *, *>, +// partitioner: Partitioner, +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .rightOuterJoin( +// other.mapToPair { it.toTuple() }, +// partitioner, +// ) +// .map { +// Pair(it._1, Pair(it._2._1.getOrNull(), it._2._2)) +// } +// +///** +// * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and +// * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` +// * partitions. +// */ +//@JvmName("fullOuterJoinPair") +//fun JavaDStreamLike, *, *>.fullOuterJoin( +// other: JavaDStreamLike, *, *>, +// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .fullOuterJoin( +// other.mapToPair { it.toTuple() }, +// numPartitions, +// ) +// .map { +// Pair(it._1, Pair(it._2._1.getOrNull(), it._2._2.getOrNull())) +// } +// +///** +// * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and +// * `other` DStream. The supplied org.apache.spark.Partitioner is used to control +// * the partitioning of each RDD. +// */ +//@JvmName("fullOuterJoinPair") +//fun JavaDStreamLike, *, *>.fullOuterJoin( +// other: JavaDStreamLike, *, *>, +// partitioner: Partitioner, +//): JavaDStream>> = +// mapToPair { it.toTuple() } +// .fullOuterJoin( +// other.mapToPair { it.toTuple() }, +// partitioner, +// ) +// .map { +// Pair(it._1, Pair(it._2._1.getOrNull(), it._2._2.getOrNull())) +// } +// +///** +// * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is +// * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". +// */ +//@JvmName("saveAsHadoopFilesPair") +//fun JavaDStreamLike, *, *>.saveAsHadoopFiles( +// prefix: String, suffix: String, +//): Unit = +// mapToPair { it.toTuple() } +// .saveAsHadoopFiles(prefix, suffix) +// +///** +// * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is +// * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". +// */ +//@JvmName("saveAsNewAPIHadoopFilesPair") +//fun JavaDStreamLike, *, *>.saveAsNewAPIHadoopFiles( +// prefix: String, suffix: String, +//): Unit = +// mapToPair { it.toTuple() } +// .saveAsNewAPIHadoopFiles(prefix, suffix) /** @@ -1231,22 +1222,22 @@ fun JavaDStreamLike, *, *>.saveAsNewAPIHadoopFiles( * generate the RDDs with `numPartitions` partitions. */ @JvmName("groupByKeyTuple2") -fun JavaDStreamLike, *, *>.groupByKey( +fun JavaDStream>.groupByKey( numPartitions: Int = dstream().ssc().sc().defaultParallelism(), ): JavaDStream>> = - mapToPair { it } + toPairDStream() .groupByKey(numPartitions) - .map { it } + .toJavaDStream() /** * Return a new DStream by applying `groupByKey` on each RDD. The supplied * org.apache.spark.Partitioner is used to control the partitioning of each RDD. */ @JvmName("groupByKeyTuple2") -fun JavaDStreamLike, *, *>.groupByKey(partitioner: Partitioner): JavaDStream>> = - mapToPair { it } +fun JavaDStream>.groupByKey(partitioner: Partitioner): JavaDStream>> = + toPairDStream() .groupByKey(partitioner) - .map { it } + .toJavaDStream() /** * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are @@ -1254,13 +1245,13 @@ fun JavaDStreamLike, *, *>.groupByKey(partitioner: Partition * with `numPartitions` partitions. */ @JvmName("reduceByKeyTuple2") -fun JavaDStreamLike, *, *>.reduceByKey( +fun JavaDStream>.reduceByKey( numPartitions: Int = dstream().ssc().sc().defaultParallelism(), reduceFunc: (V, V) -> V, ): JavaDStream> = - mapToPair { it } + toPairDStream() .reduceByKey(reduceFunc, numPartitions) - .map { it } + .toJavaDStream() /** * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are @@ -1268,13 +1259,13 @@ fun JavaDStreamLike, *, *>.reduceByKey( * the partitioning of each RDD. */ @JvmName("reduceByKeyTuple2") -fun JavaDStreamLike, *, *>.reduceByKey( +fun JavaDStream>.reduceByKey( partitioner: Partitioner, reduceFunc: (V, V) -> V, ): JavaDStream> = - mapToPair { it } + toPairDStream() .reduceByKey(reduceFunc, partitioner) - .map { it } + .toJavaDStream() /** * Combine elements of each key in DStream's RDDs using custom functions. This is similar to the @@ -1282,16 +1273,16 @@ fun JavaDStreamLike, *, *>.reduceByKey( * org.apache.spark.rdd.PairRDDFunctions in the Spark core documentation for more information. */ @JvmName("combineByKeyTuple2") -fun JavaDStreamLike, *, *>.combineByKey( +fun JavaDStream>.combineByKey( createCombiner: (V) -> C, mergeValue: (C, V) -> C, mergeCombiner: (C, C) -> C, partitioner: Partitioner, mapSideCombine: Boolean = true, ): JavaDStream> = - mapToPair { it } + toPairDStream() .combineByKey(createCombiner, mergeValue, mergeCombiner, partitioner, mapSideCombine) - .map { it } + .toJavaDStream() /** * Return a new DStream by applying `groupByKey` over a sliding window on `this` DStream. @@ -1306,14 +1297,14 @@ fun JavaDStreamLike, *, *>.combineByKey( * then Spark's default number of partitions will be used */ @JvmName("groupByKeyAndWindowTuple2") -fun JavaDStreamLike, *, *>.groupByKeyAndWindow( +fun JavaDStream>.groupByKeyAndWindow( windowDuration: Duration, slideDuration: Duration = dstream().slideDuration(), numPartitions: Int = dstream().ssc().sc().defaultParallelism(), ): JavaDStream>> = - mapToPair { it } + toPairDStream() .groupByKeyAndWindow(windowDuration, slideDuration, numPartitions) - .map { it } + .toJavaDStream() /** * Create a new DStream by applying `groupByKey` over a sliding window on `this` DStream. @@ -1327,14 +1318,14 @@ fun JavaDStreamLike, *, *>.groupByKeyAndWindow( * DStream. */ @JvmName("groupByKeyAndWindowTuple2") -fun JavaDStreamLike, *, *>.groupByKeyAndWindow( +fun JavaDStream>.groupByKeyAndWindow( windowDuration: Duration, slideDuration: Duration = dstream().slideDuration(), partitioner: Partitioner, ): JavaDStream>> = - mapToPair { it } + toPairDStream() .groupByKeyAndWindow(windowDuration, slideDuration, partitioner) - .map { it } + .toJavaDStream() /** * Return a new DStream by applying `reduceByKey` over a sliding window. This is similar to @@ -1349,15 +1340,15 @@ fun JavaDStreamLike, *, *>.groupByKeyAndWindow( * @param numPartitions number of partitions of each RDD in the new DStream. */ @JvmName("reduceByKeyAndWindowTuple2") -fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( +fun JavaDStream>.reduceByKeyAndWindow( windowDuration: Duration, slideDuration: Duration = dstream().slideDuration(), numPartitions: Int = dstream().ssc().sc().defaultParallelism(), reduceFunc: (V, V) -> V, ): JavaDStream> = - mapToPair { it } + toPairDStream() .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, numPartitions) - .map { it } + .toJavaDStream() /** * Return a new DStream by applying `reduceByKey` over a sliding window. Similar to @@ -1372,15 +1363,15 @@ fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( * in the new DStream. */ @JvmName("reduceByKeyAndWindowTuple2") -fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( +fun JavaDStream>.reduceByKeyAndWindow( windowDuration: Duration, slideDuration: Duration = dstream().slideDuration(), partitioner: Partitioner, reduceFunc: (V, V) -> V, ): JavaDStream> = - mapToPair { it } + toPairDStream() .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, partitioner) - .map { it } + .toJavaDStream() /** * Return a new DStream by applying incremental `reduceByKey` over a sliding window. @@ -1404,7 +1395,7 @@ fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( * only pairs that satisfy the function are retained */ @JvmName("reduceByKeyAndWindowTuple2") -fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( +fun JavaDStream>.reduceByKeyAndWindow( invReduceFunc: (V, V) -> V, windowDuration: Duration, slideDuration: Duration = dstream().slideDuration(), @@ -1412,20 +1403,20 @@ fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( filterFunc: ((Tuple2) -> Boolean)? = null, reduceFunc: (V, V) -> V, ): JavaDStream> = - mapToPair { it } + toPairDStream() .reduceByKeyAndWindow( - reduceFunc, - invReduceFunc, - windowDuration, - slideDuration, - numPartitions, - filterFunc?.let { + /* reduceFunc = */ reduceFunc, + /* invReduceFunc = */ invReduceFunc, + /* windowDuration = */ windowDuration, + /* slideDuration = */ slideDuration, + /* numPartitions = */ numPartitions, + /* filterFunc = */ filterFunc?.let { { tuple -> filterFunc(tuple) } } ) - .map { it } + .toJavaDStream() /** * Return a new DStream by applying incremental `reduceByKey` over a sliding window. @@ -1447,7 +1438,7 @@ fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( * only pairs that satisfy the function are retained */ @JvmName("reduceByKeyAndWindowTuple2") -fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( +fun JavaDStream>.reduceByKeyAndWindow( invReduceFunc: (V, V) -> V, windowDuration: Duration, slideDuration: Duration = dstream().slideDuration(), @@ -1455,20 +1446,20 @@ fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( filterFunc: ((Tuple2) -> Boolean)? = null, reduceFunc: (V, V) -> V, ): JavaDStream> = - mapToPair { it } + toPairDStream() .reduceByKeyAndWindow( - reduceFunc, - invReduceFunc, - windowDuration, - slideDuration, - partitioner, - filterFunc?.let { + /* reduceFunc = */ reduceFunc, + /* invReduceFunc = */ invReduceFunc, + /* windowDuration = */ windowDuration, + /* slideDuration = */ slideDuration, + /* partitioner = */ partitioner, + /* filterFunc = */ filterFunc?.let { { tuple -> filterFunc(tuple) } } ) - .map { it } + .toJavaDStream() /** * Return a [MapWithStateDStream] by applying a function to every key-value element of @@ -1495,34 +1486,33 @@ fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( * @tparam MappedType Class type of the mapped data */ @JvmName("mapWithStateTuple2") -fun JavaDStreamLike, *, *>.mapWithState( +fun JavaDStream>.mapWithState( spec: StateSpec, ): JavaMapWithStateDStream = - mapToPair { it } - .mapWithState(spec) + toPairDStream().mapWithState(spec) /** * Return a new "state" DStream where the state for each key is updated by applying * the given function on the previous state of the key and the new values of each key. * In every batch the updateFunc will be called for each state even if there are no new values. * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. - * @param updateFunc State update function. If `this` function returns None, then + * @param updateFunc State update function. If `this` function returns `null`, then * corresponding state key-value pair will be eliminated. * @tparam S State type */ @JvmName("updateStateByKeyTuple2") -fun JavaDStreamLike, *, *>.updateStateByKey( +fun JavaDStream>.updateStateByKey( numPartitions: Int = dstream().ssc().sc().defaultParallelism(), updateFunc: (List, S?) -> S?, ): JavaDStream> = - mapToPair { it } + toPairDStream() .updateStateByKey( { list: List, s: Optional -> - updateFunc(list, s.toNullable()).toOptional() + updateFunc(list, s.getOrNull()).asOptional() }, numPartitions, ) - .map { it } + .toJavaDStream() /** * Return a new "state" DStream where the state for each key is updated by applying @@ -1538,24 +1528,24 @@ fun JavaDStreamLike, *, *>.updateStateByKey( * @tparam S State type */ @JvmName("updateStateByKeyTuple2") -fun JavaDStreamLike, *, *>.updateStateByKey( +fun JavaDStream>.updateStateByKey( partitioner: Partitioner, updateFunc: (List, S?) -> S?, ): JavaDStream> = - mapToPair { it } + toPairDStream() .updateStateByKey( { list: List, s: Optional -> - updateFunc(list, s.toNullable()).toOptional() + updateFunc(list, s.getOrNull()).asOptional() }, partitioner, ) - .map { it } + .toJavaDStream() /** * Return a new "state" DStream where the state for each key is updated by applying * the given function on the previous state of the key and the new values of the key. * org.apache.spark.Partitioner is used to control the partitioning of each RDD. - * @param updateFunc State update function. If `this` function returns None, then + * @param updateFunc State update function. If `this` function returns `null`, then * corresponding state key-value pair will be eliminated. * @param partitioner Partitioner for controlling the partitioning of each RDD in the new * DStream. @@ -1563,116 +1553,110 @@ fun JavaDStreamLike, *, *>.updateStateByKey( * @tparam S State type */ @JvmName("updateStateByKeyTuple2") -fun JavaDStreamLike, *, *>.updateStateByKey( +fun JavaDStream>.updateStateByKey( partitioner: Partitioner, initialRDD: JavaRDD>, updateFunc: (List, S?) -> S?, ): JavaDStream> = - mapToPair { it } + toPairDStream() .updateStateByKey( { list: List, s: Optional -> - updateFunc(list, s.toNullable()).toOptional() + updateFunc(list, s.getOrNull()).asOptional() }, partitioner, - initialRDD.mapToPair { it }, + initialRDD.toPairRDD(), ) - .map { it } + .toJavaDStream() + /** * Return a new DStream by applying a map function to the value of each key-value pairs in * 'this' DStream without changing the key. */ @JvmName("mapValuesTuple2") -fun JavaDStreamLike, *, *>.mapValues( +fun JavaDStream>.mapValues( mapValuesFunc: (V) -> U, ): JavaDStream> = - mapToPair { it } + toPairDStream() .mapValues(mapValuesFunc) - .map { it } + .toJavaDStream() /** * Return a new DStream by applying a flatmap function to the value of each key-value pairs in * 'this' DStream without changing the key. */ @JvmName("flatMapValuesTuple2") -fun JavaDStreamLike, *, *>.flatMapValues( +fun JavaDStream>.flatMapValues( flatMapValuesFunc: (V) -> Iterator, ): JavaDStream> = - mapToPair { it } + toPairDStream() .flatMapValues(flatMapValuesFunc) - .map { it } + .toJavaDStream() /** * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. */ @JvmName("cogroupTuple2") -fun JavaDStreamLike, *, *>.cogroup( - other: JavaDStreamLike, *, *>, +fun JavaDStream>.cogroup( + other: JavaDStream>, numPartitions: Int = dstream().ssc().sc().defaultParallelism(), ): JavaDStream, Iterable>>> = - mapToPair { it } + toPairDStream() .cogroup( - other.mapToPair { it }, + other.toPairDStream(), numPartitions, ) - .map { - Tuple2(it._1, it._2) - } + .toJavaDStream() + /** * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. * The supplied org.apache.spark.Partitioner is used to partition the generated RDDs. */ @JvmName("cogroupTuple2") -fun JavaDStreamLike, *, *>.cogroup( - other: JavaDStreamLike, *, *>, +fun JavaDStream>.cogroup( + other: JavaDStream>, partitioner: Partitioner, ): JavaDStream, Iterable>>> = - mapToPair { it } + toPairDStream() .cogroup( - other.mapToPair { it }, + other.toPairDStream(), partitioner, ) - .map { - Tuple2(it._1, it._2) - } + .toJavaDStream() /** * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. */ @JvmName("joinTuple2") -fun JavaDStreamLike, *, *>.join( - other: JavaDStreamLike, *, *>, +fun JavaDStream>.join( + other: JavaDStream>, numPartitions: Int = dstream().ssc().sc().defaultParallelism(), ): JavaDStream>> = - mapToPair { it } + toPairDStream() .join( - other.mapToPair { it }, + other.toPairDStream(), numPartitions, ) - .map { - Tuple2(it._1, it._2) - } + .toJavaDStream() /** * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. * The supplied org.apache.spark.Partitioner is used to control the partitioning of each RDD. */ @JvmName("joinTuple2") -fun JavaDStreamLike, *, *>.join( - other: JavaDStreamLike, *, *>, +fun JavaDStream>.join( + other: JavaDStream>, partitioner: Partitioner, ): JavaDStream>> = - mapToPair { it } + toPairDStream() .join( - other.mapToPair { it }, + other.toPairDStream(), partitioner, ) - .map { - Tuple2(it._1, it._2) - } + .toJavaDStream() /** * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and @@ -1680,18 +1664,16 @@ fun JavaDStreamLike, *, *>.join( * partitions. */ @JvmName("leftOuterJoinTuple2") -fun JavaDStreamLike, *, *>.leftOuterJoin( - other: JavaDStreamLike, *, *>, +fun JavaDStream>.leftOuterJoin( + other: JavaDStream>, numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -): JavaDStream>> = - mapToPair { it } +): JavaDStream>>> = + toPairDStream() .leftOuterJoin( - other.mapToPair { it }, + other.toPairDStream(), numPartitions, ) - .map { - Tuple2(it._1, Tuple2(it._2._1, it._2._2.toNullable())) - } + .toJavaDStream() /** * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and @@ -1699,18 +1681,16 @@ fun JavaDStreamLike, *, *>.leftOuterJoin( * the partitioning of each RDD. */ @JvmName("leftOuterJoinTuple2") -fun JavaDStreamLike, *, *>.leftOuterJoin( - other: JavaDStreamLike, *, *>, +fun JavaDStream>.leftOuterJoin( + other: JavaDStream>, partitioner: Partitioner, -): JavaDStream>> = - mapToPair { it } +): JavaDStream>>> = + toPairDStream() .leftOuterJoin( - other.mapToPair { it }, + other.toPairDStream(), partitioner, ) - .map { - Tuple2(it._1, Tuple2(it._2._1, it._2._2.toNullable())) - } + .toJavaDStream() /** * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and @@ -1718,18 +1698,16 @@ fun JavaDStreamLike, *, *>.leftOuterJoin( * partitions. */ @JvmName("rightOuterJoinTuple2") -fun JavaDStreamLike, *, *>.rightOuterJoin( - other: JavaDStreamLike, *, *>, +fun JavaDStream>.rightOuterJoin( + other: JavaDStream>, numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -): JavaDStream>> = - mapToPair { it } +): JavaDStream, W>>> = + toPairDStream() .rightOuterJoin( - other.mapToPair { it }, + other.toPairDStream(), numPartitions, ) - .map { - Tuple2(it._1, Tuple2(it._2._1.toNullable(), it._2._2)) - } + .toJavaDStream() /** * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and @@ -1737,18 +1715,16 @@ fun JavaDStreamLike, *, *>.rightOuterJoin( * the partitioning of each RDD. */ @JvmName("rightOuterJoinTuple2") -fun JavaDStreamLike, *, *>.rightOuterJoin( - other: JavaDStreamLike, *, *>, +fun JavaDStream>.rightOuterJoin( + other: JavaDStream>, partitioner: Partitioner, -): JavaDStream>> = - mapToPair { it } +): JavaDStream, W>>> = + toPairDStream() .rightOuterJoin( - other.mapToPair { it }, + other.toPairDStream(), partitioner, ) - .map { - Tuple2(it._1, Tuple2(it._2._1.toNullable(), it._2._2)) - } + .toJavaDStream() /** * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and @@ -1756,18 +1732,16 @@ fun JavaDStreamLike, *, *>.rightOuterJoin( * partitions. */ @JvmName("fullOuterJoinTuple2") -fun JavaDStreamLike, *, *>.fullOuterJoin( - other: JavaDStreamLike, *, *>, +fun JavaDStream>.fullOuterJoin( + other: JavaDStream>, numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -): JavaDStream>> = - mapToPair { it } +): JavaDStream, Optional>>> = + toPairDStream() .fullOuterJoin( - other.mapToPair { it }, + other.toPairDStream(), numPartitions, ) - .map { - Tuple2(it._1, Tuple2(it._2._1.toNullable(), it._2._2.toNullable())) - } + .toJavaDStream() /** * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and @@ -1775,37 +1749,33 @@ fun JavaDStreamLike, *, *>.fullOuterJoin( * the partitioning of each RDD. */ @JvmName("fullOuterJoinTuple2") -fun JavaDStreamLike, *, *>.fullOuterJoin( - other: JavaDStreamLike, *, *>, +fun JavaDStream>.fullOuterJoin( + other: JavaDStream>, partitioner: Partitioner, -): JavaDStream>> = - mapToPair { it } +): JavaDStream, Optional>>> = + toPairDStream() .fullOuterJoin( - other.mapToPair { it }, + other.toPairDStream(), partitioner, ) - .map { - Tuple2(it._1, Tuple2(it._2._1.toNullable(), it._2._2.toNullable())) - } + .toJavaDStream() /** * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". */ @JvmName("saveAsHadoopFilesTuple2") -fun JavaDStreamLike, *, *>.saveAsHadoopFiles( - prefix: String, suffix: String, -): Unit = - mapToPair { it } - .saveAsHadoopFiles(prefix, suffix) +fun JavaDStream>.saveAsHadoopFiles( + prefix: String, + suffix: String, +): Unit = toPairDStream().saveAsHadoopFiles(prefix, suffix) /** * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". */ @JvmName("saveAsNewAPIHadoopFilesTuple2") -fun JavaDStreamLike, *, *>.saveAsNewAPIHadoopFiles( - prefix: String, suffix: String, -): Unit = - mapToPair { it } - .saveAsNewAPIHadoopFiles(prefix, suffix) +fun JavaDStream>.saveAsNewAPIHadoopFiles( + prefix: String, + suffix: String, +): Unit = toPairDStream().saveAsNewAPIHadoopFiles(prefix, suffix) From aff3edcdf3ac3c2accfa0d40e29a3ebed036c116 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 23 Mar 2022 13:59:14 +0100 Subject: [PATCH 099/213] added deprecation notice to arities --- .../jetbrains/kotlinx/spark/api/Arities.kt | 142 ++++++++++++++---- .../kotlinx/spark/api/DatasetFunctionTest.kt | 19 +++ 2 files changed, 131 insertions(+), 30 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt index 6dcb1666..ddd3fc2e 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt @@ -44,58 +44,140 @@ package org.jetbrains.kotlinx.spark.api import java.io.Serializable +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple1(_1)", "scala.Tuple1")) data class Arity1(val _1: T1): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple2(_1, _2)", "scala.Tuple2")) data class Arity2(val _1: T1, val _2: T2): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple3(_1, _2, _3)", "scala.Tuple3")) data class Arity3(val _1: T1, val _2: T2, val _3: T3): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple4(_1, _2, _3, _4)", "scala.Tuple4")) data class Arity4(val _1: T1, val _2: T2, val _3: T3, val _4: T4): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple5(_1, _2, _3, _4, _5)", "scala.Tuple5")) data class Arity5(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple6(_1, _2, _3, _4, _5, _6)", "scala.Tuple6")) data class Arity6(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple7(_1, _2, _3, _4, _5, _6, _7)", "scala.Tuple7")) data class Arity7(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple8(_1, _2, _3, _4, _5, _6, _7, _8)", "scala.Tuple8")) data class Arity8(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple9(_1, _2, _3, _4, _5, _6, _7, _8, _9)", "scala.Tuple9")) data class Arity9(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple10(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10)", "scala.Tuple10")) data class Arity10(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple11(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11)", "scala.Tuple11")) data class Arity11(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple12(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12)", "scala.Tuple12")) data class Arity12(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple13(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13)", "scala.Tuple13")) data class Arity13(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple14(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14)", "scala.Tuple14")) data class Arity14(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple15(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15)", "scala.Tuple15")) data class Arity15(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple16(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16)", "scala.Tuple16")) data class Arity16(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple17(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17)", "scala.Tuple17")) data class Arity17(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple18(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18)", "scala.Tuple18")) data class Arity18(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple19(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19)", "scala.Tuple19")) data class Arity19(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple20(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20)", "scala.Tuple20")) data class Arity20(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple21(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21)", "scala.Tuple21")) data class Arity21(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21): Serializable + +@Deprecated("Use Scala tuples instead.", ReplaceWith("Tuple22(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22)", "scala.Tuple22")) data class Arity22(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22): Serializable -data class Arity23(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23): Serializable -data class Arity24(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23, val _24: T24): Serializable -data class Arity25(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23, val _24: T24, val _25: T25): Serializable -data class Arity26(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23, val _24: T24, val _25: T25, val _26: T26): Serializable -fun c(_1: T1) = Arity1(_1) -fun c(_1: T1, _2: T2) = Arity2(_1, _2) -fun c(_1: T1, _2: T2, _3: T3) = Arity3(_1, _2, _3) -fun c(_1: T1, _2: T2, _3: T3, _4: T4) = Arity4(_1, _2, _3, _4) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5) = Arity5(_1, _2, _3, _4, _5) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6) = Arity6(_1, _2, _3, _4, _5, _6) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7) = Arity7(_1, _2, _3, _4, _5, _6, _7) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8) = Arity8(_1, _2, _3, _4, _5, _6, _7, _8) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9) = Arity9(_1, _2, _3, _4, _5, _6, _7, _8, _9) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10) = Arity10(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11) = Arity11(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12) = Arity12(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13) = Arity13(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14) = Arity14(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15) = Arity15(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16) = Arity16(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17) = Arity17(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18) = Arity18(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19) = Arity19(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20) = Arity20(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21) = Arity21(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22) = Arity22(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22, _23: T23) = Arity23(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22, _23: T23, _24: T24) = Arity24(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22, _23: T23, _24: T24, _25: T25) = Arity25(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25) -fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22, _23: T23, _24: T24, _25: T25, _26: T26) = Arity26(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25, _26) + + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1)")) +fun c(_1: T1): Arity1 = Arity1(_1) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2)")) +fun c(_1: T1, _2: T2): Arity2 = Arity2(_1, _2) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3)")) +fun c(_1: T1, _2: T2, _3: T3): Arity3 = Arity3(_1, _2, _3) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4): Arity4 = Arity4(_1, _2, _3, _4) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5): Arity5 = Arity5(_1, _2, _3, _4, _5) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6): Arity6 = Arity6(_1, _2, _3, _4, _5, _6) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7): Arity7 = Arity7(_1, _2, _3, _4, _5, _6, _7) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8): Arity8 = Arity8(_1, _2, _3, _4, _5, _6, _7, _8) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9): Arity9 = Arity9(_1, _2, _3, _4, _5, _6, _7, _8, _9) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10): Arity10 = Arity10(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11): Arity11 = Arity11(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12): Arity12 = Arity12(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13): Arity13 = Arity13(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14): Arity14 = Arity14(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15): Arity15 = Arity15(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16): Arity16 = Arity16(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17): Arity17 = Arity17(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18): Arity18 = Arity18(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19): Arity19 = Arity19(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20): Arity20 = Arity20(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21): Arity21 = Arity21(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21) + +@Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22)")) +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22): Arity22 = Arity22(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22) + + infix operator fun Arity1.plus(that: Arity1) = Arity2(this._1, that._1) infix operator fun Arity1.plus(that: Arity2) = Arity3(this._1, that._1, that._2) infix operator fun Arity2.plus(that: Arity1) = Arity3(this._1, this._2, that._1) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt index 495948e3..3354338b 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt @@ -1,3 +1,22 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ package org.jetbrains.kotlinx.spark.api import ch.tutteli.atrium.api.fluent.en_GB.* From c19d6d83ca04ab88522e65d5be51e14972bfc8da Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 23 Mar 2022 16:00:58 +0100 Subject: [PATCH 100/213] adding all tuple functions from scalaTuplesInKotlin, starting library conversion --- .../jetbrains/kotlinx/spark/api/Arities.kt | 24 + .../kotlinx/spark/api/Conversions.kt | 52 +- .../jetbrains/kotlinx/spark/api/Dataset.kt | 50 +- .../api/tuples/DestructuredTupleBuilders.kt | 79 +++ .../kotlinx/spark/api/tuples/DropFunctions.kt | 95 ++++ .../spark/api/tuples/ProductDestructuring.kt | 306 +++++++++++ .../spark/api/tuples/ProductExtensions.kt | 155 ++++++ .../api/tuples/ProductTextualAccessors.kt | 183 ++++++ .../api/tuples/SameTypeProductExtensions.kt | 169 ++++++ .../kotlinx/spark/api/tuples/TupleBuilders.kt | 192 +++++++ .../spark/api/tuples/TupleConcatenation.kt | 519 ++++++++++++++++++ .../kotlinx/spark/api/tuples/TupleCopy.kt | 47 ++ .../spark/api/tuples/TupleExtending.kt | 155 ++++++ 13 files changed, 2000 insertions(+), 26 deletions(-) create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DropFunctions.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductDestructuring.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductExtensions.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductTextualAccessors.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/SameTypeProductExtensions.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleBuilders.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleConcatenation.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleCopy.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleExtending.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt index ddd3fc2e..0a671e78 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt @@ -110,6 +110,18 @@ data class Arity21(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22)", "scala.Tuple22")) data class Arity22(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22): Serializable +@Deprecated("Use Scala tuples instead. They only reach 22 values.") +data class Arity23(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23): Serializable + +@Deprecated("Use Scala tuples instead. They only reach 22 values.") +data class Arity24(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23, val _24: T24): Serializable + +@Deprecated("Use Scala tuples instead. They only reach 22 values.") +data class Arity25(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23, val _24: T24, val _25: T25): Serializable + +@Deprecated("Use Scala tuples instead. They only reach 22 values.") +data class Arity26(val _1: T1, val _2: T2, val _3: T3, val _4: T4, val _5: T5, val _6: T6, val _7: T7, val _8: T8, val _9: T9, val _10: T10, val _11: T11, val _12: T12, val _13: T13, val _14: T14, val _15: T15, val _16: T16, val _17: T17, val _18: T18, val _19: T19, val _20: T20, val _21: T21, val _22: T22, val _23: T23, val _24: T24, val _25: T25, val _26: T26): Serializable + @Deprecated("Use Scala tuples instead.", ReplaceWith("t(_1)")) fun c(_1: T1): Arity1 = Arity1(_1) @@ -177,6 +189,18 @@ fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22): Arity22 = Arity22(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22) +@Deprecated("Use Scala tuples instead. They only reach 22 values.") +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22, _23: T23): Arity23 = Arity23(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23) + +@Deprecated("Use Scala tuples instead. They only reach 22 values.") +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22, _23: T23, _24: T24): Arity24 = Arity24(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24) + +@Deprecated("Use Scala tuples instead. They only reach 22 values.") +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22, _23: T23, _24: T24, _25: T25): Arity25 = Arity25(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25) + +@Deprecated("Use Scala tuples instead. They only reach 22 values.") +fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22, _23: T23, _24: T24, _25: T25, _26: T26): Arity26 = Arity26(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25, _26) + infix operator fun Arity1.plus(that: Arity1) = Arity2(this._1, that._1) infix operator fun Arity1.plus(that: Arity2) = Arity3(this._1, that._1, that._2) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt index 8b67a1bc..7a9fe75f 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt @@ -175,6 +175,7 @@ fun Pair.toTuple(): Tuple2 = Tuple2(first, seco /** * Returns a new [Arity2] based on the arguments in the current [Pair]. */ +@Deprecated("Use Scala tuples instead.", ReplaceWith("this.toTuple()", "scala.Tuple2")) fun Pair.toArity(): Arity2 = Arity2(first, second) /** @@ -185,17 +186,19 @@ fun Tuple2.toPair(): Pair = Pair(_1(), _2()) /** * Returns a new [Pair] based on the arguments in the current [Arity2]. */ +@Deprecated("Use Scala tuples instead.") fun Arity2.toPair(): Pair = Pair(_1, _2) /** * Returns a new [Tuple3] based on the arguments in the current [Triple]. */ -fun Triple.toTuple(): Tuple3 = Tuple3(first, second, third) +//fun Triple.toTuple(): Tuple3 = Tuple3(first, second, third) /** * Returns a new [Arity3] based on the arguments in the current [Triple]. */ +@Deprecated("Use Scala tuples instead.", ReplaceWith("this.toTuple()", "scala.Tuple3")) fun Triple.toArity(): Arity3 = Arity3(first, second, third) /** @@ -206,225 +209,270 @@ fun Tuple3.toTriple(): Triple = Triple Arity3.toTriple(): Triple = Triple(_1, _2, _3) /** * Returns a new Arity1 based on this Tuple1. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple1.toArity(): Arity1 = Arity1(this._1()) /** * Returns a new Arity2 based on this Tuple2. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple2.toArity(): Arity2 = Arity2(this._1(), this._2()) /** * Returns a new Arity3 based on this Tuple3. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple3.toArity(): Arity3 = Arity3(this._1(), this._2(), this._3()) /** * Returns a new Arity4 based on this Tuple4. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple4.toArity(): Arity4 = Arity4(this._1(), this._2(), this._3(), this._4()) /** * Returns a new Arity5 based on this Tuple5. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple5.toArity(): Arity5 = Arity5(this._1(), this._2(), this._3(), this._4(), this._5()) /** * Returns a new Arity6 based on this Tuple6. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple6.toArity(): Arity6 = Arity6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) /** * Returns a new Arity7 based on this Tuple7. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple7.toArity(): Arity7 = Arity7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) /** * Returns a new Arity8 based on this Tuple8. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple8.toArity(): Arity8 = Arity8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) /** * Returns a new Arity9 based on this Tuple9. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple9.toArity(): Arity9 = Arity9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) /** * Returns a new Arity10 based on this Tuple10. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple10.toArity(): Arity10 = Arity10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) /** * Returns a new Arity11 based on this Tuple11. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple11.toArity(): Arity11 = Arity11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) /** * Returns a new Arity12 based on this Tuple12. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple12.toArity(): Arity12 = Arity12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) /** * Returns a new Arity13 based on this Tuple13. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple13.toArity(): Arity13 = Arity13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) /** * Returns a new Arity14 based on this Tuple14. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple14.toArity(): Arity14 = Arity14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) /** * Returns a new Arity15 based on this Tuple15. - **/ + **/@Deprecated("Use Scala tuples instead.") + fun Tuple15.toArity(): Arity15 = Arity15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) /** * Returns a new Arity16 based on this Tuple16. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple16.toArity(): Arity16 = Arity16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) /** * Returns a new Arity17 based on this Tuple17. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple17.toArity(): Arity17 = Arity17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) /** * Returns a new Arity18 based on this Tuple18. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple18.toArity(): Arity18 = Arity18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) /** * Returns a new Arity19 based on this Tuple19. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple19.toArity(): Arity19 = Arity19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) /** * Returns a new Arity20 based on this Tuple20. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple20.toArity(): Arity20 = Arity20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) /** * Returns a new Arity21 based on this Tuple21. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple21.toArity(): Arity21 = Arity21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) /** * Returns a new Arity22 based on this Tuple22. **/ +@Deprecated("Use Scala tuples instead.") fun Tuple22.toArity(): Arity22 = Arity22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) /** * Returns a new Tuple1 based on this Arity1. **/ +@Deprecated("Use Scala tuples instead.") fun Arity1.toTuple(): Tuple1 = Tuple1(this._1) /** * Returns a new Tuple2 based on this Arity2. **/ +@Deprecated("Use Scala tuples instead.") fun Arity2.toTuple(): Tuple2 = Tuple2(this._1, this._2) /** * Returns a new Tuple3 based on this Arity3. **/ +@Deprecated("Use Scala tuples instead.") fun Arity3.toTuple(): Tuple3 = Tuple3(this._1, this._2, this._3) /** * Returns a new Tuple4 based on this Arity4. **/ +@Deprecated("Use Scala tuples instead.") fun Arity4.toTuple(): Tuple4 = Tuple4(this._1, this._2, this._3, this._4) /** * Returns a new Tuple5 based on this Arity5. **/ +@Deprecated("Use Scala tuples instead.") fun Arity5.toTuple(): Tuple5 = Tuple5(this._1, this._2, this._3, this._4, this._5) /** * Returns a new Tuple6 based on this Arity6. **/ +@Deprecated("Use Scala tuples instead.") fun Arity6.toTuple(): Tuple6 = Tuple6(this._1, this._2, this._3, this._4, this._5, this._6) /** * Returns a new Tuple7 based on this Arity7. **/ +@Deprecated("Use Scala tuples instead.") fun Arity7.toTuple(): Tuple7 = Tuple7(this._1, this._2, this._3, this._4, this._5, this._6, this._7) /** * Returns a new Tuple8 based on this Arity8. **/ +@Deprecated("Use Scala tuples instead.") fun Arity8.toTuple(): Tuple8 = Tuple8(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8) /** * Returns a new Tuple9 based on this Arity9. **/ +@Deprecated("Use Scala tuples instead.") fun Arity9.toTuple(): Tuple9 = Tuple9(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9) /** * Returns a new Tuple10 based on this Arity10. **/ +@Deprecated("Use Scala tuples instead.") fun Arity10.toTuple(): Tuple10 = Tuple10(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10) /** * Returns a new Tuple11 based on this Arity11. **/ +@Deprecated("Use Scala tuples instead.") fun Arity11.toTuple(): Tuple11 = Tuple11(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11) /** * Returns a new Tuple12 based on this Arity12. **/ +@Deprecated("Use Scala tuples instead.") fun Arity12.toTuple(): Tuple12 = Tuple12(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12) /** * Returns a new Tuple13 based on this Arity13. **/ +@Deprecated("Use Scala tuples instead.") fun Arity13.toTuple(): Tuple13 = Tuple13(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13) /** * Returns a new Tuple14 based on this Arity14. **/ +@Deprecated("Use Scala tuples instead.") fun Arity14.toTuple(): Tuple14 = Tuple14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14) /** * Returns a new Tuple15 based on this Arity15. **/ +@Deprecated("Use Scala tuples instead.") fun Arity15.toTuple(): Tuple15 = Tuple15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15) /** * Returns a new Tuple16 based on this Arity16. **/ +@Deprecated("Use Scala tuples instead.") fun Arity16.toTuple(): Tuple16 = Tuple16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16) /** * Returns a new Tuple17 based on this Arity17. **/ +@Deprecated("Use Scala tuples instead.") fun Arity17.toTuple(): Tuple17 = Tuple17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17) /** * Returns a new Tuple18 based on this Arity18. **/ +@Deprecated("Use Scala tuples instead.") fun Arity18.toTuple(): Tuple18 = Tuple18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18) /** * Returns a new Tuple19 based on this Arity19. **/ +@Deprecated("Use Scala tuples instead.") fun Arity19.toTuple(): Tuple19 = Tuple19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19) /** * Returns a new Tuple20 based on this Arity20. **/ +@Deprecated("Use Scala tuples instead.") fun Arity20.toTuple(): Tuple20 = Tuple20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20) /** * Returns a new Tuple21 based on this Arity21. **/ +@Deprecated("Use Scala tuples instead.") fun Arity21.toTuple(): Tuple21 = Tuple21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21) /** * Returns a new Tuple22 based on this Arity22. **/ +@Deprecated("Use Scala tuples instead.") fun Arity22.toTuple(): Tuple22 = Tuple22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt index 31227762..467a3078 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt @@ -40,6 +40,9 @@ import org.apache.spark.sql.KeyValueGroupedDataset import org.apache.spark.sql.TypedColumn import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions import scala.Tuple2 +import scala.Tuple3 +import scala.Tuple4 +import scala.Tuple5 import kotlin.reflect.KProperty1 @@ -144,6 +147,7 @@ inline fun Dataset>.takeKeys(): Dataset = map * Maps the Dataset to only retain the "keys" or [Arity2._1] values. */ @JvmName("takeKeysArity2") +@Deprecated("Use Scala tuples instead.") inline fun Dataset>.takeKeys(): Dataset = map { it._1 } /** @@ -164,6 +168,7 @@ inline fun Dataset>.takeValues(): Dataset = ma * Maps the Dataset to only retain the "values" or [Arity2._2] values. */ @JvmName("takeValuesArity2") +@Deprecated("Use Scala tuples instead.") inline fun Dataset>.takeValues(): Dataset = map { it._2 } /** DEPRECATED: Use [as] or [to] for this. */ @@ -250,11 +255,10 @@ fun Dataset.debug(): Dataset = also { KSparkExtensions.debug(it) } * @param right right dataset * @param col join condition * - * @return dataset of pairs where right element is forced nullable + * @return dataset of [Tuple2] where right element is forced nullable */ -inline fun Dataset.leftJoin(right: Dataset, col: Column): Dataset> { - return joinWith(right, col, "left").map { it._1 to it._2 } -} +inline fun Dataset.leftJoin(right: Dataset, col: Column): Dataset> = + joinWith(right, col, "left") /** * Alias for [Dataset.joinWith] which passes "right" argument @@ -264,11 +268,10 @@ inline fun Dataset.leftJoin(right: Dataset, * @param right right dataset * @param col join condition * - * @return dataset of [Pair] where left element is forced nullable + * @return dataset of [Tuple2] where left element is forced nullable */ -inline fun Dataset.rightJoin(right: Dataset, col: Column): Dataset> { - return joinWith(right, col, "right").map { it._1 to it._2 } -} +inline fun Dataset.rightJoin(right: Dataset, col: Column): Dataset> = + joinWith(right, col, "right") /** * Alias for [Dataset.joinWith] which passes "inner" argument @@ -277,11 +280,10 @@ inline fun Dataset.rightJoin(right: Dataset, * @param right right dataset * @param col join condition * - * @return resulting dataset of [Pair] + * @return resulting dataset of [Tuple2] */ -inline fun Dataset.innerJoin(right: Dataset, col: Column): Dataset> { - return joinWith(right, col, "inner").map { it._1 to it._2 } -} +inline fun Dataset.innerJoin(right: Dataset, col: Column): Dataset> = + joinWith(right, col, "inner") /** * Alias for [Dataset.joinWith] which passes "full" argument @@ -291,14 +293,12 @@ inline fun Dataset.innerJoin(right: Dataset, col: C * @param right right dataset * @param col join condition * - * @return dataset of [Pair] where both elements are forced nullable + * @return dataset of [Tuple2] where both elements are forced nullable */ inline fun Dataset.fullJoin( right: Dataset, col: Column, -): Dataset> { - return joinWith(right, col, "full").map { it._1 to it._2 } -} +): Dataset> = joinWith(right, col, "full") /** * Alias for [Dataset.sort] which forces user to provide sorted columns from the source dataset @@ -318,10 +318,12 @@ fun Dataset>.sortByKey(): Dataset> = sort fun Dataset>.sortByValue(): Dataset> = sort("_2") /** Returns a dataset sorted by the first (`_1`) value of each [Arity2] inside. */ +@Deprecated("Use Scala tuples instead.") @JvmName("sortByArity2Key") fun Dataset>.sortByKey(): Dataset> = sort("_1") /** Returns a dataset sorted by the second (`_2`) value of each [Arity2] inside. */ +@Deprecated("Use Scala tuples instead.") @JvmName("sortByArity2Value") fun Dataset>.sortByValue(): Dataset> = sort("_2") @@ -400,11 +402,11 @@ inline fun Dataset.selectTyped( inline fun Dataset.selectTyped( c1: TypedColumn, c2: TypedColumn, -): Dataset> = +): Dataset> = select( c1 as TypedColumn, c2 as TypedColumn, - ).map { Pair(it._1(), it._2()) } + ) /** * Returns a new Dataset by computing the given [Column] expressions for each element. @@ -414,12 +416,12 @@ inline fun Dataset.selectType c1: TypedColumn, c2: TypedColumn, c3: TypedColumn, -): Dataset> = +): Dataset> = select( c1 as TypedColumn, c2 as TypedColumn, c3 as TypedColumn, - ).map { Triple(it._1(), it._2(), it._3()) } + ) /** * Returns a new Dataset by computing the given [Column] expressions for each element. @@ -430,13 +432,13 @@ inline fun Dataset, c3: TypedColumn, c4: TypedColumn, -): Dataset> = +): Dataset> = select( c1 as TypedColumn, c2 as TypedColumn, c3 as TypedColumn, c4 as TypedColumn, - ).map { Arity4(it._1(), it._2(), it._3(), it._4()) } + ) /** * Returns a new Dataset by computing the given [Column] expressions for each element. @@ -448,12 +450,12 @@ inline fun , c4: TypedColumn, c5: TypedColumn, -): Dataset> = +): Dataset> = select( c1 as TypedColumn, c2 as TypedColumn, c3 as TypedColumn, c4 as TypedColumn, c5 as TypedColumn, - ).map { Arity5(it._1(), it._2(), it._3(), it._4(), it._5()) } + ) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt new file mode 100644 index 00000000..8e258aab --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt @@ -0,0 +1,79 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +@file:Suppress("RemoveExplicitTypeArguments") + +package org.jetbrains.kotlinx.spark.api.tuples + +import scala.Tuple2 +import scala.Tuple3 +import scala.Tuple4 +import scala.Tuple5 +import scala.Tuple6 +import scala.Tuple7 +import scala.Tuple8 +import scala.Tuple9 +import scala.Tuple10 +import scala.Tuple11 +import scala.Tuple12 +import scala.Tuple13 +import scala.Tuple14 +import scala.Tuple15 +import scala.Tuple16 +import scala.Tuple17 +import scala.Tuple18 +import scala.Tuple19 +import scala.Tuple20 +import scala.Tuple21 +import scala.Tuple22 + +/** + * This file provides a descriptive way to create Tuples using [t]. + * + * For instance: + * ```val yourTuple = 1 t "test" t a``` + * + */ + +/** + * Returns a new Tuple2 of the given arguments. + **/ +@JvmName("tInfix") +infix fun T1.t(other: T2): Tuple2 = Tuple2(this, other) + +infix fun Tuple2.t(next: T3): Tuple3 = Tuple3(this._1(), this._2(), next) +infix fun Tuple3.t(next: T4): Tuple4 = Tuple4(this._1(), this._2(), this._3(), next) +infix fun Tuple4.t(next: T5): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), next) +infix fun Tuple5.t(next: T6): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), next) +infix fun Tuple6.t(next: T7): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), next) +infix fun Tuple7.t(next: T8): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), next) +infix fun Tuple8.t(next: T9): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), next) +infix fun Tuple9.t(next: T10): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), next) +infix fun Tuple10.t(next: T11): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), next) +infix fun Tuple11.t(next: T12): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), next) +infix fun Tuple12.t(next: T13): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), next) +infix fun Tuple13.t(next: T14): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), next) +infix fun Tuple14.t(next: T15): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), next) +infix fun Tuple15.t(next: T16): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), next) +infix fun Tuple16.t(next: T17): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), next) +infix fun Tuple17.t(next: T18): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), next) +infix fun Tuple18.t(next: T19): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), next) +infix fun Tuple19.t(next: T20): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), next) +infix fun Tuple20.t(next: T21): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), next) +infix fun Tuple21.t(next: T22): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), next) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DropFunctions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DropFunctions.kt new file mode 100644 index 00000000..5c12b06f --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DropFunctions.kt @@ -0,0 +1,95 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.api.tuples + +import scala.Tuple1 +import scala.Tuple2 +import scala.Tuple3 +import scala.Tuple4 +import scala.Tuple5 +import scala.Tuple6 +import scala.Tuple7 +import scala.Tuple8 +import scala.Tuple9 +import scala.Tuple10 +import scala.Tuple11 +import scala.Tuple12 +import scala.Tuple13 +import scala.Tuple14 +import scala.Tuple15 +import scala.Tuple16 +import scala.Tuple17 +import scala.Tuple18 +import scala.Tuple19 +import scala.Tuple20 +import scala.Tuple21 +import scala.Tuple22 + +/** + * This file contains functions to lower the amount of dimensions of tuples. + * This can be done using [dropFirst] and [dropLast]. + * + * For example: + * ```val yourTuple: Tuple2 = tupleOf(1, "test", a).dropLast()``` + * + */ + +fun Tuple2<*, T1>.dropFirst(): Tuple1 = Tuple1(this._2()) +fun Tuple2.dropLast(): Tuple1 = Tuple1(this._1()) +fun Tuple3<*, T1, T2>.dropFirst(): Tuple2 = Tuple2(this._2(), this._3()) +fun Tuple3.dropLast(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple4<*, T1, T2, T3>.dropFirst(): Tuple3 = Tuple3(this._2(), this._3(), this._4()) +fun Tuple4.dropLast(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple5<*, T1, T2, T3, T4>.dropFirst(): Tuple4 = Tuple4(this._2(), this._3(), this._4(), this._5()) +fun Tuple5.dropLast(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple6<*, T1, T2, T3, T4, T5>.dropFirst(): Tuple5 = Tuple5(this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple6.dropLast(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple7<*, T1, T2, T3, T4, T5, T6>.dropFirst(): Tuple6 = Tuple6(this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple7.dropLast(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple8<*, T1, T2, T3, T4, T5, T6, T7>.dropFirst(): Tuple7 = Tuple7(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple8.dropLast(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple9<*, T1, T2, T3, T4, T5, T6, T7, T8>.dropFirst(): Tuple8 = Tuple8(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple9.dropLast(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple10<*, T1, T2, T3, T4, T5, T6, T7, T8, T9>.dropFirst(): Tuple9 = Tuple9(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple10.dropLast(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple11<*, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>.dropFirst(): Tuple10 = Tuple10(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple11.dropLast(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple12<*, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.dropFirst(): Tuple11 = Tuple11(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12.dropLast(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple13<*, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.dropFirst(): Tuple12 = Tuple12(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13.dropLast(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple14<*, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.dropFirst(): Tuple13 = Tuple13(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14.dropLast(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple15<*, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.dropFirst(): Tuple14 = Tuple14(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15.dropLast(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple16<*, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.dropFirst(): Tuple15 = Tuple15(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16.dropLast(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple17<*, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.dropFirst(): Tuple16 = Tuple16(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17.dropLast(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple18<*, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.dropFirst(): Tuple17 = Tuple17(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18.dropLast(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple19<*, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.dropFirst(): Tuple18 = Tuple18(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19.dropLast(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple20<*, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.dropFirst(): Tuple19 = Tuple19(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20.dropLast(): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple21<*, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.dropFirst(): Tuple20 = Tuple20(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21.dropLast(): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple22<*, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.dropFirst(): Tuple21 = Tuple21(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22.dropLast(): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductDestructuring.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductDestructuring.kt new file mode 100644 index 00000000..e5abefa8 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductDestructuring.kt @@ -0,0 +1,306 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.api.tuples + +import scala.Product1 +import scala.Product2 +import scala.Product3 +import scala.Product4 +import scala.Product5 +import scala.Product6 +import scala.Product7 +import scala.Product8 +import scala.Product9 +import scala.Product10 +import scala.Product11 +import scala.Product12 +import scala.Product13 +import scala.Product14 +import scala.Product15 +import scala.Product16 +import scala.Product17 +import scala.Product18 +import scala.Product19 +import scala.Product20 +import scala.Product21 +import scala.Product22 + +/** + * + * This file provides the operator functions to destructuring for Scala classes implementing ProductX, like Tuples. + * + * This means you can type `val (a, b, c, d) = yourTuple` to unpack its values, + * similar to how [Pair], [Triple] and other data classes work in Kotlin. + * + */ + +operator fun Product1.component1(): T = this._1() +operator fun Product2.component1(): T = this._1() +operator fun Product2<*, T>.component2(): T = this._2() +operator fun Product3.component1(): T = this._1() +operator fun Product3<*, T, *>.component2(): T = this._2() +operator fun Product3<*, *, T>.component3(): T = this._3() +operator fun Product4.component1(): T = this._1() +operator fun Product4<*, T, *, *>.component2(): T = this._2() +operator fun Product4<*, *, T, *>.component3(): T = this._3() +operator fun Product4<*, *, *, T>.component4(): T = this._4() +operator fun Product5.component1(): T = this._1() +operator fun Product5<*, T, *, *, *>.component2(): T = this._2() +operator fun Product5<*, *, T, *, *>.component3(): T = this._3() +operator fun Product5<*, *, *, T, *>.component4(): T = this._4() +operator fun Product5<*, *, *, *, T>.component5(): T = this._5() +operator fun Product6.component1(): T = this._1() +operator fun Product6<*, T, *, *, *, *>.component2(): T = this._2() +operator fun Product6<*, *, T, *, *, *>.component3(): T = this._3() +operator fun Product6<*, *, *, T, *, *>.component4(): T = this._4() +operator fun Product6<*, *, *, *, T, *>.component5(): T = this._5() +operator fun Product6<*, *, *, *, *, T>.component6(): T = this._6() +operator fun Product7.component1(): T = this._1() +operator fun Product7<*, T, *, *, *, *, *>.component2(): T = this._2() +operator fun Product7<*, *, T, *, *, *, *>.component3(): T = this._3() +operator fun Product7<*, *, *, T, *, *, *>.component4(): T = this._4() +operator fun Product7<*, *, *, *, T, *, *>.component5(): T = this._5() +operator fun Product7<*, *, *, *, *, T, *>.component6(): T = this._6() +operator fun Product7<*, *, *, *, *, *, T>.component7(): T = this._7() +operator fun Product8.component1(): T = this._1() +operator fun Product8<*, T, *, *, *, *, *, *>.component2(): T = this._2() +operator fun Product8<*, *, T, *, *, *, *, *>.component3(): T = this._3() +operator fun Product8<*, *, *, T, *, *, *, *>.component4(): T = this._4() +operator fun Product8<*, *, *, *, T, *, *, *>.component5(): T = this._5() +operator fun Product8<*, *, *, *, *, T, *, *>.component6(): T = this._6() +operator fun Product8<*, *, *, *, *, *, T, *>.component7(): T = this._7() +operator fun Product8<*, *, *, *, *, *, *, T>.component8(): T = this._8() +operator fun Product9.component1(): T = this._1() +operator fun Product9<*, T, *, *, *, *, *, *, *>.component2(): T = this._2() +operator fun Product9<*, *, T, *, *, *, *, *, *>.component3(): T = this._3() +operator fun Product9<*, *, *, T, *, *, *, *, *>.component4(): T = this._4() +operator fun Product9<*, *, *, *, T, *, *, *, *>.component5(): T = this._5() +operator fun Product9<*, *, *, *, *, T, *, *, *>.component6(): T = this._6() +operator fun Product9<*, *, *, *, *, *, T, *, *>.component7(): T = this._7() +operator fun Product9<*, *, *, *, *, *, *, T, *>.component8(): T = this._8() +operator fun Product9<*, *, *, *, *, *, *, *, T>.component9(): T = this._9() +operator fun Product10.component1(): T = this._1() +operator fun Product10<*, T, *, *, *, *, *, *, *, *>.component2(): T = this._2() +operator fun Product10<*, *, T, *, *, *, *, *, *, *>.component3(): T = this._3() +operator fun Product10<*, *, *, T, *, *, *, *, *, *>.component4(): T = this._4() +operator fun Product10<*, *, *, *, T, *, *, *, *, *>.component5(): T = this._5() +operator fun Product10<*, *, *, *, *, T, *, *, *, *>.component6(): T = this._6() +operator fun Product10<*, *, *, *, *, *, T, *, *, *>.component7(): T = this._7() +operator fun Product10<*, *, *, *, *, *, *, T, *, *>.component8(): T = this._8() +operator fun Product10<*, *, *, *, *, *, *, *, T, *>.component9(): T = this._9() +operator fun Product10<*, *, *, *, *, *, *, *, *, T>.component10(): T = this._10() +operator fun Product11.component1(): T = this._1() +operator fun Product11<*, T, *, *, *, *, *, *, *, *, *>.component2(): T = this._2() +operator fun Product11<*, *, T, *, *, *, *, *, *, *, *>.component3(): T = this._3() +operator fun Product11<*, *, *, T, *, *, *, *, *, *, *>.component4(): T = this._4() +operator fun Product11<*, *, *, *, T, *, *, *, *, *, *>.component5(): T = this._5() +operator fun Product11<*, *, *, *, *, T, *, *, *, *, *>.component6(): T = this._6() +operator fun Product11<*, *, *, *, *, *, T, *, *, *, *>.component7(): T = this._7() +operator fun Product11<*, *, *, *, *, *, *, T, *, *, *>.component8(): T = this._8() +operator fun Product11<*, *, *, *, *, *, *, *, T, *, *>.component9(): T = this._9() +operator fun Product11<*, *, *, *, *, *, *, *, *, T, *>.component10(): T = this._10() +operator fun Product11<*, *, *, *, *, *, *, *, *, *, T>.component11(): T = this._11() +operator fun Product12.component1(): T = this._1() +operator fun Product12<*, T, *, *, *, *, *, *, *, *, *, *>.component2(): T = this._2() +operator fun Product12<*, *, T, *, *, *, *, *, *, *, *, *>.component3(): T = this._3() +operator fun Product12<*, *, *, T, *, *, *, *, *, *, *, *>.component4(): T = this._4() +operator fun Product12<*, *, *, *, T, *, *, *, *, *, *, *>.component5(): T = this._5() +operator fun Product12<*, *, *, *, *, T, *, *, *, *, *, *>.component6(): T = this._6() +operator fun Product12<*, *, *, *, *, *, T, *, *, *, *, *>.component7(): T = this._7() +operator fun Product12<*, *, *, *, *, *, *, T, *, *, *, *>.component8(): T = this._8() +operator fun Product12<*, *, *, *, *, *, *, *, T, *, *, *>.component9(): T = this._9() +operator fun Product12<*, *, *, *, *, *, *, *, *, T, *, *>.component10(): T = this._10() +operator fun Product12<*, *, *, *, *, *, *, *, *, *, T, *>.component11(): T = this._11() +operator fun Product12<*, *, *, *, *, *, *, *, *, *, *, T>.component12(): T = this._12() +operator fun Product13.component1(): T = this._1() +operator fun Product13<*, T, *, *, *, *, *, *, *, *, *, *, *>.component2(): T = this._2() +operator fun Product13<*, *, T, *, *, *, *, *, *, *, *, *, *>.component3(): T = this._3() +operator fun Product13<*, *, *, T, *, *, *, *, *, *, *, *, *>.component4(): T = this._4() +operator fun Product13<*, *, *, *, T, *, *, *, *, *, *, *, *>.component5(): T = this._5() +operator fun Product13<*, *, *, *, *, T, *, *, *, *, *, *, *>.component6(): T = this._6() +operator fun Product13<*, *, *, *, *, *, T, *, *, *, *, *, *>.component7(): T = this._7() +operator fun Product13<*, *, *, *, *, *, *, T, *, *, *, *, *>.component8(): T = this._8() +operator fun Product13<*, *, *, *, *, *, *, *, T, *, *, *, *>.component9(): T = this._9() +operator fun Product13<*, *, *, *, *, *, *, *, *, T, *, *, *>.component10(): T = this._10() +operator fun Product13<*, *, *, *, *, *, *, *, *, *, T, *, *>.component11(): T = this._11() +operator fun Product13<*, *, *, *, *, *, *, *, *, *, *, T, *>.component12(): T = this._12() +operator fun Product13<*, *, *, *, *, *, *, *, *, *, *, *, T>.component13(): T = this._13() +operator fun Product14.component1(): T = this._1() +operator fun Product14<*, T, *, *, *, *, *, *, *, *, *, *, *, *>.component2(): T = this._2() +operator fun Product14<*, *, T, *, *, *, *, *, *, *, *, *, *, *>.component3(): T = this._3() +operator fun Product14<*, *, *, T, *, *, *, *, *, *, *, *, *, *>.component4(): T = this._4() +operator fun Product14<*, *, *, *, T, *, *, *, *, *, *, *, *, *>.component5(): T = this._5() +operator fun Product14<*, *, *, *, *, T, *, *, *, *, *, *, *, *>.component6(): T = this._6() +operator fun Product14<*, *, *, *, *, *, T, *, *, *, *, *, *, *>.component7(): T = this._7() +operator fun Product14<*, *, *, *, *, *, *, T, *, *, *, *, *, *>.component8(): T = this._8() +operator fun Product14<*, *, *, *, *, *, *, *, T, *, *, *, *, *>.component9(): T = this._9() +operator fun Product14<*, *, *, *, *, *, *, *, *, T, *, *, *, *>.component10(): T = this._10() +operator fun Product14<*, *, *, *, *, *, *, *, *, *, T, *, *, *>.component11(): T = this._11() +operator fun Product14<*, *, *, *, *, *, *, *, *, *, *, T, *, *>.component12(): T = this._12() +operator fun Product14<*, *, *, *, *, *, *, *, *, *, *, *, T, *>.component13(): T = this._13() +operator fun Product14<*, *, *, *, *, *, *, *, *, *, *, *, *, T>.component14(): T = this._14() +operator fun Product15.component1(): T = this._1() +operator fun Product15<*, T, *, *, *, *, *, *, *, *, *, *, *, *, *>.component2(): T = this._2() +operator fun Product15<*, *, T, *, *, *, *, *, *, *, *, *, *, *, *>.component3(): T = this._3() +operator fun Product15<*, *, *, T, *, *, *, *, *, *, *, *, *, *, *>.component4(): T = this._4() +operator fun Product15<*, *, *, *, T, *, *, *, *, *, *, *, *, *, *>.component5(): T = this._5() +operator fun Product15<*, *, *, *, *, T, *, *, *, *, *, *, *, *, *>.component6(): T = this._6() +operator fun Product15<*, *, *, *, *, *, T, *, *, *, *, *, *, *, *>.component7(): T = this._7() +operator fun Product15<*, *, *, *, *, *, *, T, *, *, *, *, *, *, *>.component8(): T = this._8() +operator fun Product15<*, *, *, *, *, *, *, *, T, *, *, *, *, *, *>.component9(): T = this._9() +operator fun Product15<*, *, *, *, *, *, *, *, *, T, *, *, *, *, *>.component10(): T = this._10() +operator fun Product15<*, *, *, *, *, *, *, *, *, *, T, *, *, *, *>.component11(): T = this._11() +operator fun Product15<*, *, *, *, *, *, *, *, *, *, *, T, *, *, *>.component12(): T = this._12() +operator fun Product15<*, *, *, *, *, *, *, *, *, *, *, *, T, *, *>.component13(): T = this._13() +operator fun Product15<*, *, *, *, *, *, *, *, *, *, *, *, *, T, *>.component14(): T = this._14() +operator fun Product15<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.component15(): T = this._15() +operator fun Product16.component1(): T = this._1() +operator fun Product16<*, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component2(): T = this._2() +operator fun Product16<*, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *>.component3(): T = this._3() +operator fun Product16<*, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *>.component4(): T = this._4() +operator fun Product16<*, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *>.component5(): T = this._5() +operator fun Product16<*, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *>.component6(): T = this._6() +operator fun Product16<*, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *>.component7(): T = this._7() +operator fun Product16<*, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *>.component8(): T = this._8() +operator fun Product16<*, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *>.component9(): T = this._9() +operator fun Product16<*, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *>.component10(): T = this._10() +operator fun Product16<*, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *>.component11(): T = this._11() +operator fun Product16<*, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *>.component12(): T = this._12() +operator fun Product16<*, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *>.component13(): T = this._13() +operator fun Product16<*, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *>.component14(): T = this._14() +operator fun Product16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *>.component15(): T = this._15() +operator fun Product16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.component16(): T = this._16() +operator fun Product17.component1(): T = this._1() +operator fun Product17<*, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component2(): T = this._2() +operator fun Product17<*, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component3(): T = this._3() +operator fun Product17<*, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *>.component4(): T = this._4() +operator fun Product17<*, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *>.component5(): T = this._5() +operator fun Product17<*, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *>.component6(): T = this._6() +operator fun Product17<*, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *>.component7(): T = this._7() +operator fun Product17<*, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *>.component8(): T = this._8() +operator fun Product17<*, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *>.component9(): T = this._9() +operator fun Product17<*, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *>.component10(): T = this._10() +operator fun Product17<*, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *>.component11(): T = this._11() +operator fun Product17<*, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *>.component12(): T = this._12() +operator fun Product17<*, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *>.component13(): T = this._13() +operator fun Product17<*, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *>.component14(): T = this._14() +operator fun Product17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *>.component15(): T = this._15() +operator fun Product17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *>.component16(): T = this._16() +operator fun Product17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.component17(): T = this._17() +operator fun Product18.component1(): T = this._1() +operator fun Product18<*, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component2(): T = this._2() +operator fun Product18<*, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component3(): T = this._3() +operator fun Product18<*, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component4(): T = this._4() +operator fun Product18<*, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *>.component5(): T = this._5() +operator fun Product18<*, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *>.component6(): T = this._6() +operator fun Product18<*, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *>.component7(): T = this._7() +operator fun Product18<*, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *>.component8(): T = this._8() +operator fun Product18<*, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *>.component9(): T = this._9() +operator fun Product18<*, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *>.component10(): T = this._10() +operator fun Product18<*, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *>.component11(): T = this._11() +operator fun Product18<*, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *>.component12(): T = this._12() +operator fun Product18<*, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *>.component13(): T = this._13() +operator fun Product18<*, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *>.component14(): T = this._14() +operator fun Product18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *>.component15(): T = this._15() +operator fun Product18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *>.component16(): T = this._16() +operator fun Product18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *>.component17(): T = this._17() +operator fun Product18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.component18(): T = this._18() +operator fun Product19.component1(): T = this._1() +operator fun Product19<*, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component2(): T = this._2() +operator fun Product19<*, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component3(): T = this._3() +operator fun Product19<*, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component4(): T = this._4() +operator fun Product19<*, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component5(): T = this._5() +operator fun Product19<*, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *>.component6(): T = this._6() +operator fun Product19<*, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *>.component7(): T = this._7() +operator fun Product19<*, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *>.component8(): T = this._8() +operator fun Product19<*, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *>.component9(): T = this._9() +operator fun Product19<*, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *>.component10(): T = this._10() +operator fun Product19<*, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *>.component11(): T = this._11() +operator fun Product19<*, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *>.component12(): T = this._12() +operator fun Product19<*, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *>.component13(): T = this._13() +operator fun Product19<*, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *>.component14(): T = this._14() +operator fun Product19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *>.component15(): T = this._15() +operator fun Product19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *>.component16(): T = this._16() +operator fun Product19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *>.component17(): T = this._17() +operator fun Product19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *>.component18(): T = this._18() +operator fun Product19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.component19(): T = this._19() +operator fun Product20.component1(): T = this._1() +operator fun Product20<*, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component2(): T = this._2() +operator fun Product20<*, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component3(): T = this._3() +operator fun Product20<*, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component4(): T = this._4() +operator fun Product20<*, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component5(): T = this._5() +operator fun Product20<*, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component6(): T = this._6() +operator fun Product20<*, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *>.component7(): T = this._7() +operator fun Product20<*, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *>.component8(): T = this._8() +operator fun Product20<*, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *>.component9(): T = this._9() +operator fun Product20<*, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *>.component10(): T = this._10() +operator fun Product20<*, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *>.component11(): T = this._11() +operator fun Product20<*, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *>.component12(): T = this._12() +operator fun Product20<*, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *>.component13(): T = this._13() +operator fun Product20<*, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *>.component14(): T = this._14() +operator fun Product20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *>.component15(): T = this._15() +operator fun Product20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *>.component16(): T = this._16() +operator fun Product20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *>.component17(): T = this._17() +operator fun Product20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *>.component18(): T = this._18() +operator fun Product20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *>.component19(): T = this._19() +operator fun Product20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.component20(): T = this._20() +operator fun Product21.component1(): T = this._1() +operator fun Product21<*, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component2(): T = this._2() +operator fun Product21<*, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component3(): T = this._3() +operator fun Product21<*, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component4(): T = this._4() +operator fun Product21<*, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component5(): T = this._5() +operator fun Product21<*, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component6(): T = this._6() +operator fun Product21<*, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component7(): T = this._7() +operator fun Product21<*, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *>.component8(): T = this._8() +operator fun Product21<*, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *>.component9(): T = this._9() +operator fun Product21<*, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *>.component10(): T = this._10() +operator fun Product21<*, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *>.component11(): T = this._11() +operator fun Product21<*, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *>.component12(): T = this._12() +operator fun Product21<*, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *>.component13(): T = this._13() +operator fun Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *>.component14(): T = this._14() +operator fun Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *>.component15(): T = this._15() +operator fun Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *>.component16(): T = this._16() +operator fun Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *>.component17(): T = this._17() +operator fun Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *>.component18(): T = this._18() +operator fun Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *>.component19(): T = this._19() +operator fun Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *>.component20(): T = this._20() +operator fun Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.component21(): T = this._21() +operator fun Product22.component1(): T = this._1() +operator fun Product22<*, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component2(): T = this._2() +operator fun Product22<*, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component3(): T = this._3() +operator fun Product22<*, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component4(): T = this._4() +operator fun Product22<*, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component5(): T = this._5() +operator fun Product22<*, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component6(): T = this._6() +operator fun Product22<*, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component7(): T = this._7() +operator fun Product22<*, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.component8(): T = this._8() +operator fun Product22<*, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *>.component9(): T = this._9() +operator fun Product22<*, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *>.component10(): T = this._10() +operator fun Product22<*, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *>.component11(): T = this._11() +operator fun Product22<*, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *>.component12(): T = this._12() +operator fun Product22<*, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *>.component13(): T = this._13() +operator fun Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *>.component14(): T = this._14() +operator fun Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *>.component15(): T = this._15() +operator fun Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *>.component16(): T = this._16() +operator fun Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *>.component17(): T = this._17() +operator fun Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *>.component18(): T = this._18() +operator fun Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *>.component19(): T = this._19() +operator fun Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *>.component20(): T = this._20() +operator fun Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *>.component21(): T = this._21() +operator fun Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.component22(): T = this._22() diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductExtensions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductExtensions.kt new file mode 100644 index 00000000..ff8427a0 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductExtensions.kt @@ -0,0 +1,155 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.api.tuples + +import scala.Product +import scala.collection.JavaConverters +import kotlin.jvm.Throws + +/** + * Extra extensions for Scala [Product]s such as Tuples. + * + * For example: + * + * ```kotlin + * 1 in tupleOf(1, 2, 3) == true + * + * for (x in tupleOf("a", "b", "c")) { ... } + * + * val a: List = tupleOf(1, "a", 3L).asIterable().toList() + * + * tupleOf(1, 2, 3).size == 3 + * + * tupleOf(1, 2, 3)[0] == 1 + * + * tupleOf(1, 1, 2)[1..2] == tupleOf(1, 2, 2)[0..1] + * ``` + * + */ + +/** Tests whether this iterator contains a given value as an element. + * Note: may not terminate for infinite iterators. + * + * @param item the element to test. + * @return `true` if this iterator produces some value that + * is equal (as determined by `==`) to `elem`, `false` otherwise. + * @note Reuse: After calling this method, one should discard the iterator it was called on. + * Using it is undefined and subject to change. + */ +operator fun Product.contains(item: Any?): Boolean = productIterator().contains(item) + +/** + * An iterator over all the elements of this product. + * @return in the default implementation, an `Iterator` + */ +operator fun Product.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator()) + +/** + * Converts this product to an `Any?` iterable. + */ +fun Product.asIterable(): Iterable = object : Iterable { + override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator()) +} + +/** The size of this product. + * @return for a product `A(x,,1,,, ..., x,,k,,)`, returns `k` + */ +val Product.size: Int + get() = productArity() + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) +operator fun Product.get(n: Int): Any? = productElement(n) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ +fun Product.getOrNull(n: Int): Any? = if (n in 0 until size) productElement(n) else null + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * The result is cast to the given type [T]. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @throws ClassCastException + * @return the element `n` elements after the first element + */ +@Suppress("UNCHECKED_CAST") +@Throws(IndexOutOfBoundsException::class, ClassCastException::class) +fun Product.getAs(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * The result is cast to the given type [T]. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds or unable to be cast + */ +@Suppress("UNCHECKED_CAST") +fun Product.getAsOrNull(n: Int): T? = getOrNull(n) as? T + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) +operator fun Product.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ +fun Product.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * The results are cast to the given type [T]. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @throws ClassCastException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class, ClassCastException::class) +fun Product.getAs(indexRange: IntRange): List = indexRange.map(::getAs) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * The results are cast to the given type [T]. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` is out of bounds or unable to be cast + */ +fun Product.getAsOrNull(indexRange: IntRange): List = indexRange.map(::getAsOrNull) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductTextualAccessors.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductTextualAccessors.kt new file mode 100644 index 00000000..5779ecad --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductTextualAccessors.kt @@ -0,0 +1,183 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.api.tuples + +import scala.Product1 +import scala.Product2 +import scala.Product3 +import scala.Product4 +import scala.Product5 +import scala.Product6 +import scala.Product7 +import scala.Product8 +import scala.Product9 +import scala.Product10 +import scala.Product11 +import scala.Product12 +import scala.Product13 +import scala.Product14 +import scala.Product15 +import scala.Product16 +import scala.Product17 +import scala.Product18 +import scala.Product19 +import scala.Product20 +import scala.Product21 +import scala.Product22 + +/** + * + * This file provides the functions `yourTuple.first()` and `yourTuple.last()` to access + * the value you require. + * + */ + +/** Returns the first value of this Tuple or Product. */ +fun Product1.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product1.last(): T = this._1() + +/** Returns the first value of this Tuple or Product. */ +fun Product2.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product2<*, T>.last(): T = this._2() + +/** Returns the first value of this Tuple or Product. */ +fun Product3.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product3<*, *, T>.last(): T = this._3() + +/** Returns the first value of this Tuple or Product. */ +fun Product4.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product4<*, *, *, T>.last(): T = this._4() + +/** Returns the first value of this Tuple or Product. */ +fun Product5.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product5<*, *, *, *, T>.last(): T = this._5() + +/** Returns the first value of this Tuple or Product. */ +fun Product6.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product6<*, *, *, *, *, T>.last(): T = this._6() + +/** Returns the first value of this Tuple or Product. */ +fun Product7.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product7<*, *, *, *, *, *, T>.last(): T = this._7() + +/** Returns the first value of this Tuple or Product. */ +fun Product8.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product8<*, *, *, *, *, *, *, T>.last(): T = this._8() + +/** Returns the first value of this Tuple or Product. */ +fun Product9.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product9<*, *, *, *, *, *, *, *, T>.last(): T = this._9() + +/** Returns the first value of this Tuple or Product. */ +fun Product10.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product10<*, *, *, *, *, *, *, *, *, T>.last(): T = this._10() + +/** Returns the first value of this Tuple or Product. */ +fun Product11.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product11<*, *, *, *, *, *, *, *, *, *, T>.last(): T = this._11() + +/** Returns the first value of this Tuple or Product. */ +fun Product12.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product12<*, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._12() + +/** Returns the first value of this Tuple or Product. */ +fun Product13.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product13<*, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._13() + +/** Returns the first value of this Tuple or Product. */ +fun Product14.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product14<*, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._14() + +/** Returns the first value of this Tuple or Product. */ +fun Product15.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product15<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._15() + +/** Returns the first value of this Tuple or Product. */ +fun Product16.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._16() + +/** Returns the first value of this Tuple or Product. */ +fun Product17.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._17() + +/** Returns the first value of this Tuple or Product. */ +fun Product18.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._18() + +/** Returns the first value of this Tuple or Product. */ +fun Product19.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._19() + +/** Returns the first value of this Tuple or Product. */ +fun Product20.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._20() + +/** Returns the first value of this Tuple or Product. */ +fun Product21.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._21() + +/** Returns the first value of this Tuple or Product. */ +fun Product22.first(): T = this._1() + +/** Returns the last value of this Tuple or Product. */ +fun Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._22() + diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/SameTypeProductExtensions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/SameTypeProductExtensions.kt new file mode 100644 index 00000000..36c90122 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/SameTypeProductExtensions.kt @@ -0,0 +1,169 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +@file:Suppress("UNCHECKED_CAST", "RemoveExplicitTypeArguments") + +package org.jetbrains.kotlinx.spark.api.tuples + +import scala.Product1 +import scala.Product2 +import scala.Product3 +import scala.Product4 +import scala.Product5 +import scala.Product6 +import scala.Product7 +import scala.Product8 +import scala.Product9 +import scala.Product10 +import scala.Product11 +import scala.Product12 +import scala.Product13 +import scala.Product14 +import scala.Product15 +import scala.Product16 +import scala.Product17 +import scala.Product18 +import scala.Product19 +import scala.Product20 +import scala.Product21 +import scala.Product22 +import scala.collection.JavaConverters + +/** + * This file provides quality of life extensions for Products/Tuples where each of its types is the same. + * This includes converting to [Iterable] or getting an [Iterator] of a Product/Tuple, + * as well as taking a single value or slice from a Tuple/Product. + * + */ + +operator fun Product1.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product2.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product3.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product4.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product5.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product6.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product7.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product8.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product9.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product10.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product11.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product12.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product13.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product14.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product15.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product16.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product17.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product18.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product19.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product20.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product21.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +operator fun Product22.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +fun Product1.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product2.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product3.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product4.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product5.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product6.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product7.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product8.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product9.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product10.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product11.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product12.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product13.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product14.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product15.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product16.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product17.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product18.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product19.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product20.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product21.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +fun Product22.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +fun Product1.toList(): List = listOf(this._1()) +fun Product2.toList(): List = listOf(this._1(), this._2()) +fun Product3.toList(): List = listOf(this._1(), this._2(), this._3()) +fun Product4.toList(): List = listOf(this._1(), this._2(), this._3(), this._4()) +fun Product5.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Product6.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Product7.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Product8.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Product9.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Product10.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Product11.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Product12.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Product13.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Product14.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Product15.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Product16.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Product17.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Product18.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Product19.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Product20.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Product21.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Product22.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) + +operator fun Product1.get(index: Int): T = productElement(index) as T +operator fun Product2.get(index: Int): T = productElement(index) as T +operator fun Product3.get(index: Int): T = productElement(index) as T +operator fun Product4.get(index: Int): T = productElement(index) as T +operator fun Product5.get(index: Int): T = productElement(index) as T +operator fun Product6.get(index: Int): T = productElement(index) as T +operator fun Product7.get(index: Int): T = productElement(index) as T +operator fun Product8.get(index: Int): T = productElement(index) as T +operator fun Product9.get(index: Int): T = productElement(index) as T +operator fun Product10.get(index: Int): T = productElement(index) as T +operator fun Product11.get(index: Int): T = productElement(index) as T +operator fun Product12.get(index: Int): T = productElement(index) as T +operator fun Product13.get(index: Int): T = productElement(index) as T +operator fun Product14.get(index: Int): T = productElement(index) as T +operator fun Product15.get(index: Int): T = productElement(index) as T +operator fun Product16.get(index: Int): T = productElement(index) as T +operator fun Product17.get(index: Int): T = productElement(index) as T +operator fun Product18.get(index: Int): T = productElement(index) as T +operator fun Product19.get(index: Int): T = productElement(index) as T +operator fun Product20.get(index: Int): T = productElement(index) as T +operator fun Product21.get(index: Int): T = productElement(index) as T +operator fun Product22.get(index: Int): T = productElement(index) as T + +operator fun Product1.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product2.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product3.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product4.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product5.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product6.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product7.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product8.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product9.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product10.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product11.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product12.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product13.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product14.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product15.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product16.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product17.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product18.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product19.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product20.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product21.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +operator fun Product22.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } + diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleBuilders.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleBuilders.kt new file mode 100644 index 00000000..d6725ab4 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleBuilders.kt @@ -0,0 +1,192 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +@file:Suppress("FunctionName", "RemoveExplicitTypeArguments", "DuplicatedCode") +package org.jetbrains.kotlinx.spark.api.tuples + +import scala.Tuple1 +import scala.Tuple2 +import scala.Tuple3 +import scala.Tuple4 +import scala.Tuple5 +import scala.Tuple6 +import scala.Tuple7 +import scala.Tuple8 +import scala.Tuple9 +import scala.Tuple10 +import scala.Tuple11 +import scala.Tuple12 +import scala.Tuple13 +import scala.Tuple14 +import scala.Tuple15 +import scala.Tuple16 +import scala.Tuple17 +import scala.Tuple18 +import scala.Tuple19 +import scala.Tuple20 +import scala.Tuple21 +import scala.Tuple22 + +/** + * This file contains simple functional Tuple builders in the form of `tupleOf()`. + * + * This allows you to easily create the correct type of tuple with correct types like + * ```val yourTuple = tupleOf(1, "test", a)``` + * or + * ```val yourTuple = t(1, "test", a)``` + * + * As replacement of `to` there is + * ```val tuple: Tuple2 = 5 t "test"``` + */ + + + +/** Returns a new Tuple1 of the given arguments. */ +fun tupleOf(_1: T1): Tuple1 = Tuple1(_1) + +/** Returns a new Tuple2 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2): Tuple2 = Tuple2(_1, _2) + +/** Returns a new Tuple3 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3): Tuple3 = Tuple3(_1, _2, _3) + +/** Returns a new Tuple4 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4): Tuple4 = Tuple4(_1, _2, _3, _4) + +/** Returns a new Tuple5 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5): Tuple5 = Tuple5(_1, _2, _3, _4, _5) + +/** Returns a new Tuple6 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6): Tuple6 = Tuple6(_1, _2, _3, _4, _5, _6) + +/** Returns a new Tuple7 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7): Tuple7 = Tuple7(_1, _2, _3, _4, _5, _6, _7) + +/** Returns a new Tuple8 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8): Tuple8 = Tuple8(_1, _2, _3, _4, _5, _6, _7, _8) + +/** Returns a new Tuple9 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9): Tuple9 = Tuple9(_1, _2, _3, _4, _5, _6, _7, _8, _9) + +/** Returns a new Tuple10 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10): Tuple10 = Tuple10(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10) + +/** Returns a new Tuple11 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11): Tuple11 = Tuple11(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11) + +/** Returns a new Tuple12 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12): Tuple12 = Tuple12(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12) + +/** Returns a new Tuple13 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13): Tuple13 = Tuple13(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13) + +/** Returns a new Tuple14 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14): Tuple14 = Tuple14(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14) + +/** Returns a new Tuple15 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15): Tuple15 = Tuple15(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15) + +/** Returns a new Tuple16 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16): Tuple16 = Tuple16(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16) + +/** Returns a new Tuple17 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17): Tuple17 = Tuple17(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17) + +/** Returns a new Tuple18 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18): Tuple18 = Tuple18(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18) + +/** Returns a new Tuple19 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19): Tuple19 = Tuple19(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19) + +/** Returns a new Tuple20 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20): Tuple20 = Tuple20(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20) + +/** Returns a new Tuple21 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21): Tuple21 = Tuple21(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21) + +/** Returns a new Tuple22 of the given arguments. */ +fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22): Tuple22 = Tuple22(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22) + + + +/** Returns a new Tuple1 of the given arguments. */ +fun t(_1: T1): Tuple1 = Tuple1(_1) + +/** Returns a new Tuple2 of the given arguments. */ +fun t(_1: T1, _2: T2): Tuple2 = Tuple2(_1, _2) + +/** Returns a new Tuple3 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3): Tuple3 = Tuple3(_1, _2, _3) + +/** Returns a new Tuple4 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4): Tuple4 = Tuple4(_1, _2, _3, _4) + +/** Returns a new Tuple5 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5): Tuple5 = Tuple5(_1, _2, _3, _4, _5) + +/** Returns a new Tuple6 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6): Tuple6 = Tuple6(_1, _2, _3, _4, _5, _6) + +/** Returns a new Tuple7 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7): Tuple7 = Tuple7(_1, _2, _3, _4, _5, _6, _7) + +/** Returns a new Tuple8 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8): Tuple8 = Tuple8(_1, _2, _3, _4, _5, _6, _7, _8) + +/** Returns a new Tuple9 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9): Tuple9 = Tuple9(_1, _2, _3, _4, _5, _6, _7, _8, _9) + +/** Returns a new Tuple10 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10): Tuple10 = Tuple10(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10) + +/** Returns a new Tuple11 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11): Tuple11 = Tuple11(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11) + +/** Returns a new Tuple12 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12): Tuple12 = Tuple12(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12) + +/** Returns a new Tuple13 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13): Tuple13 = Tuple13(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13) + +/** Returns a new Tuple14 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14): Tuple14 = Tuple14(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14) + +/** Returns a new Tuple15 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15): Tuple15 = Tuple15(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15) + +/** Returns a new Tuple16 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16): Tuple16 = Tuple16(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16) + +/** Returns a new Tuple17 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17): Tuple17 = Tuple17(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17) + +/** Returns a new Tuple18 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18): Tuple18 = Tuple18(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18) + +/** Returns a new Tuple19 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19): Tuple19 = Tuple19(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19) + +/** Returns a new Tuple20 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20): Tuple20 = Tuple20(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20) + +/** Returns a new Tuple21 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21): Tuple21 = Tuple21(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21) + +/** Returns a new Tuple22 of the given arguments. */ +fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22): Tuple22 = Tuple22(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleConcatenation.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleConcatenation.kt new file mode 100644 index 00000000..77926349 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleConcatenation.kt @@ -0,0 +1,519 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +@file:Suppress("FunctionName", "RemoveExplicitTypeArguments") +package org.jetbrains.kotlinx.spark.api.tuples + +import scala.Tuple1 +import scala.Tuple2 +import scala.Tuple3 +import scala.Tuple4 +import scala.Tuple5 +import scala.Tuple6 +import scala.Tuple7 +import scala.Tuple8 +import scala.Tuple9 +import scala.Tuple10 +import scala.Tuple11 +import scala.Tuple12 +import scala.Tuple13 +import scala.Tuple14 +import scala.Tuple15 +import scala.Tuple16 +import scala.Tuple17 +import scala.Tuple18 +import scala.Tuple19 +import scala.Tuple20 +import scala.Tuple21 +import scala.Tuple22 + +/** + * This file provides functions to easily merge two separate tuples into one. + * + * For example (using tupleOf() to create a new tuple): + * ```tupleOf(a, b) concat tupleOf(c, d) == tupleOf(a, b, c, d)``` + * or using the shorthand: + * ```tupleOf(a, b) + tupleOf(c, d) == tupleOf(a, b, c, d)``` + * + * + */ + +infix fun Tuple1.concat(other: Tuple1): Tuple2 = Tuple2(this._1(), other._1()) +infix fun Tuple1.concat(other: Tuple2): Tuple3 = Tuple3(this._1(), other._1(), other._2()) +infix fun Tuple1.concat(other: Tuple3): Tuple4 = Tuple4(this._1(), other._1(), other._2(), other._3()) +infix fun Tuple1.concat(other: Tuple4): Tuple5 = Tuple5(this._1(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple1.concat(other: Tuple5): Tuple6 = Tuple6(this._1(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun Tuple1.concat(other: Tuple6): Tuple7 = Tuple7(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +infix fun Tuple1.concat(other: Tuple7): Tuple8 = Tuple8(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +infix fun Tuple1.concat(other: Tuple8): Tuple9 = Tuple9(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +infix fun Tuple1.concat(other: Tuple9): Tuple10 = Tuple10(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +infix fun Tuple1.concat(other: Tuple10): Tuple11 = Tuple11(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +infix fun Tuple1.concat(other: Tuple11): Tuple12 = Tuple12(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +infix fun Tuple1.concat(other: Tuple12): Tuple13 = Tuple13(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +infix fun Tuple1.concat(other: Tuple13): Tuple14 = Tuple14(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +infix fun Tuple1.concat(other: Tuple14): Tuple15 = Tuple15(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +infix fun Tuple1.concat(other: Tuple15): Tuple16 = Tuple16(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) +infix fun Tuple1.concat(other: Tuple16): Tuple17 = Tuple17(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) +infix fun Tuple1.concat(other: Tuple17): Tuple18 = Tuple18(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) +infix fun Tuple1.concat(other: Tuple18): Tuple19 = Tuple19(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) +infix fun Tuple1.concat(other: Tuple19): Tuple20 = Tuple20(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) +infix fun Tuple1.concat(other: Tuple20): Tuple21 = Tuple21(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20()) +infix fun Tuple1.concat(other: Tuple21): Tuple22 = Tuple22(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20(), other._21()) +infix fun Tuple2.concat(other: Tuple1): Tuple3 = Tuple3(this._1(), this._2(), other._1()) +infix fun Tuple2.concat(other: Tuple2): Tuple4 = Tuple4(this._1(), this._2(), other._1(), other._2()) +infix fun Tuple2.concat(other: Tuple3): Tuple5 = Tuple5(this._1(), this._2(), other._1(), other._2(), other._3()) +infix fun Tuple2.concat(other: Tuple4): Tuple6 = Tuple6(this._1(), this._2(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple2.concat(other: Tuple5): Tuple7 = Tuple7(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun Tuple2.concat(other: Tuple6): Tuple8 = Tuple8(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +infix fun Tuple2.concat(other: Tuple7): Tuple9 = Tuple9(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +infix fun Tuple2.concat(other: Tuple8): Tuple10 = Tuple10(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +infix fun Tuple2.concat(other: Tuple9): Tuple11 = Tuple11(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +infix fun Tuple2.concat(other: Tuple10): Tuple12 = Tuple12(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +infix fun Tuple2.concat(other: Tuple11): Tuple13 = Tuple13(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +infix fun Tuple2.concat(other: Tuple12): Tuple14 = Tuple14(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +infix fun Tuple2.concat(other: Tuple13): Tuple15 = Tuple15(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +infix fun Tuple2.concat(other: Tuple14): Tuple16 = Tuple16(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +infix fun Tuple2.concat(other: Tuple15): Tuple17 = Tuple17(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) +infix fun Tuple2.concat(other: Tuple16): Tuple18 = Tuple18(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) +infix fun Tuple2.concat(other: Tuple17): Tuple19 = Tuple19(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) +infix fun Tuple2.concat(other: Tuple18): Tuple20 = Tuple20(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) +infix fun Tuple2.concat(other: Tuple19): Tuple21 = Tuple21(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) +infix fun Tuple2.concat(other: Tuple20): Tuple22 = Tuple22(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20()) +infix fun Tuple3.concat(other: Tuple1): Tuple4 = Tuple4(this._1(), this._2(), this._3(), other._1()) +infix fun Tuple3.concat(other: Tuple2): Tuple5 = Tuple5(this._1(), this._2(), this._3(), other._1(), other._2()) +infix fun Tuple3.concat(other: Tuple3): Tuple6 = Tuple6(this._1(), this._2(), this._3(), other._1(), other._2(), other._3()) +infix fun Tuple3.concat(other: Tuple4): Tuple7 = Tuple7(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple3.concat(other: Tuple5): Tuple8 = Tuple8(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun Tuple3.concat(other: Tuple6): Tuple9 = Tuple9(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +infix fun Tuple3.concat(other: Tuple7): Tuple10 = Tuple10(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +infix fun Tuple3.concat(other: Tuple8): Tuple11 = Tuple11(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +infix fun Tuple3.concat(other: Tuple9): Tuple12 = Tuple12(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +infix fun Tuple3.concat(other: Tuple10): Tuple13 = Tuple13(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +infix fun Tuple3.concat(other: Tuple11): Tuple14 = Tuple14(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +infix fun Tuple3.concat(other: Tuple12): Tuple15 = Tuple15(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +infix fun Tuple3.concat(other: Tuple13): Tuple16 = Tuple16(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +infix fun Tuple3.concat(other: Tuple14): Tuple17 = Tuple17(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +infix fun Tuple3.concat(other: Tuple15): Tuple18 = Tuple18(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) +infix fun Tuple3.concat(other: Tuple16): Tuple19 = Tuple19(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) +infix fun Tuple3.concat(other: Tuple17): Tuple20 = Tuple20(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) +infix fun Tuple3.concat(other: Tuple18): Tuple21 = Tuple21(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) +infix fun Tuple3.concat(other: Tuple19): Tuple22 = Tuple22(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) +infix fun Tuple4.concat(other: Tuple1): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), other._1()) +infix fun Tuple4.concat(other: Tuple2): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), other._1(), other._2()) +infix fun Tuple4.concat(other: Tuple3): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3()) +infix fun Tuple4.concat(other: Tuple4): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple4.concat(other: Tuple5): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun Tuple4.concat(other: Tuple6): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +infix fun Tuple4.concat(other: Tuple7): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +infix fun Tuple4.concat(other: Tuple8): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +infix fun Tuple4.concat(other: Tuple9): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +infix fun Tuple4.concat(other: Tuple10): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +infix fun Tuple4.concat(other: Tuple11): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +infix fun Tuple4.concat(other: Tuple12): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +infix fun Tuple4.concat(other: Tuple13): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +infix fun Tuple4.concat(other: Tuple14): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +infix fun Tuple4.concat(other: Tuple15): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) +infix fun Tuple4.concat(other: Tuple16): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) +infix fun Tuple4.concat(other: Tuple17): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) +infix fun Tuple4.concat(other: Tuple18): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) +infix fun Tuple5.concat(other: Tuple1): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), other._1()) +infix fun Tuple5.concat(other: Tuple2): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2()) +infix fun Tuple5.concat(other: Tuple3): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3()) +infix fun Tuple5.concat(other: Tuple4): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple5.concat(other: Tuple5): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun Tuple5.concat(other: Tuple6): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +infix fun Tuple5.concat(other: Tuple7): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +infix fun Tuple5.concat(other: Tuple8): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +infix fun Tuple5.concat(other: Tuple9): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +infix fun Tuple5.concat(other: Tuple10): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +infix fun Tuple5.concat(other: Tuple11): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +infix fun Tuple5.concat(other: Tuple12): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +infix fun Tuple5.concat(other: Tuple13): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +infix fun Tuple5.concat(other: Tuple14): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +infix fun Tuple5.concat(other: Tuple15): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) +infix fun Tuple5.concat(other: Tuple16): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) +infix fun Tuple5.concat(other: Tuple17): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) +infix fun Tuple6.concat(other: Tuple1): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1()) +infix fun Tuple6.concat(other: Tuple2): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2()) +infix fun Tuple6.concat(other: Tuple3): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3()) +infix fun Tuple6.concat(other: Tuple4): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple6.concat(other: Tuple5): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun Tuple6.concat(other: Tuple6): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +infix fun Tuple6.concat(other: Tuple7): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +infix fun Tuple6.concat(other: Tuple8): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +infix fun Tuple6.concat(other: Tuple9): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +infix fun Tuple6.concat(other: Tuple10): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +infix fun Tuple6.concat(other: Tuple11): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +infix fun Tuple6.concat(other: Tuple12): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +infix fun Tuple6.concat(other: Tuple13): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +infix fun Tuple6.concat(other: Tuple14): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +infix fun Tuple6.concat(other: Tuple15): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) +infix fun Tuple6.concat(other: Tuple16): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) +infix fun Tuple7.concat(other: Tuple1): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1()) +infix fun Tuple7.concat(other: Tuple2): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2()) +infix fun Tuple7.concat(other: Tuple3): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3()) +infix fun Tuple7.concat(other: Tuple4): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple7.concat(other: Tuple5): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun Tuple7.concat(other: Tuple6): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +infix fun Tuple7.concat(other: Tuple7): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +infix fun Tuple7.concat(other: Tuple8): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +infix fun Tuple7.concat(other: Tuple9): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +infix fun Tuple7.concat(other: Tuple10): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +infix fun Tuple7.concat(other: Tuple11): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +infix fun Tuple7.concat(other: Tuple12): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +infix fun Tuple7.concat(other: Tuple13): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +infix fun Tuple7.concat(other: Tuple14): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +infix fun Tuple7.concat(other: Tuple15): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) +infix fun Tuple8.concat(other: Tuple1): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1()) +infix fun Tuple8.concat(other: Tuple2): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2()) +infix fun Tuple8.concat(other: Tuple3): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3()) +infix fun Tuple8.concat(other: Tuple4): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple8.concat(other: Tuple5): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun Tuple8.concat(other: Tuple6): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +infix fun Tuple8.concat(other: Tuple7): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +infix fun Tuple8.concat(other: Tuple8): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +infix fun Tuple8.concat(other: Tuple9): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +infix fun Tuple8.concat(other: Tuple10): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +infix fun Tuple8.concat(other: Tuple11): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +infix fun Tuple8.concat(other: Tuple12): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +infix fun Tuple8.concat(other: Tuple13): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +infix fun Tuple8.concat(other: Tuple14): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +infix fun Tuple9.concat(other: Tuple1): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1()) +infix fun Tuple9.concat(other: Tuple2): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2()) +infix fun Tuple9.concat(other: Tuple3): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3()) +infix fun Tuple9.concat(other: Tuple4): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple9.concat(other: Tuple5): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun Tuple9.concat(other: Tuple6): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +infix fun Tuple9.concat(other: Tuple7): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +infix fun Tuple9.concat(other: Tuple8): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +infix fun Tuple9.concat(other: Tuple9): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +infix fun Tuple9.concat(other: Tuple10): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +infix fun Tuple9.concat(other: Tuple11): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +infix fun Tuple9.concat(other: Tuple12): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +infix fun Tuple9.concat(other: Tuple13): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +infix fun Tuple10.concat(other: Tuple1): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1()) +infix fun Tuple10.concat(other: Tuple2): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2()) +infix fun Tuple10.concat(other: Tuple3): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3()) +infix fun Tuple10.concat(other: Tuple4): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple10.concat(other: Tuple5): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun Tuple10.concat(other: Tuple6): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +infix fun Tuple10.concat(other: Tuple7): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +infix fun Tuple10.concat(other: Tuple8): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +infix fun Tuple10.concat(other: Tuple9): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +infix fun Tuple10.concat(other: Tuple10): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +infix fun Tuple10.concat(other: Tuple11): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +infix fun Tuple10.concat(other: Tuple12): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +infix fun Tuple11.concat(other: Tuple1): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1()) +infix fun Tuple11.concat(other: Tuple2): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2()) +infix fun Tuple11.concat(other: Tuple3): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3()) +infix fun Tuple11.concat(other: Tuple4): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple11.concat(other: Tuple5): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun Tuple11.concat(other: Tuple6): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +infix fun Tuple11.concat(other: Tuple7): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +infix fun Tuple11.concat(other: Tuple8): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +infix fun Tuple11.concat(other: Tuple9): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +infix fun Tuple11.concat(other: Tuple10): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +infix fun Tuple11.concat(other: Tuple11): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +infix fun Tuple12.concat(other: Tuple1): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1()) +infix fun Tuple12.concat(other: Tuple2): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2()) +infix fun Tuple12.concat(other: Tuple3): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3()) +infix fun Tuple12.concat(other: Tuple4): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple12.concat(other: Tuple5): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun Tuple12.concat(other: Tuple6): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +infix fun Tuple12.concat(other: Tuple7): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +infix fun Tuple12.concat(other: Tuple8): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +infix fun Tuple12.concat(other: Tuple9): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +infix fun Tuple12.concat(other: Tuple10): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +infix fun Tuple13.concat(other: Tuple1): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1()) +infix fun Tuple13.concat(other: Tuple2): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2()) +infix fun Tuple13.concat(other: Tuple3): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3()) +infix fun Tuple13.concat(other: Tuple4): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple13.concat(other: Tuple5): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun Tuple13.concat(other: Tuple6): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +infix fun Tuple13.concat(other: Tuple7): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +infix fun Tuple13.concat(other: Tuple8): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +infix fun Tuple13.concat(other: Tuple9): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +infix fun Tuple14.concat(other: Tuple1): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1()) +infix fun Tuple14.concat(other: Tuple2): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2()) +infix fun Tuple14.concat(other: Tuple3): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3()) +infix fun Tuple14.concat(other: Tuple4): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple14.concat(other: Tuple5): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun Tuple14.concat(other: Tuple6): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +infix fun Tuple14.concat(other: Tuple7): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +infix fun Tuple14.concat(other: Tuple8): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +infix fun Tuple15.concat(other: Tuple1): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1()) +infix fun Tuple15.concat(other: Tuple2): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2()) +infix fun Tuple15.concat(other: Tuple3): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3()) +infix fun Tuple15.concat(other: Tuple4): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple15.concat(other: Tuple5): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun Tuple15.concat(other: Tuple6): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +infix fun Tuple15.concat(other: Tuple7): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +infix fun Tuple16.concat(other: Tuple1): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1()) +infix fun Tuple16.concat(other: Tuple2): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2()) +infix fun Tuple16.concat(other: Tuple3): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3()) +infix fun Tuple16.concat(other: Tuple4): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple16.concat(other: Tuple5): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun Tuple16.concat(other: Tuple6): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +infix fun Tuple17.concat(other: Tuple1): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1()) +infix fun Tuple17.concat(other: Tuple2): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2()) +infix fun Tuple17.concat(other: Tuple3): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3()) +infix fun Tuple17.concat(other: Tuple4): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple17.concat(other: Tuple5): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun Tuple18.concat(other: Tuple1): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1()) +infix fun Tuple18.concat(other: Tuple2): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2()) +infix fun Tuple18.concat(other: Tuple3): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2(), other._3()) +infix fun Tuple18.concat(other: Tuple4): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2(), other._3(), other._4()) +infix fun Tuple19.concat(other: Tuple1): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1()) +infix fun Tuple19.concat(other: Tuple2): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1(), other._2()) +infix fun Tuple19.concat(other: Tuple3): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1(), other._2(), other._3()) +infix fun Tuple20.concat(other: Tuple1): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), other._1()) +infix fun Tuple20.concat(other: Tuple2): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), other._1(), other._2()) +infix fun Tuple21.concat(other: Tuple1): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), other._1()) + +operator fun Tuple1.plus(other: Tuple1): Tuple2 = Tuple2(this._1(), other._1()) +operator fun Tuple1.plus(other: Tuple2): Tuple3 = Tuple3(this._1(), other._1(), other._2()) +operator fun Tuple1.plus(other: Tuple3): Tuple4 = Tuple4(this._1(), other._1(), other._2(), other._3()) +operator fun Tuple1.plus(other: Tuple4): Tuple5 = Tuple5(this._1(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple1.plus(other: Tuple5): Tuple6 = Tuple6(this._1(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun Tuple1.plus(other: Tuple6): Tuple7 = Tuple7(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +operator fun Tuple1.plus(other: Tuple7): Tuple8 = Tuple8(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +operator fun Tuple1.plus(other: Tuple8): Tuple9 = Tuple9(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +operator fun Tuple1.plus(other: Tuple9): Tuple10 = Tuple10(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +operator fun Tuple1.plus(other: Tuple10): Tuple11 = Tuple11(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +operator fun Tuple1.plus(other: Tuple11): Tuple12 = Tuple12(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +operator fun Tuple1.plus(other: Tuple12): Tuple13 = Tuple13(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +operator fun Tuple1.plus(other: Tuple13): Tuple14 = Tuple14(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +operator fun Tuple1.plus(other: Tuple14): Tuple15 = Tuple15(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +operator fun Tuple1.plus(other: Tuple15): Tuple16 = Tuple16(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) +operator fun Tuple1.plus(other: Tuple16): Tuple17 = Tuple17(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) +operator fun Tuple1.plus(other: Tuple17): Tuple18 = Tuple18(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) +operator fun Tuple1.plus(other: Tuple18): Tuple19 = Tuple19(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) +operator fun Tuple1.plus(other: Tuple19): Tuple20 = Tuple20(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) +operator fun Tuple1.plus(other: Tuple20): Tuple21 = Tuple21(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20()) +operator fun Tuple1.plus(other: Tuple21): Tuple22 = Tuple22(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20(), other._21()) +operator fun Tuple2.plus(other: Tuple1): Tuple3 = Tuple3(this._1(), this._2(), other._1()) +operator fun Tuple2.plus(other: Tuple2): Tuple4 = Tuple4(this._1(), this._2(), other._1(), other._2()) +operator fun Tuple2.plus(other: Tuple3): Tuple5 = Tuple5(this._1(), this._2(), other._1(), other._2(), other._3()) +operator fun Tuple2.plus(other: Tuple4): Tuple6 = Tuple6(this._1(), this._2(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple2.plus(other: Tuple5): Tuple7 = Tuple7(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun Tuple2.plus(other: Tuple6): Tuple8 = Tuple8(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +operator fun Tuple2.plus(other: Tuple7): Tuple9 = Tuple9(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +operator fun Tuple2.plus(other: Tuple8): Tuple10 = Tuple10(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +operator fun Tuple2.plus(other: Tuple9): Tuple11 = Tuple11(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +operator fun Tuple2.plus(other: Tuple10): Tuple12 = Tuple12(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +operator fun Tuple2.plus(other: Tuple11): Tuple13 = Tuple13(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +operator fun Tuple2.plus(other: Tuple12): Tuple14 = Tuple14(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +operator fun Tuple2.plus(other: Tuple13): Tuple15 = Tuple15(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +operator fun Tuple2.plus(other: Tuple14): Tuple16 = Tuple16(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +operator fun Tuple2.plus(other: Tuple15): Tuple17 = Tuple17(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) +operator fun Tuple2.plus(other: Tuple16): Tuple18 = Tuple18(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) +operator fun Tuple2.plus(other: Tuple17): Tuple19 = Tuple19(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) +operator fun Tuple2.plus(other: Tuple18): Tuple20 = Tuple20(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) +operator fun Tuple2.plus(other: Tuple19): Tuple21 = Tuple21(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) +operator fun Tuple2.plus(other: Tuple20): Tuple22 = Tuple22(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20()) +operator fun Tuple3.plus(other: Tuple1): Tuple4 = Tuple4(this._1(), this._2(), this._3(), other._1()) +operator fun Tuple3.plus(other: Tuple2): Tuple5 = Tuple5(this._1(), this._2(), this._3(), other._1(), other._2()) +operator fun Tuple3.plus(other: Tuple3): Tuple6 = Tuple6(this._1(), this._2(), this._3(), other._1(), other._2(), other._3()) +operator fun Tuple3.plus(other: Tuple4): Tuple7 = Tuple7(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple3.plus(other: Tuple5): Tuple8 = Tuple8(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun Tuple3.plus(other: Tuple6): Tuple9 = Tuple9(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +operator fun Tuple3.plus(other: Tuple7): Tuple10 = Tuple10(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +operator fun Tuple3.plus(other: Tuple8): Tuple11 = Tuple11(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +operator fun Tuple3.plus(other: Tuple9): Tuple12 = Tuple12(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +operator fun Tuple3.plus(other: Tuple10): Tuple13 = Tuple13(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +operator fun Tuple3.plus(other: Tuple11): Tuple14 = Tuple14(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +operator fun Tuple3.plus(other: Tuple12): Tuple15 = Tuple15(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +operator fun Tuple3.plus(other: Tuple13): Tuple16 = Tuple16(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +operator fun Tuple3.plus(other: Tuple14): Tuple17 = Tuple17(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +operator fun Tuple3.plus(other: Tuple15): Tuple18 = Tuple18(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) +operator fun Tuple3.plus(other: Tuple16): Tuple19 = Tuple19(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) +operator fun Tuple3.plus(other: Tuple17): Tuple20 = Tuple20(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) +operator fun Tuple3.plus(other: Tuple18): Tuple21 = Tuple21(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) +operator fun Tuple3.plus(other: Tuple19): Tuple22 = Tuple22(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) +operator fun Tuple4.plus(other: Tuple1): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), other._1()) +operator fun Tuple4.plus(other: Tuple2): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), other._1(), other._2()) +operator fun Tuple4.plus(other: Tuple3): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3()) +operator fun Tuple4.plus(other: Tuple4): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple4.plus(other: Tuple5): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun Tuple4.plus(other: Tuple6): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +operator fun Tuple4.plus(other: Tuple7): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +operator fun Tuple4.plus(other: Tuple8): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +operator fun Tuple4.plus(other: Tuple9): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +operator fun Tuple4.plus(other: Tuple10): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +operator fun Tuple4.plus(other: Tuple11): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +operator fun Tuple4.plus(other: Tuple12): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +operator fun Tuple4.plus(other: Tuple13): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +operator fun Tuple4.plus(other: Tuple14): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +operator fun Tuple4.plus(other: Tuple15): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) +operator fun Tuple4.plus(other: Tuple16): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) +operator fun Tuple4.plus(other: Tuple17): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) +operator fun Tuple4.plus(other: Tuple18): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) +operator fun Tuple5.plus(other: Tuple1): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), other._1()) +operator fun Tuple5.plus(other: Tuple2): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2()) +operator fun Tuple5.plus(other: Tuple3): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3()) +operator fun Tuple5.plus(other: Tuple4): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple5.plus(other: Tuple5): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun Tuple5.plus(other: Tuple6): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +operator fun Tuple5.plus(other: Tuple7): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +operator fun Tuple5.plus(other: Tuple8): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +operator fun Tuple5.plus(other: Tuple9): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +operator fun Tuple5.plus(other: Tuple10): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +operator fun Tuple5.plus(other: Tuple11): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +operator fun Tuple5.plus(other: Tuple12): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +operator fun Tuple5.plus(other: Tuple13): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +operator fun Tuple5.plus(other: Tuple14): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +operator fun Tuple5.plus(other: Tuple15): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) +operator fun Tuple5.plus(other: Tuple16): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) +operator fun Tuple5.plus(other: Tuple17): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) +operator fun Tuple6.plus(other: Tuple1): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1()) +operator fun Tuple6.plus(other: Tuple2): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2()) +operator fun Tuple6.plus(other: Tuple3): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3()) +operator fun Tuple6.plus(other: Tuple4): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple6.plus(other: Tuple5): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun Tuple6.plus(other: Tuple6): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +operator fun Tuple6.plus(other: Tuple7): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +operator fun Tuple6.plus(other: Tuple8): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +operator fun Tuple6.plus(other: Tuple9): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +operator fun Tuple6.plus(other: Tuple10): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +operator fun Tuple6.plus(other: Tuple11): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +operator fun Tuple6.plus(other: Tuple12): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +operator fun Tuple6.plus(other: Tuple13): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +operator fun Tuple6.plus(other: Tuple14): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +operator fun Tuple6.plus(other: Tuple15): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) +operator fun Tuple6.plus(other: Tuple16): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) +operator fun Tuple7.plus(other: Tuple1): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1()) +operator fun Tuple7.plus(other: Tuple2): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2()) +operator fun Tuple7.plus(other: Tuple3): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3()) +operator fun Tuple7.plus(other: Tuple4): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple7.plus(other: Tuple5): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun Tuple7.plus(other: Tuple6): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +operator fun Tuple7.plus(other: Tuple7): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +operator fun Tuple7.plus(other: Tuple8): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +operator fun Tuple7.plus(other: Tuple9): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +operator fun Tuple7.plus(other: Tuple10): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +operator fun Tuple7.plus(other: Tuple11): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +operator fun Tuple7.plus(other: Tuple12): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +operator fun Tuple7.plus(other: Tuple13): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +operator fun Tuple7.plus(other: Tuple14): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +operator fun Tuple7.plus(other: Tuple15): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) +operator fun Tuple8.plus(other: Tuple1): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1()) +operator fun Tuple8.plus(other: Tuple2): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2()) +operator fun Tuple8.plus(other: Tuple3): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3()) +operator fun Tuple8.plus(other: Tuple4): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple8.plus(other: Tuple5): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun Tuple8.plus(other: Tuple6): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +operator fun Tuple8.plus(other: Tuple7): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +operator fun Tuple8.plus(other: Tuple8): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +operator fun Tuple8.plus(other: Tuple9): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +operator fun Tuple8.plus(other: Tuple10): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +operator fun Tuple8.plus(other: Tuple11): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +operator fun Tuple8.plus(other: Tuple12): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +operator fun Tuple8.plus(other: Tuple13): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +operator fun Tuple8.plus(other: Tuple14): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +operator fun Tuple9.plus(other: Tuple1): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1()) +operator fun Tuple9.plus(other: Tuple2): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2()) +operator fun Tuple9.plus(other: Tuple3): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3()) +operator fun Tuple9.plus(other: Tuple4): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple9.plus(other: Tuple5): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun Tuple9.plus(other: Tuple6): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +operator fun Tuple9.plus(other: Tuple7): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +operator fun Tuple9.plus(other: Tuple8): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +operator fun Tuple9.plus(other: Tuple9): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +operator fun Tuple9.plus(other: Tuple10): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +operator fun Tuple9.plus(other: Tuple11): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +operator fun Tuple9.plus(other: Tuple12): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +operator fun Tuple9.plus(other: Tuple13): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +operator fun Tuple10.plus(other: Tuple1): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1()) +operator fun Tuple10.plus(other: Tuple2): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2()) +operator fun Tuple10.plus(other: Tuple3): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3()) +operator fun Tuple10.plus(other: Tuple4): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple10.plus(other: Tuple5): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun Tuple10.plus(other: Tuple6): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +operator fun Tuple10.plus(other: Tuple7): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +operator fun Tuple10.plus(other: Tuple8): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +operator fun Tuple10.plus(other: Tuple9): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +operator fun Tuple10.plus(other: Tuple10): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +operator fun Tuple10.plus(other: Tuple11): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +operator fun Tuple10.plus(other: Tuple12): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +operator fun Tuple11.plus(other: Tuple1): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1()) +operator fun Tuple11.plus(other: Tuple2): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2()) +operator fun Tuple11.plus(other: Tuple3): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3()) +operator fun Tuple11.plus(other: Tuple4): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple11.plus(other: Tuple5): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun Tuple11.plus(other: Tuple6): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +operator fun Tuple11.plus(other: Tuple7): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +operator fun Tuple11.plus(other: Tuple8): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +operator fun Tuple11.plus(other: Tuple9): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +operator fun Tuple11.plus(other: Tuple10): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +operator fun Tuple11.plus(other: Tuple11): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +operator fun Tuple12.plus(other: Tuple1): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1()) +operator fun Tuple12.plus(other: Tuple2): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2()) +operator fun Tuple12.plus(other: Tuple3): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3()) +operator fun Tuple12.plus(other: Tuple4): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple12.plus(other: Tuple5): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun Tuple12.plus(other: Tuple6): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +operator fun Tuple12.plus(other: Tuple7): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +operator fun Tuple12.plus(other: Tuple8): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +operator fun Tuple12.plus(other: Tuple9): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +operator fun Tuple12.plus(other: Tuple10): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +operator fun Tuple13.plus(other: Tuple1): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1()) +operator fun Tuple13.plus(other: Tuple2): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2()) +operator fun Tuple13.plus(other: Tuple3): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3()) +operator fun Tuple13.plus(other: Tuple4): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple13.plus(other: Tuple5): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun Tuple13.plus(other: Tuple6): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +operator fun Tuple13.plus(other: Tuple7): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +operator fun Tuple13.plus(other: Tuple8): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +operator fun Tuple13.plus(other: Tuple9): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +operator fun Tuple14.plus(other: Tuple1): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1()) +operator fun Tuple14.plus(other: Tuple2): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2()) +operator fun Tuple14.plus(other: Tuple3): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3()) +operator fun Tuple14.plus(other: Tuple4): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple14.plus(other: Tuple5): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun Tuple14.plus(other: Tuple6): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +operator fun Tuple14.plus(other: Tuple7): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +operator fun Tuple14.plus(other: Tuple8): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +operator fun Tuple15.plus(other: Tuple1): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1()) +operator fun Tuple15.plus(other: Tuple2): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2()) +operator fun Tuple15.plus(other: Tuple3): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3()) +operator fun Tuple15.plus(other: Tuple4): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple15.plus(other: Tuple5): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun Tuple15.plus(other: Tuple6): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +operator fun Tuple15.plus(other: Tuple7): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +operator fun Tuple16.plus(other: Tuple1): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1()) +operator fun Tuple16.plus(other: Tuple2): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2()) +operator fun Tuple16.plus(other: Tuple3): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3()) +operator fun Tuple16.plus(other: Tuple4): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple16.plus(other: Tuple5): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun Tuple16.plus(other: Tuple6): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +operator fun Tuple17.plus(other: Tuple1): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1()) +operator fun Tuple17.plus(other: Tuple2): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2()) +operator fun Tuple17.plus(other: Tuple3): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3()) +operator fun Tuple17.plus(other: Tuple4): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple17.plus(other: Tuple5): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun Tuple18.plus(other: Tuple1): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1()) +operator fun Tuple18.plus(other: Tuple2): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2()) +operator fun Tuple18.plus(other: Tuple3): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2(), other._3()) +operator fun Tuple18.plus(other: Tuple4): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2(), other._3(), other._4()) +operator fun Tuple19.plus(other: Tuple1): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1()) +operator fun Tuple19.plus(other: Tuple2): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1(), other._2()) +operator fun Tuple19.plus(other: Tuple3): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1(), other._2(), other._3()) +operator fun Tuple20.plus(other: Tuple1): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), other._1()) +operator fun Tuple20.plus(other: Tuple2): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), other._1(), other._2()) +operator fun Tuple21.plus(other: Tuple1): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), other._1()) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleCopy.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleCopy.kt new file mode 100644 index 00000000..9f921e77 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleCopy.kt @@ -0,0 +1,47 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +@file:Suppress("EXTENSION_SHADOWED_BY_MEMBER") + +package org.jetbrains.kotlinx.spark.api.tuples + +import scala.* + +fun Tuple1.copy(_1: T1 = this._1()): Tuple1 = Tuple1(_1) +fun Tuple2.copy(_1: T1 = this._1(), _2: T2 = this._2()): Tuple2 = Tuple2(_1, _2) +fun Tuple3.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3()): Tuple3 = Tuple3(_1, _2, _3) +fun Tuple4.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4()): Tuple4 = Tuple4(_1, _2, _3, _4) +fun Tuple5.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5()): Tuple5 = Tuple5(_1, _2, _3, _4, _5) +fun Tuple6.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6()): Tuple6 = Tuple6(_1, _2, _3, _4, _5, _6) +fun Tuple7.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7()): Tuple7 = Tuple7(_1, _2, _3, _4, _5, _6, _7) +fun Tuple8.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8()): Tuple8 = Tuple8(_1, _2, _3, _4, _5, _6, _7, _8) +fun Tuple9.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9()): Tuple9 = Tuple9(_1, _2, _3, _4, _5, _6, _7, _8, _9) +fun Tuple10.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10()): Tuple10 = Tuple10(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10) +fun Tuple11.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11()): Tuple11 = Tuple11(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11) +fun Tuple12.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12()): Tuple12 = Tuple12(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12) +fun Tuple13.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13()): Tuple13 = Tuple13(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13) +fun Tuple14.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14()): Tuple14 = Tuple14(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14) +fun Tuple15.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15()): Tuple15 = Tuple15(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15) +fun Tuple16.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16()): Tuple16 = Tuple16(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16) +fun Tuple17.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17()): Tuple17 = Tuple17(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17) +fun Tuple18.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17(), _18: T18 = this._18()): Tuple18 = Tuple18(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18) +fun Tuple19.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17(), _18: T18 = this._18(), _19: T19 = this._19()): Tuple19 = Tuple19(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19) +fun Tuple20.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17(), _18: T18 = this._18(), _19: T19 = this._19(), _20: T20 = this._20()): Tuple20 = Tuple20(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20) +fun Tuple21.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17(), _18: T18 = this._18(), _19: T19 = this._19(), _20: T20 = this._20(), _21: T21 = this._21()): Tuple21 = Tuple21(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21) +fun Tuple22.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17(), _18: T18 = this._18(), _19: T19 = this._19(), _20: T20 = this._20(), _21: T21 = this._21(), _22: T22 = this._22()): Tuple22 = Tuple22(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleExtending.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleExtending.kt new file mode 100644 index 00000000..13e6c5c7 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleExtending.kt @@ -0,0 +1,155 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +@file:Suppress("FunctionName", "RemoveExplicitTypeArguments") +package org.jetbrains.kotlinx.spark.api.tuples + +import scala.Tuple1 +import scala.Tuple2 +import scala.Tuple3 +import scala.Tuple4 +import scala.Tuple5 +import scala.Tuple6 +import scala.Tuple7 +import scala.Tuple8 +import scala.Tuple9 +import scala.Tuple10 +import scala.Tuple11 +import scala.Tuple12 +import scala.Tuple13 +import scala.Tuple14 +import scala.Tuple15 +import scala.Tuple16 +import scala.Tuple17 +import scala.Tuple18 +import scala.Tuple19 +import scala.Tuple20 +import scala.Tuple21 +import scala.Tuple22 + +/** + * This file provides functions to easily extend Scala Tuples. + * + * This means you can easily create a new tuple appended-, or prepended by a new value or tuple. + * + * For example (using tupleOf() to create a new tuple): + * ```tupleOf(a, b).appendedBy(c) == tupleOf(a, b, c)``` + * and + * ```tupleOf(a, b).prependedBy(c) == tupleOf(c, a, b)``` + * + * or in shorthand: + * ```tupleOf(a, b)..c == tupleOf(a, b, c)``` + * and + * ```c..tupleOf(a, b) == tupleOf(c, a, b)``` + * + * Note that ```tupleOf(a, b)..tupleOf(c, d)``` will not work due to it being ambiguous: + * It could mean both ```tupleOf(a, b, tupleOf(c, d))``` and ```tupleOf(tupleOf(a, b), c, d)```. + * So, for two tuples, you must use [appendedBy] and [prependedBy] explicitly. + * + * For concatenating two tuples, see [org.jetbrains.kotlinx.spark.api.tuples.concat]. + * + */ + +fun Tuple1.appendedBy(other: T2): Tuple2 = Tuple2(this._1(), other) +fun Tuple2.appendedBy(other: T3): Tuple3 = Tuple3(this._1(), this._2(), other) +fun Tuple3.appendedBy(other: T4): Tuple4 = Tuple4(this._1(), this._2(), this._3(), other) +fun Tuple4.appendedBy(other: T5): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), other) +fun Tuple5.appendedBy(other: T6): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), other) +fun Tuple6.appendedBy(other: T7): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other) +fun Tuple7.appendedBy(other: T8): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other) +fun Tuple8.appendedBy(other: T9): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other) +fun Tuple9.appendedBy(other: T10): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other) +fun Tuple10.appendedBy(other: T11): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other) +fun Tuple11.appendedBy(other: T12): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other) +fun Tuple12.appendedBy(other: T13): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other) +fun Tuple13.appendedBy(other: T14): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other) +fun Tuple14.appendedBy(other: T15): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other) +fun Tuple15.appendedBy(other: T16): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other) +fun Tuple16.appendedBy(other: T17): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other) +fun Tuple17.appendedBy(other: T18): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other) +fun Tuple18.appendedBy(other: T19): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other) +fun Tuple19.appendedBy(other: T20): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other) +fun Tuple20.appendedBy(other: T21): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), other) +fun Tuple21.appendedBy(other: T22): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), other) + +fun Tuple1.prependedBy(other: T1): Tuple2 = Tuple2(other, this._1()) +fun Tuple2.prependedBy(other: T1): Tuple3 = Tuple3(other, this._1(), this._2()) +fun Tuple3.prependedBy(other: T1): Tuple4 = Tuple4(other, this._1(), this._2(), this._3()) +fun Tuple4.prependedBy(other: T1): Tuple5 = Tuple5(other, this._1(), this._2(), this._3(), this._4()) +fun Tuple5.prependedBy(other: T1): Tuple6 = Tuple6(other, this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple6.prependedBy(other: T1): Tuple7 = Tuple7(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple7.prependedBy(other: T1): Tuple8 = Tuple8(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple8.prependedBy(other: T1): Tuple9 = Tuple9(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple9.prependedBy(other: T1): Tuple10 = Tuple10(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple10.prependedBy(other: T1): Tuple11 = Tuple11(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple11.prependedBy(other: T1): Tuple12 = Tuple12(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple12.prependedBy(other: T1): Tuple13 = Tuple13(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple13.prependedBy(other: T1): Tuple14 = Tuple14(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple14.prependedBy(other: T1): Tuple15 = Tuple15(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple15.prependedBy(other: T1): Tuple16 = Tuple16(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple16.prependedBy(other: T1): Tuple17 = Tuple17(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple17.prependedBy(other: T1): Tuple18 = Tuple18(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple18.prependedBy(other: T1): Tuple19 = Tuple19(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple19.prependedBy(other: T1): Tuple20 = Tuple20(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple20.prependedBy(other: T1): Tuple21 = Tuple21(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple21.prependedBy(other: T1): Tuple22 = Tuple22(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) + +operator fun Tuple1.rangeTo(other: T2): Tuple2 = this.appendedBy(other) +operator fun Tuple2.rangeTo(other: T3): Tuple3 = this.appendedBy(other) +operator fun Tuple3.rangeTo(other: T4): Tuple4 = this.appendedBy(other) +operator fun Tuple4.rangeTo(other: T5): Tuple5 = this.appendedBy(other) +operator fun Tuple5.rangeTo(other: T6): Tuple6 = this.appendedBy(other) +operator fun Tuple6.rangeTo(other: T7): Tuple7 = this.appendedBy(other) +operator fun Tuple7.rangeTo(other: T8): Tuple8 = this.appendedBy(other) +operator fun Tuple8.rangeTo(other: T9): Tuple9 = this.appendedBy(other) +operator fun Tuple9.rangeTo(other: T10): Tuple10 = this.appendedBy(other) +operator fun Tuple10.rangeTo(other: T11): Tuple11 = this.appendedBy(other) +operator fun Tuple11.rangeTo(other: T12): Tuple12 = this.appendedBy(other) +operator fun Tuple12.rangeTo(other: T13): Tuple13 = this.appendedBy(other) +operator fun Tuple13.rangeTo(other: T14): Tuple14 = this.appendedBy(other) +operator fun Tuple14.rangeTo(other: T15): Tuple15 = this.appendedBy(other) +operator fun Tuple15.rangeTo(other: T16): Tuple16 = this.appendedBy(other) +operator fun Tuple16.rangeTo(other: T17): Tuple17 = this.appendedBy(other) +operator fun Tuple17.rangeTo(other: T18): Tuple18 = this.appendedBy(other) +operator fun Tuple18.rangeTo(other: T19): Tuple19 = this.appendedBy(other) +operator fun Tuple19.rangeTo(other: T20): Tuple20 = this.appendedBy(other) +operator fun Tuple20.rangeTo(other: T21): Tuple21 = this.appendedBy(other) +operator fun Tuple21.rangeTo(other: T22): Tuple22 = this.appendedBy(other) + +operator fun T1.rangeTo(other: Tuple1): Tuple2 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple2): Tuple3 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple3): Tuple4 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple4): Tuple5 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple5): Tuple6 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple6): Tuple7 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple7): Tuple8 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple8): Tuple9 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple9): Tuple10 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple10): Tuple11 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple11): Tuple12 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple12): Tuple13 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple13): Tuple14 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple14): Tuple15 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple15): Tuple16 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple16): Tuple17 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple17): Tuple18 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple18): Tuple19 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple19): Tuple20 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple20): Tuple21 = other.prependedBy(this) +operator fun T1.rangeTo(other: Tuple21): Tuple22 = other.prependedBy(this) From 4b821921b04c96621e88f7c3ca4f78e7ca4e0715 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 23 Mar 2022 18:35:06 +0100 Subject: [PATCH 101/213] adding all tuple functions from scalaTuplesInKotlin, starting library conversion --- .../jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt | 4 ++-- .../org/jetbrains/kotlinx/spark/api/UDFRegisterTest.kt | 7 ++++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt index 81b7c0bf..ec840dbe 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt @@ -38,6 +38,7 @@ import org.apache.spark.sql.KeyValueGroupedDataset import org.apache.spark.sql.streaming.GroupState import org.apache.spark.sql.streaming.GroupStateTimeout import org.apache.spark.sql.streaming.OutputMode +import scala.Tuple2 /** @@ -79,9 +80,8 @@ inline fun KeyValueGroupedDataset.mapGroups( * Note that you need to use [reduceGroupsK] always instead of the Java- or Scala-specific * [KeyValueGroupedDataset.reduceGroups] to make the compiler work. */ -inline fun KeyValueGroupedDataset.reduceGroupsK(noinline func: (VALUE, VALUE) -> VALUE): Dataset> = +inline fun KeyValueGroupedDataset.reduceGroupsK(noinline func: (VALUE, VALUE) -> VALUE): Dataset> = reduceGroups(ReduceFunction(func)) - .map { t -> t._1() to t._2() } /** * (Kotlin-specific) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegisterTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegisterTest.kt index 1926be42..df3525ef 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegisterTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegisterTest.kt @@ -124,8 +124,8 @@ class UDFRegisterTest : ShouldSpec({ should("succeed in dataset") { val dataset: Dataset = listOf( - NormalClass(name="a", age =10), - NormalClass(name="b", age =20) + NormalClass(name = "a", age = 10), + NormalClass(name = "b", age = 20) ).toDS() val udfWrapper = udf.register("nameConcatAge") { name, age -> @@ -150,7 +150,7 @@ class UDFRegisterTest : ShouldSpec({ should("return NormalClass") { listOf("a" to 1, "b" to 2).toDS().toDF().createOrReplaceTempView("test2") udf.register("toNormalClass") { a, b -> - NormalClass(b, a) + NormalClass(b, a) } spark.sql("select toNormalClass(first, second) from test2").show() } @@ -160,6 +160,7 @@ class UDFRegisterTest : ShouldSpec({ } }) + data class NormalClass( val age: Int, val name: String From 558c3ab019fb70ddf2e864b43339c3e331d9135c Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 24 Mar 2022 22:58:25 +0100 Subject: [PATCH 102/213] adding updated tuple functions from scala tuples in kotlin library. Examples are updated to take advantage of them and are converted away from Arities/Pairs towards tuples. Still experimental notation in some places --- .../spark/examples/CachedOperations.kt | 9 +- .../kotlinx/spark/examples/Collect.kt | 3 +- .../jetbrains/kotlinx/spark/examples/Group.kt | 9 +- .../jetbrains/kotlinx/spark/examples/Join.kt | 7 +- .../jetbrains/kotlinx/spark/examples/Main.kt | 35 +- .../spark/examples/MapAndListOperations.kt | 13 +- .../kotlinx/spark/examples/WordCount.kt | 9 +- .../api/tuples/DestructuredTupleBuilders.kt | 46 +- .../kotlinx/spark/api/tuples/DropFunctions.kt | 2 + .../kotlinx/spark/api/tuples/EmptyTuple.kt | 44 ++ .../api/tuples/ProductTextualAccessors.kt | 509 +++++++++++++++++- .../kotlinx/spark/api/tuples/TupleBuilders.kt | 6 +- .../spark/api/tuples/TupleConcatenation.kt | 94 ++++ .../kotlinx/spark/api/tuples/TupleCopy.kt | 24 + .../spark/api/tuples/TupleExtending.kt | 98 ++-- .../kotlinx/spark/api/DatasetFunctionTest.kt | 109 ++-- .../kotlinx/spark/api/EncodingTest.kt | 27 +- 17 files changed, 873 insertions(+), 171 deletions(-) create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/EmptyTuple.kt diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/CachedOperations.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/CachedOperations.kt index e8e4dbf1..d3fff24b 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/CachedOperations.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/CachedOperations.kt @@ -20,17 +20,20 @@ package org.jetbrains.kotlinx.spark.examples import org.jetbrains.kotlinx.spark.api.* +import org.jetbrains.kotlinx.spark.api.tuples.* fun main() { withSpark { dsOf(1, 2, 3, 4, 5) - .map { it to (it + 2) } + .map { t(it, (it + 2)) } .withCached { showDS() - filter { it.first % 2 == 0 }.showDS() + filter { it._1 % 2 == 0 }.showDS() + } + .map { (first, second) -> + t(first, second, (first + second) * 2) } - .map { c(it.first, it.second, (it.first + it.second) * 2) } .show() } } diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Collect.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Collect.kt index 685c1ae4..a956bd15 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Collect.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Collect.kt @@ -21,6 +21,7 @@ package org.jetbrains.kotlinx.spark.examples import org.apache.spark.sql.Row import org.jetbrains.kotlinx.spark.api.* +import org.jetbrains.kotlinx.spark.api.tuples.* fun main() { withSpark { @@ -39,7 +40,7 @@ fun main() { } dsOf(1, 2, 3) - .map { c(it, it + 1, it + 2) } + .map { t(it, it + 1, it + 2) } .to() .select("_1") .collectAsList() diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Group.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Group.kt index 89ded8e3..5b5f8a41 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Group.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Group.kt @@ -20,12 +20,17 @@ package org.jetbrains.kotlinx.spark.examples import org.jetbrains.kotlinx.spark.api.* +import org.jetbrains.kotlinx.spark.api.tuples.t fun main() { withSpark { - dsOf(c(1, "a"), c(1, "b"), c(2, "c")) + dsOf( + t(1, "a"), + t(1, "b"), + t(2, "c"), + ) .groupByKey { it._1 } - .reduceGroupsK { a, b -> c(a._1 + b._1, a._2 + b._2) } + .reduceGroupsK { a, b -> t(a._1 + b._1, a._2 + b._2) } .show() } } diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Join.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Join.kt index 594a2474..52193c1d 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Join.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Join.kt @@ -20,6 +20,7 @@ package org.jetbrains.kotlinx.spark.examples import org.jetbrains.kotlinx.spark.api.* +import org.jetbrains.kotlinx.spark.api.tuples.* data class Left(val id: Int, val name: String) @@ -32,10 +33,12 @@ fun main() { val first = dsOf(Left(1, "a"), Left(2, "b")) val second = dsOf(Right(1, 100), Right(3, 300)) first - .leftJoin(second, first.col("id").eq(second.col("id"))) + .leftJoin(second, first.col("id") eq second.col("id")) .debugCodegen() .also { it.show() } - .map { c(it.first.id, it.first.name, it.second?.value) } + .map { (left, right) -> + t + left.id + left.name + right?.value + } .show() } diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt index a253c12d..f7f3157f 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt @@ -20,36 +20,47 @@ package org.jetbrains.kotlinx.spark.examples import org.apache.spark.api.java.function.ReduceFunction +import org.apache.spark.sql.Dataset import org.jetbrains.kotlinx.spark.api.* +import org.jetbrains.kotlinx.spark.api.tuples.* +import scala.* data class Q(val id: Int, val text: T) object Main { @JvmStatic fun main(args: Array) { - val spark = SparkSession .builder() .master("local[2]") - .appName("Simple Application").orCreate - - val triples = spark - .toDS(listOf(Q(1, 1 to null), Q(2, 2 to "22"), Q(3, 3 to "333"))) - .map { (a, b) -> a + b.first to b.second?.length } - .map { it to 1 } - .map { (a, b) -> Triple(a.first, a.second, b) } + .appName("Simple Application") + .getOrCreate() + val triples: Dataset> = spark + .toDS( + listOf( + Q(1, t(1, null)), + Q(2, t(2, "22")), + Q(3, t(3, "333")), + ) + ) + .map { (a, b) -> t(a + b._1, b._2?.length) } + .map { it: Tuple2 -> it + 1 } // add counter val pairs = spark - .toDS(listOf(2 to "hell", 4 to "moon", 6 to "berry")) + .toDS(listOf( + t + 2 + "hell", + t + 4 + "moon", + t + 6 + "berry", + )) triples - .leftJoin(pairs, triples.col("first").multiply(2).eq(pairs.col("first"))) + .leftJoin(pairs, triples.col("first").multiply(2) eq pairs.col("first")) // .also { it.printSchema() } - .map { (triple, pair) -> Five(triple.first, triple.second, triple.third, pair?.first, pair?.second) } + .map { (triple, pair) -> Five(triple._1, triple._2, triple._3, pair?._1, pair?._2) } .groupByKey { it.a } .reduceGroupsK { v1, v2 -> v1.copy(a = v1.a + v2.a, b = v1.a + v2.a) } - .map { it.second } + .map { it._2 } .repartition(1) .withCached { write() diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/MapAndListOperations.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/MapAndListOperations.kt index c67504cf..15892cfd 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/MapAndListOperations.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/MapAndListOperations.kt @@ -20,16 +20,21 @@ package org.jetbrains.kotlinx.spark.examples import org.jetbrains.kotlinx.spark.api.* +import org.jetbrains.kotlinx.spark.api.tuples.* fun main() { withSpark(props = mapOf("spark.sql.codegen.wholeStage" to true)) { dsOf( - mapOf(1 to c(1, 2, 3), 2 to c(1, 2, 3)), - mapOf(3 to c(1, 2, 3), 4 to c(1, 2, 3)), + mapOf(1 to t(1, 2, 3), 2 to t(1, 2, 3)), + mapOf(3 to t(1, 2, 3), 4 to t(1, 2, 3)), ) - .flatMap { it.toList().map { p -> listOf(p.first, p.second._1, p.second._2, p.second._3) }.iterator() } + .flatMap { + it.toList() + .map { (first, tuple) -> listOf(first, tuple._1, tuple._2, tuple._3) } + .iterator() + } .flatten() - .map { c(it) } + .map { tupleOf(it) } .also { it.printSchema() } .distinct() .sort("_1") diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/WordCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/WordCount.kt index 996e36be..42ab48c4 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/WordCount.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/WordCount.kt @@ -21,6 +21,7 @@ package org.jetbrains.kotlinx.spark.examples import org.apache.spark.sql.Dataset import org.jetbrains.kotlinx.spark.api.* +import org.jetbrains.kotlinx.spark.api.tuples.* const val MEANINGFUL_WORD_LENGTH = 4 @@ -33,15 +34,15 @@ fun main() { .flatten() .cleanup() .groupByKey { it } - .mapGroups { k, iter -> k to iter.asSequence().count() } - .sort { arrayOf(it.col("second").desc()) } + .mapGroups { k, iter -> t + k + iter.asSequence().count() } + .sort { arrayOf(it.col("_2").desc()) } .limit(20) - .map { it.second to it.first } + .map { it.swap() } .show(false) } } -fun Dataset.cleanup() = +fun Dataset.cleanup(): Dataset = filter { it.isNotBlank() } .map { it.trim(',', ' ', '\n', ':', '.', ';', '?', '!', '"', '\'', '\t', ' ') } .filter { !it.endsWith("n’t") } diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt index 8e258aab..a3ca0824 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt @@ -54,26 +54,26 @@ import scala.Tuple22 /** * Returns a new Tuple2 of the given arguments. **/ -@JvmName("tInfix") -infix fun T1.t(other: T2): Tuple2 = Tuple2(this, other) - -infix fun Tuple2.t(next: T3): Tuple3 = Tuple3(this._1(), this._2(), next) -infix fun Tuple3.t(next: T4): Tuple4 = Tuple4(this._1(), this._2(), this._3(), next) -infix fun Tuple4.t(next: T5): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), next) -infix fun Tuple5.t(next: T6): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), next) -infix fun Tuple6.t(next: T7): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), next) -infix fun Tuple7.t(next: T8): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), next) -infix fun Tuple8.t(next: T9): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), next) -infix fun Tuple9.t(next: T10): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), next) -infix fun Tuple10.t(next: T11): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), next) -infix fun Tuple11.t(next: T12): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), next) -infix fun Tuple12.t(next: T13): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), next) -infix fun Tuple13.t(next: T14): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), next) -infix fun Tuple14.t(next: T15): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), next) -infix fun Tuple15.t(next: T16): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), next) -infix fun Tuple16.t(next: T17): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), next) -infix fun Tuple17.t(next: T18): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), next) -infix fun Tuple18.t(next: T19): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), next) -infix fun Tuple19.t(next: T20): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), next) -infix fun Tuple20.t(next: T21): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), next) -infix fun Tuple21.t(next: T22): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), next) +//@JvmName("tInfix") +//infix fun T1.t(other: T2): Tuple2 = Tuple2(this, other) +// +//infix fun Tuple2.t(next: T3): Tuple3 = Tuple3(this._1(), this._2(), next) +//infix fun Tuple3.t(next: T4): Tuple4 = Tuple4(this._1(), this._2(), this._3(), next) +//infix fun Tuple4.t(next: T5): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), next) +//infix fun Tuple5.t(next: T6): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), next) +//infix fun Tuple6.t(next: T7): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), next) +//infix fun Tuple7.t(next: T8): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), next) +//infix fun Tuple8.t(next: T9): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), next) +//infix fun Tuple9.t(next: T10): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), next) +//infix fun Tuple10.t(next: T11): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), next) +//infix fun Tuple11.t(next: T12): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), next) +//infix fun Tuple12.t(next: T13): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), next) +//infix fun Tuple13.t(next: T14): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), next) +//infix fun Tuple14.t(next: T15): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), next) +//infix fun Tuple15.t(next: T16): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), next) +//infix fun Tuple16.t(next: T17): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), next) +//infix fun Tuple17.t(next: T18): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), next) +//infix fun Tuple18.t(next: T19): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), next) +//infix fun Tuple19.t(next: T20): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), next) +//infix fun Tuple20.t(next: T21): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), next) +//infix fun Tuple21.t(next: T22): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), next) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DropFunctions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DropFunctions.kt index 5c12b06f..cfc730f3 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DropFunctions.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DropFunctions.kt @@ -51,6 +51,8 @@ import scala.Tuple22 * */ +fun Tuple1<*>.dropFirst(): EmptyTuple = EmptyTuple +fun Tuple1<*>.dropLast(): EmptyTuple = EmptyTuple fun Tuple2<*, T1>.dropFirst(): Tuple1 = Tuple1(this._2()) fun Tuple2.dropLast(): Tuple1 = Tuple1(this._1()) fun Tuple3<*, T1, T2>.dropFirst(): Tuple2 = Tuple2(this._2(), this._3()) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/EmptyTuple.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/EmptyTuple.kt new file mode 100644 index 00000000..925866dc --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/EmptyTuple.kt @@ -0,0 +1,44 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.api.tuples + +import scala.* +import java.io.Serializable + +/** + * Just as in Scala3, we provide the [EmptyTuple]. It is the result of dropping the last item from a [Tuple1] + * or when calling `tupleOf()` for instance. + * + * It can also be used to create tuples like: + * ```kotlin + * val tuple: Tuple3 = t + 1 + 5L + "test" + * ``` + * if you really want to. + */ + +object EmptyTuple : Product, Serializable { + override fun canEqual(that: Any?): Boolean = that == EmptyTuple + override fun productElement(n: Int): Nothing = throw IndexOutOfBoundsException("EmptyTuple has no members") + override fun productArity(): Int = 0 + override fun toString(): String = "()" +} + +public val t: EmptyTuple = EmptyTuple +public fun emptyTuple(): EmptyTuple = EmptyTuple diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductTextualAccessors.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductTextualAccessors.kt index 5779ecad..4e9c676b 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductTextualAccessors.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductTextualAccessors.kt @@ -17,6 +17,8 @@ * limitations under the License. * =LICENSEEND= */ +@file:Suppress("ObjectPropertyName") + package org.jetbrains.kotlinx.spark.api.tuples import scala.Product1 @@ -49,135 +51,640 @@ import scala.Product22 * */ +val Product1._1: T get() = this._1() + /** Returns the first value of this Tuple or Product. */ fun Product1.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product1.last(): T = this._1() +val Product2._1: T get() = this._1() + +val Product2<*, T>._2: T get() = this._2() + /** Returns the first value of this Tuple or Product. */ fun Product2.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product2<*, T>.last(): T = this._2() +val Product3._1: T get() = this._1() + +val Product3<*, T, *>._2: T get() = this._2() + +val Product3<*, *, T>._3: T get() = this._3() + /** Returns the first value of this Tuple or Product. */ fun Product3.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product3<*, *, T>.last(): T = this._3() +val Product4._1: T get() = this._1() + +val Product4<*, T, *, *>._2: T get() = this._2() + +val Product4<*, *, T, *>._3: T get() = this._3() + +val Product4<*, *, *, T>._4: T get() = this._4() + /** Returns the first value of this Tuple or Product. */ fun Product4.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product4<*, *, *, T>.last(): T = this._4() +val Product5._1: T get() = this._1() + +val Product5<*, T, *, *, *>._2: T get() = this._2() + +val Product5<*, *, T, *, *>._3: T get() = this._3() + +val Product5<*, *, *, T, *>._4: T get() = this._4() + +val Product5<*, *, *, *, T>._5: T get() = this._5() + /** Returns the first value of this Tuple or Product. */ fun Product5.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product5<*, *, *, *, T>.last(): T = this._5() +val Product6._1: T get() = this._1() + +val Product6<*, T, *, *, *, *>._2: T get() = this._2() + +val Product6<*, *, T, *, *, *>._3: T get() = this._3() + +val Product6<*, *, *, T, *, *>._4: T get() = this._4() + +val Product6<*, *, *, *, T, *>._5: T get() = this._5() + +val Product6<*, *, *, *, *, T>._6: T get() = this._6() + /** Returns the first value of this Tuple or Product. */ fun Product6.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product6<*, *, *, *, *, T>.last(): T = this._6() +val Product7._1: T get() = this._1() + +val Product7<*, T, *, *, *, *, *>._2: T get() = this._2() + +val Product7<*, *, T, *, *, *, *>._3: T get() = this._3() + +val Product7<*, *, *, T, *, *, *>._4: T get() = this._4() + +val Product7<*, *, *, *, T, *, *>._5: T get() = this._5() + +val Product7<*, *, *, *, *, T, *>._6: T get() = this._6() + +val Product7<*, *, *, *, *, *, T>._7: T get() = this._7() + /** Returns the first value of this Tuple or Product. */ fun Product7.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product7<*, *, *, *, *, *, T>.last(): T = this._7() +val Product8._1: T get() = this._1() + +val Product8<*, T, *, *, *, *, *, *>._2: T get() = this._2() + +val Product8<*, *, T, *, *, *, *, *>._3: T get() = this._3() + +val Product8<*, *, *, T, *, *, *, *>._4: T get() = this._4() + +val Product8<*, *, *, *, T, *, *, *>._5: T get() = this._5() + +val Product8<*, *, *, *, *, T, *, *>._6: T get() = this._6() + +val Product8<*, *, *, *, *, *, T, *>._7: T get() = this._7() + +val Product8<*, *, *, *, *, *, *, T>._8: T get() = this._8() + /** Returns the first value of this Tuple or Product. */ fun Product8.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product8<*, *, *, *, *, *, *, T>.last(): T = this._8() +val Product9._1: T get() = this._1() + +val Product9<*, T, *, *, *, *, *, *, *>._2: T get() = this._2() + +val Product9<*, *, T, *, *, *, *, *, *>._3: T get() = this._3() + +val Product9<*, *, *, T, *, *, *, *, *>._4: T get() = this._4() + +val Product9<*, *, *, *, T, *, *, *, *>._5: T get() = this._5() + +val Product9<*, *, *, *, *, T, *, *, *>._6: T get() = this._6() + +val Product9<*, *, *, *, *, *, T, *, *>._7: T get() = this._7() + +val Product9<*, *, *, *, *, *, *, T, *>._8: T get() = this._8() + +val Product9<*, *, *, *, *, *, *, *, T>._9: T get() = this._9() + /** Returns the first value of this Tuple or Product. */ fun Product9.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product9<*, *, *, *, *, *, *, *, T>.last(): T = this._9() +val Product10._1: T get() = this._1() + +val Product10<*, T, *, *, *, *, *, *, *, *>._2: T get() = this._2() + +val Product10<*, *, T, *, *, *, *, *, *, *>._3: T get() = this._3() + +val Product10<*, *, *, T, *, *, *, *, *, *>._4: T get() = this._4() + +val Product10<*, *, *, *, T, *, *, *, *, *>._5: T get() = this._5() + +val Product10<*, *, *, *, *, T, *, *, *, *>._6: T get() = this._6() + +val Product10<*, *, *, *, *, *, T, *, *, *>._7: T get() = this._7() + +val Product10<*, *, *, *, *, *, *, T, *, *>._8: T get() = this._8() + +val Product10<*, *, *, *, *, *, *, *, T, *>._9: T get() = this._9() + +val Product10<*, *, *, *, *, *, *, *, *, T>._10: T get() = this._10() + /** Returns the first value of this Tuple or Product. */ fun Product10.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product10<*, *, *, *, *, *, *, *, *, T>.last(): T = this._10() +val Product11._1: T get() = this._1() + +val Product11<*, T, *, *, *, *, *, *, *, *, *>._2: T get() = this._2() + +val Product11<*, *, T, *, *, *, *, *, *, *, *>._3: T get() = this._3() + +val Product11<*, *, *, T, *, *, *, *, *, *, *>._4: T get() = this._4() + +val Product11<*, *, *, *, T, *, *, *, *, *, *>._5: T get() = this._5() + +val Product11<*, *, *, *, *, T, *, *, *, *, *>._6: T get() = this._6() + +val Product11<*, *, *, *, *, *, T, *, *, *, *>._7: T get() = this._7() + +val Product11<*, *, *, *, *, *, *, T, *, *, *>._8: T get() = this._8() + +val Product11<*, *, *, *, *, *, *, *, T, *, *>._9: T get() = this._9() + +val Product11<*, *, *, *, *, *, *, *, *, T, *>._10: T get() = this._10() + +val Product11<*, *, *, *, *, *, *, *, *, *, T>._11: T get() = this._11() + /** Returns the first value of this Tuple or Product. */ fun Product11.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product11<*, *, *, *, *, *, *, *, *, *, T>.last(): T = this._11() +val Product12._1: T get() = this._1() + +val Product12<*, T, *, *, *, *, *, *, *, *, *, *>._2: T get() = this._2() + +val Product12<*, *, T, *, *, *, *, *, *, *, *, *>._3: T get() = this._3() + +val Product12<*, *, *, T, *, *, *, *, *, *, *, *>._4: T get() = this._4() + +val Product12<*, *, *, *, T, *, *, *, *, *, *, *>._5: T get() = this._5() + +val Product12<*, *, *, *, *, T, *, *, *, *, *, *>._6: T get() = this._6() + +val Product12<*, *, *, *, *, *, T, *, *, *, *, *>._7: T get() = this._7() + +val Product12<*, *, *, *, *, *, *, T, *, *, *, *>._8: T get() = this._8() + +val Product12<*, *, *, *, *, *, *, *, T, *, *, *>._9: T get() = this._9() + +val Product12<*, *, *, *, *, *, *, *, *, T, *, *>._10: T get() = this._10() + +val Product12<*, *, *, *, *, *, *, *, *, *, T, *>._11: T get() = this._11() + +val Product12<*, *, *, *, *, *, *, *, *, *, *, T>._12: T get() = this._12() + /** Returns the first value of this Tuple or Product. */ fun Product12.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product12<*, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._12() +val Product13._1: T get() = this._1() + +val Product13<*, T, *, *, *, *, *, *, *, *, *, *, *>._2: T get() = this._2() + +val Product13<*, *, T, *, *, *, *, *, *, *, *, *, *>._3: T get() = this._3() + +val Product13<*, *, *, T, *, *, *, *, *, *, *, *, *>._4: T get() = this._4() + +val Product13<*, *, *, *, T, *, *, *, *, *, *, *, *>._5: T get() = this._5() + +val Product13<*, *, *, *, *, T, *, *, *, *, *, *, *>._6: T get() = this._6() + +val Product13<*, *, *, *, *, *, T, *, *, *, *, *, *>._7: T get() = this._7() + +val Product13<*, *, *, *, *, *, *, T, *, *, *, *, *>._8: T get() = this._8() + +val Product13<*, *, *, *, *, *, *, *, T, *, *, *, *>._9: T get() = this._9() + +val Product13<*, *, *, *, *, *, *, *, *, T, *, *, *>._10: T get() = this._10() + +val Product13<*, *, *, *, *, *, *, *, *, *, T, *, *>._11: T get() = this._11() + +val Product13<*, *, *, *, *, *, *, *, *, *, *, T, *>._12: T get() = this._12() + +val Product13<*, *, *, *, *, *, *, *, *, *, *, *, T>._13: T get() = this._13() + /** Returns the first value of this Tuple or Product. */ fun Product13.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product13<*, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._13() +val Product14._1: T get() = this._1() + +val Product14<*, T, *, *, *, *, *, *, *, *, *, *, *, *>._2: T get() = this._2() + +val Product14<*, *, T, *, *, *, *, *, *, *, *, *, *, *>._3: T get() = this._3() + +val Product14<*, *, *, T, *, *, *, *, *, *, *, *, *, *>._4: T get() = this._4() + +val Product14<*, *, *, *, T, *, *, *, *, *, *, *, *, *>._5: T get() = this._5() + +val Product14<*, *, *, *, *, T, *, *, *, *, *, *, *, *>._6: T get() = this._6() + +val Product14<*, *, *, *, *, *, T, *, *, *, *, *, *, *>._7: T get() = this._7() + +val Product14<*, *, *, *, *, *, *, T, *, *, *, *, *, *>._8: T get() = this._8() + +val Product14<*, *, *, *, *, *, *, *, T, *, *, *, *, *>._9: T get() = this._9() + +val Product14<*, *, *, *, *, *, *, *, *, T, *, *, *, *>._10: T get() = this._10() + +val Product14<*, *, *, *, *, *, *, *, *, *, T, *, *, *>._11: T get() = this._11() + +val Product14<*, *, *, *, *, *, *, *, *, *, *, T, *, *>._12: T get() = this._12() + +val Product14<*, *, *, *, *, *, *, *, *, *, *, *, T, *>._13: T get() = this._13() + +val Product14<*, *, *, *, *, *, *, *, *, *, *, *, *, T>._14: T get() = this._14() + /** Returns the first value of this Tuple or Product. */ fun Product14.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product14<*, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._14() +val Product15._1: T get() = this._1() + +val Product15<*, T, *, *, *, *, *, *, *, *, *, *, *, *, *>._2: T get() = this._2() + +val Product15<*, *, T, *, *, *, *, *, *, *, *, *, *, *, *>._3: T get() = this._3() + +val Product15<*, *, *, T, *, *, *, *, *, *, *, *, *, *, *>._4: T get() = this._4() + +val Product15<*, *, *, *, T, *, *, *, *, *, *, *, *, *, *>._5: T get() = this._5() + +val Product15<*, *, *, *, *, T, *, *, *, *, *, *, *, *, *>._6: T get() = this._6() + +val Product15<*, *, *, *, *, *, T, *, *, *, *, *, *, *, *>._7: T get() = this._7() + +val Product15<*, *, *, *, *, *, *, T, *, *, *, *, *, *, *>._8: T get() = this._8() + +val Product15<*, *, *, *, *, *, *, *, T, *, *, *, *, *, *>._9: T get() = this._9() + +val Product15<*, *, *, *, *, *, *, *, *, T, *, *, *, *, *>._10: T get() = this._10() + +val Product15<*, *, *, *, *, *, *, *, *, *, T, *, *, *, *>._11: T get() = this._11() + +val Product15<*, *, *, *, *, *, *, *, *, *, *, T, *, *, *>._12: T get() = this._12() + +val Product15<*, *, *, *, *, *, *, *, *, *, *, *, T, *, *>._13: T get() = this._13() + +val Product15<*, *, *, *, *, *, *, *, *, *, *, *, *, T, *>._14: T get() = this._14() + +val Product15<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T>._15: T get() = this._15() + /** Returns the first value of this Tuple or Product. */ fun Product15.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product15<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._15() +val Product16._1: T get() = this._1() + +val Product16<*, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._2: T get() = this._2() + +val Product16<*, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *>._3: T get() = this._3() + +val Product16<*, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *>._4: T get() = this._4() + +val Product16<*, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *>._5: T get() = this._5() + +val Product16<*, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *>._6: T get() = this._6() + +val Product16<*, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *>._7: T get() = this._7() + +val Product16<*, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *>._8: T get() = this._8() + +val Product16<*, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *>._9: T get() = this._9() + +val Product16<*, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *>._10: T get() = this._10() + +val Product16<*, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *>._11: T get() = this._11() + +val Product16<*, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *>._12: T get() = this._12() + +val Product16<*, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *>._13: T get() = this._13() + +val Product16<*, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *>._14: T get() = this._14() + +val Product16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *>._15: T get() = this._15() + +val Product16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>._16: T get() = this._16() + /** Returns the first value of this Tuple or Product. */ fun Product16.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._16() +val Product17._1: T get() = this._1() + +val Product17<*, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._2: T get() = this._2() + +val Product17<*, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._3: T get() = this._3() + +val Product17<*, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *>._4: T get() = this._4() + +val Product17<*, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *>._5: T get() = this._5() + +val Product17<*, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *>._6: T get() = this._6() + +val Product17<*, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *>._7: T get() = this._7() + +val Product17<*, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *>._8: T get() = this._8() + +val Product17<*, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *>._9: T get() = this._9() + +val Product17<*, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *>._10: T get() = this._10() + +val Product17<*, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *>._11: T get() = this._11() + +val Product17<*, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *>._12: T get() = this._12() + +val Product17<*, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *>._13: T get() = this._13() + +val Product17<*, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *>._14: T get() = this._14() + +val Product17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *>._15: T get() = this._15() + +val Product17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *>._16: T get() = this._16() + +val Product17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>._17: T get() = this._17() + /** Returns the first value of this Tuple or Product. */ fun Product17.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._17() +val Product18._1: T get() = this._1() + +val Product18<*, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._2: T get() = this._2() + +val Product18<*, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._3: T get() = this._3() + +val Product18<*, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._4: T get() = this._4() + +val Product18<*, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *>._5: T get() = this._5() + +val Product18<*, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *>._6: T get() = this._6() + +val Product18<*, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *>._7: T get() = this._7() + +val Product18<*, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *>._8: T get() = this._8() + +val Product18<*, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *>._9: T get() = this._9() + +val Product18<*, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *>._10: T get() = this._10() + +val Product18<*, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *>._11: T get() = this._11() + +val Product18<*, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *>._12: T get() = this._12() + +val Product18<*, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *>._13: T get() = this._13() + +val Product18<*, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *>._14: T get() = this._14() + +val Product18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *>._15: T get() = this._15() + +val Product18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *>._16: T get() = this._16() + +val Product18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *>._17: T get() = this._17() + +val Product18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>._18: T get() = this._18() + /** Returns the first value of this Tuple or Product. */ fun Product18.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._18() +val Product19._1: T get() = this._1() + +val Product19<*, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._2: T get() = this._2() + +val Product19<*, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._3: T get() = this._3() + +val Product19<*, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._4: T get() = this._4() + +val Product19<*, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._5: T get() = this._5() + +val Product19<*, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *>._6: T get() = this._6() + +val Product19<*, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *>._7: T get() = this._7() + +val Product19<*, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *>._8: T get() = this._8() + +val Product19<*, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *>._9: T get() = this._9() + +val Product19<*, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *>._10: T get() = this._10() + +val Product19<*, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *>._11: T get() = this._11() + +val Product19<*, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *>._12: T get() = this._12() + +val Product19<*, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *>._13: T get() = this._13() + +val Product19<*, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *>._14: T get() = this._14() + +val Product19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *>._15: T get() = this._15() + +val Product19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *>._16: T get() = this._16() + +val Product19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *>._17: T get() = this._17() + +val Product19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *>._18: T get() = this._18() + +val Product19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>._19: T get() = this._19() + /** Returns the first value of this Tuple or Product. */ fun Product19.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._19() +val Product20._1: T get() = this._1() + +val Product20<*, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._2: T get() = this._2() + +val Product20<*, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._3: T get() = this._3() + +val Product20<*, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._4: T get() = this._4() + +val Product20<*, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._5: T get() = this._5() + +val Product20<*, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._6: T get() = this._6() + +val Product20<*, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *>._7: T get() = this._7() + +val Product20<*, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *>._8: T get() = this._8() + +val Product20<*, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *>._9: T get() = this._9() + +val Product20<*, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *>._10: T get() = this._10() + +val Product20<*, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *>._11: T get() = this._11() + +val Product20<*, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *>._12: T get() = this._12() + +val Product20<*, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *>._13: T get() = this._13() + +val Product20<*, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *>._14: T get() = this._14() + +val Product20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *>._15: T get() = this._15() + +val Product20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *>._16: T get() = this._16() + +val Product20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *>._17: T get() = this._17() + +val Product20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *>._18: T get() = this._18() + +val Product20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *>._19: T get() = this._19() + +val Product20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>._20: T get() = this._20() + /** Returns the first value of this Tuple or Product. */ fun Product20.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._20() +val Product21._1: T get() = this._1() + +val Product21<*, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._2: T get() = this._2() + +val Product21<*, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._3: T get() = this._3() + +val Product21<*, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._4: T get() = this._4() + +val Product21<*, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._5: T get() = this._5() + +val Product21<*, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._6: T get() = this._6() + +val Product21<*, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._7: T get() = this._7() + +val Product21<*, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *>._8: T get() = this._8() + +val Product21<*, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *>._9: T get() = this._9() + +val Product21<*, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *>._10: T get() = this._10() + +val Product21<*, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *>._11: T get() = this._11() + +val Product21<*, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *>._12: T get() = this._12() + +val Product21<*, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *>._13: T get() = this._13() + +val Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *>._14: T get() = this._14() + +val Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *>._15: T get() = this._15() + +val Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *>._16: T get() = this._16() + +val Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *>._17: T get() = this._17() + +val Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *>._18: T get() = this._18() + +val Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *>._19: T get() = this._19() + +val Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *>._20: T get() = this._20() + +val Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>._21: T get() = this._21() + /** Returns the first value of this Tuple or Product. */ fun Product21.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._21() +val Product22._1: T get() = this._1() + +val Product22<*, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._2: T get() = this._2() + +val Product22<*, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._3: T get() = this._3() + +val Product22<*, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._4: T get() = this._4() + +val Product22<*, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._5: T get() = this._5() + +val Product22<*, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._6: T get() = this._6() + +val Product22<*, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._7: T get() = this._7() + +val Product22<*, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *, *>._8: T get() = this._8() + +val Product22<*, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *, *>._9: T get() = this._9() + +val Product22<*, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *, *>._10: T get() = this._10() + +val Product22<*, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *, *>._11: T get() = this._11() + +val Product22<*, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *, *>._12: T get() = this._12() + +val Product22<*, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *, *>._13: T get() = this._13() + +val Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *, *>._14: T get() = this._14() + +val Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *, *>._15: T get() = this._15() + +val Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *, *>._16: T get() = this._16() + +val Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *, *>._17: T get() = this._17() + +val Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *, *>._18: T get() = this._18() + +val Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *, *>._19: T get() = this._19() + +val Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *, *>._20: T get() = this._20() + +val Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T, *>._21: T get() = this._21() + +val Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>._22: T get() = this._22() + /** Returns the first value of this Tuple or Product. */ fun Product22.first(): T = this._1() /** Returns the last value of this Tuple or Product. */ fun Product22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T>.last(): T = this._22() - diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleBuilders.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleBuilders.kt index d6725ab4..d31b8247 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleBuilders.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleBuilders.kt @@ -57,6 +57,9 @@ import scala.Tuple22 +/** Returns the instance of Tuple0. */ +fun tupleOf(): EmptyTuple = EmptyTuple + /** Returns a new Tuple1 of the given arguments. */ fun tupleOf(_1: T1): Tuple1 = Tuple1(_1) @@ -123,7 +126,8 @@ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22): Tuple22 = Tuple22(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22) - +/** Returns the instance of Tuple0. */ +fun t(): EmptyTuple = EmptyTuple /** Returns a new Tuple1 of the given arguments. */ fun t(_1: T1): Tuple1 = Tuple1(_1) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleConcatenation.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleConcatenation.kt index 77926349..19406090 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleConcatenation.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleConcatenation.kt @@ -51,9 +51,16 @@ import scala.Tuple22 * or using the shorthand: * ```tupleOf(a, b) + tupleOf(c, d) == tupleOf(a, b, c, d)``` * + * If you mean to create ```tupleOf(a, b, tupleOf(c, d))``` or ```tupleOf(tupleOf(a, b), c, d)```, + * use [appendedBy] and [prependedBy] explicitly: + * ```t(a, b).appendedBy(t(c, d)) == t(a, b, t(c, d))``` + * or wrap it in another [Tuple1]: + * ```t(a, b) + t(t(c, d)) == t(a, b, t(c, d))``` * */ +infix fun EmptyTuple.concat(other: Tuple1): Tuple1 = other.clone() +infix fun Tuple1.concat(other: EmptyTuple): Tuple1 = this.clone() infix fun Tuple1.concat(other: Tuple1): Tuple2 = Tuple2(this._1(), other._1()) infix fun Tuple1.concat(other: Tuple2): Tuple3 = Tuple3(this._1(), other._1(), other._2()) infix fun Tuple1.concat(other: Tuple3): Tuple4 = Tuple4(this._1(), other._1(), other._2(), other._3()) @@ -75,6 +82,8 @@ infix fun Tuple1.concat(other: Tuple19): Tuple20 = Tuple20(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) infix fun Tuple1.concat(other: Tuple20): Tuple21 = Tuple21(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20()) infix fun Tuple1.concat(other: Tuple21): Tuple22 = Tuple22(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20(), other._21()) +infix fun EmptyTuple.concat(other: Tuple2): Tuple2 = other.clone() +infix fun Tuple2.concat(other: EmptyTuple): Tuple2 = this.clone() infix fun Tuple2.concat(other: Tuple1): Tuple3 = Tuple3(this._1(), this._2(), other._1()) infix fun Tuple2.concat(other: Tuple2): Tuple4 = Tuple4(this._1(), this._2(), other._1(), other._2()) infix fun Tuple2.concat(other: Tuple3): Tuple5 = Tuple5(this._1(), this._2(), other._1(), other._2(), other._3()) @@ -95,6 +104,8 @@ infix fun Tuple2.concat(other: Tuple18): Tuple20 = Tuple20(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) infix fun Tuple2.concat(other: Tuple19): Tuple21 = Tuple21(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) infix fun Tuple2.concat(other: Tuple20): Tuple22 = Tuple22(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20()) +infix fun EmptyTuple.concat(other: Tuple3): Tuple3 = other.clone() +infix fun Tuple3.concat(other: EmptyTuple): Tuple3 = this.clone() infix fun Tuple3.concat(other: Tuple1): Tuple4 = Tuple4(this._1(), this._2(), this._3(), other._1()) infix fun Tuple3.concat(other: Tuple2): Tuple5 = Tuple5(this._1(), this._2(), this._3(), other._1(), other._2()) infix fun Tuple3.concat(other: Tuple3): Tuple6 = Tuple6(this._1(), this._2(), this._3(), other._1(), other._2(), other._3()) @@ -114,6 +125,8 @@ infix fun Tuple3.concat(other: Tuple17): Tuple20 = Tuple20(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) infix fun Tuple3.concat(other: Tuple18): Tuple21 = Tuple21(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) infix fun Tuple3.concat(other: Tuple19): Tuple22 = Tuple22(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) +infix fun EmptyTuple.concat(other: Tuple4): Tuple4 = other.clone() +infix fun Tuple4.concat(other: EmptyTuple): Tuple4 = this.clone() infix fun Tuple4.concat(other: Tuple1): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), other._1()) infix fun Tuple4.concat(other: Tuple2): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), other._1(), other._2()) infix fun Tuple4.concat(other: Tuple3): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3()) @@ -132,6 +145,8 @@ infix fun Tuple4.concat(other: Tuple16): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) infix fun Tuple4.concat(other: Tuple17): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) infix fun Tuple4.concat(other: Tuple18): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) +infix fun EmptyTuple.concat(other: Tuple5): Tuple5 = other.clone() +infix fun Tuple5.concat(other: EmptyTuple): Tuple5 = this.clone() infix fun Tuple5.concat(other: Tuple1): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), other._1()) infix fun Tuple5.concat(other: Tuple2): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2()) infix fun Tuple5.concat(other: Tuple3): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3()) @@ -149,6 +164,8 @@ infix fun Tuple5.concat(other: Tuple15): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) infix fun Tuple5.concat(other: Tuple16): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) infix fun Tuple5.concat(other: Tuple17): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) +infix fun EmptyTuple.concat(other: Tuple6): Tuple6 = other.clone() +infix fun Tuple6.concat(other: EmptyTuple): Tuple6 = this.clone() infix fun Tuple6.concat(other: Tuple1): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1()) infix fun Tuple6.concat(other: Tuple2): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2()) infix fun Tuple6.concat(other: Tuple3): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3()) @@ -165,6 +182,8 @@ infix fun Tuple6.concat(other: Tuple14): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) infix fun Tuple6.concat(other: Tuple15): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) infix fun Tuple6.concat(other: Tuple16): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) +infix fun EmptyTuple.concat(other: Tuple7): Tuple7 = other.clone() +infix fun Tuple7.concat(other: EmptyTuple): Tuple7 = this.clone() infix fun Tuple7.concat(other: Tuple1): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1()) infix fun Tuple7.concat(other: Tuple2): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2()) infix fun Tuple7.concat(other: Tuple3): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3()) @@ -180,6 +199,8 @@ infix fun Tuple7.concat(other: Tuple13): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) infix fun Tuple7.concat(other: Tuple14): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) infix fun Tuple7.concat(other: Tuple15): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) +infix fun EmptyTuple.concat(other: Tuple8): Tuple8 = other.clone() +infix fun Tuple8.concat(other: EmptyTuple): Tuple8 = this.clone() infix fun Tuple8.concat(other: Tuple1): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1()) infix fun Tuple8.concat(other: Tuple2): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2()) infix fun Tuple8.concat(other: Tuple3): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3()) @@ -194,6 +215,8 @@ infix fun Tuple8.concat(other: Tuple12): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) infix fun Tuple8.concat(other: Tuple13): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) infix fun Tuple8.concat(other: Tuple14): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +infix fun EmptyTuple.concat(other: Tuple9): Tuple9 = other.clone() +infix fun Tuple9.concat(other: EmptyTuple): Tuple9 = this.clone() infix fun Tuple9.concat(other: Tuple1): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1()) infix fun Tuple9.concat(other: Tuple2): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2()) infix fun Tuple9.concat(other: Tuple3): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3()) @@ -207,6 +230,8 @@ infix fun Tuple9.concat(other: Tuple11): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) infix fun Tuple9.concat(other: Tuple12): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) infix fun Tuple9.concat(other: Tuple13): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +infix fun EmptyTuple.concat(other: Tuple10): Tuple10 = other.clone() +infix fun Tuple10.concat(other: EmptyTuple): Tuple10 = this.clone() infix fun Tuple10.concat(other: Tuple1): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1()) infix fun Tuple10.concat(other: Tuple2): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2()) infix fun Tuple10.concat(other: Tuple3): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3()) @@ -219,6 +244,8 @@ infix fun Tuple10.concat(other: Tuple10): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) infix fun Tuple10.concat(other: Tuple11): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) infix fun Tuple10.concat(other: Tuple12): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +infix fun EmptyTuple.concat(other: Tuple11): Tuple11 = other.clone() +infix fun Tuple11.concat(other: EmptyTuple): Tuple11 = this.clone() infix fun Tuple11.concat(other: Tuple1): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1()) infix fun Tuple11.concat(other: Tuple2): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2()) infix fun Tuple11.concat(other: Tuple3): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3()) @@ -230,6 +257,8 @@ infix fun Tuple11.concat(other: Tuple9): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) infix fun Tuple11.concat(other: Tuple10): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) infix fun Tuple11.concat(other: Tuple11): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +infix fun EmptyTuple.concat(other: Tuple12): Tuple12 = other.clone() +infix fun Tuple12.concat(other: EmptyTuple): Tuple12 = this.clone() infix fun Tuple12.concat(other: Tuple1): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1()) infix fun Tuple12.concat(other: Tuple2): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2()) infix fun Tuple12.concat(other: Tuple3): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3()) @@ -240,6 +269,8 @@ infix fun Tuple12.concat(other: Tuple8): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) infix fun Tuple12.concat(other: Tuple9): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) infix fun Tuple12.concat(other: Tuple10): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +infix fun EmptyTuple.concat(other: Tuple13): Tuple13 = other.clone() +infix fun Tuple13.concat(other: EmptyTuple): Tuple13 = this.clone() infix fun Tuple13.concat(other: Tuple1): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1()) infix fun Tuple13.concat(other: Tuple2): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2()) infix fun Tuple13.concat(other: Tuple3): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3()) @@ -249,6 +280,8 @@ infix fun Tuple13.concat(other: Tuple7): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) infix fun Tuple13.concat(other: Tuple8): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) infix fun Tuple13.concat(other: Tuple9): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +infix fun EmptyTuple.concat(other: Tuple14): Tuple14 = other.clone() +infix fun Tuple14.concat(other: EmptyTuple): Tuple14 = this.clone() infix fun Tuple14.concat(other: Tuple1): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1()) infix fun Tuple14.concat(other: Tuple2): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2()) infix fun Tuple14.concat(other: Tuple3): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3()) @@ -257,6 +290,8 @@ infix fun Tuple14.concat(other: Tuple6): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) infix fun Tuple14.concat(other: Tuple7): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) infix fun Tuple14.concat(other: Tuple8): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +infix fun EmptyTuple.concat(other: Tuple15): Tuple15 = other.clone() +infix fun Tuple15.concat(other: EmptyTuple): Tuple15 = this.clone() infix fun Tuple15.concat(other: Tuple1): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1()) infix fun Tuple15.concat(other: Tuple2): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2()) infix fun Tuple15.concat(other: Tuple3): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3()) @@ -264,28 +299,44 @@ infix fun Tuple15.concat(other: Tuple5): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5()) infix fun Tuple15.concat(other: Tuple6): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) infix fun Tuple15.concat(other: Tuple7): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +infix fun EmptyTuple.concat(other: Tuple16): Tuple16 = other.clone() +infix fun Tuple16.concat(other: EmptyTuple): Tuple16 = this.clone() infix fun Tuple16.concat(other: Tuple1): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1()) infix fun Tuple16.concat(other: Tuple2): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2()) infix fun Tuple16.concat(other: Tuple3): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3()) infix fun Tuple16.concat(other: Tuple4): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4()) infix fun Tuple16.concat(other: Tuple5): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4(), other._5()) infix fun Tuple16.concat(other: Tuple6): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +infix fun EmptyTuple.concat(other: Tuple17): Tuple17 = other.clone() +infix fun Tuple17.concat(other: EmptyTuple): Tuple17 = this.clone() infix fun Tuple17.concat(other: Tuple1): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1()) infix fun Tuple17.concat(other: Tuple2): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2()) infix fun Tuple17.concat(other: Tuple3): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3()) infix fun Tuple17.concat(other: Tuple4): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3(), other._4()) infix fun Tuple17.concat(other: Tuple5): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3(), other._4(), other._5()) +infix fun EmptyTuple.concat(other: Tuple18): Tuple18 = other.clone() +infix fun Tuple18.concat(other: EmptyTuple): Tuple18 = this.clone() infix fun Tuple18.concat(other: Tuple1): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1()) infix fun Tuple18.concat(other: Tuple2): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2()) infix fun Tuple18.concat(other: Tuple3): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2(), other._3()) infix fun Tuple18.concat(other: Tuple4): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2(), other._3(), other._4()) +infix fun EmptyTuple.concat(other: Tuple19): Tuple19 = other.clone() +infix fun Tuple19.concat(other: EmptyTuple): Tuple19 = this.clone() infix fun Tuple19.concat(other: Tuple1): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1()) infix fun Tuple19.concat(other: Tuple2): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1(), other._2()) infix fun Tuple19.concat(other: Tuple3): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1(), other._2(), other._3()) +infix fun EmptyTuple.concat(other: Tuple20): Tuple20 = other.clone() +infix fun Tuple20.concat(other: EmptyTuple): Tuple20 = this.clone() infix fun Tuple20.concat(other: Tuple1): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), other._1()) infix fun Tuple20.concat(other: Tuple2): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), other._1(), other._2()) +infix fun EmptyTuple.concat(other: Tuple21): Tuple21 = other.clone() +infix fun Tuple21.concat(other: EmptyTuple): Tuple21 = this.clone() infix fun Tuple21.concat(other: Tuple1): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), other._1()) +infix fun EmptyTuple.concat(other: Tuple22): Tuple22 = other.clone() +infix fun Tuple22.concat(other: EmptyTuple): Tuple22 = this.clone() +operator fun EmptyTuple.plus(other: Tuple1): Tuple1 = other.clone() +operator fun Tuple1.plus(other: EmptyTuple): Tuple1 = this.clone() operator fun Tuple1.plus(other: Tuple1): Tuple2 = Tuple2(this._1(), other._1()) operator fun Tuple1.plus(other: Tuple2): Tuple3 = Tuple3(this._1(), other._1(), other._2()) operator fun Tuple1.plus(other: Tuple3): Tuple4 = Tuple4(this._1(), other._1(), other._2(), other._3()) @@ -307,6 +358,8 @@ operator fun Tuple1.plus(other: Tuple19): Tuple20 = Tuple20(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) operator fun Tuple1.plus(other: Tuple20): Tuple21 = Tuple21(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20()) operator fun Tuple1.plus(other: Tuple21): Tuple22 = Tuple22(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20(), other._21()) +operator fun EmptyTuple.plus(other: Tuple2): Tuple2 = other.clone() +operator fun Tuple2.plus(other: EmptyTuple): Tuple2 = this.clone() operator fun Tuple2.plus(other: Tuple1): Tuple3 = Tuple3(this._1(), this._2(), other._1()) operator fun Tuple2.plus(other: Tuple2): Tuple4 = Tuple4(this._1(), this._2(), other._1(), other._2()) operator fun Tuple2.plus(other: Tuple3): Tuple5 = Tuple5(this._1(), this._2(), other._1(), other._2(), other._3()) @@ -327,6 +380,8 @@ operator fun Tuple2.plus(other: Tuple18): Tuple20 = Tuple20(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) operator fun Tuple2.plus(other: Tuple19): Tuple21 = Tuple21(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) operator fun Tuple2.plus(other: Tuple20): Tuple22 = Tuple22(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20()) +operator fun EmptyTuple.plus(other: Tuple3): Tuple3 = other.clone() +operator fun Tuple3.plus(other: EmptyTuple): Tuple3 = this.clone() operator fun Tuple3.plus(other: Tuple1): Tuple4 = Tuple4(this._1(), this._2(), this._3(), other._1()) operator fun Tuple3.plus(other: Tuple2): Tuple5 = Tuple5(this._1(), this._2(), this._3(), other._1(), other._2()) operator fun Tuple3.plus(other: Tuple3): Tuple6 = Tuple6(this._1(), this._2(), this._3(), other._1(), other._2(), other._3()) @@ -346,6 +401,8 @@ operator fun Tuple3.plus(other: Tuple17): Tuple20 = Tuple20(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) operator fun Tuple3.plus(other: Tuple18): Tuple21 = Tuple21(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) operator fun Tuple3.plus(other: Tuple19): Tuple22 = Tuple22(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) +operator fun EmptyTuple.plus(other: Tuple4): Tuple4 = other.clone() +operator fun Tuple4.plus(other: EmptyTuple): Tuple4 = this.clone() operator fun Tuple4.plus(other: Tuple1): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), other._1()) operator fun Tuple4.plus(other: Tuple2): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), other._1(), other._2()) operator fun Tuple4.plus(other: Tuple3): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3()) @@ -364,6 +421,8 @@ operator fun Tuple4.plus(other: Tuple16): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) operator fun Tuple4.plus(other: Tuple17): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) operator fun Tuple4.plus(other: Tuple18): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) +operator fun EmptyTuple.plus(other: Tuple5): Tuple5 = other.clone() +operator fun Tuple5.plus(other: EmptyTuple): Tuple5 = this.clone() operator fun Tuple5.plus(other: Tuple1): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), other._1()) operator fun Tuple5.plus(other: Tuple2): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2()) operator fun Tuple5.plus(other: Tuple3): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3()) @@ -381,6 +440,8 @@ operator fun Tuple5.plus(other: Tuple15): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) operator fun Tuple5.plus(other: Tuple16): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) operator fun Tuple5.plus(other: Tuple17): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) +operator fun EmptyTuple.plus(other: Tuple6): Tuple6 = other.clone() +operator fun Tuple6.plus(other: EmptyTuple): Tuple6 = this.clone() operator fun Tuple6.plus(other: Tuple1): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1()) operator fun Tuple6.plus(other: Tuple2): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2()) operator fun Tuple6.plus(other: Tuple3): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3()) @@ -397,6 +458,8 @@ operator fun Tuple6.plus(other: Tuple14): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) operator fun Tuple6.plus(other: Tuple15): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) operator fun Tuple6.plus(other: Tuple16): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) +operator fun EmptyTuple.plus(other: Tuple7): Tuple7 = other.clone() +operator fun Tuple7.plus(other: EmptyTuple): Tuple7 = this.clone() operator fun Tuple7.plus(other: Tuple1): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1()) operator fun Tuple7.plus(other: Tuple2): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2()) operator fun Tuple7.plus(other: Tuple3): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3()) @@ -412,6 +475,8 @@ operator fun Tuple7.plus(other: Tuple13): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) operator fun Tuple7.plus(other: Tuple14): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) operator fun Tuple7.plus(other: Tuple15): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) +operator fun EmptyTuple.plus(other: Tuple8): Tuple8 = other.clone() +operator fun Tuple8.plus(other: EmptyTuple): Tuple8 = this.clone() operator fun Tuple8.plus(other: Tuple1): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1()) operator fun Tuple8.plus(other: Tuple2): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2()) operator fun Tuple8.plus(other: Tuple3): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3()) @@ -426,6 +491,8 @@ operator fun Tuple8.plus(other: Tuple12): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) operator fun Tuple8.plus(other: Tuple13): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) operator fun Tuple8.plus(other: Tuple14): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) +operator fun EmptyTuple.plus(other: Tuple9): Tuple9 = other.clone() +operator fun Tuple9.plus(other: EmptyTuple): Tuple9 = this.clone() operator fun Tuple9.plus(other: Tuple1): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1()) operator fun Tuple9.plus(other: Tuple2): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2()) operator fun Tuple9.plus(other: Tuple3): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3()) @@ -439,6 +506,8 @@ operator fun Tuple9.plus(other: Tuple11): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) operator fun Tuple9.plus(other: Tuple12): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) operator fun Tuple9.plus(other: Tuple13): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) +operator fun EmptyTuple.plus(other: Tuple10): Tuple10 = other.clone() +operator fun Tuple10.plus(other: EmptyTuple): Tuple10 = this.clone() operator fun Tuple10.plus(other: Tuple1): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1()) operator fun Tuple10.plus(other: Tuple2): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2()) operator fun Tuple10.plus(other: Tuple3): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3()) @@ -451,6 +520,8 @@ operator fun Tuple10.plus(other: Tuple10): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) operator fun Tuple10.plus(other: Tuple11): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) operator fun Tuple10.plus(other: Tuple12): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) +operator fun EmptyTuple.plus(other: Tuple11): Tuple11 = other.clone() +operator fun Tuple11.plus(other: EmptyTuple): Tuple11 = this.clone() operator fun Tuple11.plus(other: Tuple1): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1()) operator fun Tuple11.plus(other: Tuple2): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2()) operator fun Tuple11.plus(other: Tuple3): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3()) @@ -462,6 +533,8 @@ operator fun Tuple11.plus(other: Tuple9): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) operator fun Tuple11.plus(other: Tuple10): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) operator fun Tuple11.plus(other: Tuple11): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) +operator fun EmptyTuple.plus(other: Tuple12): Tuple12 = other.clone() +operator fun Tuple12.plus(other: EmptyTuple): Tuple12 = this.clone() operator fun Tuple12.plus(other: Tuple1): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1()) operator fun Tuple12.plus(other: Tuple2): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2()) operator fun Tuple12.plus(other: Tuple3): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3()) @@ -472,6 +545,8 @@ operator fun Tuple12.plus(other: Tuple8): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) operator fun Tuple12.plus(other: Tuple9): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) operator fun Tuple12.plus(other: Tuple10): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) +operator fun EmptyTuple.plus(other: Tuple13): Tuple13 = other.clone() +operator fun Tuple13.plus(other: EmptyTuple): Tuple13 = this.clone() operator fun Tuple13.plus(other: Tuple1): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1()) operator fun Tuple13.plus(other: Tuple2): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2()) operator fun Tuple13.plus(other: Tuple3): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3()) @@ -481,6 +556,8 @@ operator fun Tuple13.plus(other: Tuple7): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) operator fun Tuple13.plus(other: Tuple8): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) operator fun Tuple13.plus(other: Tuple9): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) +operator fun EmptyTuple.plus(other: Tuple14): Tuple14 = other.clone() +operator fun Tuple14.plus(other: EmptyTuple): Tuple14 = this.clone() operator fun Tuple14.plus(other: Tuple1): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1()) operator fun Tuple14.plus(other: Tuple2): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2()) operator fun Tuple14.plus(other: Tuple3): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3()) @@ -489,6 +566,8 @@ operator fun Tuple14.plus(other: Tuple6): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) operator fun Tuple14.plus(other: Tuple7): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) operator fun Tuple14.plus(other: Tuple8): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) +operator fun EmptyTuple.plus(other: Tuple15): Tuple15 = other.clone() +operator fun Tuple15.plus(other: EmptyTuple): Tuple15 = this.clone() operator fun Tuple15.plus(other: Tuple1): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1()) operator fun Tuple15.plus(other: Tuple2): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2()) operator fun Tuple15.plus(other: Tuple3): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3()) @@ -496,24 +575,39 @@ operator fun Tuple15.plus(other: Tuple5): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5()) operator fun Tuple15.plus(other: Tuple6): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) operator fun Tuple15.plus(other: Tuple7): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) +operator fun EmptyTuple.plus(other: Tuple16): Tuple16 = other.clone() +operator fun Tuple16.plus(other: EmptyTuple): Tuple16 = this.clone() operator fun Tuple16.plus(other: Tuple1): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1()) operator fun Tuple16.plus(other: Tuple2): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2()) operator fun Tuple16.plus(other: Tuple3): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3()) operator fun Tuple16.plus(other: Tuple4): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4()) operator fun Tuple16.plus(other: Tuple5): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4(), other._5()) operator fun Tuple16.plus(other: Tuple6): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) +operator fun EmptyTuple.plus(other: Tuple17): Tuple17 = other.clone() +operator fun Tuple17.plus(other: EmptyTuple): Tuple17 = this.clone() operator fun Tuple17.plus(other: Tuple1): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1()) operator fun Tuple17.plus(other: Tuple2): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2()) operator fun Tuple17.plus(other: Tuple3): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3()) operator fun Tuple17.plus(other: Tuple4): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3(), other._4()) operator fun Tuple17.plus(other: Tuple5): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3(), other._4(), other._5()) +operator fun EmptyTuple.plus(other: Tuple18): Tuple18 = other.clone() +operator fun Tuple18.plus(other: EmptyTuple): Tuple18 = this.clone() operator fun Tuple18.plus(other: Tuple1): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1()) operator fun Tuple18.plus(other: Tuple2): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2()) operator fun Tuple18.plus(other: Tuple3): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2(), other._3()) operator fun Tuple18.plus(other: Tuple4): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2(), other._3(), other._4()) +operator fun EmptyTuple.plus(other: Tuple19): Tuple19 = other.clone() +operator fun Tuple19.plus(other: EmptyTuple): Tuple19 = this.clone() operator fun Tuple19.plus(other: Tuple1): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1()) operator fun Tuple19.plus(other: Tuple2): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1(), other._2()) operator fun Tuple19.plus(other: Tuple3): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1(), other._2(), other._3()) +operator fun EmptyTuple.plus(other: Tuple20): Tuple20 = other.clone() +operator fun Tuple20.plus(other: EmptyTuple): Tuple20 = this.clone() operator fun Tuple20.plus(other: Tuple1): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), other._1()) operator fun Tuple20.plus(other: Tuple2): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), other._1(), other._2()) +operator fun EmptyTuple.plus(other: Tuple21): Tuple21 = other.clone() +operator fun Tuple21.plus(other: EmptyTuple): Tuple21 = this.clone() operator fun Tuple21.plus(other: Tuple1): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), other._1()) +operator fun EmptyTuple.plus(other: Tuple22): Tuple22 = other.clone() +operator fun Tuple22.plus(other: EmptyTuple): Tuple22 = this.clone() + diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleCopy.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleCopy.kt index 9f921e77..1b9a37d3 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleCopy.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleCopy.kt @@ -23,25 +23,49 @@ package org.jetbrains.kotlinx.spark.api.tuples import scala.* +fun EmptyTuple.clone(): EmptyTuple = EmptyTuple +fun EmptyTuple.copy(): EmptyTuple = EmptyTuple +fun Tuple1.clone(): Tuple1 = Tuple1(this._1()) fun Tuple1.copy(_1: T1 = this._1()): Tuple1 = Tuple1(_1) +fun Tuple2.clone(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple2.copy(_1: T1 = this._1(), _2: T2 = this._2()): Tuple2 = Tuple2(_1, _2) +fun Tuple3.clone(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) fun Tuple3.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3()): Tuple3 = Tuple3(_1, _2, _3) +fun Tuple4.clone(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) fun Tuple4.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4()): Tuple4 = Tuple4(_1, _2, _3, _4) +fun Tuple5.clone(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) fun Tuple5.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5()): Tuple5 = Tuple5(_1, _2, _3, _4, _5) +fun Tuple6.clone(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) fun Tuple6.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6()): Tuple6 = Tuple6(_1, _2, _3, _4, _5, _6) +fun Tuple7.clone(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) fun Tuple7.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7()): Tuple7 = Tuple7(_1, _2, _3, _4, _5, _6, _7) +fun Tuple8.clone(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) fun Tuple8.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8()): Tuple8 = Tuple8(_1, _2, _3, _4, _5, _6, _7, _8) +fun Tuple9.clone(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) fun Tuple9.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9()): Tuple9 = Tuple9(_1, _2, _3, _4, _5, _6, _7, _8, _9) +fun Tuple10.clone(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) fun Tuple10.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10()): Tuple10 = Tuple10(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10) +fun Tuple11.clone(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) fun Tuple11.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11()): Tuple11 = Tuple11(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11) +fun Tuple12.clone(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) fun Tuple12.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12()): Tuple12 = Tuple12(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12) +fun Tuple13.clone(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) fun Tuple13.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13()): Tuple13 = Tuple13(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13) +fun Tuple14.clone(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) fun Tuple14.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14()): Tuple14 = Tuple14(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14) +fun Tuple15.clone(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) fun Tuple15.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15()): Tuple15 = Tuple15(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15) +fun Tuple16.clone(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) fun Tuple16.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16()): Tuple16 = Tuple16(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16) +fun Tuple17.clone(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) fun Tuple17.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17()): Tuple17 = Tuple17(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17) +fun Tuple18.clone(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) fun Tuple18.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17(), _18: T18 = this._18()): Tuple18 = Tuple18(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18) +fun Tuple19.clone(): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) fun Tuple19.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17(), _18: T18 = this._18(), _19: T19 = this._19()): Tuple19 = Tuple19(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19) +fun Tuple20.clone(): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) fun Tuple20.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17(), _18: T18 = this._18(), _19: T19 = this._19(), _20: T20 = this._20()): Tuple20 = Tuple20(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20) +fun Tuple21.clone(): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) fun Tuple21.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17(), _18: T18 = this._18(), _19: T19 = this._19(), _20: T20 = this._20(), _21: T21 = this._21()): Tuple21 = Tuple21(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21) +fun Tuple22.clone(): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) fun Tuple22.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17(), _18: T18 = this._18(), _19: T19 = this._19(), _20: T20 = this._20(), _21: T21 = this._21(), _22: T22 = this._22()): Tuple22 = Tuple22(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleExtending.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleExtending.kt index 13e6c5c7..ed8762c6 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleExtending.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleExtending.kt @@ -54,18 +54,19 @@ import scala.Tuple22 * ```tupleOf(a, b).prependedBy(c) == tupleOf(c, a, b)``` * * or in shorthand: - * ```tupleOf(a, b)..c == tupleOf(a, b, c)``` + * ```tupleOf(a, b) + c == tupleOf(a, b, c)``` * and - * ```c..tupleOf(a, b) == tupleOf(c, a, b)``` + * ```c + tupleOf(a, b) == tupleOf(c, a, b)``` * - * Note that ```tupleOf(a, b)..tupleOf(c, d)``` will not work due to it being ambiguous: - * It could mean both ```tupleOf(a, b, tupleOf(c, d))``` and ```tupleOf(tupleOf(a, b), c, d)```. - * So, for two tuples, you must use [appendedBy] and [prependedBy] explicitly. + * Note that ```tupleOf(a, b) + tupleOf(c, d)``` will merge the two into ```tupleOf(a, b, c, d)```: + * If you mean to create ```tupleOf(a, b, tupleOf(c, d))``` or ```tupleOf(tupleOf(a, b), c, d)```, + * use [appendedBy] and [prependedBy] explicitly. * * For concatenating two tuples, see [org.jetbrains.kotlinx.spark.api.tuples.concat]. * */ +fun EmptyTuple.appendedBy(other: T1): Tuple1 = Tuple1(other) fun Tuple1.appendedBy(other: T2): Tuple2 = Tuple2(this._1(), other) fun Tuple2.appendedBy(other: T3): Tuple3 = Tuple3(this._1(), this._2(), other) fun Tuple3.appendedBy(other: T4): Tuple4 = Tuple4(this._1(), this._2(), this._3(), other) @@ -88,6 +89,7 @@ fun Tuple20.appendedBy(other: T21): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), other) fun Tuple21.appendedBy(other: T22): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), other) +fun EmptyTuple.prependedBy(other: T1): Tuple1 = Tuple1(other) fun Tuple1.prependedBy(other: T1): Tuple2 = Tuple2(other, this._1()) fun Tuple2.prependedBy(other: T1): Tuple3 = Tuple3(other, this._1(), this._2()) fun Tuple3.prependedBy(other: T1): Tuple4 = Tuple4(other, this._1(), this._2(), this._3()) @@ -110,46 +112,48 @@ fun Tuple20.prependedBy(other: T1): Tuple21 = Tuple21(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) fun Tuple21.prependedBy(other: T1): Tuple22 = Tuple22(other, this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) -operator fun Tuple1.rangeTo(other: T2): Tuple2 = this.appendedBy(other) -operator fun Tuple2.rangeTo(other: T3): Tuple3 = this.appendedBy(other) -operator fun Tuple3.rangeTo(other: T4): Tuple4 = this.appendedBy(other) -operator fun Tuple4.rangeTo(other: T5): Tuple5 = this.appendedBy(other) -operator fun Tuple5.rangeTo(other: T6): Tuple6 = this.appendedBy(other) -operator fun Tuple6.rangeTo(other: T7): Tuple7 = this.appendedBy(other) -operator fun Tuple7.rangeTo(other: T8): Tuple8 = this.appendedBy(other) -operator fun Tuple8.rangeTo(other: T9): Tuple9 = this.appendedBy(other) -operator fun Tuple9.rangeTo(other: T10): Tuple10 = this.appendedBy(other) -operator fun Tuple10.rangeTo(other: T11): Tuple11 = this.appendedBy(other) -operator fun Tuple11.rangeTo(other: T12): Tuple12 = this.appendedBy(other) -operator fun Tuple12.rangeTo(other: T13): Tuple13 = this.appendedBy(other) -operator fun Tuple13.rangeTo(other: T14): Tuple14 = this.appendedBy(other) -operator fun Tuple14.rangeTo(other: T15): Tuple15 = this.appendedBy(other) -operator fun Tuple15.rangeTo(other: T16): Tuple16 = this.appendedBy(other) -operator fun Tuple16.rangeTo(other: T17): Tuple17 = this.appendedBy(other) -operator fun Tuple17.rangeTo(other: T18): Tuple18 = this.appendedBy(other) -operator fun Tuple18.rangeTo(other: T19): Tuple19 = this.appendedBy(other) -operator fun Tuple19.rangeTo(other: T20): Tuple20 = this.appendedBy(other) -operator fun Tuple20.rangeTo(other: T21): Tuple21 = this.appendedBy(other) -operator fun Tuple21.rangeTo(other: T22): Tuple22 = this.appendedBy(other) +operator fun EmptyTuple.plus(other: T1): Tuple1 = Tuple1(other) +operator fun Tuple1.plus(other: T2): Tuple2 = this.appendedBy(other) +operator fun Tuple2.plus(other: T3): Tuple3 = this.appendedBy(other) +operator fun Tuple3.plus(other: T4): Tuple4 = this.appendedBy(other) +operator fun Tuple4.plus(other: T5): Tuple5 = this.appendedBy(other) +operator fun Tuple5.plus(other: T6): Tuple6 = this.appendedBy(other) +operator fun Tuple6.plus(other: T7): Tuple7 = this.appendedBy(other) +operator fun Tuple7.plus(other: T8): Tuple8 = this.appendedBy(other) +operator fun Tuple8.plus(other: T9): Tuple9 = this.appendedBy(other) +operator fun Tuple9.plus(other: T10): Tuple10 = this.appendedBy(other) +operator fun Tuple10.plus(other: T11): Tuple11 = this.appendedBy(other) +operator fun Tuple11.plus(other: T12): Tuple12 = this.appendedBy(other) +operator fun Tuple12.plus(other: T13): Tuple13 = this.appendedBy(other) +operator fun Tuple13.plus(other: T14): Tuple14 = this.appendedBy(other) +operator fun Tuple14.plus(other: T15): Tuple15 = this.appendedBy(other) +operator fun Tuple15.plus(other: T16): Tuple16 = this.appendedBy(other) +operator fun Tuple16.plus(other: T17): Tuple17 = this.appendedBy(other) +operator fun Tuple17.plus(other: T18): Tuple18 = this.appendedBy(other) +operator fun Tuple18.plus(other: T19): Tuple19 = this.appendedBy(other) +operator fun Tuple19.plus(other: T20): Tuple20 = this.appendedBy(other) +operator fun Tuple20.plus(other: T21): Tuple21 = this.appendedBy(other) +operator fun Tuple21.plus(other: T22): Tuple22 = this.appendedBy(other) -operator fun T1.rangeTo(other: Tuple1): Tuple2 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple2): Tuple3 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple3): Tuple4 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple4): Tuple5 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple5): Tuple6 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple6): Tuple7 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple7): Tuple8 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple8): Tuple9 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple9): Tuple10 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple10): Tuple11 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple11): Tuple12 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple12): Tuple13 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple13): Tuple14 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple14): Tuple15 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple15): Tuple16 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple16): Tuple17 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple17): Tuple18 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple18): Tuple19 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple19): Tuple20 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple20): Tuple21 = other.prependedBy(this) -operator fun T1.rangeTo(other: Tuple21): Tuple22 = other.prependedBy(this) +operator fun T1.plus(other: EmptyTuple): Tuple1 = Tuple1(this) +operator fun T1.plus(other: Tuple1): Tuple2 = other.prependedBy(this) +operator fun T1.plus(other: Tuple2): Tuple3 = other.prependedBy(this) +operator fun T1.plus(other: Tuple3): Tuple4 = other.prependedBy(this) +operator fun T1.plus(other: Tuple4): Tuple5 = other.prependedBy(this) +operator fun T1.plus(other: Tuple5): Tuple6 = other.prependedBy(this) +operator fun T1.plus(other: Tuple6): Tuple7 = other.prependedBy(this) +operator fun T1.plus(other: Tuple7): Tuple8 = other.prependedBy(this) +operator fun T1.plus(other: Tuple8): Tuple9 = other.prependedBy(this) +operator fun T1.plus(other: Tuple9): Tuple10 = other.prependedBy(this) +operator fun T1.plus(other: Tuple10): Tuple11 = other.prependedBy(this) +operator fun T1.plus(other: Tuple11): Tuple12 = other.prependedBy(this) +operator fun T1.plus(other: Tuple12): Tuple13 = other.prependedBy(this) +operator fun T1.plus(other: Tuple13): Tuple14 = other.prependedBy(this) +operator fun T1.plus(other: Tuple14): Tuple15 = other.prependedBy(this) +operator fun T1.plus(other: Tuple15): Tuple16 = other.prependedBy(this) +operator fun T1.plus(other: Tuple16): Tuple17 = other.prependedBy(this) +operator fun T1.plus(other: Tuple17): Tuple18 = other.prependedBy(this) +operator fun T1.plus(other: Tuple18): Tuple19 = other.prependedBy(this) +operator fun T1.plus(other: Tuple19): Tuple20 = other.prependedBy(this) +operator fun T1.plus(other: Tuple20): Tuple21 = other.prependedBy(this) +operator fun T1.plus(other: Tuple21): Tuple22 = other.prependedBy(this) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt index 3354338b..871bd08c 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt @@ -31,8 +31,11 @@ import org.apache.spark.sql.Dataset import org.apache.spark.sql.functions import org.apache.spark.sql.streaming.GroupState import org.apache.spark.sql.streaming.GroupStateTimeout +import org.jetbrains.kotlinx.spark.api.tuples.* import scala.Tuple2 import scala.Tuple3 +import scala.Tuple4 +import scala.Tuple5 import java.io.Serializable class DatasetFunctionTest : ShouldSpec({ @@ -42,23 +45,25 @@ class DatasetFunctionTest : ShouldSpec({ should("handle cached operations") { val result = dsOf(1, 2, 3, 4, 5) - .map { it to (it + 2) } + .map { t(it, it + 2) } .withCached { expect(collectAsList()).contains.inAnyOrder.only.values( - 1 to 3, - 2 to 4, - 3 to 5, - 4 to 6, - 5 to 7 + t(1, 3), + t(2, 4), + t(3, 5), + t(4, 6), + t(5, 7), ) - val next = filter { it.first % 2 == 0 } - expect(next.collectAsList()).contains.inAnyOrder.only.values(2 to 4, 4 to 6) + val next = filter { it._1 % 2 == 0 } + expect(next.collectAsList()).contains.inAnyOrder.only.values(t(2, 4), t(4, 6)) next } - .map { c(it.first, it.second, (it.first + it.second) * 2) } + .map { + it + (it._1 + it._2) * 2 + } .collectAsList() - expect(result).contains.inOrder.only.values(c(2, 4, 12), c(4, 6, 20)) + expect(result).contains.inOrder.only.values(t(2, 4, 12), t(4, 6, 20)) } should("handle join operations") { @@ -69,10 +74,10 @@ class DatasetFunctionTest : ShouldSpec({ val first = dsOf(Left(1, "a"), Left(2, "b")) val second = dsOf(Right(1, 100), Right(3, 300)) val result = first - .leftJoin(second, first.col("id").eq(second.col("id"))) - .map { c(it.first.id, it.first.name, it.second?.value) } + .leftJoin(second, first.col("id") eq second.col("id")) + .map { t + it._1.id + it._1.name + it._2?.value } .collectAsList() - expect(result).contains.inOrder.only.values(c(1, "a", 100), c(2, "b", null)) + expect(result).contains.inOrder.only.values(t(1, "a", 100), t(2, "b", null)) } should("handle map operations") { @@ -121,16 +126,16 @@ class DatasetFunctionTest : ShouldSpec({ } should("Have Kotlin ready functions in place of overload ambiguity") { - val dataset: Pair = dsOf( + val dataset: Tuple2 = dsOf( SomeClass(intArrayOf(1, 2, 3), 1), SomeClass(intArrayOf(4, 3, 2), 1), ) .groupByKey { it: SomeClass -> it.b } .reduceGroupsK { v1: SomeClass, v2: SomeClass -> v1 } - .filter { it: Pair -> true } // not sure why this does work, but reduce doesn't - .reduceK { v1: Pair, v2: Pair -> v1 } + .filter { it: Tuple2 -> true } // not sure why this does work, but reduce doesn't + .reduceK { v1: Tuple2, v2: Tuple2 -> v1 } - dataset.second.a shouldBe intArrayOf(1, 2, 3) + dataset._2.a shouldBe intArrayOf(1, 2, 3) } } } @@ -139,24 +144,24 @@ class DatasetFunctionTest : ShouldSpec({ withSpark(props = mapOf("spark.sql.codegen.comments" to true)) { should("perform flat map on grouped datasets") { - val groupedDataset = listOf(1 to "a", 1 to "b", 2 to "c") + val groupedDataset = listOf(t(1, "a"), t(1, "b"), t(2, "c")) .toDS() - .groupByKey { it.first } + .groupByKey { it._1 } val flatMapped = groupedDataset.flatMapGroups { key, values -> val collected = values.asSequence().toList() if (collected.size > 1) collected.iterator() - else emptyList>().iterator() + else emptyList>().iterator() } flatMapped.count() shouldBe 2 } should("perform map group with state and timeout conf on grouped datasets") { - val groupedDataset = listOf(1 to "a", 1 to "b", 2 to "c") + val groupedDataset = listOf(t(1, "a"), t(1, "b"), t(2, "c")) .toDS() - .groupByKey { it.first } + .groupByKey { it._1 } val mappedWithStateTimeoutConf = groupedDataset.mapGroupsWithState(GroupStateTimeout.NoTimeout()) { key, values, state: GroupState -> @@ -166,16 +171,16 @@ class DatasetFunctionTest : ShouldSpec({ s = key s shouldBe key - s!! to collected.map { it.second } + t(s!!, collected.map { it._2 }) } mappedWithStateTimeoutConf.count() shouldBe 2 } should("perform map group with state on grouped datasets") { - val groupedDataset = listOf(1 to "a", 1 to "b", 2 to "c") + val groupedDataset = listOf(t(1, "a"), t(1, "b"), t(2, "c")) .toDS() - .groupByKey { it.first } + .groupByKey { it._1 } val mappedWithState = groupedDataset.mapGroupsWithState { key, values, state: GroupState -> var s by state @@ -184,16 +189,16 @@ class DatasetFunctionTest : ShouldSpec({ s = key s shouldBe key - s!! to collected.map { it.second } + t(s!!, collected.map { it._2 }) } mappedWithState.count() shouldBe 2 } should("perform flat map group with state on grouped datasets") { - val groupedDataset = listOf(1 to "a", 1 to "b", 2 to "c") + val groupedDataset = listOf(t(1, "a"), t(1, "b"), t(2, "c")) .toDS() - .groupByKey { it.first } + .groupByKey { it._1 } val flatMappedWithState = groupedDataset.mapGroupsWithState { key, values, state: GroupState -> var s by state @@ -203,25 +208,25 @@ class DatasetFunctionTest : ShouldSpec({ s shouldBe key if (collected.size > 1) collected.iterator() - else emptyList>().iterator() + else emptyList>().iterator() } flatMappedWithState.count() shouldBe 2 } should("be able to cogroup grouped datasets") { - val groupedDataset1 = listOf(1 to "a", 1 to "b", 2 to "c") + val groupedDataset1 = listOf(1 to "a", 1 to "b", 2 to "c").map { it.toTuple() } .toDS() - .groupByKey { it.first } + .groupByKey { it._1 } - val groupedDataset2 = listOf(1 to "d", 5 to "e", 3 to "f") + val groupedDataset2 = listOf(1 to "d", 5 to "e", 3 to "f").map { it.toTuple() } .toDS() - .groupByKey { it.first } + .groupByKey { it._1 } val cogrouped = groupedDataset1.cogroup(groupedDataset2) { key, left, right -> listOf( key to (left.asSequence() + right.asSequence()) - .map { it.second } + .map { it._2 } .toList() ).iterator() } @@ -265,11 +270,11 @@ class DatasetFunctionTest : ShouldSpec({ should("Convert JavaPairRDD to Dataset") { val rdd3: JavaPairRDD = sc.parallelizePairs( - listOf(Tuple2(1, 1.0), Tuple2(2, 2.0), Tuple2(3, 3.0)) + listOf(t(1, 1.0), t(2, 2.0), t(3, 3.0)) ) val dataset3: Dataset> = rdd3.toDS() - dataset3.toList>() shouldBe listOf(Tuple2(1, 1.0), Tuple2(2, 2.0), Tuple2(3, 3.0)) + dataset3.toList>() shouldBe listOf(t(1, 1.0), t(2, 2.0), t(3, 3.0)) } should("Convert Kotlin Serializable data class RDD to Dataset") { @@ -284,13 +289,13 @@ class DatasetFunctionTest : ShouldSpec({ } } - should("Convert Arity RDD to Dataset") { + should("Convert Tuple RDD to Dataset") { val rdd5 = sc.parallelize( - listOf(c(1.0, 4)) + listOf(t(1.0, 4)) ) val dataset5 = rdd5.toDS() - dataset5.toList>() shouldBe listOf(c(1.0, 4)) + dataset5.toList>() shouldBe listOf(t(1.0, 4)) } should("Convert List RDD to Dataset") { @@ -302,23 +307,11 @@ class DatasetFunctionTest : ShouldSpec({ dataset6.toList>() shouldBe listOf(listOf(1, 2, 3), listOf(4, 5, 6)) } - should("Sort Arity2 Dataset") { - val list = listOf( - c(1, 6), - c(2, 5), - c(3, 4), - ) - val dataset = list.toDS() - - dataset.sortByKey().collectAsList() shouldBe list.sortedBy { it._1 } - dataset.sortByValue().collectAsList() shouldBe list.sortedBy { it._2 } - } - should("Sort Tuple2 Dataset") { val list = listOf( - Tuple2(1, 6), - Tuple2(2, 5), - Tuple2(3, 4), + t(1, 6), + t(2, 5), + t(3, 4), ) val dataset = list.toDS() @@ -355,20 +348,20 @@ class DatasetFunctionTest : ShouldSpec({ ) newDS1WithAs.collectAsList() - val newDS2: Dataset> = dataset.selectTyped( + val newDS2: Dataset> = dataset.selectTyped( col(SomeClass::a), // NOTE: this only works on 3.0, returning a data class with an array in it col(SomeClass::b), ) newDS2.collectAsList() - val newDS3: Dataset> = dataset.selectTyped( + val newDS3: Dataset> = dataset.selectTyped( col(SomeClass::a), col(SomeClass::b), col(SomeClass::b), ) newDS3.collectAsList() - val newDS4: Dataset> = dataset.selectTyped( + val newDS4: Dataset> = dataset.selectTyped( col(SomeClass::a), col(SomeClass::b), col(SomeClass::b), @@ -376,7 +369,7 @@ class DatasetFunctionTest : ShouldSpec({ ) newDS4.collectAsList() - val newDS5: Dataset> = dataset.selectTyped( + val newDS5: Dataset> = dataset.selectTyped( col(SomeClass::a), col(SomeClass::b), col(SomeClass::b), diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt index 9d37194a..e053e05b 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt @@ -26,6 +26,7 @@ import io.kotest.matchers.shouldBe import org.apache.spark.sql.Dataset import org.apache.spark.sql.types.Decimal import org.apache.spark.unsafe.types.CalendarInterval +import org.jetbrains.kotlinx.spark.api.tuples.* import scala.Product import scala.Tuple1 import scala.Tuple2 @@ -124,7 +125,7 @@ class EncodingTest : ShouldSpec({ } should("be able to serialize binary") { - val byteArrayTriple = c("Hello there".encodeToByteArray(), 1, intArrayOf(1, 2, 3)) + val byteArrayTriple = t("Hello there".encodeToByteArray(), 1, intArrayOf(1, 2, 3)) val dataset = dsOf(byteArrayTriple) val (a, b, c) = dataset.collectAsList().single() @@ -134,13 +135,13 @@ class EncodingTest : ShouldSpec({ } should("be able to serialize Decimal") { - val decimalPair = c(Decimal().set(50), 12) + val decimalPair = t(Decimal().set(50), 12) val dataset = dsOf(decimalPair) dataset.collectAsList() shouldBe listOf(decimalPair) } should("be able to serialize BigDecimal") { - val decimalPair = c(BigDecimal.TEN, 12) + val decimalPair = t(BigDecimal.TEN, 12) val dataset = dsOf(decimalPair) val (a, b) = dataset.collectAsList().single() a.compareTo(BigDecimal.TEN) shouldBe 0 @@ -155,23 +156,23 @@ class EncodingTest : ShouldSpec({ should("Be able to serialize Scala Tuples including data classes") { val dataset = dsOf( - Tuple2("a", Tuple3("a", 1, LonLat(1.0, 1.0))), - Tuple2("b", Tuple3("b", 2, LonLat(1.0, 2.0))), + t("a", t("a", 1, LonLat(1.0, 1.0))), + t("b", t("b", 2, LonLat(1.0, 2.0))), ) dataset.show() val asList = dataset.takeAsList(2) - asList.first() shouldBe Tuple2("a", Tuple3("a", 1, LonLat(1.0, 1.0))) + asList.first() shouldBe t("a", t("a", 1, LonLat(1.0, 1.0))) } should("Be able to serialize data classes with tuples") { val dataset = dsOf( - DataClassWithTuple(Tuple3(5L, "test", Tuple1(""))), - DataClassWithTuple(Tuple3(6L, "tessst", Tuple1(""))), + DataClassWithTuple(t(5L, "test", t(""))), + DataClassWithTuple(t(6L, "tessst", t(""))), ) dataset.show() val asList = dataset.takeAsList(2) - asList.first().tuple shouldBe Tuple3(5L, "test", Tuple1("")) + asList.first().tuple shouldBe t(5L, "test", t("")) } } } @@ -187,15 +188,15 @@ class EncodingTest : ShouldSpec({ } should("contain all generic primitives with complex schema") { - val primitives = c(1, 1.0, 1.toFloat(), 1.toByte(), LocalDate.now(), true) - val primitives2 = c(2, 2.0, 2.toFloat(), 2.toByte(), LocalDate.now().plusDays(1), false) + val primitives = t(1, 1.0, 1.toFloat(), 1.toByte(), LocalDate.now(), true) + val primitives2 = t(2, 2.0, 2.toFloat(), 2.toByte(), LocalDate.now().plusDays(1), false) val tuples = dsOf(primitives, primitives2).collectAsList() expect(tuples).contains.inAnyOrder.only.values(primitives, primitives2) } should("contain all generic primitives with complex nullable schema") { - val primitives = c(1, 1.0, 1.toFloat(), 1.toByte(), LocalDate.now(), true) - val nulls = c(null, null, null, null, null, null) + val primitives = t(1, 1.0, 1.toFloat(), 1.toByte(), LocalDate.now(), true) + val nulls = t(null, null, null, null, null, null) val tuples = dsOf(primitives, nulls).collectAsList() expect(tuples).contains.inAnyOrder.only.values(primitives, nulls) } From a4557109cb09115b30d25a412475da7a8aef0fb2 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 25 Mar 2022 18:49:55 +0100 Subject: [PATCH 103/213] now using X for tuples, updated code and examples/tests. Currently, writing tests for tuple functions. --- .../spark/examples/CachedOperations.kt | 4 +- .../jetbrains/kotlinx/spark/examples/Group.kt | 9 +- .../jetbrains/kotlinx/spark/examples/Join.kt | 2 +- .../jetbrains/kotlinx/spark/examples/Main.kt | 25 +- .../kotlinx/spark/examples/WordCount.kt | 4 +- .../kotlinx/spark/api/Conversions.kt | 2 +- .../api/tuples/DestructuredTupleBuilders.kt | 53 ++-- .../kotlinx/spark/api/tuples/EmptyTuple.kt | 7 - .../spark/api/tuples/ProductExtensions.kt | 10 +- .../api/tuples/SameTypeProductExtensions.kt | 133 ++++++--- .../kotlinx/spark/api/tuples/TupleBuilders.kt | 280 +++++++++++++++--- .../spark/api/tuples/TupleExtending.kt | 2 + .../kotlinx/spark/api/DatasetFunctionTest.kt | 28 +- .../jetbrains/kotlinx/spark/api/TuplesTest.kt | 154 ++++++++++ 14 files changed, 548 insertions(+), 165 deletions(-) create mode 100644 kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/CachedOperations.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/CachedOperations.kt index d3fff24b..81190361 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/CachedOperations.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/CachedOperations.kt @@ -25,14 +25,14 @@ import org.jetbrains.kotlinx.spark.api.tuples.* fun main() { withSpark { dsOf(1, 2, 3, 4, 5) - .map { t(it, (it + 2)) } + .map { it X (it + 2) } .withCached { showDS() filter { it._1 % 2 == 0 }.showDS() } .map { (first, second) -> - t(first, second, (first + second) * 2) + first X second X (first + second) * 2 } .show() } diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Group.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Group.kt index 5b5f8a41..f4d35097 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Group.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Group.kt @@ -20,17 +20,18 @@ package org.jetbrains.kotlinx.spark.examples import org.jetbrains.kotlinx.spark.api.* +import org.jetbrains.kotlinx.spark.api.tuples.* import org.jetbrains.kotlinx.spark.api.tuples.t fun main() { withSpark { dsOf( - t(1, "a"), - t(1, "b"), - t(2, "c"), + 1 X "a", + 1 X "b", + 2 X "c", ) .groupByKey { it._1 } - .reduceGroupsK { a, b -> t(a._1 + b._1, a._2 + b._2) } + .reduceGroupsK { a, b -> tupleOf(_1 = a._1 + b._1, _2 = a._2 + b._2) } .show() } } diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Join.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Join.kt index 52193c1d..684674f2 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Join.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Join.kt @@ -37,7 +37,7 @@ fun main() { .debugCodegen() .also { it.show() } .map { (left, right) -> - t + left.id + left.name + right?.value + left.id X left.name X right?.value } .show() diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt index f7f3157f..920716bf 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt @@ -39,23 +39,28 @@ object Main { val triples: Dataset> = spark .toDS( listOf( - Q(1, t(1, null)), - Q(2, t(2, "22")), - Q(3, t(3, "333")), + Q(1, 1 X null), + Q(2, 2 X "22"), + Q(3, 3 X "333"), ) ) - .map { (a, b) -> t(a + b._1, b._2?.length) } + .map { (a, b) -> t(_1 = a + b._1, _2 = b._2?.length) } .map { it: Tuple2 -> it + 1 } // add counter val pairs = spark - .toDS(listOf( - t + 2 + "hell", - t + 4 + "moon", - t + 6 + "berry", - )) + .toDS( + listOf( + 2 X "hell", + 4 X "moon", + 6 X "berry", + ) + ) triples - .leftJoin(pairs, triples.col("first").multiply(2) eq pairs.col("first")) + .leftJoin( + right = pairs, + col = triples("_1").multiply(2) eq pairs("_1"), + ) // .also { it.printSchema() } .map { (triple, pair) -> Five(triple._1, triple._2, triple._3, pair?._1, pair?._2) } .groupByKey { it.a } diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/WordCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/WordCount.kt index 42ab48c4..c08a9df5 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/WordCount.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/WordCount.kt @@ -34,8 +34,8 @@ fun main() { .flatten() .cleanup() .groupByKey { it } - .mapGroups { k, iter -> t + k + iter.asSequence().count() } - .sort { arrayOf(it.col("_2").desc()) } + .mapGroups { k, iter -> k X iter.asSequence().count() } + .sort { arrayOf(it(colName = "_2").desc()) } .limit(20) .map { it.swap() } .show(false) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt index 7a9fe75f..6db44e42 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt @@ -193,7 +193,7 @@ fun Arity2.toPair(): Pair = Pair(_1, _2) /** * Returns a new [Tuple3] based on the arguments in the current [Triple]. */ -//fun Triple.toTuple(): Tuple3 = Tuple3(first, second, third) +fun Triple.toTuple(): Tuple3 = Tuple3(first, second, third) /** * Returns a new [Arity3] based on the arguments in the current [Triple]. diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt index a3ca0824..96f395f2 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt @@ -17,7 +17,7 @@ * limitations under the License. * =LICENSEEND= */ -@file:Suppress("RemoveExplicitTypeArguments") +@file:Suppress("RemoveExplicitTypeArguments", "FunctionName") package org.jetbrains.kotlinx.spark.api.tuples @@ -44,36 +44,37 @@ import scala.Tuple21 import scala.Tuple22 /** - * This file provides a descriptive way to create Tuples using [t]. + * This file provides a descriptive way to create Tuples using [X]. * * For instance: - * ```val yourTuple = 1 t "test" t a``` + * ```val yourTuple = 1 X "test" X a``` * */ /** * Returns a new Tuple2 of the given arguments. + * @see tupleOf + * @see t **/ -//@JvmName("tInfix") -//infix fun T1.t(other: T2): Tuple2 = Tuple2(this, other) -// -//infix fun Tuple2.t(next: T3): Tuple3 = Tuple3(this._1(), this._2(), next) -//infix fun Tuple3.t(next: T4): Tuple4 = Tuple4(this._1(), this._2(), this._3(), next) -//infix fun Tuple4.t(next: T5): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), next) -//infix fun Tuple5.t(next: T6): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), next) -//infix fun Tuple6.t(next: T7): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), next) -//infix fun Tuple7.t(next: T8): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), next) -//infix fun Tuple8.t(next: T9): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), next) -//infix fun Tuple9.t(next: T10): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), next) -//infix fun Tuple10.t(next: T11): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), next) -//infix fun Tuple11.t(next: T12): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), next) -//infix fun Tuple12.t(next: T13): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), next) -//infix fun Tuple13.t(next: T14): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), next) -//infix fun Tuple14.t(next: T15): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), next) -//infix fun Tuple15.t(next: T16): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), next) -//infix fun Tuple16.t(next: T17): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), next) -//infix fun Tuple17.t(next: T18): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), next) -//infix fun Tuple18.t(next: T19): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), next) -//infix fun Tuple19.t(next: T20): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), next) -//infix fun Tuple20.t(next: T21): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), next) -//infix fun Tuple21.t(next: T22): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), next) +infix fun T1.X(other: T2): Tuple2 = Tuple2(this, other) + +infix fun Tuple2.X(next: T3): Tuple3 = Tuple3(this._1(), this._2(), next) +infix fun Tuple3.X(next: T4): Tuple4 = Tuple4(this._1(), this._2(), this._3(), next) +infix fun Tuple4.X(next: T5): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), next) +infix fun Tuple5.X(next: T6): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), next) +infix fun Tuple6.X(next: T7): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), next) +infix fun Tuple7.X(next: T8): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), next) +infix fun Tuple8.X(next: T9): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), next) +infix fun Tuple9.X(next: T10): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), next) +infix fun Tuple10.X(next: T11): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), next) +infix fun Tuple11.X(next: T12): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), next) +infix fun Tuple12.X(next: T13): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), next) +infix fun Tuple13.X(next: T14): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), next) +infix fun Tuple14.X(next: T15): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), next) +infix fun Tuple15.X(next: T16): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), next) +infix fun Tuple16.X(next: T17): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), next) +infix fun Tuple17.X(next: T18): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), next) +infix fun Tuple18.X(next: T19): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), next) +infix fun Tuple19.X(next: T20): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), next) +infix fun Tuple20.X(next: T21): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), next) +infix fun Tuple21.X(next: T22): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), next) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/EmptyTuple.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/EmptyTuple.kt index 925866dc..0420c8b7 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/EmptyTuple.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/EmptyTuple.kt @@ -25,12 +25,6 @@ import java.io.Serializable /** * Just as in Scala3, we provide the [EmptyTuple]. It is the result of dropping the last item from a [Tuple1] * or when calling `tupleOf()` for instance. - * - * It can also be used to create tuples like: - * ```kotlin - * val tuple: Tuple3 = t + 1 + 5L + "test" - * ``` - * if you really want to. */ object EmptyTuple : Product, Serializable { @@ -40,5 +34,4 @@ object EmptyTuple : Product, Serializable { override fun toString(): String = "()" } -public val t: EmptyTuple = EmptyTuple public fun emptyTuple(): EmptyTuple = EmptyTuple diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductExtensions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductExtensions.kt index ff8427a0..cca9d553 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductExtensions.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductExtensions.kt @@ -25,6 +25,8 @@ import kotlin.jvm.Throws /** * Extra extensions for Scala [Product]s such as Tuples. + * In most cases, the functions of `SameTypeProductExtensions.kt` will be used + * instead of these. But these help for the overview and generic case. * * For example: * @@ -103,7 +105,7 @@ fun Product.getOrNull(n: Int): Any? = if (n in 0 until size) productElement(n) e */ @Suppress("UNCHECKED_CAST") @Throws(IndexOutOfBoundsException::class, ClassCastException::class) -fun Product.getAs(n: Int): T = productElement(n) as T +inline fun Product.getAs(n: Int): T = productElement(n) as T /** The n'th element of this product, 0-based. In other words, for a * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. @@ -113,7 +115,7 @@ fun Product.getAs(n: Int): T = productElement(n) as T * @return the element `n` elements after the first element, `null` if out of bounds or unable to be cast */ @Suppress("UNCHECKED_CAST") -fun Product.getAsOrNull(n: Int): T? = getOrNull(n) as? T +inline fun Product.getAsOrNull(n: Int): T? = getOrNull(n) as? T /** The range of n'th elements of this product, 0-based. In other words, for a * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. @@ -143,7 +145,7 @@ fun Product.getOrNull(indexRange: IntRange): List = indexRange.map(::getOr * @return the elements in [indexRange] */ @Throws(IndexOutOfBoundsException::class, ClassCastException::class) -fun Product.getAs(indexRange: IntRange): List = indexRange.map(::getAs) +inline fun Product.getAs(indexRange: IntRange): List = indexRange.map(::getAs) /** The range of n'th elements of this product, 0-based. In other words, for a * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. @@ -152,4 +154,4 @@ fun Product.getAs(indexRange: IntRange): List = indexRange.map(::getAs) * @param indexRange the indices of the elements to return * @return the elements in [indexRange], `null` is out of bounds or unable to be cast */ -fun Product.getAsOrNull(indexRange: IntRange): List = indexRange.map(::getAsOrNull) +inline fun Product.getAsOrNull(indexRange: IntRange): List = indexRange.map(::getAsOrNull) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/SameTypeProductExtensions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/SameTypeProductExtensions.kt index 36c90122..84644cb9 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/SameTypeProductExtensions.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/SameTypeProductExtensions.kt @@ -121,49 +121,94 @@ fun Product21.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) fun Product22.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) -operator fun Product1.get(index: Int): T = productElement(index) as T -operator fun Product2.get(index: Int): T = productElement(index) as T -operator fun Product3.get(index: Int): T = productElement(index) as T -operator fun Product4.get(index: Int): T = productElement(index) as T -operator fun Product5.get(index: Int): T = productElement(index) as T -operator fun Product6.get(index: Int): T = productElement(index) as T -operator fun Product7.get(index: Int): T = productElement(index) as T -operator fun Product8.get(index: Int): T = productElement(index) as T -operator fun Product9.get(index: Int): T = productElement(index) as T -operator fun Product10.get(index: Int): T = productElement(index) as T -operator fun Product11.get(index: Int): T = productElement(index) as T -operator fun Product12.get(index: Int): T = productElement(index) as T -operator fun Product13.get(index: Int): T = productElement(index) as T -operator fun Product14.get(index: Int): T = productElement(index) as T -operator fun Product15.get(index: Int): T = productElement(index) as T -operator fun Product16.get(index: Int): T = productElement(index) as T -operator fun Product17.get(index: Int): T = productElement(index) as T -operator fun Product18.get(index: Int): T = productElement(index) as T -operator fun Product19.get(index: Int): T = productElement(index) as T -operator fun Product20.get(index: Int): T = productElement(index) as T -operator fun Product21.get(index: Int): T = productElement(index) as T -operator fun Product22.get(index: Int): T = productElement(index) as T +operator fun Product1.get(n: Int): T = productElement(n) as T +operator fun Product2.get(n: Int): T = productElement(n) as T +operator fun Product3.get(n: Int): T = productElement(n) as T +operator fun Product4.get(n: Int): T = productElement(n) as T +operator fun Product5.get(n: Int): T = productElement(n) as T +operator fun Product6.get(n: Int): T = productElement(n) as T +operator fun Product7.get(n: Int): T = productElement(n) as T +operator fun Product8.get(n: Int): T = productElement(n) as T +operator fun Product9.get(n: Int): T = productElement(n) as T +operator fun Product10.get(n: Int): T = productElement(n) as T +operator fun Product11.get(n: Int): T = productElement(n) as T +operator fun Product12.get(n: Int): T = productElement(n) as T +operator fun Product13.get(n: Int): T = productElement(n) as T +operator fun Product14.get(n: Int): T = productElement(n) as T +operator fun Product15.get(n: Int): T = productElement(n) as T +operator fun Product16.get(n: Int): T = productElement(n) as T +operator fun Product17.get(n: Int): T = productElement(n) as T +operator fun Product18.get(n: Int): T = productElement(n) as T +operator fun Product19.get(n: Int): T = productElement(n) as T +operator fun Product20.get(n: Int): T = productElement(n) as T +operator fun Product21.get(n: Int): T = productElement(n) as T +operator fun Product22.get(n: Int): T = productElement(n) as T -operator fun Product1.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product2.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product3.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product4.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product5.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product6.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product7.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product8.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product9.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product10.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product11.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product12.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product13.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product14.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product15.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product16.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product17.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product18.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product19.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product20.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product21.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } -operator fun Product22.get(indexRange: IntRange): List = indexRange.map { productElement(it) as T } +fun Product1.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product2.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product3.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product4.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product5.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product6.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product7.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product8.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product9.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product10.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product11.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product12.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product13.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product14.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product15.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product16.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product17.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product18.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product19.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product20.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product21.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +fun Product22.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) +operator fun Product1.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product2.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product3.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product4.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product5.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product6.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product7.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product8.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product9.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product10.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product11.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product12.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product13.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product14.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product15.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product16.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product17.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product18.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product19.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product20.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product21.get(indexRange: IntRange): List = indexRange.map(::get) +operator fun Product22.get(indexRange: IntRange): List = indexRange.map(::get) + +fun Product1.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product2.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product3.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product4.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product5.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product6.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product7.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product8.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product9.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product10.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product11.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product12.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product13.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product14.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product15.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product16.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product17.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product18.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product19.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product20.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product21.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) +fun Product22.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleBuilders.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleBuilders.kt index d31b8247..365be37a 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleBuilders.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleBuilders.kt @@ -52,145 +52,327 @@ import scala.Tuple22 * ```val yourTuple = t(1, "test", a)``` * * As replacement of `to` there is - * ```val tuple: Tuple2 = 5 t "test"``` + * ```val tuple: Tuple2 = 5 X "test"``` */ - - -/** Returns the instance of Tuple0. */ +/** + * Returns the instance of Tuple0. + * @see t + */ fun tupleOf(): EmptyTuple = EmptyTuple -/** Returns a new Tuple1 of the given arguments. */ + +/** + * Returns a new Tuple1 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1): Tuple1 = Tuple1(_1) -/** Returns a new Tuple2 of the given arguments. */ +/** + * Returns a new Tuple2 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2): Tuple2 = Tuple2(_1, _2) -/** Returns a new Tuple3 of the given arguments. */ +/** + * Returns a new Tuple3 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3): Tuple3 = Tuple3(_1, _2, _3) -/** Returns a new Tuple4 of the given arguments. */ +/** + * Returns a new Tuple4 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4): Tuple4 = Tuple4(_1, _2, _3, _4) -/** Returns a new Tuple5 of the given arguments. */ +/** + * Returns a new Tuple5 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5): Tuple5 = Tuple5(_1, _2, _3, _4, _5) -/** Returns a new Tuple6 of the given arguments. */ +/** + * Returns a new Tuple6 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6): Tuple6 = Tuple6(_1, _2, _3, _4, _5, _6) -/** Returns a new Tuple7 of the given arguments. */ +/** + * Returns a new Tuple7 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7): Tuple7 = Tuple7(_1, _2, _3, _4, _5, _6, _7) -/** Returns a new Tuple8 of the given arguments. */ +/** + * Returns a new Tuple8 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8): Tuple8 = Tuple8(_1, _2, _3, _4, _5, _6, _7, _8) -/** Returns a new Tuple9 of the given arguments. */ +/** + * Returns a new Tuple9 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9): Tuple9 = Tuple9(_1, _2, _3, _4, _5, _6, _7, _8, _9) -/** Returns a new Tuple10 of the given arguments. */ +/** + * Returns a new Tuple10 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10): Tuple10 = Tuple10(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10) -/** Returns a new Tuple11 of the given arguments. */ +/** + * Returns a new Tuple11 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11): Tuple11 = Tuple11(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11) -/** Returns a new Tuple12 of the given arguments. */ +/** + * Returns a new Tuple12 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12): Tuple12 = Tuple12(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12) -/** Returns a new Tuple13 of the given arguments. */ +/** + * Returns a new Tuple13 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13): Tuple13 = Tuple13(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13) -/** Returns a new Tuple14 of the given arguments. */ +/** + * Returns a new Tuple14 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14): Tuple14 = Tuple14(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14) -/** Returns a new Tuple15 of the given arguments. */ +/** + * Returns a new Tuple15 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15): Tuple15 = Tuple15(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15) -/** Returns a new Tuple16 of the given arguments. */ +/** + * Returns a new Tuple16 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16): Tuple16 = Tuple16(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16) -/** Returns a new Tuple17 of the given arguments. */ +/** + * Returns a new Tuple17 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17): Tuple17 = Tuple17(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17) -/** Returns a new Tuple18 of the given arguments. */ +/** + * Returns a new Tuple18 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18): Tuple18 = Tuple18(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18) -/** Returns a new Tuple19 of the given arguments. */ +/** + * Returns a new Tuple19 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19): Tuple19 = Tuple19(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19) -/** Returns a new Tuple20 of the given arguments. */ +/** + * Returns a new Tuple20 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20): Tuple20 = Tuple20(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20) -/** Returns a new Tuple21 of the given arguments. */ +/** + * Returns a new Tuple21 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21): Tuple21 = Tuple21(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21) -/** Returns a new Tuple22 of the given arguments. */ +/** + * Returns a new Tuple22 of the given arguments. + * @see t + * @see X + */ fun tupleOf(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22): Tuple22 = Tuple22(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22) -/** Returns the instance of Tuple0. */ +/** + * Returns the instance of Tuple0. + * @see tupleOf + */ fun t(): EmptyTuple = EmptyTuple -/** Returns a new Tuple1 of the given arguments. */ + +/** + * Returns a new Tuple1 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1): Tuple1 = Tuple1(_1) -/** Returns a new Tuple2 of the given arguments. */ +/** + * Returns a new Tuple2 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2): Tuple2 = Tuple2(_1, _2) -/** Returns a new Tuple3 of the given arguments. */ +/** + * Returns a new Tuple3 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3): Tuple3 = Tuple3(_1, _2, _3) -/** Returns a new Tuple4 of the given arguments. */ +/** + * Returns a new Tuple4 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4): Tuple4 = Tuple4(_1, _2, _3, _4) -/** Returns a new Tuple5 of the given arguments. */ +/** + * Returns a new Tuple5 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5): Tuple5 = Tuple5(_1, _2, _3, _4, _5) -/** Returns a new Tuple6 of the given arguments. */ +/** + * Returns a new Tuple6 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6): Tuple6 = Tuple6(_1, _2, _3, _4, _5, _6) -/** Returns a new Tuple7 of the given arguments. */ +/** + * Returns a new Tuple7 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7): Tuple7 = Tuple7(_1, _2, _3, _4, _5, _6, _7) -/** Returns a new Tuple8 of the given arguments. */ +/** + * Returns a new Tuple8 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8): Tuple8 = Tuple8(_1, _2, _3, _4, _5, _6, _7, _8) -/** Returns a new Tuple9 of the given arguments. */ +/** + * Returns a new Tuple9 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9): Tuple9 = Tuple9(_1, _2, _3, _4, _5, _6, _7, _8, _9) -/** Returns a new Tuple10 of the given arguments. */ +/** + * Returns a new Tuple10 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10): Tuple10 = Tuple10(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10) -/** Returns a new Tuple11 of the given arguments. */ +/** + * Returns a new Tuple11 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11): Tuple11 = Tuple11(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11) -/** Returns a new Tuple12 of the given arguments. */ +/** + * Returns a new Tuple12 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12): Tuple12 = Tuple12(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12) -/** Returns a new Tuple13 of the given arguments. */ +/** + * Returns a new Tuple13 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13): Tuple13 = Tuple13(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13) -/** Returns a new Tuple14 of the given arguments. */ +/** + * Returns a new Tuple14 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14): Tuple14 = Tuple14(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14) -/** Returns a new Tuple15 of the given arguments. */ +/** + * Returns a new Tuple15 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15): Tuple15 = Tuple15(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15) -/** Returns a new Tuple16 of the given arguments. */ +/** + * Returns a new Tuple16 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16): Tuple16 = Tuple16(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16) -/** Returns a new Tuple17 of the given arguments. */ +/** + * Returns a new Tuple17 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17): Tuple17 = Tuple17(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17) -/** Returns a new Tuple18 of the given arguments. */ +/** + * Returns a new Tuple18 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18): Tuple18 = Tuple18(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18) -/** Returns a new Tuple19 of the given arguments. */ +/** + * Returns a new Tuple19 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19): Tuple19 = Tuple19(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19) -/** Returns a new Tuple20 of the given arguments. */ +/** + * Returns a new Tuple20 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20): Tuple20 = Tuple20(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20) -/** Returns a new Tuple21 of the given arguments. */ +/** + * Returns a new Tuple21 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21): Tuple21 = Tuple21(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21) -/** Returns a new Tuple22 of the given arguments. */ +/** + * Returns a new Tuple22 of the given arguments. + * @see tupleOf + * @see X + */ fun t(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22): Tuple22 = Tuple22(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleExtending.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleExtending.kt index ed8762c6..595d1883 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleExtending.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleExtending.kt @@ -62,6 +62,8 @@ import scala.Tuple22 * If you mean to create ```tupleOf(a, b, tupleOf(c, d))``` or ```tupleOf(tupleOf(a, b), c, d)```, * use [appendedBy] and [prependedBy] explicitly. * + * Note that [String.plus] concatenates any object to the string, so prepending it like ```myString + myTuple``` won't work. + * * For concatenating two tuples, see [org.jetbrains.kotlinx.spark.api.tuples.concat]. * */ diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt index 871bd08c..2459d5e7 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt @@ -45,25 +45,25 @@ class DatasetFunctionTest : ShouldSpec({ should("handle cached operations") { val result = dsOf(1, 2, 3, 4, 5) - .map { t(it, it + 2) } + .map { it X (it + 2) } .withCached { expect(collectAsList()).contains.inAnyOrder.only.values( - t(1, 3), - t(2, 4), - t(3, 5), - t(4, 6), - t(5, 7), + 1 X 3, + 2 X 4, + 3 X 5, + 4 X 6, + 5 X 7, ) val next = filter { it._1 % 2 == 0 } - expect(next.collectAsList()).contains.inAnyOrder.only.values(t(2, 4), t(4, 6)) + expect(next.collectAsList()).contains.inAnyOrder.only.values(2 X 4, 4 X 6) next } - .map { + .map { it: Tuple2 -> it + (it._1 + it._2) * 2 } .collectAsList() - expect(result).contains.inOrder.only.values(t(2, 4, 12), t(4, 6, 20)) + expect(result).contains.inOrder.only.values(2 X 4 X 12, 4 X 6 X 20) } should("handle join operations") { @@ -75,7 +75,7 @@ class DatasetFunctionTest : ShouldSpec({ val second = dsOf(Right(1, 100), Right(3, 300)) val result = first .leftJoin(second, first.col("id") eq second.col("id")) - .map { t + it._1.id + it._1.name + it._2?.value } + .map { it._1.id X it._1.name X it._2?.value } .collectAsList() expect(result).contains.inOrder.only.values(t(1, "a", 100), t(2, "b", null)) } @@ -171,7 +171,7 @@ class DatasetFunctionTest : ShouldSpec({ s = key s shouldBe key - t(s!!, collected.map { it._2 }) + s!! X collected.map { it._2 } } mappedWithStateTimeoutConf.count() shouldBe 2 @@ -189,7 +189,7 @@ class DatasetFunctionTest : ShouldSpec({ s = key s shouldBe key - t(s!!, collected.map { it._2 }) + s!! X collected.map { it._2 } } mappedWithState.count() shouldBe 2 @@ -225,9 +225,7 @@ class DatasetFunctionTest : ShouldSpec({ val cogrouped = groupedDataset1.cogroup(groupedDataset2) { key, left, right -> listOf( - key to (left.asSequence() + right.asSequence()) - .map { it._2 } - .toList() + key to (left.asSequence() + right.asSequence()).map { it._2 }.toList() ).iterator() } diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt new file mode 100644 index 00000000..4f1e5aa4 --- /dev/null +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt @@ -0,0 +1,154 @@ +package org.jetbrains.kotlinx.spark.api + +import io.kotest.assertions.throwables.shouldThrow +import io.kotest.core.spec.style.ShouldSpec +import io.kotest.matchers.collections.shouldNotBeIn +import io.kotest.matchers.shouldBe +import io.kotest.matchers.shouldNotBe +import org.jetbrains.kotlinx.spark.api.tuples.* +import org.jetbrains.kotlinx.spark.api.* +import scala.Tuple3 + +class TuplesTest : ShouldSpec({ + context("Test tuple extensions") { + + should("Support different ways to create tuples") { + listOf( + 1 X 2L X "3", + (1 X 2L) + "3", + 1 + (2L X "3"), + 1 + t() + 2L + "3", + t() + 1 + 2L + "3", + tupleOf() + 1 + 2L + "3", + EmptyTuple + 1 + 2L + "3", + emptyTuple() + 1 + 2L + "3", + t(1, 2L, "3"), + tupleOf(1, 2L, "3"), + t(1, 2L) + "3", + t(1, 2L) + t("3"), + t(1) + t(2L, "3"), + t(1) + t(2L) + t("3"), + t() + t(1, 2L, "3"), + t(1) + t() + t(2L, "3"), + t(1, 2L) + t() + t("3"), + t(1, 2L, "3") + t(), + 1 + t(2L) + "3", + 1 + t(2L, "3"), + Triple(1, 2L, "3").toTuple(), + (1 to 2L).toTuple().appendedBy("3"), + (2L to "3").toTuple().prependedBy(1), + ).forEach { + it shouldBe Tuple3(1, 2L, "3") + } + } + + should("Merge tuples with +, append/prepend other values") { + t() + 1 shouldBe t(1) + t(1) + 2L shouldBe t(1, 2L) + t(1, 2L) + "3" shouldBe t(1, 2L, "3") + 1 + t() shouldBe t(1) + 2L + t(1) shouldBe t(2L, 1) + 2L + t(1, "3") shouldBe t(2L, 1, "3") + + // NOTE! String.plus is a thing + "3" + t(1, 2L) shouldNotBe t("3", 1, 2L) + + t() + t(1) shouldBe t(1) + t(1) + t(2L) shouldBe t(1, 2L) + t(1, 2L) + t("3") shouldBe t(1, 2L, "3") + t(1) + t(2L, "3") shouldBe t(1, 2L, "3") + + t() concat t(1) shouldBe t(1) + t(1) concat t(2L) shouldBe t(1, 2L) + t(1, 2L) concat t("3") shouldBe t(1, 2L, "3") + t(1) concat t(2L, "3") shouldBe t(1, 2L, "3") + + // tuple inside other tuple + t().appendedBy(t(1)) shouldBe t(t(1)) + t() + t(t(1)) shouldBe t(t(1)) + t().prependedBy(t(1)) shouldBe t(t(1)) + t(t(1)) + t() shouldBe t(t(1)) + + t(1).appendedBy(t(2L)) shouldBe t(1, t(2L)) + t(1) + t(t(2L)) shouldBe t(1, t(2L)) + t(1).prependedBy(t(2L)) shouldBe t(t(2L), 1) + t(t(2L)) + t(1) shouldBe t(t(2L), 1) + } + + should("Have drop functions") { + t(1, 2L).dropLast() shouldBe t(1) + t(1, 2L).dropFirst() shouldBe t(2L) + t(1, 2L, "3").dropLast() shouldBe t(1, 2L) + t(1, 2L, "3").dropFirst() shouldBe t(2L, "3") + + t(1).dropLast() shouldBe emptyTuple() + t(1).dropFirst() shouldBe emptyTuple() + } + + should("Have Tuple destructuring") { + val (a: Int, b: Double, c: Long, d: String, e: Char, f: Float, g: Short, h: Byte, i: UInt, j: UByte, k: UShort, l: ULong) = + 1 X 2.0 X 3L X "4" X '5' X 6F X 7.toShort() X 8.toByte() X 9.toUInt() X 10.toUByte() X 11.toUShort() X 12.toULong() // etc... + a shouldBe 1 + b shouldBe 2.0 + c shouldBe 3L + d shouldBe "4" + e shouldBe '5' + f shouldBe 6F + g shouldBe 7.toShort() + h shouldBe 8.toByte() + i shouldBe 9.toUInt() + j shouldBe 10.toUByte() + k shouldBe 11.toUShort() + l shouldBe 12.toULong() + } + + should("Have other helpful extensions") { + (0 !in tupleOf()) shouldBe true + (1 in tupleOf(1, 2, 3)) shouldBe true + (0 !in tupleOf(1, 2, 3)) shouldBe true + tupleOf(1, 2, 3).iterator().asSequence().toSet() shouldBe setOf(1, 2, 3) + for (it in tupleOf(1, 1, 1)) { + it shouldBe 1 + } + tupleOf(1, 2, 3).toList().isNotEmpty() shouldBe true + tupleOf(1, 2, 3).asIterable().none { + it > 4 + } shouldBe true + tupleOf(1, 2, 3, 4, 5).size shouldBe 5 + tupleOf(1, 2, 3, 4)[0] shouldBe 1 + shouldThrow { tupleOf(1, 2L)[5] } + tupleOf(1).getOrNull(5) shouldBe null + + shouldThrow { tupleOf(1).getAs(5) } + shouldThrow { tupleOf(1).getAs(0) } + + tupleOf(1).getAsOrNull(5) shouldBe null + tupleOf(1).getAsOrNull(0) shouldBe null + + + tupleOf(1, 2, 3).toTriple() shouldBe Triple(1, 2, 3) + + tupleOf(1, 2, 3, 4, 5, 6, 7)[1..3] + .containsAll(listOf(2, 3, 4)) shouldBe true + tupleOf(1, 1, 2)[1..2] shouldBe tupleOf(1, 2, 2)[0..1] + + tupleOf(1, 2, 3, 4, 5)[2] shouldBe 3 + + shouldThrow { tupleOf(1, 1, 2)[1..5] } + (null in tupleOf(1, 1, 2).getOrNull(1..5)) shouldBe true + + + + tupleOf(1, 2) shouldBe tupleOf(2, 1).swap() + tupleOf(1 to "Test") shouldBe tupleOf(1 to "Test") + val a: List = tupleOf(A(), B()).toList() + } + + + } +}) + +interface Super + +class A : Super +class B : Super \ No newline at end of file From 9acd8b9c61ac7b26b767b445ec82b75dfc3c7cb2 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 28 Mar 2022 14:25:19 +0200 Subject: [PATCH 104/213] updating tuple docs --- .../api/tuples/SameTypeProductExtensions.kt | 833 ++++++++++++++++++ .../jetbrains/kotlinx/spark/api/TuplesTest.kt | 3 +- 2 files changed, 835 insertions(+), 1 deletion(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/SameTypeProductExtensions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/SameTypeProductExtensions.kt index 84644cb9..1f139cb1 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/SameTypeProductExtensions.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/SameTypeProductExtensions.kt @@ -52,163 +52,996 @@ import scala.collection.JavaConverters * */ +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product1.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product2.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product3.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product4.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product5.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product6.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product7.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product8.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product9.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product10.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product11.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product12.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product13.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product14.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product15.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product16.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product17.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product18.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product19.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product20.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product21.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) + +/** Allows this product to be iterated over. Returns an iterator of type [T]. */ operator fun Product22.iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) +/** Returns this product as an iterable of type [T]. */ fun Product1.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product2.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product3.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product4.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product5.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product6.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product7.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product8.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product9.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product10.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product11.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product12.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product13.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product14.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product15.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product16.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product17.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product18.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product19.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product20.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product21.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } + +/** Returns this product as an iterable of type [T]. */ fun Product22.asIterable(): Iterable = object : Iterable { override fun iterator(): Iterator = JavaConverters.asJavaIterator(productIterator().map { it as T }) } +/** Returns list of type [T] for this product. */ fun Product1.toList(): List = listOf(this._1()) + +/** Returns list of type [T] for this product. */ fun Product2.toList(): List = listOf(this._1(), this._2()) + +/** Returns list of type [T] for this product. */ fun Product3.toList(): List = listOf(this._1(), this._2(), this._3()) + +/** Returns list of type [T] for this product. */ fun Product4.toList(): List = listOf(this._1(), this._2(), this._3(), this._4()) + +/** Returns list of type [T] for this product. */ fun Product5.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5()) + +/** Returns list of type [T] for this product. */ fun Product6.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) + +/** Returns list of type [T] for this product. */ fun Product7.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) + +/** Returns list of type [T] for this product. */ fun Product8.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) + +/** Returns list of type [T] for this product. */ fun Product9.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) + +/** Returns list of type [T] for this product. */ fun Product10.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) + +/** Returns list of type [T] for this product. */ fun Product11.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) + +/** Returns list of type [T] for this product. */ fun Product12.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) + +/** Returns list of type [T] for this product. */ fun Product13.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) + +/** Returns list of type [T] for this product. */ fun Product14.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) + +/** Returns list of type [T] for this product. */ fun Product15.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) + +/** Returns list of type [T] for this product. */ fun Product16.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) + +/** Returns list of type [T] for this product. */ fun Product17.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) + +/** Returns list of type [T] for this product. */ fun Product18.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) + +/** Returns list of type [T] for this product. */ fun Product19.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) + +/** Returns list of type [T] for this product. */ fun Product20.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) + +/** Returns list of type [T] for this product. */ fun Product21.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) + +/** Returns list of type [T] for this product. */ fun Product22.toList(): List = listOf(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product1.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product2.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product3.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product4.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product5.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product6.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product7.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product8.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product9.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product10.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product11.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product12.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product13.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product14.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product15.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product16.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product17.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product18.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product19.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product20.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product21.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @throws IndexOutOfBoundsException + * @return the element `n` elements after the first element + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product22.get(n: Int): T = productElement(n) as T + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product1.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product2.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product3.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product4.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product5.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product6.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product7.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product8.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product9.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product10.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product11.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product12.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product13.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product14.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product15.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product16.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product17.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product18.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product19.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product20.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product21.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The n'th element of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param n the index of the element to return + * @return the element `n` elements after the first element, `null` if out of bounds + */ fun Product22.getOrNull(n: Int): T? = (if (n in 0 until size) productElement(n) as T else null) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product1.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product2.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product3.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product4.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product5.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product6.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product7.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product8.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product9.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product10.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product11.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product12.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product13.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product14.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product15.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product16.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product17.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product18.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product19.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product20.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product21.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @throws IndexOutOfBoundsException + * @return the elements in [indexRange] + */ +@Throws(IndexOutOfBoundsException::class) operator fun Product22.get(indexRange: IntRange): List = indexRange.map(::get) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product1.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product2.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product3.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product4.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product5.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product6.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product7.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product8.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product9.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product10.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product11.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product12.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product13.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product14.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product15.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product16.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product17.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product18.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product19.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product20.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product21.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) + +/** The range of n'th elements of this product, 0-based. In other words, for a + * product `A(x,,1,,, ..., x,,k,,)`, returns `x,,(n+1),,` where `0 <= n < k`. + * + * @param indexRange the indices of the elements to return + * @return the elements in [indexRange], `null` if out of bounds + */ fun Product22.getOrNull(indexRange: IntRange): List = indexRange.map(::getOrNull) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt index 4f1e5aa4..4c16d28e 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt @@ -117,7 +117,8 @@ class TuplesTest : ShouldSpec({ tupleOf(1, 2, 3, 4, 5).size shouldBe 5 tupleOf(1, 2, 3, 4)[0] shouldBe 1 shouldThrow { tupleOf(1, 2L)[5] } - tupleOf(1).getOrNull(5) shouldBe null + + tupleOf(1, 2, 3).getOrNull(5) shouldBe null shouldThrow { tupleOf(1).getAs(5) } shouldThrow { tupleOf(1).getAs(0) } From d96dd55be6a48c39a8d2832cc077a48b4a52c28b Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 28 Mar 2022 16:05:10 +0200 Subject: [PATCH 105/213] updating docs and tests --- .../spark/api/tuples/TupleConcatenation.kt | 176 +++++++++--------- .../kotlinx/spark/api/tuples/TupleCopy.kt | 23 --- ...xtensions.kt => TypedProductExtensions.kt} | 0 .../jetbrains/kotlinx/spark/api/TuplesTest.kt | 38 +++- 4 files changed, 118 insertions(+), 119 deletions(-) rename kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/{SameTypeProductExtensions.kt => TypedProductExtensions.kt} (100%) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleConcatenation.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleConcatenation.kt index 19406090..fe07a4ff 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleConcatenation.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleConcatenation.kt @@ -59,8 +59,8 @@ import scala.Tuple22 * */ -infix fun EmptyTuple.concat(other: Tuple1): Tuple1 = other.clone() -infix fun Tuple1.concat(other: EmptyTuple): Tuple1 = this.clone() +infix fun EmptyTuple.concat(other: Tuple1): Tuple1 = other.copy() +infix fun Tuple1.concat(other: EmptyTuple): Tuple1 = this.copy() infix fun Tuple1.concat(other: Tuple1): Tuple2 = Tuple2(this._1(), other._1()) infix fun Tuple1.concat(other: Tuple2): Tuple3 = Tuple3(this._1(), other._1(), other._2()) infix fun Tuple1.concat(other: Tuple3): Tuple4 = Tuple4(this._1(), other._1(), other._2(), other._3()) @@ -82,8 +82,8 @@ infix fun Tuple1.concat(other: Tuple19): Tuple20 = Tuple20(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) infix fun Tuple1.concat(other: Tuple20): Tuple21 = Tuple21(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20()) infix fun Tuple1.concat(other: Tuple21): Tuple22 = Tuple22(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20(), other._21()) -infix fun EmptyTuple.concat(other: Tuple2): Tuple2 = other.clone() -infix fun Tuple2.concat(other: EmptyTuple): Tuple2 = this.clone() +infix fun EmptyTuple.concat(other: Tuple2): Tuple2 = other.copy() +infix fun Tuple2.concat(other: EmptyTuple): Tuple2 = this.copy() infix fun Tuple2.concat(other: Tuple1): Tuple3 = Tuple3(this._1(), this._2(), other._1()) infix fun Tuple2.concat(other: Tuple2): Tuple4 = Tuple4(this._1(), this._2(), other._1(), other._2()) infix fun Tuple2.concat(other: Tuple3): Tuple5 = Tuple5(this._1(), this._2(), other._1(), other._2(), other._3()) @@ -104,8 +104,8 @@ infix fun Tuple2.concat(other: Tuple18): Tuple20 = Tuple20(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) infix fun Tuple2.concat(other: Tuple19): Tuple21 = Tuple21(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) infix fun Tuple2.concat(other: Tuple20): Tuple22 = Tuple22(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20()) -infix fun EmptyTuple.concat(other: Tuple3): Tuple3 = other.clone() -infix fun Tuple3.concat(other: EmptyTuple): Tuple3 = this.clone() +infix fun EmptyTuple.concat(other: Tuple3): Tuple3 = other.copy() +infix fun Tuple3.concat(other: EmptyTuple): Tuple3 = this.copy() infix fun Tuple3.concat(other: Tuple1): Tuple4 = Tuple4(this._1(), this._2(), this._3(), other._1()) infix fun Tuple3.concat(other: Tuple2): Tuple5 = Tuple5(this._1(), this._2(), this._3(), other._1(), other._2()) infix fun Tuple3.concat(other: Tuple3): Tuple6 = Tuple6(this._1(), this._2(), this._3(), other._1(), other._2(), other._3()) @@ -125,8 +125,8 @@ infix fun Tuple3.concat(other: Tuple17): Tuple20 = Tuple20(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) infix fun Tuple3.concat(other: Tuple18): Tuple21 = Tuple21(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) infix fun Tuple3.concat(other: Tuple19): Tuple22 = Tuple22(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) -infix fun EmptyTuple.concat(other: Tuple4): Tuple4 = other.clone() -infix fun Tuple4.concat(other: EmptyTuple): Tuple4 = this.clone() +infix fun EmptyTuple.concat(other: Tuple4): Tuple4 = other.copy() +infix fun Tuple4.concat(other: EmptyTuple): Tuple4 = this.copy() infix fun Tuple4.concat(other: Tuple1): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), other._1()) infix fun Tuple4.concat(other: Tuple2): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), other._1(), other._2()) infix fun Tuple4.concat(other: Tuple3): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3()) @@ -145,8 +145,8 @@ infix fun Tuple4.concat(other: Tuple16): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) infix fun Tuple4.concat(other: Tuple17): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) infix fun Tuple4.concat(other: Tuple18): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) -infix fun EmptyTuple.concat(other: Tuple5): Tuple5 = other.clone() -infix fun Tuple5.concat(other: EmptyTuple): Tuple5 = this.clone() +infix fun EmptyTuple.concat(other: Tuple5): Tuple5 = other.copy() +infix fun Tuple5.concat(other: EmptyTuple): Tuple5 = this.copy() infix fun Tuple5.concat(other: Tuple1): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), other._1()) infix fun Tuple5.concat(other: Tuple2): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2()) infix fun Tuple5.concat(other: Tuple3): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3()) @@ -164,8 +164,8 @@ infix fun Tuple5.concat(other: Tuple15): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) infix fun Tuple5.concat(other: Tuple16): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) infix fun Tuple5.concat(other: Tuple17): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) -infix fun EmptyTuple.concat(other: Tuple6): Tuple6 = other.clone() -infix fun Tuple6.concat(other: EmptyTuple): Tuple6 = this.clone() +infix fun EmptyTuple.concat(other: Tuple6): Tuple6 = other.copy() +infix fun Tuple6.concat(other: EmptyTuple): Tuple6 = this.copy() infix fun Tuple6.concat(other: Tuple1): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1()) infix fun Tuple6.concat(other: Tuple2): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2()) infix fun Tuple6.concat(other: Tuple3): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3()) @@ -182,8 +182,8 @@ infix fun Tuple6.concat(other: Tuple14): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) infix fun Tuple6.concat(other: Tuple15): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) infix fun Tuple6.concat(other: Tuple16): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) -infix fun EmptyTuple.concat(other: Tuple7): Tuple7 = other.clone() -infix fun Tuple7.concat(other: EmptyTuple): Tuple7 = this.clone() +infix fun EmptyTuple.concat(other: Tuple7): Tuple7 = other.copy() +infix fun Tuple7.concat(other: EmptyTuple): Tuple7 = this.copy() infix fun Tuple7.concat(other: Tuple1): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1()) infix fun Tuple7.concat(other: Tuple2): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2()) infix fun Tuple7.concat(other: Tuple3): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3()) @@ -199,8 +199,8 @@ infix fun Tuple7.concat(other: Tuple13): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) infix fun Tuple7.concat(other: Tuple14): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) infix fun Tuple7.concat(other: Tuple15): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) -infix fun EmptyTuple.concat(other: Tuple8): Tuple8 = other.clone() -infix fun Tuple8.concat(other: EmptyTuple): Tuple8 = this.clone() +infix fun EmptyTuple.concat(other: Tuple8): Tuple8 = other.copy() +infix fun Tuple8.concat(other: EmptyTuple): Tuple8 = this.copy() infix fun Tuple8.concat(other: Tuple1): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1()) infix fun Tuple8.concat(other: Tuple2): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2()) infix fun Tuple8.concat(other: Tuple3): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3()) @@ -215,8 +215,8 @@ infix fun Tuple8.concat(other: Tuple12): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) infix fun Tuple8.concat(other: Tuple13): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) infix fun Tuple8.concat(other: Tuple14): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) -infix fun EmptyTuple.concat(other: Tuple9): Tuple9 = other.clone() -infix fun Tuple9.concat(other: EmptyTuple): Tuple9 = this.clone() +infix fun EmptyTuple.concat(other: Tuple9): Tuple9 = other.copy() +infix fun Tuple9.concat(other: EmptyTuple): Tuple9 = this.copy() infix fun Tuple9.concat(other: Tuple1): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1()) infix fun Tuple9.concat(other: Tuple2): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2()) infix fun Tuple9.concat(other: Tuple3): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3()) @@ -230,8 +230,8 @@ infix fun Tuple9.concat(other: Tuple11): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) infix fun Tuple9.concat(other: Tuple12): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) infix fun Tuple9.concat(other: Tuple13): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) -infix fun EmptyTuple.concat(other: Tuple10): Tuple10 = other.clone() -infix fun Tuple10.concat(other: EmptyTuple): Tuple10 = this.clone() +infix fun EmptyTuple.concat(other: Tuple10): Tuple10 = other.copy() +infix fun Tuple10.concat(other: EmptyTuple): Tuple10 = this.copy() infix fun Tuple10.concat(other: Tuple1): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1()) infix fun Tuple10.concat(other: Tuple2): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2()) infix fun Tuple10.concat(other: Tuple3): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3()) @@ -244,8 +244,8 @@ infix fun Tuple10.concat(other: Tuple10): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) infix fun Tuple10.concat(other: Tuple11): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) infix fun Tuple10.concat(other: Tuple12): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) -infix fun EmptyTuple.concat(other: Tuple11): Tuple11 = other.clone() -infix fun Tuple11.concat(other: EmptyTuple): Tuple11 = this.clone() +infix fun EmptyTuple.concat(other: Tuple11): Tuple11 = other.copy() +infix fun Tuple11.concat(other: EmptyTuple): Tuple11 = this.copy() infix fun Tuple11.concat(other: Tuple1): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1()) infix fun Tuple11.concat(other: Tuple2): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2()) infix fun Tuple11.concat(other: Tuple3): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3()) @@ -257,8 +257,8 @@ infix fun Tuple11.concat(other: Tuple9): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) infix fun Tuple11.concat(other: Tuple10): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) infix fun Tuple11.concat(other: Tuple11): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) -infix fun EmptyTuple.concat(other: Tuple12): Tuple12 = other.clone() -infix fun Tuple12.concat(other: EmptyTuple): Tuple12 = this.clone() +infix fun EmptyTuple.concat(other: Tuple12): Tuple12 = other.copy() +infix fun Tuple12.concat(other: EmptyTuple): Tuple12 = this.copy() infix fun Tuple12.concat(other: Tuple1): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1()) infix fun Tuple12.concat(other: Tuple2): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2()) infix fun Tuple12.concat(other: Tuple3): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3()) @@ -269,8 +269,8 @@ infix fun Tuple12.concat(other: Tuple8): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) infix fun Tuple12.concat(other: Tuple9): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) infix fun Tuple12.concat(other: Tuple10): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) -infix fun EmptyTuple.concat(other: Tuple13): Tuple13 = other.clone() -infix fun Tuple13.concat(other: EmptyTuple): Tuple13 = this.clone() +infix fun EmptyTuple.concat(other: Tuple13): Tuple13 = other.copy() +infix fun Tuple13.concat(other: EmptyTuple): Tuple13 = this.copy() infix fun Tuple13.concat(other: Tuple1): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1()) infix fun Tuple13.concat(other: Tuple2): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2()) infix fun Tuple13.concat(other: Tuple3): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3()) @@ -280,8 +280,8 @@ infix fun Tuple13.concat(other: Tuple7): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) infix fun Tuple13.concat(other: Tuple8): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) infix fun Tuple13.concat(other: Tuple9): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) -infix fun EmptyTuple.concat(other: Tuple14): Tuple14 = other.clone() -infix fun Tuple14.concat(other: EmptyTuple): Tuple14 = this.clone() +infix fun EmptyTuple.concat(other: Tuple14): Tuple14 = other.copy() +infix fun Tuple14.concat(other: EmptyTuple): Tuple14 = this.copy() infix fun Tuple14.concat(other: Tuple1): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1()) infix fun Tuple14.concat(other: Tuple2): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2()) infix fun Tuple14.concat(other: Tuple3): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3()) @@ -290,8 +290,8 @@ infix fun Tuple14.concat(other: Tuple6): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) infix fun Tuple14.concat(other: Tuple7): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) infix fun Tuple14.concat(other: Tuple8): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) -infix fun EmptyTuple.concat(other: Tuple15): Tuple15 = other.clone() -infix fun Tuple15.concat(other: EmptyTuple): Tuple15 = this.clone() +infix fun EmptyTuple.concat(other: Tuple15): Tuple15 = other.copy() +infix fun Tuple15.concat(other: EmptyTuple): Tuple15 = this.copy() infix fun Tuple15.concat(other: Tuple1): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1()) infix fun Tuple15.concat(other: Tuple2): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2()) infix fun Tuple15.concat(other: Tuple3): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3()) @@ -299,44 +299,44 @@ infix fun Tuple15.concat(other: Tuple5): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5()) infix fun Tuple15.concat(other: Tuple6): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) infix fun Tuple15.concat(other: Tuple7): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) -infix fun EmptyTuple.concat(other: Tuple16): Tuple16 = other.clone() -infix fun Tuple16.concat(other: EmptyTuple): Tuple16 = this.clone() +infix fun EmptyTuple.concat(other: Tuple16): Tuple16 = other.copy() +infix fun Tuple16.concat(other: EmptyTuple): Tuple16 = this.copy() infix fun Tuple16.concat(other: Tuple1): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1()) infix fun Tuple16.concat(other: Tuple2): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2()) infix fun Tuple16.concat(other: Tuple3): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3()) infix fun Tuple16.concat(other: Tuple4): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4()) infix fun Tuple16.concat(other: Tuple5): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4(), other._5()) infix fun Tuple16.concat(other: Tuple6): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) -infix fun EmptyTuple.concat(other: Tuple17): Tuple17 = other.clone() -infix fun Tuple17.concat(other: EmptyTuple): Tuple17 = this.clone() +infix fun EmptyTuple.concat(other: Tuple17): Tuple17 = other.copy() +infix fun Tuple17.concat(other: EmptyTuple): Tuple17 = this.copy() infix fun Tuple17.concat(other: Tuple1): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1()) infix fun Tuple17.concat(other: Tuple2): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2()) infix fun Tuple17.concat(other: Tuple3): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3()) infix fun Tuple17.concat(other: Tuple4): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3(), other._4()) infix fun Tuple17.concat(other: Tuple5): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3(), other._4(), other._5()) -infix fun EmptyTuple.concat(other: Tuple18): Tuple18 = other.clone() -infix fun Tuple18.concat(other: EmptyTuple): Tuple18 = this.clone() +infix fun EmptyTuple.concat(other: Tuple18): Tuple18 = other.copy() +infix fun Tuple18.concat(other: EmptyTuple): Tuple18 = this.copy() infix fun Tuple18.concat(other: Tuple1): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1()) infix fun Tuple18.concat(other: Tuple2): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2()) infix fun Tuple18.concat(other: Tuple3): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2(), other._3()) infix fun Tuple18.concat(other: Tuple4): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2(), other._3(), other._4()) -infix fun EmptyTuple.concat(other: Tuple19): Tuple19 = other.clone() -infix fun Tuple19.concat(other: EmptyTuple): Tuple19 = this.clone() +infix fun EmptyTuple.concat(other: Tuple19): Tuple19 = other.copy() +infix fun Tuple19.concat(other: EmptyTuple): Tuple19 = this.copy() infix fun Tuple19.concat(other: Tuple1): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1()) infix fun Tuple19.concat(other: Tuple2): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1(), other._2()) infix fun Tuple19.concat(other: Tuple3): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1(), other._2(), other._3()) -infix fun EmptyTuple.concat(other: Tuple20): Tuple20 = other.clone() -infix fun Tuple20.concat(other: EmptyTuple): Tuple20 = this.clone() +infix fun EmptyTuple.concat(other: Tuple20): Tuple20 = other.copy() +infix fun Tuple20.concat(other: EmptyTuple): Tuple20 = this.copy() infix fun Tuple20.concat(other: Tuple1): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), other._1()) infix fun Tuple20.concat(other: Tuple2): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), other._1(), other._2()) -infix fun EmptyTuple.concat(other: Tuple21): Tuple21 = other.clone() -infix fun Tuple21.concat(other: EmptyTuple): Tuple21 = this.clone() +infix fun EmptyTuple.concat(other: Tuple21): Tuple21 = other.copy() +infix fun Tuple21.concat(other: EmptyTuple): Tuple21 = this.copy() infix fun Tuple21.concat(other: Tuple1): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), other._1()) -infix fun EmptyTuple.concat(other: Tuple22): Tuple22 = other.clone() -infix fun Tuple22.concat(other: EmptyTuple): Tuple22 = this.clone() +infix fun EmptyTuple.concat(other: Tuple22): Tuple22 = other.copy() +infix fun Tuple22.concat(other: EmptyTuple): Tuple22 = this.copy() -operator fun EmptyTuple.plus(other: Tuple1): Tuple1 = other.clone() -operator fun Tuple1.plus(other: EmptyTuple): Tuple1 = this.clone() +operator fun EmptyTuple.plus(other: Tuple1): Tuple1 = other.copy() +operator fun Tuple1.plus(other: EmptyTuple): Tuple1 = this.copy() operator fun Tuple1.plus(other: Tuple1): Tuple2 = Tuple2(this._1(), other._1()) operator fun Tuple1.plus(other: Tuple2): Tuple3 = Tuple3(this._1(), other._1(), other._2()) operator fun Tuple1.plus(other: Tuple3): Tuple4 = Tuple4(this._1(), other._1(), other._2(), other._3()) @@ -358,8 +358,8 @@ operator fun Tuple1.plus(other: Tuple19): Tuple20 = Tuple20(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) operator fun Tuple1.plus(other: Tuple20): Tuple21 = Tuple21(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20()) operator fun Tuple1.plus(other: Tuple21): Tuple22 = Tuple22(this._1(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20(), other._21()) -operator fun EmptyTuple.plus(other: Tuple2): Tuple2 = other.clone() -operator fun Tuple2.plus(other: EmptyTuple): Tuple2 = this.clone() +operator fun EmptyTuple.plus(other: Tuple2): Tuple2 = other.copy() +operator fun Tuple2.plus(other: EmptyTuple): Tuple2 = this.copy() operator fun Tuple2.plus(other: Tuple1): Tuple3 = Tuple3(this._1(), this._2(), other._1()) operator fun Tuple2.plus(other: Tuple2): Tuple4 = Tuple4(this._1(), this._2(), other._1(), other._2()) operator fun Tuple2.plus(other: Tuple3): Tuple5 = Tuple5(this._1(), this._2(), other._1(), other._2(), other._3()) @@ -380,8 +380,8 @@ operator fun Tuple2.plus(other: Tuple18): Tuple20 = Tuple20(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) operator fun Tuple2.plus(other: Tuple19): Tuple21 = Tuple21(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) operator fun Tuple2.plus(other: Tuple20): Tuple22 = Tuple22(this._1(), this._2(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19(), other._20()) -operator fun EmptyTuple.plus(other: Tuple3): Tuple3 = other.clone() -operator fun Tuple3.plus(other: EmptyTuple): Tuple3 = this.clone() +operator fun EmptyTuple.plus(other: Tuple3): Tuple3 = other.copy() +operator fun Tuple3.plus(other: EmptyTuple): Tuple3 = this.copy() operator fun Tuple3.plus(other: Tuple1): Tuple4 = Tuple4(this._1(), this._2(), this._3(), other._1()) operator fun Tuple3.plus(other: Tuple2): Tuple5 = Tuple5(this._1(), this._2(), this._3(), other._1(), other._2()) operator fun Tuple3.plus(other: Tuple3): Tuple6 = Tuple6(this._1(), this._2(), this._3(), other._1(), other._2(), other._3()) @@ -401,8 +401,8 @@ operator fun Tuple3.plus(other: Tuple17): Tuple20 = Tuple20(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) operator fun Tuple3.plus(other: Tuple18): Tuple21 = Tuple21(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) operator fun Tuple3.plus(other: Tuple19): Tuple22 = Tuple22(this._1(), this._2(), this._3(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18(), other._19()) -operator fun EmptyTuple.plus(other: Tuple4): Tuple4 = other.clone() -operator fun Tuple4.plus(other: EmptyTuple): Tuple4 = this.clone() +operator fun EmptyTuple.plus(other: Tuple4): Tuple4 = other.copy() +operator fun Tuple4.plus(other: EmptyTuple): Tuple4 = this.copy() operator fun Tuple4.plus(other: Tuple1): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), other._1()) operator fun Tuple4.plus(other: Tuple2): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), other._1(), other._2()) operator fun Tuple4.plus(other: Tuple3): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3()) @@ -421,8 +421,8 @@ operator fun Tuple4.plus(other: Tuple16): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) operator fun Tuple4.plus(other: Tuple17): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) operator fun Tuple4.plus(other: Tuple18): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17(), other._18()) -operator fun EmptyTuple.plus(other: Tuple5): Tuple5 = other.clone() -operator fun Tuple5.plus(other: EmptyTuple): Tuple5 = this.clone() +operator fun EmptyTuple.plus(other: Tuple5): Tuple5 = other.copy() +operator fun Tuple5.plus(other: EmptyTuple): Tuple5 = this.copy() operator fun Tuple5.plus(other: Tuple1): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), other._1()) operator fun Tuple5.plus(other: Tuple2): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2()) operator fun Tuple5.plus(other: Tuple3): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3()) @@ -440,8 +440,8 @@ operator fun Tuple5.plus(other: Tuple15): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) operator fun Tuple5.plus(other: Tuple16): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) operator fun Tuple5.plus(other: Tuple17): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16(), other._17()) -operator fun EmptyTuple.plus(other: Tuple6): Tuple6 = other.clone() -operator fun Tuple6.plus(other: EmptyTuple): Tuple6 = this.clone() +operator fun EmptyTuple.plus(other: Tuple6): Tuple6 = other.copy() +operator fun Tuple6.plus(other: EmptyTuple): Tuple6 = this.copy() operator fun Tuple6.plus(other: Tuple1): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1()) operator fun Tuple6.plus(other: Tuple2): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2()) operator fun Tuple6.plus(other: Tuple3): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3()) @@ -458,8 +458,8 @@ operator fun Tuple6.plus(other: Tuple14): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) operator fun Tuple6.plus(other: Tuple15): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) operator fun Tuple6.plus(other: Tuple16): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15(), other._16()) -operator fun EmptyTuple.plus(other: Tuple7): Tuple7 = other.clone() -operator fun Tuple7.plus(other: EmptyTuple): Tuple7 = this.clone() +operator fun EmptyTuple.plus(other: Tuple7): Tuple7 = other.copy() +operator fun Tuple7.plus(other: EmptyTuple): Tuple7 = this.copy() operator fun Tuple7.plus(other: Tuple1): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1()) operator fun Tuple7.plus(other: Tuple2): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2()) operator fun Tuple7.plus(other: Tuple3): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3()) @@ -475,8 +475,8 @@ operator fun Tuple7.plus(other: Tuple13): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) operator fun Tuple7.plus(other: Tuple14): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) operator fun Tuple7.plus(other: Tuple15): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14(), other._15()) -operator fun EmptyTuple.plus(other: Tuple8): Tuple8 = other.clone() -operator fun Tuple8.plus(other: EmptyTuple): Tuple8 = this.clone() +operator fun EmptyTuple.plus(other: Tuple8): Tuple8 = other.copy() +operator fun Tuple8.plus(other: EmptyTuple): Tuple8 = this.copy() operator fun Tuple8.plus(other: Tuple1): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1()) operator fun Tuple8.plus(other: Tuple2): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2()) operator fun Tuple8.plus(other: Tuple3): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3()) @@ -491,8 +491,8 @@ operator fun Tuple8.plus(other: Tuple12): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) operator fun Tuple8.plus(other: Tuple13): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) operator fun Tuple8.plus(other: Tuple14): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13(), other._14()) -operator fun EmptyTuple.plus(other: Tuple9): Tuple9 = other.clone() -operator fun Tuple9.plus(other: EmptyTuple): Tuple9 = this.clone() +operator fun EmptyTuple.plus(other: Tuple9): Tuple9 = other.copy() +operator fun Tuple9.plus(other: EmptyTuple): Tuple9 = this.copy() operator fun Tuple9.plus(other: Tuple1): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1()) operator fun Tuple9.plus(other: Tuple2): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2()) operator fun Tuple9.plus(other: Tuple3): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3()) @@ -506,8 +506,8 @@ operator fun Tuple9.plus(other: Tuple11): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) operator fun Tuple9.plus(other: Tuple12): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) operator fun Tuple9.plus(other: Tuple13): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12(), other._13()) -operator fun EmptyTuple.plus(other: Tuple10): Tuple10 = other.clone() -operator fun Tuple10.plus(other: EmptyTuple): Tuple10 = this.clone() +operator fun EmptyTuple.plus(other: Tuple10): Tuple10 = other.copy() +operator fun Tuple10.plus(other: EmptyTuple): Tuple10 = this.copy() operator fun Tuple10.plus(other: Tuple1): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1()) operator fun Tuple10.plus(other: Tuple2): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2()) operator fun Tuple10.plus(other: Tuple3): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3()) @@ -520,8 +520,8 @@ operator fun Tuple10.plus(other: Tuple10): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) operator fun Tuple10.plus(other: Tuple11): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) operator fun Tuple10.plus(other: Tuple12): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11(), other._12()) -operator fun EmptyTuple.plus(other: Tuple11): Tuple11 = other.clone() -operator fun Tuple11.plus(other: EmptyTuple): Tuple11 = this.clone() +operator fun EmptyTuple.plus(other: Tuple11): Tuple11 = other.copy() +operator fun Tuple11.plus(other: EmptyTuple): Tuple11 = this.copy() operator fun Tuple11.plus(other: Tuple1): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1()) operator fun Tuple11.plus(other: Tuple2): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2()) operator fun Tuple11.plus(other: Tuple3): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3()) @@ -533,8 +533,8 @@ operator fun Tuple11.plus(other: Tuple9): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) operator fun Tuple11.plus(other: Tuple10): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) operator fun Tuple11.plus(other: Tuple11): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10(), other._11()) -operator fun EmptyTuple.plus(other: Tuple12): Tuple12 = other.clone() -operator fun Tuple12.plus(other: EmptyTuple): Tuple12 = this.clone() +operator fun EmptyTuple.plus(other: Tuple12): Tuple12 = other.copy() +operator fun Tuple12.plus(other: EmptyTuple): Tuple12 = this.copy() operator fun Tuple12.plus(other: Tuple1): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1()) operator fun Tuple12.plus(other: Tuple2): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2()) operator fun Tuple12.plus(other: Tuple3): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3()) @@ -545,8 +545,8 @@ operator fun Tuple12.plus(other: Tuple8): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) operator fun Tuple12.plus(other: Tuple9): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) operator fun Tuple12.plus(other: Tuple10): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9(), other._10()) -operator fun EmptyTuple.plus(other: Tuple13): Tuple13 = other.clone() -operator fun Tuple13.plus(other: EmptyTuple): Tuple13 = this.clone() +operator fun EmptyTuple.plus(other: Tuple13): Tuple13 = other.copy() +operator fun Tuple13.plus(other: EmptyTuple): Tuple13 = this.copy() operator fun Tuple13.plus(other: Tuple1): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1()) operator fun Tuple13.plus(other: Tuple2): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2()) operator fun Tuple13.plus(other: Tuple3): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3()) @@ -556,8 +556,8 @@ operator fun Tuple13.plus(other: Tuple7): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) operator fun Tuple13.plus(other: Tuple8): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) operator fun Tuple13.plus(other: Tuple9): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8(), other._9()) -operator fun EmptyTuple.plus(other: Tuple14): Tuple14 = other.clone() -operator fun Tuple14.plus(other: EmptyTuple): Tuple14 = this.clone() +operator fun EmptyTuple.plus(other: Tuple14): Tuple14 = other.copy() +operator fun Tuple14.plus(other: EmptyTuple): Tuple14 = this.copy() operator fun Tuple14.plus(other: Tuple1): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1()) operator fun Tuple14.plus(other: Tuple2): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2()) operator fun Tuple14.plus(other: Tuple3): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3()) @@ -566,8 +566,8 @@ operator fun Tuple14.plus(other: Tuple6): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) operator fun Tuple14.plus(other: Tuple7): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) operator fun Tuple14.plus(other: Tuple8): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7(), other._8()) -operator fun EmptyTuple.plus(other: Tuple15): Tuple15 = other.clone() -operator fun Tuple15.plus(other: EmptyTuple): Tuple15 = this.clone() +operator fun EmptyTuple.plus(other: Tuple15): Tuple15 = other.copy() +operator fun Tuple15.plus(other: EmptyTuple): Tuple15 = this.copy() operator fun Tuple15.plus(other: Tuple1): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1()) operator fun Tuple15.plus(other: Tuple2): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2()) operator fun Tuple15.plus(other: Tuple3): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3()) @@ -575,39 +575,39 @@ operator fun Tuple15.plus(other: Tuple5): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5()) operator fun Tuple15.plus(other: Tuple6): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) operator fun Tuple15.plus(other: Tuple7): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6(), other._7()) -operator fun EmptyTuple.plus(other: Tuple16): Tuple16 = other.clone() -operator fun Tuple16.plus(other: EmptyTuple): Tuple16 = this.clone() +operator fun EmptyTuple.plus(other: Tuple16): Tuple16 = other.copy() +operator fun Tuple16.plus(other: EmptyTuple): Tuple16 = this.copy() operator fun Tuple16.plus(other: Tuple1): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1()) operator fun Tuple16.plus(other: Tuple2): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2()) operator fun Tuple16.plus(other: Tuple3): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3()) operator fun Tuple16.plus(other: Tuple4): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4()) operator fun Tuple16.plus(other: Tuple5): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4(), other._5()) operator fun Tuple16.plus(other: Tuple6): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), other._1(), other._2(), other._3(), other._4(), other._5(), other._6()) -operator fun EmptyTuple.plus(other: Tuple17): Tuple17 = other.clone() -operator fun Tuple17.plus(other: EmptyTuple): Tuple17 = this.clone() +operator fun EmptyTuple.plus(other: Tuple17): Tuple17 = other.copy() +operator fun Tuple17.plus(other: EmptyTuple): Tuple17 = this.copy() operator fun Tuple17.plus(other: Tuple1): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1()) operator fun Tuple17.plus(other: Tuple2): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2()) operator fun Tuple17.plus(other: Tuple3): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3()) operator fun Tuple17.plus(other: Tuple4): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3(), other._4()) operator fun Tuple17.plus(other: Tuple5): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), other._1(), other._2(), other._3(), other._4(), other._5()) -operator fun EmptyTuple.plus(other: Tuple18): Tuple18 = other.clone() -operator fun Tuple18.plus(other: EmptyTuple): Tuple18 = this.clone() +operator fun EmptyTuple.plus(other: Tuple18): Tuple18 = other.copy() +operator fun Tuple18.plus(other: EmptyTuple): Tuple18 = this.copy() operator fun Tuple18.plus(other: Tuple1): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1()) operator fun Tuple18.plus(other: Tuple2): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2()) operator fun Tuple18.plus(other: Tuple3): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2(), other._3()) operator fun Tuple18.plus(other: Tuple4): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), other._1(), other._2(), other._3(), other._4()) -operator fun EmptyTuple.plus(other: Tuple19): Tuple19 = other.clone() -operator fun Tuple19.plus(other: EmptyTuple): Tuple19 = this.clone() +operator fun EmptyTuple.plus(other: Tuple19): Tuple19 = other.copy() +operator fun Tuple19.plus(other: EmptyTuple): Tuple19 = this.copy() operator fun Tuple19.plus(other: Tuple1): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1()) operator fun Tuple19.plus(other: Tuple2): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1(), other._2()) operator fun Tuple19.plus(other: Tuple3): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), other._1(), other._2(), other._3()) -operator fun EmptyTuple.plus(other: Tuple20): Tuple20 = other.clone() -operator fun Tuple20.plus(other: EmptyTuple): Tuple20 = this.clone() +operator fun EmptyTuple.plus(other: Tuple20): Tuple20 = other.copy() +operator fun Tuple20.plus(other: EmptyTuple): Tuple20 = this.copy() operator fun Tuple20.plus(other: Tuple1): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), other._1()) operator fun Tuple20.plus(other: Tuple2): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), other._1(), other._2()) -operator fun EmptyTuple.plus(other: Tuple21): Tuple21 = other.clone() -operator fun Tuple21.plus(other: EmptyTuple): Tuple21 = this.clone() +operator fun EmptyTuple.plus(other: Tuple21): Tuple21 = other.copy() +operator fun Tuple21.plus(other: EmptyTuple): Tuple21 = this.copy() operator fun Tuple21.plus(other: Tuple1): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), other._1()) -operator fun EmptyTuple.plus(other: Tuple22): Tuple22 = other.clone() -operator fun Tuple22.plus(other: EmptyTuple): Tuple22 = this.clone() +operator fun EmptyTuple.plus(other: Tuple22): Tuple22 = other.copy() +operator fun Tuple22.plus(other: EmptyTuple): Tuple22 = this.copy() diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleCopy.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleCopy.kt index 1b9a37d3..8d5ea7ac 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleCopy.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleCopy.kt @@ -23,49 +23,26 @@ package org.jetbrains.kotlinx.spark.api.tuples import scala.* -fun EmptyTuple.clone(): EmptyTuple = EmptyTuple fun EmptyTuple.copy(): EmptyTuple = EmptyTuple -fun Tuple1.clone(): Tuple1 = Tuple1(this._1()) fun Tuple1.copy(_1: T1 = this._1()): Tuple1 = Tuple1(_1) -fun Tuple2.clone(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple2.copy(_1: T1 = this._1(), _2: T2 = this._2()): Tuple2 = Tuple2(_1, _2) -fun Tuple3.clone(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) fun Tuple3.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3()): Tuple3 = Tuple3(_1, _2, _3) -fun Tuple4.clone(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) fun Tuple4.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4()): Tuple4 = Tuple4(_1, _2, _3, _4) -fun Tuple5.clone(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) fun Tuple5.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5()): Tuple5 = Tuple5(_1, _2, _3, _4, _5) -fun Tuple6.clone(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) fun Tuple6.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6()): Tuple6 = Tuple6(_1, _2, _3, _4, _5, _6) -fun Tuple7.clone(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) fun Tuple7.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7()): Tuple7 = Tuple7(_1, _2, _3, _4, _5, _6, _7) -fun Tuple8.clone(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) fun Tuple8.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8()): Tuple8 = Tuple8(_1, _2, _3, _4, _5, _6, _7, _8) -fun Tuple9.clone(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) fun Tuple9.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9()): Tuple9 = Tuple9(_1, _2, _3, _4, _5, _6, _7, _8, _9) -fun Tuple10.clone(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) fun Tuple10.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10()): Tuple10 = Tuple10(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10) -fun Tuple11.clone(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) fun Tuple11.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11()): Tuple11 = Tuple11(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11) -fun Tuple12.clone(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) fun Tuple12.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12()): Tuple12 = Tuple12(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12) -fun Tuple13.clone(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) fun Tuple13.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13()): Tuple13 = Tuple13(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13) -fun Tuple14.clone(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) fun Tuple14.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14()): Tuple14 = Tuple14(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14) -fun Tuple15.clone(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) fun Tuple15.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15()): Tuple15 = Tuple15(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15) -fun Tuple16.clone(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) fun Tuple16.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16()): Tuple16 = Tuple16(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16) -fun Tuple17.clone(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) fun Tuple17.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17()): Tuple17 = Tuple17(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17) -fun Tuple18.clone(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) fun Tuple18.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17(), _18: T18 = this._18()): Tuple18 = Tuple18(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18) -fun Tuple19.clone(): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) fun Tuple19.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17(), _18: T18 = this._18(), _19: T19 = this._19()): Tuple19 = Tuple19(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19) -fun Tuple20.clone(): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) fun Tuple20.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17(), _18: T18 = this._18(), _19: T19 = this._19(), _20: T20 = this._20()): Tuple20 = Tuple20(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20) -fun Tuple21.clone(): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) fun Tuple21.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17(), _18: T18 = this._18(), _19: T19 = this._19(), _20: T20 = this._20(), _21: T21 = this._21()): Tuple21 = Tuple21(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21) -fun Tuple22.clone(): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) fun Tuple22.copy(_1: T1 = this._1(), _2: T2 = this._2(), _3: T3 = this._3(), _4: T4 = this._4(), _5: T5 = this._5(), _6: T6 = this._6(), _7: T7 = this._7(), _8: T8 = this._8(), _9: T9 = this._9(), _10: T10 = this._10(), _11: T11 = this._11(), _12: T12 = this._12(), _13: T13 = this._13(), _14: T14 = this._14(), _15: T15 = this._15(), _16: T16 = this._16(), _17: T17 = this._17(), _18: T18 = this._18(), _19: T19 = this._19(), _20: T20 = this._20(), _21: T21 = this._21(), _22: T22 = this._22()): Tuple22 = Tuple22(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/SameTypeProductExtensions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TypedProductExtensions.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/SameTypeProductExtensions.kt rename to kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TypedProductExtensions.kt diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt index 4c16d28e..720368fa 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt @@ -8,7 +8,9 @@ import io.kotest.matchers.shouldNotBe import org.jetbrains.kotlinx.spark.api.tuples.* import org.jetbrains.kotlinx.spark.api.* import scala.Tuple3 +import io.kotest.matchers.types.shouldBeInstanceOf +@Suppress("ShouldBeInstanceOfInspection", "RedundantLambdaArrow", "USELESS_IS_CHECK") class TuplesTest : ShouldSpec({ context("Test tuple extensions") { @@ -111,14 +113,20 @@ class TuplesTest : ShouldSpec({ it shouldBe 1 } tupleOf(1, 2, 3).toList().isNotEmpty() shouldBe true - tupleOf(1, 2, 3).asIterable().none { - it > 4 - } shouldBe true + tupleOf(1, 2, 3).asIterable().none { it > 4 } shouldBe true tupleOf(1, 2, 3, 4, 5).size shouldBe 5 tupleOf(1, 2, 3, 4)[0] shouldBe 1 shouldThrow { tupleOf(1, 2L)[5] } - tupleOf(1, 2, 3).getOrNull(5) shouldBe null + tupleOf(1 to 3, arrayOf(1), A()).getOrNull(5).let { + (it is Any?) shouldBe true + it shouldBe null + } + + tupleOf(1, 2, 3).getOrNull(5).let { + (it is Int?) shouldBe true + it shouldBe null + } shouldThrow { tupleOf(1).getAs(5) } shouldThrow { tupleOf(1).getAs(0) } @@ -129,8 +137,10 @@ class TuplesTest : ShouldSpec({ tupleOf(1, 2, 3).toTriple() shouldBe Triple(1, 2, 3) - tupleOf(1, 2, 3, 4, 5, 6, 7)[1..3] - .containsAll(listOf(2, 3, 4)) shouldBe true + tupleOf(1, 2, 3, 4, 5, 6, 7)[1..3].let { + it.shouldBeInstanceOf>() + it.containsAll(listOf(2, 3, 4)) shouldBe true + } tupleOf(1, 1, 2)[1..2] shouldBe tupleOf(1, 2, 2)[0..1] tupleOf(1, 2, 3, 4, 5)[2] shouldBe 3 @@ -138,13 +148,25 @@ class TuplesTest : ShouldSpec({ shouldThrow { tupleOf(1, 1, 2)[1..5] } (null in tupleOf(1, 1, 2).getOrNull(1..5)) shouldBe true - - tupleOf(1, 2) shouldBe tupleOf(2, 1).swap() tupleOf(1 to "Test") shouldBe tupleOf(1 to "Test") val a: List = tupleOf(A(), B()).toList() } + should("Have copy methods for tuples, Kotlin data class style") { + t().copy() shouldBe t() + + t(1, 2).copy(_1 = 0) shouldBe t(0, 2) + t(1, 2).copy(_2 = 0) shouldBe t(1, 0) + + t(1, 2).copy() shouldBe t(1, 2) + t(1, 2, 3, 4, 5).copy() shouldBe t(1, 2, 3, 4, 5) + + // when specifying all parameters, the Scala version will be used + t(1, 2).copy(3, 4) shouldBe t(3, 4) + // unless explicitly giving parameters + t(1, 2).copy(_1 = 3, _2 = 4) shouldBe t(3, 4) + } } }) From 2367d8762fe614e0eaf321925232a4ececd7ece0 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 29 Mar 2022 12:36:56 +0200 Subject: [PATCH 106/213] calming qodana --- .../jetbrains/kotlinx/spark/api/Arities.kt | 325 ++++++++++++++++++ 1 file changed, 325 insertions(+) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt index 0a671e78..93371fee 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt @@ -202,328 +202,653 @@ fun c(_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22, _23: T23, _24: T24, _25: T25, _26: T26): Arity26 = Arity26(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, _16, _17, _18, _19, _20, _21, _22, _23, _24, _25, _26) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity1) = Arity2(this._1, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity2) = Arity3(this._1, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity1) = Arity3(this._1, this._2, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity3) = Arity4(this._1, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity2) = Arity4(this._1, this._2, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity1) = Arity4(this._1, this._2, this._3, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity4) = Arity5(this._1, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity3) = Arity5(this._1, this._2, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity2) = Arity5(this._1, this._2, this._3, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity1) = Arity5(this._1, this._2, this._3, this._4, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity5) = Arity6(this._1, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity4) = Arity6(this._1, this._2, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity3) = Arity6(this._1, this._2, this._3, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity2) = Arity6(this._1, this._2, this._3, this._4, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity1) = Arity6(this._1, this._2, this._3, this._4, this._5, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity6) = Arity7(this._1, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity5) = Arity7(this._1, this._2, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity4) = Arity7(this._1, this._2, this._3, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity3) = Arity7(this._1, this._2, this._3, this._4, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity2) = Arity7(this._1, this._2, this._3, this._4, this._5, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity1) = Arity7(this._1, this._2, this._3, this._4, this._5, this._6, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity7) = Arity8(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity6) = Arity8(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity5) = Arity8(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity4) = Arity8(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity3) = Arity8(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity2) = Arity8(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity1) = Arity8(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity8) = Arity9(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity7) = Arity9(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity6) = Arity9(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity5) = Arity9(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity4) = Arity9(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity3) = Arity9(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity2) = Arity9(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity1) = Arity9(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity9) = Arity10(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity8) = Arity10(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity7) = Arity10(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity6) = Arity10(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity5) = Arity10(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity4) = Arity10(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity3) = Arity10(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity2) = Arity10(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity9.plus(that: Arity1) = Arity10(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity10) = Arity11(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity9) = Arity11(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity8) = Arity11(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity7) = Arity11(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity6) = Arity11(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity5) = Arity11(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity4) = Arity11(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity3) = Arity11(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity9.plus(that: Arity2) = Arity11(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity10.plus(that: Arity1) = Arity11(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity11) = Arity12(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity10) = Arity12(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity9) = Arity12(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity8) = Arity12(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity7) = Arity12(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity6) = Arity12(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity5) = Arity12(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity4) = Arity12(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity9.plus(that: Arity3) = Arity12(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity10.plus(that: Arity2) = Arity12(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity11.plus(that: Arity1) = Arity12(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity12) = Arity13(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity11) = Arity13(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity10) = Arity13(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity9) = Arity13(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity8) = Arity13(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity7) = Arity13(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity6) = Arity13(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity5) = Arity13(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity9.plus(that: Arity4) = Arity13(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity10.plus(that: Arity3) = Arity13(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity11.plus(that: Arity2) = Arity13(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity12.plus(that: Arity1) = Arity13(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity13) = Arity14(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity12) = Arity14(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity11) = Arity14(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity10) = Arity14(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity9) = Arity14(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity8) = Arity14(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity7) = Arity14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity6) = Arity14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity9.plus(that: Arity5) = Arity14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity10.plus(that: Arity4) = Arity14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity11.plus(that: Arity3) = Arity14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity12.plus(that: Arity2) = Arity14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity13.plus(that: Arity1) = Arity14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity14) = Arity15(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity13) = Arity15(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity12) = Arity15(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity11) = Arity15(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity10) = Arity15(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity9) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity8) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity7) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity9.plus(that: Arity6) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity10.plus(that: Arity5) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity11.plus(that: Arity4) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity12.plus(that: Arity3) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity13.plus(that: Arity2) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity14.plus(that: Arity1) = Arity15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity15) = Arity16(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity14) = Arity16(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity13) = Arity16(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity12) = Arity16(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity11) = Arity16(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity10) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity9) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity8) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity9.plus(that: Arity7) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity10.plus(that: Arity6) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity11.plus(that: Arity5) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity12.plus(that: Arity4) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity13.plus(that: Arity3) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity14.plus(that: Arity2) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity15.plus(that: Arity1) = Arity16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity16) = Arity17(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity15) = Arity17(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity14) = Arity17(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity13) = Arity17(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity12) = Arity17(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity11) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity10) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity9) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity9.plus(that: Arity8) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity10.plus(that: Arity7) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity11.plus(that: Arity6) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity12.plus(that: Arity5) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity13.plus(that: Arity4) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity14.plus(that: Arity3) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity15.plus(that: Arity2) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity16.plus(that: Arity1) = Arity17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity17) = Arity18(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity16) = Arity18(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity15) = Arity18(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity14) = Arity18(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity13) = Arity18(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity12) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity11) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity10) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity9.plus(that: Arity9) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity10.plus(that: Arity8) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity11.plus(that: Arity7) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity12.plus(that: Arity6) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity13.plus(that: Arity5) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity14.plus(that: Arity4) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity15.plus(that: Arity3) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity16.plus(that: Arity2) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity17.plus(that: Arity1) = Arity18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity18) = Arity19(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity17) = Arity19(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity16) = Arity19(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity15) = Arity19(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity14) = Arity19(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity13) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity12) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity11) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity9.plus(that: Arity10) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity10.plus(that: Arity9) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity11.plus(that: Arity8) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity12.plus(that: Arity7) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity13.plus(that: Arity6) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity14.plus(that: Arity5) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity15.plus(that: Arity4) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity16.plus(that: Arity3) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity17.plus(that: Arity2) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity18.plus(that: Arity1) = Arity19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity19) = Arity20(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity18) = Arity20(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity17) = Arity20(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity16) = Arity20(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity15) = Arity20(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity14) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity13) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity12) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity9.plus(that: Arity11) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity10.plus(that: Arity10) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity11.plus(that: Arity9) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity12.plus(that: Arity8) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity13.plus(that: Arity7) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity14.plus(that: Arity6) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity15.plus(that: Arity5) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity16.plus(that: Arity4) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity17.plus(that: Arity3) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity18.plus(that: Arity2) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity19.plus(that: Arity1) = Arity20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity20) = Arity21(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity19) = Arity21(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity18) = Arity21(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity17) = Arity21(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity16) = Arity21(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity15) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity14) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity13) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity9.plus(that: Arity12) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity10.plus(that: Arity11) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity11.plus(that: Arity10) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity12.plus(that: Arity9) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity13.plus(that: Arity8) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity14.plus(that: Arity7) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity15.plus(that: Arity6) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity16.plus(that: Arity5) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity17.plus(that: Arity4) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity18.plus(that: Arity3) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity19.plus(that: Arity2) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity20.plus(that: Arity1) = Arity21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity21) = Arity22(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity20) = Arity22(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity19) = Arity22(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity18) = Arity22(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity17) = Arity22(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity16) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity15) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity14) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity9.plus(that: Arity13) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity10.plus(that: Arity12) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity11.plus(that: Arity11) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity12.plus(that: Arity10) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity13.plus(that: Arity9) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity14.plus(that: Arity8) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity15.plus(that: Arity7) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity16.plus(that: Arity6) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity17.plus(that: Arity5) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity18.plus(that: Arity4) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity19.plus(that: Arity3) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity20.plus(that: Arity2) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity21.plus(that: Arity1) = Arity22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity22) = Arity23(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity21) = Arity23(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity20) = Arity23(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity19) = Arity23(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity18) = Arity23(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity17) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity16) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity15) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity9.plus(that: Arity14) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity10.plus(that: Arity13) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity11.plus(that: Arity12) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity12.plus(that: Arity11) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity13.plus(that: Arity10) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity14.plus(that: Arity9) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity15.plus(that: Arity8) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity16.plus(that: Arity7) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity17.plus(that: Arity6) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity18.plus(that: Arity5) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity19.plus(that: Arity4) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity20.plus(that: Arity3) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity21.plus(that: Arity2) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity22.plus(that: Arity1) = Arity23(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity23) = Arity24(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22, that._23) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity22) = Arity24(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity21) = Arity24(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity20) = Arity24(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity19) = Arity24(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity18) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity17) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity16) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity9.plus(that: Arity15) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity10.plus(that: Arity14) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity11.plus(that: Arity13) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity12.plus(that: Arity12) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity13.plus(that: Arity11) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity14.plus(that: Arity10) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity15.plus(that: Arity9) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity16.plus(that: Arity8) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity17.plus(that: Arity7) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity18.plus(that: Arity6) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity19.plus(that: Arity5) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity20.plus(that: Arity4) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity21.plus(that: Arity3) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity22.plus(that: Arity2) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity23.plus(that: Arity1) = Arity24(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, this._23, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity24) = Arity25(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22, that._23, that._24) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity23) = Arity25(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22, that._23) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity22) = Arity25(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity21) = Arity25(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity20) = Arity25(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity19) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity18) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity17) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity9.plus(that: Arity16) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity10.plus(that: Arity15) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity11.plus(that: Arity14) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity12.plus(that: Arity13) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity13.plus(that: Arity12) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity14.plus(that: Arity11) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity15.plus(that: Arity10) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity16.plus(that: Arity9) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity17.plus(that: Arity8) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity18.plus(that: Arity7) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity19.plus(that: Arity6) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity20.plus(that: Arity5) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity21.plus(that: Arity4) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity22.plus(that: Arity3) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity23.plus(that: Arity2) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, this._23, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity24.plus(that: Arity1) = Arity25(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, this._23, this._24, that._1) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity1.plus(that: Arity25) = Arity26(this._1, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22, that._23, that._24, that._25) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity2.plus(that: Arity24) = Arity26(this._1, this._2, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22, that._23, that._24) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity3.plus(that: Arity23) = Arity26(this._1, this._2, this._3, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22, that._23) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity4.plus(that: Arity22) = Arity26(this._1, this._2, this._3, this._4, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21, that._22) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity5.plus(that: Arity21) = Arity26(this._1, this._2, this._3, this._4, this._5, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20, that._21) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity6.plus(that: Arity20) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19, that._20) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity7.plus(that: Arity19) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18, that._19) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity8.plus(that: Arity18) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17, that._18) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity9.plus(that: Arity17) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16, that._17) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity10.plus(that: Arity16) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15, that._16) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity11.plus(that: Arity15) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14, that._15) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity12.plus(that: Arity14) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13, that._14) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity13.plus(that: Arity13) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12, that._13) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity14.plus(that: Arity12) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11, that._12) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity15.plus(that: Arity11) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10, that._11) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity16.plus(that: Arity10) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9, that._10) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity17.plus(that: Arity9) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8, that._9) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity18.plus(that: Arity8) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, that._1, that._2, that._3, that._4, that._5, that._6, that._7, that._8) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity19.plus(that: Arity7) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, that._1, that._2, that._3, that._4, that._5, that._6, that._7) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity20.plus(that: Arity6) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, that._1, that._2, that._3, that._4, that._5, that._6) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity21.plus(that: Arity5) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, that._1, that._2, that._3, that._4, that._5) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity22.plus(that: Arity4) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, that._1, that._2, that._3, that._4) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity23.plus(that: Arity3) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, this._23, that._1, that._2, that._3) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity24.plus(that: Arity2) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, this._23, this._24, that._1, that._2) +@Deprecated("Use Scala tuples instead.") infix operator fun Arity25.plus(that: Arity1) = Arity26(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22, this._23, this._24, this._25, that._1) From fb01c775e8531b0cd8e77e5c7539ce805d3d9ac3 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 29 Mar 2022 14:05:13 +0200 Subject: [PATCH 107/213] added tuple zip-functions --- .../kotlinx/spark/api/tuples/ZipTuples.kt | 541 ++++++++++++++++++ .../jetbrains/kotlinx/spark/api/TuplesTest.kt | 18 + 2 files changed, 559 insertions(+) create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ZipTuples.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ZipTuples.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ZipTuples.kt new file mode 100644 index 00000000..f003e507 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ZipTuples.kt @@ -0,0 +1,541 @@ +package org.jetbrains.kotlinx.spark.api.tuples + +import scala.* + +/** + * This file provides zip-functions to all Tuple variants. + * Given two tuples, `t(a1, ..., an)` and `t(a1, ..., an)`, returns a tuple + * `t(t(a1, b1), ..., t(an, bn))`. If the two tuples have different sizes, + * the extra elements of the larger tuple will be disregarded. + * The result is typed as `TupleX, ..., Tuple2>`. + * + */ + +infix fun Tuple1<*>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple1<*>): EmptyTuple = EmptyTuple +infix fun Tuple1.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple2): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple3): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple4): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple5): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple6): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple7): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple8): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple9): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple10): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple11): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple12): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple13): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple14): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple15): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple16): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple17): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple18): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple19): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple20): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple21): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple1.zip(other: Tuple22): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple2<*, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple2<*, *>): EmptyTuple = EmptyTuple +infix fun Tuple2.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple2.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple3): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple4): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple5): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple6): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple7): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple8): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple9): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple10): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple11): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple12): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple13): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple14): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple15): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple16): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple17): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple18): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple19): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple20): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple21): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple2.zip(other: Tuple22): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple3<*, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple3<*, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple3.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple3.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple3.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple4): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple5): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple6): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple7): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple8): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple9): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple10): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple11): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple12): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple13): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple14): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple15): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple16): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple17): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple18): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple19): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple20): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple21): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple3.zip(other: Tuple22): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple4<*, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple4<*, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple4.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple4.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple4.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple4.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple5): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple6): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple7): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple8): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple9): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple10): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple11): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple12): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple13): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple14): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple15): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple16): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple17): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple18): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple19): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple20): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple21): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple4.zip(other: Tuple22): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple5<*, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple5<*, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple5.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple5.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple5.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple5.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple5.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple5.zip(other: Tuple6): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple5.zip(other: Tuple7): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple5.zip(other: Tuple8): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple5.zip(other: Tuple9): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple5.zip(other: Tuple10): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple5.zip(other: Tuple11): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple5.zip(other: Tuple12): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple5.zip(other: Tuple13): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple5.zip(other: Tuple14): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple5.zip(other: Tuple15): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple5.zip(other: Tuple16): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple5.zip(other: Tuple17): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple5.zip(other: Tuple18): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple5.zip(other: Tuple19): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple5.zip(other: Tuple20): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple5.zip(other: Tuple21): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple5.zip(other: Tuple22): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple6<*, *, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple6<*, *, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple6.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple6.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple6.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple6.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple6.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple6.zip(other: Tuple6): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple6.zip(other: Tuple7): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple6.zip(other: Tuple8): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple6.zip(other: Tuple9): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple6.zip(other: Tuple10): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple6.zip(other: Tuple11): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple6.zip(other: Tuple12): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple6.zip(other: Tuple13): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple6.zip(other: Tuple14): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple6.zip(other: Tuple15): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple6.zip(other: Tuple16): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple6.zip(other: Tuple17): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple6.zip(other: Tuple18): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple6.zip(other: Tuple19): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple6.zip(other: Tuple20): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple6.zip(other: Tuple21): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple6.zip(other: Tuple22): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple7<*, *, *, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple7<*, *, *, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple7.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple7.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple7.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple7.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple7.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple7.zip(other: Tuple6): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple7.zip(other: Tuple7): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple7.zip(other: Tuple8): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple7.zip(other: Tuple9): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple7.zip(other: Tuple10): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple7.zip(other: Tuple11): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple7.zip(other: Tuple12): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple7.zip(other: Tuple13): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple7.zip(other: Tuple14): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple7.zip(other: Tuple15): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple7.zip(other: Tuple16): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple7.zip(other: Tuple17): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple7.zip(other: Tuple18): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple7.zip(other: Tuple19): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple7.zip(other: Tuple20): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple7.zip(other: Tuple21): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple7.zip(other: Tuple22): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple8<*, *, *, *, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple8<*, *, *, *, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple8.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple8.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple8.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple8.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple8.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple8.zip(other: Tuple6): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple8.zip(other: Tuple7): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple8.zip(other: Tuple8): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple8.zip(other: Tuple9): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple8.zip(other: Tuple10): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple8.zip(other: Tuple11): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple8.zip(other: Tuple12): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple8.zip(other: Tuple13): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple8.zip(other: Tuple14): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple8.zip(other: Tuple15): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple8.zip(other: Tuple16): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple8.zip(other: Tuple17): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple8.zip(other: Tuple18): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple8.zip(other: Tuple19): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple8.zip(other: Tuple20): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple8.zip(other: Tuple21): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple8.zip(other: Tuple22): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple9<*, *, *, *, *, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple9<*, *, *, *, *, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple9.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple9.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple9.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple9.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple9.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple9.zip(other: Tuple6): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple9.zip(other: Tuple7): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple9.zip(other: Tuple8): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple9.zip(other: Tuple9): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple9.zip(other: Tuple10): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple9.zip(other: Tuple11): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple9.zip(other: Tuple12): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple9.zip(other: Tuple13): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple9.zip(other: Tuple14): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple9.zip(other: Tuple15): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple9.zip(other: Tuple16): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple9.zip(other: Tuple17): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple9.zip(other: Tuple18): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple9.zip(other: Tuple19): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple9.zip(other: Tuple20): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple9.zip(other: Tuple21): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple9.zip(other: Tuple22): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple10<*, *, *, *, *, *, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple10<*, *, *, *, *, *, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple10.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple10.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple10.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple10.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple10.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple10.zip(other: Tuple6): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple10.zip(other: Tuple7): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple10.zip(other: Tuple8): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple10.zip(other: Tuple9): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple10.zip(other: Tuple10): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple10.zip(other: Tuple11): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple10.zip(other: Tuple12): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple10.zip(other: Tuple13): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple10.zip(other: Tuple14): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple10.zip(other: Tuple15): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple10.zip(other: Tuple16): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple10.zip(other: Tuple17): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple10.zip(other: Tuple18): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple10.zip(other: Tuple19): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple10.zip(other: Tuple20): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple10.zip(other: Tuple21): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple10.zip(other: Tuple22): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple11<*, *, *, *, *, *, *, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple11<*, *, *, *, *, *, *, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple11.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple11.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple11.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple11.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple11.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple11.zip(other: Tuple6): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple11.zip(other: Tuple7): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple11.zip(other: Tuple8): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple11.zip(other: Tuple9): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple11.zip(other: Tuple10): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple11.zip(other: Tuple11): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple11.zip(other: Tuple12): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple11.zip(other: Tuple13): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple11.zip(other: Tuple14): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple11.zip(other: Tuple15): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple11.zip(other: Tuple16): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple11.zip(other: Tuple17): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple11.zip(other: Tuple18): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple11.zip(other: Tuple19): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple11.zip(other: Tuple20): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple11.zip(other: Tuple21): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple11.zip(other: Tuple22): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple12<*, *, *, *, *, *, *, *, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple12<*, *, *, *, *, *, *, *, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple12.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple12.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple12.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple12.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple12.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple12.zip(other: Tuple6): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple12.zip(other: Tuple7): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple12.zip(other: Tuple8): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple12.zip(other: Tuple9): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple12.zip(other: Tuple10): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple12.zip(other: Tuple11): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple12.zip(other: Tuple12): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple12.zip(other: Tuple13): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple12.zip(other: Tuple14): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple12.zip(other: Tuple15): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple12.zip(other: Tuple16): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple12.zip(other: Tuple17): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple12.zip(other: Tuple18): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple12.zip(other: Tuple19): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple12.zip(other: Tuple20): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple12.zip(other: Tuple21): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple12.zip(other: Tuple22): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple13<*, *, *, *, *, *, *, *, *, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple13<*, *, *, *, *, *, *, *, *, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple13.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple13.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple13.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple13.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple13.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple13.zip(other: Tuple6): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple13.zip(other: Tuple7): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple13.zip(other: Tuple8): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple13.zip(other: Tuple9): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple13.zip(other: Tuple10): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple13.zip(other: Tuple11): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple13.zip(other: Tuple12): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple13.zip(other: Tuple13): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple13.zip(other: Tuple14): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple13.zip(other: Tuple15): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple13.zip(other: Tuple16): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple13.zip(other: Tuple17): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple13.zip(other: Tuple18): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple13.zip(other: Tuple19): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple13.zip(other: Tuple20): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple13.zip(other: Tuple21): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple13.zip(other: Tuple22): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple14<*, *, *, *, *, *, *, *, *, *, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple14<*, *, *, *, *, *, *, *, *, *, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple14.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple14.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple14.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple14.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple14.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple14.zip(other: Tuple6): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple14.zip(other: Tuple7): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple14.zip(other: Tuple8): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple14.zip(other: Tuple9): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple14.zip(other: Tuple10): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple14.zip(other: Tuple11): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple14.zip(other: Tuple12): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple14.zip(other: Tuple13): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple14.zip(other: Tuple14): Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14())) +infix fun Tuple14.zip(other: Tuple15): Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14())) +infix fun Tuple14.zip(other: Tuple16): Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14())) +infix fun Tuple14.zip(other: Tuple17): Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14())) +infix fun Tuple14.zip(other: Tuple18): Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14())) +infix fun Tuple14.zip(other: Tuple19): Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14())) +infix fun Tuple14.zip(other: Tuple20): Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14())) +infix fun Tuple14.zip(other: Tuple21): Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14())) +infix fun Tuple14.zip(other: Tuple22): Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14())) +infix fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple15<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple15.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple15.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple15.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple15.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple15.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple15.zip(other: Tuple6): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple15.zip(other: Tuple7): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple15.zip(other: Tuple8): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple15.zip(other: Tuple9): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple15.zip(other: Tuple10): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple15.zip(other: Tuple11): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple15.zip(other: Tuple12): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple15.zip(other: Tuple13): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple15.zip(other: Tuple14): Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14())) +infix fun Tuple15.zip(other: Tuple15): Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15())) +infix fun Tuple15.zip(other: Tuple16): Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15())) +infix fun Tuple15.zip(other: Tuple17): Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15())) +infix fun Tuple15.zip(other: Tuple18): Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15())) +infix fun Tuple15.zip(other: Tuple19): Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15())) +infix fun Tuple15.zip(other: Tuple20): Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15())) +infix fun Tuple15.zip(other: Tuple21): Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15())) +infix fun Tuple15.zip(other: Tuple22): Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15())) +infix fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple16.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple16.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple16.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple16.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple16.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple16.zip(other: Tuple6): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple16.zip(other: Tuple7): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple16.zip(other: Tuple8): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple16.zip(other: Tuple9): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple16.zip(other: Tuple10): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple16.zip(other: Tuple11): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple16.zip(other: Tuple12): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple16.zip(other: Tuple13): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple16.zip(other: Tuple14): Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14())) +infix fun Tuple16.zip(other: Tuple15): Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15())) +infix fun Tuple16.zip(other: Tuple16): Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16())) +infix fun Tuple16.zip(other: Tuple17): Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16())) +infix fun Tuple16.zip(other: Tuple18): Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16())) +infix fun Tuple16.zip(other: Tuple19): Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16())) +infix fun Tuple16.zip(other: Tuple20): Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16())) +infix fun Tuple16.zip(other: Tuple21): Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16())) +infix fun Tuple16.zip(other: Tuple22): Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16())) +infix fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple17.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple17.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple17.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple17.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple17.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple17.zip(other: Tuple6): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple17.zip(other: Tuple7): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple17.zip(other: Tuple8): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple17.zip(other: Tuple9): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple17.zip(other: Tuple10): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple17.zip(other: Tuple11): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple17.zip(other: Tuple12): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple17.zip(other: Tuple13): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple17.zip(other: Tuple14): Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14())) +infix fun Tuple17.zip(other: Tuple15): Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15())) +infix fun Tuple17.zip(other: Tuple16): Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16())) +infix fun Tuple17.zip(other: Tuple17): Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17())) +infix fun Tuple17.zip(other: Tuple18): Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17())) +infix fun Tuple17.zip(other: Tuple19): Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17())) +infix fun Tuple17.zip(other: Tuple20): Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17())) +infix fun Tuple17.zip(other: Tuple21): Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17())) +infix fun Tuple17.zip(other: Tuple22): Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17())) +infix fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple18.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple18.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple18.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple18.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple18.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple18.zip(other: Tuple6): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple18.zip(other: Tuple7): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple18.zip(other: Tuple8): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple18.zip(other: Tuple9): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple18.zip(other: Tuple10): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple18.zip(other: Tuple11): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple18.zip(other: Tuple12): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple18.zip(other: Tuple13): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple18.zip(other: Tuple14): Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14())) +infix fun Tuple18.zip(other: Tuple15): Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15())) +infix fun Tuple18.zip(other: Tuple16): Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16())) +infix fun Tuple18.zip(other: Tuple17): Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17())) +infix fun Tuple18.zip(other: Tuple18): Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18())) +infix fun Tuple18.zip(other: Tuple19): Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18())) +infix fun Tuple18.zip(other: Tuple20): Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18())) +infix fun Tuple18.zip(other: Tuple21): Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18())) +infix fun Tuple18.zip(other: Tuple22): Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18())) +infix fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple19.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple19.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple19.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple19.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple19.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple19.zip(other: Tuple6): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple19.zip(other: Tuple7): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple19.zip(other: Tuple8): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple19.zip(other: Tuple9): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple19.zip(other: Tuple10): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple19.zip(other: Tuple11): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple19.zip(other: Tuple12): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple19.zip(other: Tuple13): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple19.zip(other: Tuple14): Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14())) +infix fun Tuple19.zip(other: Tuple15): Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15())) +infix fun Tuple19.zip(other: Tuple16): Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16())) +infix fun Tuple19.zip(other: Tuple17): Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17())) +infix fun Tuple19.zip(other: Tuple18): Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18())) +infix fun Tuple19.zip(other: Tuple19): Tuple19, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple19, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18()), Tuple2(this._19(), other._19())) +infix fun Tuple19.zip(other: Tuple20): Tuple19, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple19, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18()), Tuple2(this._19(), other._19())) +infix fun Tuple19.zip(other: Tuple21): Tuple19, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple19, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18()), Tuple2(this._19(), other._19())) +infix fun Tuple19.zip(other: Tuple22): Tuple19, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple19, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18()), Tuple2(this._19(), other._19())) +infix fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple20.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple20.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple20.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple20.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple20.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple20.zip(other: Tuple6): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple20.zip(other: Tuple7): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple20.zip(other: Tuple8): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple20.zip(other: Tuple9): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple20.zip(other: Tuple10): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple20.zip(other: Tuple11): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple20.zip(other: Tuple12): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple20.zip(other: Tuple13): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple20.zip(other: Tuple14): Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14())) +infix fun Tuple20.zip(other: Tuple15): Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15())) +infix fun Tuple20.zip(other: Tuple16): Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16())) +infix fun Tuple20.zip(other: Tuple17): Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17())) +infix fun Tuple20.zip(other: Tuple18): Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18())) +infix fun Tuple20.zip(other: Tuple19): Tuple19, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple19, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18()), Tuple2(this._19(), other._19())) +infix fun Tuple20.zip(other: Tuple20): Tuple20, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple20, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18()), Tuple2(this._19(), other._19()), Tuple2(this._20(), other._20())) +infix fun Tuple20.zip(other: Tuple21): Tuple20, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple20, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18()), Tuple2(this._19(), other._19()), Tuple2(this._20(), other._20())) +infix fun Tuple20.zip(other: Tuple22): Tuple20, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple20, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18()), Tuple2(this._19(), other._19()), Tuple2(this._20(), other._20())) +infix fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple21.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple21.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple21.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple21.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple21.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple21.zip(other: Tuple6): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple21.zip(other: Tuple7): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple21.zip(other: Tuple8): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple21.zip(other: Tuple9): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple21.zip(other: Tuple10): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple21.zip(other: Tuple11): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple21.zip(other: Tuple12): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple21.zip(other: Tuple13): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple21.zip(other: Tuple14): Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14())) +infix fun Tuple21.zip(other: Tuple15): Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15())) +infix fun Tuple21.zip(other: Tuple16): Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16())) +infix fun Tuple21.zip(other: Tuple17): Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17())) +infix fun Tuple21.zip(other: Tuple18): Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18())) +infix fun Tuple21.zip(other: Tuple19): Tuple19, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple19, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18()), Tuple2(this._19(), other._19())) +infix fun Tuple21.zip(other: Tuple20): Tuple20, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple20, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18()), Tuple2(this._19(), other._19()), Tuple2(this._20(), other._20())) +infix fun Tuple21.zip(other: Tuple21): Tuple21, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple21, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18()), Tuple2(this._19(), other._19()), Tuple2(this._20(), other._20()), Tuple2(this._21(), other._21())) +infix fun Tuple21.zip(other: Tuple22): Tuple21, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple21, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18()), Tuple2(this._19(), other._19()), Tuple2(this._20(), other._20()), Tuple2(this._21(), other._21())) +infix fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple +infix fun EmptyTuple.zip(other: Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>): EmptyTuple = EmptyTuple +infix fun Tuple22.zip(other: Tuple1): Tuple1> = Tuple1>(Tuple2(this._1(), other._1())) +infix fun Tuple22.zip(other: Tuple2): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2())) +infix fun Tuple22.zip(other: Tuple3): Tuple3, Tuple2, Tuple2> = Tuple3, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3())) +infix fun Tuple22.zip(other: Tuple4): Tuple4, Tuple2, Tuple2, Tuple2> = Tuple4, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4())) +infix fun Tuple22.zip(other: Tuple5): Tuple5, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple5, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5())) +infix fun Tuple22.zip(other: Tuple6): Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple6, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6())) +infix fun Tuple22.zip(other: Tuple7): Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple7, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7())) +infix fun Tuple22.zip(other: Tuple8): Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple8, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8())) +infix fun Tuple22.zip(other: Tuple9): Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple9, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9())) +infix fun Tuple22.zip(other: Tuple10): Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple10, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10())) +infix fun Tuple22.zip(other: Tuple11): Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple11, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11())) +infix fun Tuple22.zip(other: Tuple12): Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple12, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12())) +infix fun Tuple22.zip(other: Tuple13): Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple13, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13())) +infix fun Tuple22.zip(other: Tuple14): Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple14, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14())) +infix fun Tuple22.zip(other: Tuple15): Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple15, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15())) +infix fun Tuple22.zip(other: Tuple16): Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple16, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16())) +infix fun Tuple22.zip(other: Tuple17): Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple17, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17())) +infix fun Tuple22.zip(other: Tuple18): Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple18, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18())) +infix fun Tuple22.zip(other: Tuple19): Tuple19, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple19, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18()), Tuple2(this._19(), other._19())) +infix fun Tuple22.zip(other: Tuple20): Tuple20, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple20, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18()), Tuple2(this._19(), other._19()), Tuple2(this._20(), other._20())) +infix fun Tuple22.zip(other: Tuple21): Tuple21, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple21, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18()), Tuple2(this._19(), other._19()), Tuple2(this._20(), other._20()), Tuple2(this._21(), other._21())) +infix fun Tuple22.zip(other: Tuple22): Tuple22, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2> = Tuple22, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2, Tuple2>(Tuple2(this._1(), other._1()), Tuple2(this._2(), other._2()), Tuple2(this._3(), other._3()), Tuple2(this._4(), other._4()), Tuple2(this._5(), other._5()), Tuple2(this._6(), other._6()), Tuple2(this._7(), other._7()), Tuple2(this._8(), other._8()), Tuple2(this._9(), other._9()), Tuple2(this._10(), other._10()), Tuple2(this._11(), other._11()), Tuple2(this._12(), other._12()), Tuple2(this._13(), other._13()), Tuple2(this._14(), other._14()), Tuple2(this._15(), other._15()), Tuple2(this._16(), other._16()), Tuple2(this._17(), other._17()), Tuple2(this._18(), other._18()), Tuple2(this._19(), other._19()), Tuple2(this._20(), other._20()), Tuple2(this._21(), other._21()), Tuple2(this._22(), other._22())) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt index 720368fa..903da505 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt @@ -9,6 +9,8 @@ import org.jetbrains.kotlinx.spark.api.tuples.* import org.jetbrains.kotlinx.spark.api.* import scala.Tuple3 import io.kotest.matchers.types.shouldBeInstanceOf +import scala.Tuple1 +import scala.Tuple2 @Suppress("ShouldBeInstanceOfInspection", "RedundantLambdaArrow", "USELESS_IS_CHECK") class TuplesTest : ShouldSpec({ @@ -168,6 +170,22 @@ class TuplesTest : ShouldSpec({ t(1, 2).copy(_1 = 3, _2 = 4) shouldBe t(3, 4) } + should("Zip tuples") { + + (t(1, 2) zip t(3, 4)) shouldBe t(t(1, 3), t(2, 4)) + (t(1, 2, 3, 4, 5, 6) zip t("a", "b")) shouldBe t(t(1, "a"), t(2, "b")) + + (t(1, 2, 3, 4) zip t()) shouldBe t() + (t() zip t(1, 2, 3, 4)) shouldBe t() + + val a: Tuple2, Tuple2> = t("1", 2.0) zip t(3, 4L, "", "") + val b: Tuple3, Tuple2, Tuple2> = t("1", 2.0, 5f) zip t(3, 4L, "", "") + + val c: Tuple2, List>, Tuple2, Long>, Int>> = + t(1, mapOf(1 to "a")) zip t("13", 1L) zip t(listOf(null, 1), 1, 'c') + + } + } }) From b2350d2e136d0bc95b459b14c2a39eb317f1c39d Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 29 Mar 2022 14:37:24 +0200 Subject: [PATCH 108/213] updated readme for tuples --- README.md | 57 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/README.md b/README.md index 498334d2..79c25b73 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ We have opened a Spark Project Improvement Proposal: [Kotlin support for Apache - [toList and toArray](#tolist-and-toarray-methods) - [Column infix/operator functions](#column-infixoperator-functions) - [Overload Resolution Ambiguity](#overload-resolution-ambiguity) + - [Tuples](#tuples) - [Examples](#examples) - [Reporting issues/Support](#reporting-issuessupport) - [Code of Conduct](#code-of-conduct) @@ -204,6 +205,62 @@ We had to implement the functions `reduceGroups` and `reduce` for Kotlin separat We have a special example of work with this function in the [Groups example](https://github.com/JetBrains/kotlin-spark-api/blob/main/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Group.kt). +### Tuples + +Inspired by [ScalaTuplesInKotlin](https://github.com/Jolanrensen/ScalaTuplesInKotlin), the API introduces a lot of helper- extension functions +to make working with Scala Tuples a breeze in your Kotlin Spark projects. While working with data classes is encouraged, +for pair-like Datasets / RDDs / DStreams Scala Tuples are recommended, both for the useful helper functions, as well as Spark performance. +To enable these features +simply add +```kotlin +import org.jetbrains.kotlinx.spark.api.tuples.* +``` +to the start of your file. + +Tuple creation can be done in the following manners: +```kotlin +val a: Tuple2 = tupleOf(1, 2L) +val b: Tuple3 = t("test", 1.0, 2) +val c: Tuple3 = 5f X "aaa" X 1 +``` +Tuples can be expanded and merged like this: +```kotlin +// expand +tupleOf(1, 2).appendedBy(3) == tupleOf(1, 2, 3) +tupleOf(1, 2) + 3 == tupleOf(1, 2, 3) +tupleOf(2, 3).prependedBy(1) == tupleOf(1, 2, 3) +1 + tupleOf(2, 3) == tupleOf(1, 2, 3) + +// merge +tupleOf(1, 2) concat tupleOf(3, 4) == tupleOf(1, 2, 3, 4) +tupleOf(1, 2) + tupleOf(3, 4) == tupleOf(1, 2, 3, 4) + +// extend tuple instead of merging with it +tupleOf(1, 2).appendedBy(tupleOf(3, 4)) == tupleOf(1, 2, tupleOf(3, 4)) +tupleOf(1, 2) + tupleOf(tupleOf(3, 4)) == tupleOf(1, 2, tupleOf(3, 4)) +``` + +The concept of `EmptyTuple` from Scala 3 is also already present: +```kotlin +tupleOf(1).dropLast() == tupleOf() == emptyTuple() +``` + +Finally, all these helper functions are also baked in: + +- `componentX()` for destructuring: `val (a, b) = tuple` +- `dropLast() / dropFirst()` +- `contains(x)` for `if (x in tuple) { ... }` +- `iterator()` for `for (x in tuple) { ... }` +- `asIterable()` +- `size` +- `get(n) / get(i..j)` for `tuple[1] / tuple[i..j]` +- `getOrNull(n) / getOrNull(i..j)` +- `getAs(n) / getAs(i..j)` +- `getAsOrNull(n) / getAsOrNull(i..j)` +- `zip` +- `copy(_1 = ..., _5 = ...)` +- `first() / last()` +- `_1`, `_6` etc. (instead of `_1()`, `_6()`) ## Examples From 2b5d283461ee8acaa1f6bd1032f982fd42c9895d Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 29 Mar 2022 17:36:25 +0200 Subject: [PATCH 109/213] added map and take(Last)n functions. Needs docs and more tests --- .../kotlinx/spark/api/tuples/MapTuples.kt | 26 + .../kotlinx/spark/api/tuples/TupleTakeN.kt | 513 ++++++++++++++++++ .../jetbrains/kotlinx/spark/api/TuplesTest.kt | 15 + 3 files changed, 554 insertions(+) create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/MapTuples.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTakeN.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/MapTuples.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/MapTuples.kt new file mode 100644 index 00000000..ee2f517b --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/MapTuples.kt @@ -0,0 +1,26 @@ +package org.jetbrains.kotlinx.spark.api.tuples + +import scala.* + +fun Tuple1.map(func: (T) -> R): Tuple1 = Tuple1(func(this._1())) +fun Tuple2.map(func: (T) -> R): Tuple2 = Tuple2(func(this._1()), func(this._2())) +fun Tuple3.map(func: (T) -> R): Tuple3 = Tuple3(func(this._1()), func(this._2()), func(this._3())) +fun Tuple4.map(func: (T) -> R): Tuple4 = Tuple4(func(this._1()), func(this._2()), func(this._3()), func(this._4())) +fun Tuple5.map(func: (T) -> R): Tuple5 = Tuple5(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5())) +fun Tuple6.map(func: (T) -> R): Tuple6 = Tuple6(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6())) +fun Tuple7.map(func: (T) -> R): Tuple7 = Tuple7(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7())) +fun Tuple8.map(func: (T) -> R): Tuple8 = Tuple8(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8())) +fun Tuple9.map(func: (T) -> R): Tuple9 = Tuple9(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8()), func(this._9())) +fun Tuple10.map(func: (T) -> R): Tuple10 = Tuple10(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8()), func(this._9()), func(this._10())) +fun Tuple11.map(func: (T) -> R): Tuple11 = Tuple11(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8()), func(this._9()), func(this._10()), func(this._11())) +fun Tuple12.map(func: (T) -> R): Tuple12 = Tuple12(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8()), func(this._9()), func(this._10()), func(this._11()), func(this._12())) +fun Tuple13.map(func: (T) -> R): Tuple13 = Tuple13(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8()), func(this._9()), func(this._10()), func(this._11()), func(this._12()), func(this._13())) +fun Tuple14.map(func: (T) -> R): Tuple14 = Tuple14(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8()), func(this._9()), func(this._10()), func(this._11()), func(this._12()), func(this._13()), func(this._14())) +fun Tuple15.map(func: (T) -> R): Tuple15 = Tuple15(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8()), func(this._9()), func(this._10()), func(this._11()), func(this._12()), func(this._13()), func(this._14()), func(this._15())) +fun Tuple16.map(func: (T) -> R): Tuple16 = Tuple16(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8()), func(this._9()), func(this._10()), func(this._11()), func(this._12()), func(this._13()), func(this._14()), func(this._15()), func(this._16())) +fun Tuple17.map(func: (T) -> R): Tuple17 = Tuple17(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8()), func(this._9()), func(this._10()), func(this._11()), func(this._12()), func(this._13()), func(this._14()), func(this._15()), func(this._16()), func(this._17())) +fun Tuple18.map(func: (T) -> R): Tuple18 = Tuple18(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8()), func(this._9()), func(this._10()), func(this._11()), func(this._12()), func(this._13()), func(this._14()), func(this._15()), func(this._16()), func(this._17()), func(this._18())) +fun Tuple19.map(func: (T) -> R): Tuple19 = Tuple19(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8()), func(this._9()), func(this._10()), func(this._11()), func(this._12()), func(this._13()), func(this._14()), func(this._15()), func(this._16()), func(this._17()), func(this._18()), func(this._19())) +fun Tuple20.map(func: (T) -> R): Tuple20 = Tuple20(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8()), func(this._9()), func(this._10()), func(this._11()), func(this._12()), func(this._13()), func(this._14()), func(this._15()), func(this._16()), func(this._17()), func(this._18()), func(this._19()), func(this._20())) +fun Tuple21.map(func: (T) -> R): Tuple21 = Tuple21(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8()), func(this._9()), func(this._10()), func(this._11()), func(this._12()), func(this._13()), func(this._14()), func(this._15()), func(this._16()), func(this._17()), func(this._18()), func(this._19()), func(this._20()), func(this._21())) +fun Tuple22.map(func: (T) -> R): Tuple22 = Tuple22(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8()), func(this._9()), func(this._10()), func(this._11()), func(this._12()), func(this._13()), func(this._14()), func(this._15()), func(this._16()), func(this._17()), func(this._18()), func(this._19()), func(this._20()), func(this._21()), func(this._22())) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTakeN.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTakeN.kt new file mode 100644 index 00000000..76fd0f09 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTakeN.kt @@ -0,0 +1,513 @@ +package org.jetbrains.kotlinx.spark.api.tuples + +import scala.* + +fun Tuple1.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple2.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple2.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple3.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple3.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple3.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple4.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple4.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple4.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple4.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple5.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple5.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple5.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple5.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple5.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple6.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple6.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple6.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple6.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple6.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple6.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple7.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple7.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple7.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple7.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple7.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple7.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple7.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple8.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple8.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple8.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple8.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple8.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple8.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple8.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple8.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple9.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple9.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple9.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple9.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple9.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple9.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple9.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple9.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple9.take9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple10.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple10.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple10.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple10.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple10.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple10.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple10.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple10.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple10.take9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple10.take10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple11.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple11.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple11.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple11.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple11.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple11.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple11.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple11.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple11.take9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple11.take10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple11.take11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple12.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple12.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple12.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple12.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple12.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple12.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple12.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple12.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple12.take9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple12.take10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple12.take11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple12.take12(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple13.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple13.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple13.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple13.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple13.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple13.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple13.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple13.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple13.take9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple13.take10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple13.take11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple13.take12(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple13.take13(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple14.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple14.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple14.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple14.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple14.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple14.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple14.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple14.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple14.take9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple14.take10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple14.take11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple14.take12(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple14.take13(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple14.take14(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple15.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple15.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple15.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple15.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple15.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple15.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple15.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple15.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple15.take9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple15.take10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple15.take11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple15.take12(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple15.take13(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple15.take14(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple15.take15(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple16.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple16.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple16.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple16.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple16.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple16.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple16.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple16.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple16.take9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple16.take10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple16.take11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple16.take12(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple16.take13(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple16.take14(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple16.take15(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple16.take16(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple17.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple17.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple17.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple17.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple17.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple17.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple17.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple17.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple17.take9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple17.take10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple17.take11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple17.take12(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple17.take13(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple17.take14(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple17.take15(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple17.take16(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple17.take17(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple18.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple18.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple18.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple18.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple18.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple18.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple18.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple18.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple18.take9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple18.take10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple18.take11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple18.take12(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple18.take13(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple18.take14(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple18.take15(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple18.take16(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple18.take17(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple18.take18(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple19.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple19.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple19.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple19.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple19.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple19.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple19.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple19.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple19.take9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple19.take10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple19.take11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple19.take12(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple19.take13(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple19.take14(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple19.take15(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple19.take16(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple19.take17(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple19.take18(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple19.take19(): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple20.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple20.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple20.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple20.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple20.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple20.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple20.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple20.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple20.take9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple20.take10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple20.take11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple20.take12(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple20.take13(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple20.take14(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple20.take15(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple20.take16(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple20.take17(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple20.take18(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple20.take19(): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple20.take20(): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple21.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple21.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple21.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple21.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple21.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple21.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple21.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple21.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple21.take9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple21.take10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple21.take11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple21.take12(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple21.take13(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple21.take14(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple21.take15(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple21.take16(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple21.take17(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple21.take18(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple21.take19(): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple21.take20(): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple21.take21(): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple22.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple22.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple22.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple22.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple22.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple22.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple22.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple22.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple22.take9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple22.take10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple22.take11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple22.take12(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple22.take13(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple22.take14(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple22.take15(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple22.take16(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple22.take17(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple22.take18(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple22.take19(): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple22.take20(): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple22.take21(): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple22.take22(): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) + + + +fun Tuple1.takeLast1(): Tuple1 = Tuple1(this._1()) +fun Tuple2.takeLast2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple2<*, T2>.takeLast1(): Tuple1 = Tuple1(this._2()) +fun Tuple3.takeLast3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple3<*, T2, T3>.takeLast2(): Tuple2 = Tuple2(this._2(), this._3()) +fun Tuple3<*, *, T3>.takeLast1(): Tuple1 = Tuple1(this._3()) +fun Tuple4.takeLast4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple4<*, T2, T3, T4>.takeLast3(): Tuple3 = Tuple3(this._2(), this._3(), this._4()) +fun Tuple4<*, *, T3, T4>.takeLast2(): Tuple2 = Tuple2(this._3(), this._4()) +fun Tuple4<*, *, *, T4>.takeLast1(): Tuple1 = Tuple1(this._4()) +fun Tuple5.takeLast5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple5<*, T2, T3, T4, T5>.takeLast4(): Tuple4 = Tuple4(this._2(), this._3(), this._4(), this._5()) +fun Tuple5<*, *, T3, T4, T5>.takeLast3(): Tuple3 = Tuple3(this._3(), this._4(), this._5()) +fun Tuple5<*, *, *, T4, T5>.takeLast2(): Tuple2 = Tuple2(this._4(), this._5()) +fun Tuple5<*, *, *, *, T5>.takeLast1(): Tuple1 = Tuple1(this._5()) +fun Tuple6.takeLast6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple6<*, T2, T3, T4, T5, T6>.takeLast5(): Tuple5 = Tuple5(this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple6<*, *, T3, T4, T5, T6>.takeLast4(): Tuple4 = Tuple4(this._3(), this._4(), this._5(), this._6()) +fun Tuple6<*, *, *, T4, T5, T6>.takeLast3(): Tuple3 = Tuple3(this._4(), this._5(), this._6()) +fun Tuple6<*, *, *, *, T5, T6>.takeLast2(): Tuple2 = Tuple2(this._5(), this._6()) +fun Tuple6<*, *, *, *, *, T6>.takeLast1(): Tuple1 = Tuple1(this._6()) +fun Tuple7.takeLast7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple7<*, T2, T3, T4, T5, T6, T7>.takeLast6(): Tuple6 = Tuple6(this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple7<*, *, T3, T4, T5, T6, T7>.takeLast5(): Tuple5 = Tuple5(this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple7<*, *, *, T4, T5, T6, T7>.takeLast4(): Tuple4 = Tuple4(this._4(), this._5(), this._6(), this._7()) +fun Tuple7<*, *, *, *, T5, T6, T7>.takeLast3(): Tuple3 = Tuple3(this._5(), this._6(), this._7()) +fun Tuple7<*, *, *, *, *, T6, T7>.takeLast2(): Tuple2 = Tuple2(this._6(), this._7()) +fun Tuple7<*, *, *, *, *, *, T7>.takeLast1(): Tuple1 = Tuple1(this._7()) +fun Tuple8.takeLast8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple8<*, T2, T3, T4, T5, T6, T7, T8>.takeLast7(): Tuple7 = Tuple7(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple8<*, *, T3, T4, T5, T6, T7, T8>.takeLast6(): Tuple6 = Tuple6(this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple8<*, *, *, T4, T5, T6, T7, T8>.takeLast5(): Tuple5 = Tuple5(this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple8<*, *, *, *, T5, T6, T7, T8>.takeLast4(): Tuple4 = Tuple4(this._5(), this._6(), this._7(), this._8()) +fun Tuple8<*, *, *, *, *, T6, T7, T8>.takeLast3(): Tuple3 = Tuple3(this._6(), this._7(), this._8()) +fun Tuple8<*, *, *, *, *, *, T7, T8>.takeLast2(): Tuple2 = Tuple2(this._7(), this._8()) +fun Tuple8<*, *, *, *, *, *, *, T8>.takeLast1(): Tuple1 = Tuple1(this._8()) +fun Tuple9.takeLast9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple9<*, T2, T3, T4, T5, T6, T7, T8, T9>.takeLast8(): Tuple8 = Tuple8(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple9<*, *, T3, T4, T5, T6, T7, T8, T9>.takeLast7(): Tuple7 = Tuple7(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple9<*, *, *, T4, T5, T6, T7, T8, T9>.takeLast6(): Tuple6 = Tuple6(this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple9<*, *, *, *, T5, T6, T7, T8, T9>.takeLast5(): Tuple5 = Tuple5(this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple9<*, *, *, *, *, T6, T7, T8, T9>.takeLast4(): Tuple4 = Tuple4(this._6(), this._7(), this._8(), this._9()) +fun Tuple9<*, *, *, *, *, *, T7, T8, T9>.takeLast3(): Tuple3 = Tuple3(this._7(), this._8(), this._9()) +fun Tuple9<*, *, *, *, *, *, *, T8, T9>.takeLast2(): Tuple2 = Tuple2(this._8(), this._9()) +fun Tuple9<*, *, *, *, *, *, *, *, T9>.takeLast1(): Tuple1 = Tuple1(this._9()) +fun Tuple10.takeLast10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple10<*, T2, T3, T4, T5, T6, T7, T8, T9, T10>.takeLast9(): Tuple9 = Tuple9(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple10<*, *, T3, T4, T5, T6, T7, T8, T9, T10>.takeLast8(): Tuple8 = Tuple8(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple10<*, *, *, T4, T5, T6, T7, T8, T9, T10>.takeLast7(): Tuple7 = Tuple7(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple10<*, *, *, *, T5, T6, T7, T8, T9, T10>.takeLast6(): Tuple6 = Tuple6(this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple10<*, *, *, *, *, T6, T7, T8, T9, T10>.takeLast5(): Tuple5 = Tuple5(this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple10<*, *, *, *, *, *, T7, T8, T9, T10>.takeLast4(): Tuple4 = Tuple4(this._7(), this._8(), this._9(), this._10()) +fun Tuple10<*, *, *, *, *, *, *, T8, T9, T10>.takeLast3(): Tuple3 = Tuple3(this._8(), this._9(), this._10()) +fun Tuple10<*, *, *, *, *, *, *, *, T9, T10>.takeLast2(): Tuple2 = Tuple2(this._9(), this._10()) +fun Tuple10<*, *, *, *, *, *, *, *, *, T10>.takeLast1(): Tuple1 = Tuple1(this._10()) +fun Tuple11.takeLast11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple11<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.takeLast10(): Tuple10 = Tuple10(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple11<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11>.takeLast9(): Tuple9 = Tuple9(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple11<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11>.takeLast8(): Tuple8 = Tuple8(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple11<*, *, *, *, T5, T6, T7, T8, T9, T10, T11>.takeLast7(): Tuple7 = Tuple7(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple11<*, *, *, *, *, T6, T7, T8, T9, T10, T11>.takeLast6(): Tuple6 = Tuple6(this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple11<*, *, *, *, *, *, T7, T8, T9, T10, T11>.takeLast5(): Tuple5 = Tuple5(this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple11<*, *, *, *, *, *, *, T8, T9, T10, T11>.takeLast4(): Tuple4 = Tuple4(this._8(), this._9(), this._10(), this._11()) +fun Tuple11<*, *, *, *, *, *, *, *, T9, T10, T11>.takeLast3(): Tuple3 = Tuple3(this._9(), this._10(), this._11()) +fun Tuple11<*, *, *, *, *, *, *, *, *, T10, T11>.takeLast2(): Tuple2 = Tuple2(this._10(), this._11()) +fun Tuple11<*, *, *, *, *, *, *, *, *, *, T11>.takeLast1(): Tuple1 = Tuple1(this._11()) +fun Tuple12.takeLast12(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.takeLast11(): Tuple11 = Tuple11(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.takeLast10(): Tuple10 = Tuple10(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12>.takeLast9(): Tuple9 = Tuple9(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12>.takeLast8(): Tuple8 = Tuple8(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12>.takeLast7(): Tuple7 = Tuple7(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12>.takeLast6(): Tuple6 = Tuple6(this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, *, *, *, *, *, *, T8, T9, T10, T11, T12>.takeLast5(): Tuple5 = Tuple5(this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, *, *, *, *, *, *, *, T9, T10, T11, T12>.takeLast4(): Tuple4 = Tuple4(this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, *, *, *, *, *, *, *, *, T10, T11, T12>.takeLast3(): Tuple3 = Tuple3(this._10(), this._11(), this._12()) +fun Tuple12<*, *, *, *, *, *, *, *, *, *, T11, T12>.takeLast2(): Tuple2 = Tuple2(this._11(), this._12()) +fun Tuple12<*, *, *, *, *, *, *, *, *, *, *, T12>.takeLast1(): Tuple1 = Tuple1(this._12()) +fun Tuple13.takeLast13(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.takeLast12(): Tuple12 = Tuple12(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.takeLast11(): Tuple11 = Tuple11(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.takeLast10(): Tuple10 = Tuple10(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13>.takeLast9(): Tuple9 = Tuple9(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13>.takeLast8(): Tuple8 = Tuple8(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13>.takeLast7(): Tuple7 = Tuple7(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13>.takeLast6(): Tuple6 = Tuple6(this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13>.takeLast5(): Tuple5 = Tuple5(this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13>.takeLast4(): Tuple4 = Tuple4(this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, *, *, *, *, *, *, *, T11, T12, T13>.takeLast3(): Tuple3 = Tuple3(this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, *, *, *, *, *, *, *, *, T12, T13>.takeLast2(): Tuple2 = Tuple2(this._12(), this._13()) +fun Tuple13<*, *, *, *, *, *, *, *, *, *, *, *, T13>.takeLast1(): Tuple1 = Tuple1(this._13()) +fun Tuple14.takeLast14(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.takeLast13(): Tuple13 = Tuple13(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.takeLast12(): Tuple12 = Tuple12(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.takeLast11(): Tuple11 = Tuple11(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.takeLast10(): Tuple10 = Tuple10(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14>.takeLast9(): Tuple9 = Tuple9(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14>.takeLast8(): Tuple8 = Tuple8(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14>.takeLast7(): Tuple7 = Tuple7(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14>.takeLast6(): Tuple6 = Tuple6(this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14>.takeLast5(): Tuple5 = Tuple5(this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14>.takeLast4(): Tuple4 = Tuple4(this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14>.takeLast3(): Tuple3 = Tuple3(this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14>.takeLast2(): Tuple2 = Tuple2(this._13(), this._14()) +fun Tuple14<*, *, *, *, *, *, *, *, *, *, *, *, *, T14>.takeLast1(): Tuple1 = Tuple1(this._14()) +fun Tuple15.takeLast15(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.takeLast14(): Tuple14 = Tuple14(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.takeLast13(): Tuple13 = Tuple13(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.takeLast12(): Tuple12 = Tuple12(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.takeLast11(): Tuple11 = Tuple11(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.takeLast10(): Tuple10 = Tuple10(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14, T15>.takeLast9(): Tuple9 = Tuple9(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14, T15>.takeLast8(): Tuple8 = Tuple8(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14, T15>.takeLast7(): Tuple7 = Tuple7(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14, T15>.takeLast6(): Tuple6 = Tuple6(this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14, T15>.takeLast5(): Tuple5 = Tuple5(this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14, T15>.takeLast4(): Tuple4 = Tuple4(this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T15>.takeLast3(): Tuple3 = Tuple3(this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15>.takeLast2(): Tuple2 = Tuple2(this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15>.takeLast1(): Tuple1 = Tuple1(this._15()) +fun Tuple16.takeLast16(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.takeLast15(): Tuple15 = Tuple15(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.takeLast14(): Tuple14 = Tuple14(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.takeLast13(): Tuple13 = Tuple13(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.takeLast12(): Tuple12 = Tuple12(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.takeLast11(): Tuple11 = Tuple11(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.takeLast10(): Tuple10 = Tuple10(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14, T15, T16>.takeLast9(): Tuple9 = Tuple9(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14, T15, T16>.takeLast8(): Tuple8 = Tuple8(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14, T15, T16>.takeLast7(): Tuple7 = Tuple7(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14, T15, T16>.takeLast6(): Tuple6 = Tuple6(this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14, T15, T16>.takeLast5(): Tuple5 = Tuple5(this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T15, T16>.takeLast4(): Tuple4 = Tuple4(this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15, T16>.takeLast3(): Tuple3 = Tuple3(this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15, T16>.takeLast2(): Tuple2 = Tuple2(this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T16>.takeLast1(): Tuple1 = Tuple1(this._16()) +fun Tuple17.takeLast17(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.takeLast16(): Tuple16 = Tuple16(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.takeLast15(): Tuple15 = Tuple15(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.takeLast14(): Tuple14 = Tuple14(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.takeLast13(): Tuple13 = Tuple13(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.takeLast12(): Tuple12 = Tuple12(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.takeLast11(): Tuple11 = Tuple11(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.takeLast10(): Tuple10 = Tuple10(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14, T15, T16, T17>.takeLast9(): Tuple9 = Tuple9(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14, T15, T16, T17>.takeLast8(): Tuple8 = Tuple8(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14, T15, T16, T17>.takeLast7(): Tuple7 = Tuple7(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14, T15, T16, T17>.takeLast6(): Tuple6 = Tuple6(this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T15, T16, T17>.takeLast5(): Tuple5 = Tuple5(this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15, T16, T17>.takeLast4(): Tuple4 = Tuple4(this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15, T16, T17>.takeLast3(): Tuple3 = Tuple3(this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T16, T17>.takeLast2(): Tuple2 = Tuple2(this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T17>.takeLast1(): Tuple1 = Tuple1(this._17()) +fun Tuple18.takeLast18(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.takeLast17(): Tuple17 = Tuple17(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.takeLast16(): Tuple16 = Tuple16(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.takeLast15(): Tuple15 = Tuple15(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.takeLast14(): Tuple14 = Tuple14(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.takeLast13(): Tuple13 = Tuple13(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.takeLast12(): Tuple12 = Tuple12(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.takeLast11(): Tuple11 = Tuple11(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.takeLast10(): Tuple10 = Tuple10(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14, T15, T16, T17, T18>.takeLast9(): Tuple9 = Tuple9(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14, T15, T16, T17, T18>.takeLast8(): Tuple8 = Tuple8(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14, T15, T16, T17, T18>.takeLast7(): Tuple7 = Tuple7(this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T15, T16, T17, T18>.takeLast6(): Tuple6 = Tuple6(this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15, T16, T17, T18>.takeLast5(): Tuple5 = Tuple5(this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15, T16, T17, T18>.takeLast4(): Tuple4 = Tuple4(this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T16, T17, T18>.takeLast3(): Tuple3 = Tuple3(this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T17, T18>.takeLast2(): Tuple2 = Tuple2(this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T18>.takeLast1(): Tuple1 = Tuple1(this._18()) +fun Tuple19.takeLast19(): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.takeLast18(): Tuple18 = Tuple18(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.takeLast17(): Tuple17 = Tuple17(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.takeLast16(): Tuple16 = Tuple16(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.takeLast15(): Tuple15 = Tuple15(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.takeLast14(): Tuple14 = Tuple14(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.takeLast13(): Tuple13 = Tuple13(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.takeLast12(): Tuple12 = Tuple12(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.takeLast11(): Tuple11 = Tuple11(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.takeLast10(): Tuple10 = Tuple10(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14, T15, T16, T17, T18, T19>.takeLast9(): Tuple9 = Tuple9(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14, T15, T16, T17, T18, T19>.takeLast8(): Tuple8 = Tuple8(this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T15, T16, T17, T18, T19>.takeLast7(): Tuple7 = Tuple7(this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15, T16, T17, T18, T19>.takeLast6(): Tuple6 = Tuple6(this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15, T16, T17, T18, T19>.takeLast5(): Tuple5 = Tuple5(this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T16, T17, T18, T19>.takeLast4(): Tuple4 = Tuple4(this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T17, T18, T19>.takeLast3(): Tuple3 = Tuple3(this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T18, T19>.takeLast2(): Tuple2 = Tuple2(this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T19>.takeLast1(): Tuple1 = Tuple1(this._19()) +fun Tuple20.takeLast20(): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.takeLast19(): Tuple19 = Tuple19(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.takeLast18(): Tuple18 = Tuple18(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.takeLast17(): Tuple17 = Tuple17(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.takeLast16(): Tuple16 = Tuple16(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.takeLast15(): Tuple15 = Tuple15(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.takeLast14(): Tuple14 = Tuple14(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.takeLast13(): Tuple13 = Tuple13(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.takeLast12(): Tuple12 = Tuple12(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.takeLast11(): Tuple11 = Tuple11(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.takeLast10(): Tuple10 = Tuple10(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14, T15, T16, T17, T18, T19, T20>.takeLast9(): Tuple9 = Tuple9(this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T15, T16, T17, T18, T19, T20>.takeLast8(): Tuple8 = Tuple8(this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15, T16, T17, T18, T19, T20>.takeLast7(): Tuple7 = Tuple7(this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15, T16, T17, T18, T19, T20>.takeLast6(): Tuple6 = Tuple6(this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T16, T17, T18, T19, T20>.takeLast5(): Tuple5 = Tuple5(this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T17, T18, T19, T20>.takeLast4(): Tuple4 = Tuple4(this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T18, T19, T20>.takeLast3(): Tuple3 = Tuple3(this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T19, T20>.takeLast2(): Tuple2 = Tuple2(this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T20>.takeLast1(): Tuple1 = Tuple1(this._20()) +fun Tuple21.takeLast21(): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.takeLast20(): Tuple20 = Tuple20(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.takeLast19(): Tuple19 = Tuple19(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.takeLast18(): Tuple18 = Tuple18(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.takeLast17(): Tuple17 = Tuple17(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.takeLast16(): Tuple16 = Tuple16(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.takeLast15(): Tuple15 = Tuple15(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.takeLast14(): Tuple14 = Tuple14(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.takeLast13(): Tuple13 = Tuple13(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.takeLast12(): Tuple12 = Tuple12(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.takeLast11(): Tuple11 = Tuple11(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.takeLast10(): Tuple10 = Tuple10(this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T15, T16, T17, T18, T19, T20, T21>.takeLast9(): Tuple9 = Tuple9(this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15, T16, T17, T18, T19, T20, T21>.takeLast8(): Tuple8 = Tuple8(this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15, T16, T17, T18, T19, T20, T21>.takeLast7(): Tuple7 = Tuple7(this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T16, T17, T18, T19, T20, T21>.takeLast6(): Tuple6 = Tuple6(this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T17, T18, T19, T20, T21>.takeLast5(): Tuple5 = Tuple5(this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T18, T19, T20, T21>.takeLast4(): Tuple4 = Tuple4(this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T19, T20, T21>.takeLast3(): Tuple3 = Tuple3(this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T20, T21>.takeLast2(): Tuple2 = Tuple2(this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T21>.takeLast1(): Tuple1 = Tuple1(this._21()) +fun Tuple22.takeLast22(): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.takeLast21(): Tuple21 = Tuple21(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.takeLast20(): Tuple20 = Tuple20(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.takeLast19(): Tuple19 = Tuple19(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.takeLast18(): Tuple18 = Tuple18(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.takeLast17(): Tuple17 = Tuple17(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.takeLast16(): Tuple16 = Tuple16(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.takeLast15(): Tuple15 = Tuple15(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.takeLast14(): Tuple14 = Tuple14(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.takeLast13(): Tuple13 = Tuple13(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.takeLast12(): Tuple12 = Tuple12(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.takeLast11(): Tuple11 = Tuple11(this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.takeLast10(): Tuple10 = Tuple10(this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15, T16, T17, T18, T19, T20, T21, T22>.takeLast9(): Tuple9 = Tuple9(this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15, T16, T17, T18, T19, T20, T21, T22>.takeLast8(): Tuple8 = Tuple8(this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T16, T17, T18, T19, T20, T21, T22>.takeLast7(): Tuple7 = Tuple7(this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T17, T18, T19, T20, T21, T22>.takeLast6(): Tuple6 = Tuple6(this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T18, T19, T20, T21, T22>.takeLast5(): Tuple5 = Tuple5(this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T19, T20, T21, T22>.takeLast4(): Tuple4 = Tuple4(this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T20, T21, T22>.takeLast3(): Tuple3 = Tuple3(this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T21, T22>.takeLast2(): Tuple2 = Tuple2(this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T22>.takeLast1(): Tuple1 = Tuple1(this._22()) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt index 903da505..fe3b73a9 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt @@ -183,7 +183,22 @@ class TuplesTest : ShouldSpec({ val c: Tuple2, List>, Tuple2, Long>, Int>> = t(1, mapOf(1 to "a")) zip t("13", 1L) zip t(listOf(null, 1), 1, 'c') + } + + should("Map tuples") { + val a = t(1, A(), 3L, 4.0, 5).map { + when (it) { + is A -> A() + else -> it.toString() + } + } + } + + should("Take n from tuples") { + t(1, 2, 3).take2() shouldBe t(1, 2) + t(1, 2, 3).takeLast2() shouldBe t(2, 3) + val a = t(1.0, 2, 3L, 4f).takeLast3() } } From 1287cab92b8df5892329d91cb479b1e2f1cde045 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 30 Mar 2022 13:25:18 +0200 Subject: [PATCH 110/213] attempt to exclude tuples from qodana --- qodana.yaml | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 qodana.yaml diff --git a/qodana.yaml b/qodana.yaml new file mode 100644 index 00000000..1dd2df11 --- /dev/null +++ b/qodana.yaml @@ -0,0 +1,4 @@ +exclude: + - name: Tuples + paths: + - kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples \ No newline at end of file From a07673dc1c0eeb833a0f3befeaa5797ebf721e3a Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 30 Mar 2022 13:26:42 +0200 Subject: [PATCH 111/213] attempt to exclude tuples from qodana --- qodana.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/qodana.yaml b/qodana.yaml index 1dd2df11..ff68770d 100644 --- a/qodana.yaml +++ b/qodana.yaml @@ -1,3 +1,5 @@ +profile: + name: qodana.recommended exclude: - name: Tuples paths: From 19be2879deaedb0e68399373c4b06e7a8e224b17 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 30 Mar 2022 15:12:33 +0200 Subject: [PATCH 112/213] drop, splitAt and tests --- .../kotlinx/spark/api/tuples/TupleDrop.kt | 557 ++++++++++++++++++ .../kotlinx/spark/api/tuples/TupleSplit.kt | 279 +++++++++ .../tuples/{TupleTakeN.kt => TupleTake.kt} | 45 +- .../api/tuples/{ZipTuples.kt => TupleZip.kt} | 0 .../jetbrains/kotlinx/spark/api/TuplesTest.kt | 26 +- qodana.yaml | 4 +- 6 files changed, 902 insertions(+), 9 deletions(-) create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleDrop.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleSplit.kt rename kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/{TupleTakeN.kt => TupleTake.kt} (97%) rename kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/{ZipTuples.kt => TupleZip.kt} (100%) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleDrop.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleDrop.kt new file mode 100644 index 00000000..da532935 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleDrop.kt @@ -0,0 +1,557 @@ +package org.jetbrains.kotlinx.spark.api.tuples + +import scala.* + +fun Tuple1.drop0(): Tuple1 = Tuple1(this._1()) +fun Tuple1<*>.drop1(): EmptyTuple = EmptyTuple +fun Tuple2.drop0(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple2<*, T2>.drop1(): Tuple1 = Tuple1(this._2()) +fun Tuple2<*, *>.drop2(): EmptyTuple = EmptyTuple +fun Tuple3.drop0(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple3<*, T2, T3>.drop1(): Tuple2 = Tuple2(this._2(), this._3()) +fun Tuple3<*, *, T3>.drop2(): Tuple1 = Tuple1(this._3()) +fun Tuple3<*, *, *>.drop3(): EmptyTuple = EmptyTuple +fun Tuple4.drop0(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple4<*, T2, T3, T4>.drop1(): Tuple3 = Tuple3(this._2(), this._3(), this._4()) +fun Tuple4<*, *, T3, T4>.drop2(): Tuple2 = Tuple2(this._3(), this._4()) +fun Tuple4<*, *, *, T4>.drop3(): Tuple1 = Tuple1(this._4()) +fun Tuple4<*, *, *, *>.drop4(): EmptyTuple = EmptyTuple +fun Tuple5.drop0(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple5<*, T2, T3, T4, T5>.drop1(): Tuple4 = Tuple4(this._2(), this._3(), this._4(), this._5()) +fun Tuple5<*, *, T3, T4, T5>.drop2(): Tuple3 = Tuple3(this._3(), this._4(), this._5()) +fun Tuple5<*, *, *, T4, T5>.drop3(): Tuple2 = Tuple2(this._4(), this._5()) +fun Tuple5<*, *, *, *, T5>.drop4(): Tuple1 = Tuple1(this._5()) +fun Tuple5<*, *, *, *, *>.drop5(): EmptyTuple = EmptyTuple +fun Tuple6.drop0(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple6<*, T2, T3, T4, T5, T6>.drop1(): Tuple5 = Tuple5(this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple6<*, *, T3, T4, T5, T6>.drop2(): Tuple4 = Tuple4(this._3(), this._4(), this._5(), this._6()) +fun Tuple6<*, *, *, T4, T5, T6>.drop3(): Tuple3 = Tuple3(this._4(), this._5(), this._6()) +fun Tuple6<*, *, *, *, T5, T6>.drop4(): Tuple2 = Tuple2(this._5(), this._6()) +fun Tuple6<*, *, *, *, *, T6>.drop5(): Tuple1 = Tuple1(this._6()) +fun Tuple6<*, *, *, *, *, *>.drop6(): EmptyTuple = EmptyTuple +fun Tuple7.drop0(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple7<*, T2, T3, T4, T5, T6, T7>.drop1(): Tuple6 = Tuple6(this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple7<*, *, T3, T4, T5, T6, T7>.drop2(): Tuple5 = Tuple5(this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple7<*, *, *, T4, T5, T6, T7>.drop3(): Tuple4 = Tuple4(this._4(), this._5(), this._6(), this._7()) +fun Tuple7<*, *, *, *, T5, T6, T7>.drop4(): Tuple3 = Tuple3(this._5(), this._6(), this._7()) +fun Tuple7<*, *, *, *, *, T6, T7>.drop5(): Tuple2 = Tuple2(this._6(), this._7()) +fun Tuple7<*, *, *, *, *, *, T7>.drop6(): Tuple1 = Tuple1(this._7()) +fun Tuple7<*, *, *, *, *, *, *>.drop7(): EmptyTuple = EmptyTuple +fun Tuple8.drop0(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple8<*, T2, T3, T4, T5, T6, T7, T8>.drop1(): Tuple7 = Tuple7(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple8<*, *, T3, T4, T5, T6, T7, T8>.drop2(): Tuple6 = Tuple6(this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple8<*, *, *, T4, T5, T6, T7, T8>.drop3(): Tuple5 = Tuple5(this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple8<*, *, *, *, T5, T6, T7, T8>.drop4(): Tuple4 = Tuple4(this._5(), this._6(), this._7(), this._8()) +fun Tuple8<*, *, *, *, *, T6, T7, T8>.drop5(): Tuple3 = Tuple3(this._6(), this._7(), this._8()) +fun Tuple8<*, *, *, *, *, *, T7, T8>.drop6(): Tuple2 = Tuple2(this._7(), this._8()) +fun Tuple8<*, *, *, *, *, *, *, T8>.drop7(): Tuple1 = Tuple1(this._8()) +fun Tuple8<*, *, *, *, *, *, *, *>.drop8(): EmptyTuple = EmptyTuple +fun Tuple9.drop0(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple9<*, T2, T3, T4, T5, T6, T7, T8, T9>.drop1(): Tuple8 = Tuple8(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple9<*, *, T3, T4, T5, T6, T7, T8, T9>.drop2(): Tuple7 = Tuple7(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple9<*, *, *, T4, T5, T6, T7, T8, T9>.drop3(): Tuple6 = Tuple6(this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple9<*, *, *, *, T5, T6, T7, T8, T9>.drop4(): Tuple5 = Tuple5(this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple9<*, *, *, *, *, T6, T7, T8, T9>.drop5(): Tuple4 = Tuple4(this._6(), this._7(), this._8(), this._9()) +fun Tuple9<*, *, *, *, *, *, T7, T8, T9>.drop6(): Tuple3 = Tuple3(this._7(), this._8(), this._9()) +fun Tuple9<*, *, *, *, *, *, *, T8, T9>.drop7(): Tuple2 = Tuple2(this._8(), this._9()) +fun Tuple9<*, *, *, *, *, *, *, *, T9>.drop8(): Tuple1 = Tuple1(this._9()) +fun Tuple9<*, *, *, *, *, *, *, *, *>.drop9(): EmptyTuple = EmptyTuple +fun Tuple10.drop0(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple10<*, T2, T3, T4, T5, T6, T7, T8, T9, T10>.drop1(): Tuple9 = Tuple9(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple10<*, *, T3, T4, T5, T6, T7, T8, T9, T10>.drop2(): Tuple8 = Tuple8(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple10<*, *, *, T4, T5, T6, T7, T8, T9, T10>.drop3(): Tuple7 = Tuple7(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple10<*, *, *, *, T5, T6, T7, T8, T9, T10>.drop4(): Tuple6 = Tuple6(this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple10<*, *, *, *, *, T6, T7, T8, T9, T10>.drop5(): Tuple5 = Tuple5(this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple10<*, *, *, *, *, *, T7, T8, T9, T10>.drop6(): Tuple4 = Tuple4(this._7(), this._8(), this._9(), this._10()) +fun Tuple10<*, *, *, *, *, *, *, T8, T9, T10>.drop7(): Tuple3 = Tuple3(this._8(), this._9(), this._10()) +fun Tuple10<*, *, *, *, *, *, *, *, T9, T10>.drop8(): Tuple2 = Tuple2(this._9(), this._10()) +fun Tuple10<*, *, *, *, *, *, *, *, *, T10>.drop9(): Tuple1 = Tuple1(this._10()) +fun Tuple10<*, *, *, *, *, *, *, *, *, *>.drop10(): EmptyTuple = EmptyTuple +fun Tuple11.drop0(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple11<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.drop1(): Tuple10 = Tuple10(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple11<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11>.drop2(): Tuple9 = Tuple9(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple11<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11>.drop3(): Tuple8 = Tuple8(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple11<*, *, *, *, T5, T6, T7, T8, T9, T10, T11>.drop4(): Tuple7 = Tuple7(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple11<*, *, *, *, *, T6, T7, T8, T9, T10, T11>.drop5(): Tuple6 = Tuple6(this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple11<*, *, *, *, *, *, T7, T8, T9, T10, T11>.drop6(): Tuple5 = Tuple5(this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple11<*, *, *, *, *, *, *, T8, T9, T10, T11>.drop7(): Tuple4 = Tuple4(this._8(), this._9(), this._10(), this._11()) +fun Tuple11<*, *, *, *, *, *, *, *, T9, T10, T11>.drop8(): Tuple3 = Tuple3(this._9(), this._10(), this._11()) +fun Tuple11<*, *, *, *, *, *, *, *, *, T10, T11>.drop9(): Tuple2 = Tuple2(this._10(), this._11()) +fun Tuple11<*, *, *, *, *, *, *, *, *, *, T11>.drop10(): Tuple1 = Tuple1(this._11()) +fun Tuple11<*, *, *, *, *, *, *, *, *, *, *>.drop11(): EmptyTuple = EmptyTuple +fun Tuple12.drop0(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.drop1(): Tuple11 = Tuple11(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.drop2(): Tuple10 = Tuple10(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12>.drop3(): Tuple9 = Tuple9(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12>.drop4(): Tuple8 = Tuple8(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12>.drop5(): Tuple7 = Tuple7(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12>.drop6(): Tuple6 = Tuple6(this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, *, *, *, *, *, *, T8, T9, T10, T11, T12>.drop7(): Tuple5 = Tuple5(this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, *, *, *, *, *, *, *, T9, T10, T11, T12>.drop8(): Tuple4 = Tuple4(this._9(), this._10(), this._11(), this._12()) +fun Tuple12<*, *, *, *, *, *, *, *, *, T10, T11, T12>.drop9(): Tuple3 = Tuple3(this._10(), this._11(), this._12()) +fun Tuple12<*, *, *, *, *, *, *, *, *, *, T11, T12>.drop10(): Tuple2 = Tuple2(this._11(), this._12()) +fun Tuple12<*, *, *, *, *, *, *, *, *, *, *, T12>.drop11(): Tuple1 = Tuple1(this._12()) +fun Tuple12<*, *, *, *, *, *, *, *, *, *, *, *>.drop12(): EmptyTuple = EmptyTuple +fun Tuple13.drop0(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.drop1(): Tuple12 = Tuple12(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.drop2(): Tuple11 = Tuple11(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.drop3(): Tuple10 = Tuple10(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13>.drop4(): Tuple9 = Tuple9(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13>.drop5(): Tuple8 = Tuple8(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13>.drop6(): Tuple7 = Tuple7(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13>.drop7(): Tuple6 = Tuple6(this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13>.drop8(): Tuple5 = Tuple5(this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13>.drop9(): Tuple4 = Tuple4(this._10(), this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, *, *, *, *, *, *, *, T11, T12, T13>.drop10(): Tuple3 = Tuple3(this._11(), this._12(), this._13()) +fun Tuple13<*, *, *, *, *, *, *, *, *, *, *, T12, T13>.drop11(): Tuple2 = Tuple2(this._12(), this._13()) +fun Tuple13<*, *, *, *, *, *, *, *, *, *, *, *, T13>.drop12(): Tuple1 = Tuple1(this._13()) +fun Tuple13<*, *, *, *, *, *, *, *, *, *, *, *, *>.drop13(): EmptyTuple = EmptyTuple +fun Tuple14.drop0(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.drop1(): Tuple13 = Tuple13(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.drop2(): Tuple12 = Tuple12(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.drop3(): Tuple11 = Tuple11(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.drop4(): Tuple10 = Tuple10(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14>.drop5(): Tuple9 = Tuple9(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14>.drop6(): Tuple8 = Tuple8(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14>.drop7(): Tuple7 = Tuple7(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14>.drop8(): Tuple6 = Tuple6(this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14>.drop9(): Tuple5 = Tuple5(this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14>.drop10(): Tuple4 = Tuple4(this._11(), this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14>.drop11(): Tuple3 = Tuple3(this._12(), this._13(), this._14()) +fun Tuple14<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14>.drop12(): Tuple2 = Tuple2(this._13(), this._14()) +fun Tuple14<*, *, *, *, *, *, *, *, *, *, *, *, *, T14>.drop13(): Tuple1 = Tuple1(this._14()) +fun Tuple14<*, *, *, *, *, *, *, *, *, *, *, *, *, *>.drop14(): EmptyTuple = EmptyTuple +fun Tuple15.drop0(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.drop1(): Tuple14 = Tuple14(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.drop2(): Tuple13 = Tuple13(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.drop3(): Tuple12 = Tuple12(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.drop4(): Tuple11 = Tuple11(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.drop5(): Tuple10 = Tuple10(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14, T15>.drop6(): Tuple9 = Tuple9(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14, T15>.drop7(): Tuple8 = Tuple8(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14, T15>.drop8(): Tuple7 = Tuple7(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14, T15>.drop9(): Tuple6 = Tuple6(this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14, T15>.drop10(): Tuple5 = Tuple5(this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14, T15>.drop11(): Tuple4 = Tuple4(this._12(), this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T15>.drop12(): Tuple3 = Tuple3(this._13(), this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15>.drop13(): Tuple2 = Tuple2(this._14(), this._15()) +fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15>.drop14(): Tuple1 = Tuple1(this._15()) +fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.drop15(): EmptyTuple = EmptyTuple +fun Tuple16.drop0(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.drop1(): Tuple15 = Tuple15(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.drop2(): Tuple14 = Tuple14(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.drop3(): Tuple13 = Tuple13(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.drop4(): Tuple12 = Tuple12(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.drop5(): Tuple11 = Tuple11(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.drop6(): Tuple10 = Tuple10(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14, T15, T16>.drop7(): Tuple9 = Tuple9(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14, T15, T16>.drop8(): Tuple8 = Tuple8(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14, T15, T16>.drop9(): Tuple7 = Tuple7(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14, T15, T16>.drop10(): Tuple6 = Tuple6(this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14, T15, T16>.drop11(): Tuple5 = Tuple5(this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T15, T16>.drop12(): Tuple4 = Tuple4(this._13(), this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15, T16>.drop13(): Tuple3 = Tuple3(this._14(), this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15, T16>.drop14(): Tuple2 = Tuple2(this._15(), this._16()) +fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T16>.drop15(): Tuple1 = Tuple1(this._16()) +fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.drop16(): EmptyTuple = EmptyTuple +fun Tuple17.drop0(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.drop1(): Tuple16 = Tuple16(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.drop2(): Tuple15 = Tuple15(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.drop3(): Tuple14 = Tuple14(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.drop4(): Tuple13 = Tuple13(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.drop5(): Tuple12 = Tuple12(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.drop6(): Tuple11 = Tuple11(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.drop7(): Tuple10 = Tuple10(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14, T15, T16, T17>.drop8(): Tuple9 = Tuple9(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14, T15, T16, T17>.drop9(): Tuple8 = Tuple8(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14, T15, T16, T17>.drop10(): Tuple7 = Tuple7(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14, T15, T16, T17>.drop11(): Tuple6 = Tuple6(this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T15, T16, T17>.drop12(): Tuple5 = Tuple5(this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15, T16, T17>.drop13(): Tuple4 = Tuple4(this._14(), this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15, T16, T17>.drop14(): Tuple3 = Tuple3(this._15(), this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T16, T17>.drop15(): Tuple2 = Tuple2(this._16(), this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T17>.drop16(): Tuple1 = Tuple1(this._17()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.drop17(): EmptyTuple = EmptyTuple +fun Tuple18.drop0(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.drop1(): Tuple17 = Tuple17(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.drop2(): Tuple16 = Tuple16(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.drop3(): Tuple15 = Tuple15(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.drop4(): Tuple14 = Tuple14(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.drop5(): Tuple13 = Tuple13(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.drop6(): Tuple12 = Tuple12(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.drop7(): Tuple11 = Tuple11(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.drop8(): Tuple10 = Tuple10(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14, T15, T16, T17, T18>.drop9(): Tuple9 = Tuple9(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14, T15, T16, T17, T18>.drop10(): Tuple8 = Tuple8(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14, T15, T16, T17, T18>.drop11(): Tuple7 = Tuple7(this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T15, T16, T17, T18>.drop12(): Tuple6 = Tuple6(this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15, T16, T17, T18>.drop13(): Tuple5 = Tuple5(this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15, T16, T17, T18>.drop14(): Tuple4 = Tuple4(this._15(), this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T16, T17, T18>.drop15(): Tuple3 = Tuple3(this._16(), this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T17, T18>.drop16(): Tuple2 = Tuple2(this._17(), this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T18>.drop17(): Tuple1 = Tuple1(this._18()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.drop18(): EmptyTuple = EmptyTuple +fun Tuple19.drop0(): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.drop1(): Tuple18 = Tuple18(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.drop2(): Tuple17 = Tuple17(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.drop3(): Tuple16 = Tuple16(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.drop4(): Tuple15 = Tuple15(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.drop5(): Tuple14 = Tuple14(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.drop6(): Tuple13 = Tuple13(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.drop7(): Tuple12 = Tuple12(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.drop8(): Tuple11 = Tuple11(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.drop9(): Tuple10 = Tuple10(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14, T15, T16, T17, T18, T19>.drop10(): Tuple9 = Tuple9(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14, T15, T16, T17, T18, T19>.drop11(): Tuple8 = Tuple8(this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T15, T16, T17, T18, T19>.drop12(): Tuple7 = Tuple7(this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15, T16, T17, T18, T19>.drop13(): Tuple6 = Tuple6(this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15, T16, T17, T18, T19>.drop14(): Tuple5 = Tuple5(this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T16, T17, T18, T19>.drop15(): Tuple4 = Tuple4(this._16(), this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T17, T18, T19>.drop16(): Tuple3 = Tuple3(this._17(), this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T18, T19>.drop17(): Tuple2 = Tuple2(this._18(), this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T19>.drop18(): Tuple1 = Tuple1(this._19()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.drop19(): EmptyTuple = EmptyTuple +fun Tuple20.drop0(): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.drop1(): Tuple19 = Tuple19(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.drop2(): Tuple18 = Tuple18(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.drop3(): Tuple17 = Tuple17(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.drop4(): Tuple16 = Tuple16(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.drop5(): Tuple15 = Tuple15(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.drop6(): Tuple14 = Tuple14(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.drop7(): Tuple13 = Tuple13(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.drop8(): Tuple12 = Tuple12(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.drop9(): Tuple11 = Tuple11(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.drop10(): Tuple10 = Tuple10(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14, T15, T16, T17, T18, T19, T20>.drop11(): Tuple9 = Tuple9(this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T15, T16, T17, T18, T19, T20>.drop12(): Tuple8 = Tuple8(this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15, T16, T17, T18, T19, T20>.drop13(): Tuple7 = Tuple7(this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15, T16, T17, T18, T19, T20>.drop14(): Tuple6 = Tuple6(this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T16, T17, T18, T19, T20>.drop15(): Tuple5 = Tuple5(this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T17, T18, T19, T20>.drop16(): Tuple4 = Tuple4(this._17(), this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T18, T19, T20>.drop17(): Tuple3 = Tuple3(this._18(), this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T19, T20>.drop18(): Tuple2 = Tuple2(this._19(), this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T20>.drop19(): Tuple1 = Tuple1(this._20()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.drop20(): EmptyTuple = EmptyTuple +fun Tuple21.drop0(): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.drop1(): Tuple20 = Tuple20(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.drop2(): Tuple19 = Tuple19(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.drop3(): Tuple18 = Tuple18(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.drop4(): Tuple17 = Tuple17(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.drop5(): Tuple16 = Tuple16(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.drop6(): Tuple15 = Tuple15(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.drop7(): Tuple14 = Tuple14(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.drop8(): Tuple13 = Tuple13(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.drop9(): Tuple12 = Tuple12(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.drop10(): Tuple11 = Tuple11(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.drop11(): Tuple10 = Tuple10(this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T15, T16, T17, T18, T19, T20, T21>.drop12(): Tuple9 = Tuple9(this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15, T16, T17, T18, T19, T20, T21>.drop13(): Tuple8 = Tuple8(this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15, T16, T17, T18, T19, T20, T21>.drop14(): Tuple7 = Tuple7(this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T16, T17, T18, T19, T20, T21>.drop15(): Tuple6 = Tuple6(this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T17, T18, T19, T20, T21>.drop16(): Tuple5 = Tuple5(this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T18, T19, T20, T21>.drop17(): Tuple4 = Tuple4(this._18(), this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T19, T20, T21>.drop18(): Tuple3 = Tuple3(this._19(), this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T20, T21>.drop19(): Tuple2 = Tuple2(this._20(), this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T21>.drop20(): Tuple1 = Tuple1(this._21()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.drop21(): EmptyTuple = EmptyTuple +fun Tuple22.drop0(): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.drop1(): Tuple21 = Tuple21(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.drop2(): Tuple20 = Tuple20(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.drop3(): Tuple19 = Tuple19(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.drop4(): Tuple18 = Tuple18(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.drop5(): Tuple17 = Tuple17(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.drop6(): Tuple16 = Tuple16(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.drop7(): Tuple15 = Tuple15(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.drop8(): Tuple14 = Tuple14(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.drop9(): Tuple13 = Tuple13(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.drop10(): Tuple12 = Tuple12(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.drop11(): Tuple11 = Tuple11(this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.drop12(): Tuple10 = Tuple10(this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15, T16, T17, T18, T19, T20, T21, T22>.drop13(): Tuple9 = Tuple9(this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15, T16, T17, T18, T19, T20, T21, T22>.drop14(): Tuple8 = Tuple8(this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T16, T17, T18, T19, T20, T21, T22>.drop15(): Tuple7 = Tuple7(this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T17, T18, T19, T20, T21, T22>.drop16(): Tuple6 = Tuple6(this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T18, T19, T20, T21, T22>.drop17(): Tuple5 = Tuple5(this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T19, T20, T21, T22>.drop18(): Tuple4 = Tuple4(this._19(), this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T20, T21, T22>.drop19(): Tuple3 = Tuple3(this._20(), this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T21, T22>.drop20(): Tuple2 = Tuple2(this._21(), this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T22>.drop21(): Tuple1 = Tuple1(this._22()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.drop22(): EmptyTuple = EmptyTuple + + + +fun Tuple1.dropLast0(): Tuple1 = Tuple1(this._1()) +fun Tuple1<*>.dropLast1(): EmptyTuple = EmptyTuple +fun Tuple2.dropLast0(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple2.dropLast1(): Tuple1 = Tuple1(this._1()) +fun Tuple2<*, *>.dropLast2(): EmptyTuple = EmptyTuple +fun Tuple3.dropLast0(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple3.dropLast1(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple3.dropLast2(): Tuple1 = Tuple1(this._1()) +fun Tuple3<*, *, *>.dropLast3(): EmptyTuple = EmptyTuple +fun Tuple4.dropLast0(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple4.dropLast1(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple4.dropLast2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple4.dropLast3(): Tuple1 = Tuple1(this._1()) +fun Tuple4<*, *, *, *>.dropLast4(): EmptyTuple = EmptyTuple +fun Tuple5.dropLast0(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple5.dropLast1(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple5.dropLast2(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple5.dropLast3(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple5.dropLast4(): Tuple1 = Tuple1(this._1()) +fun Tuple5<*, *, *, *, *>.dropLast5(): EmptyTuple = EmptyTuple +fun Tuple6.dropLast0(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple6.dropLast1(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple6.dropLast2(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple6.dropLast3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple6.dropLast4(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple6.dropLast5(): Tuple1 = Tuple1(this._1()) +fun Tuple6<*, *, *, *, *, *>.dropLast6(): EmptyTuple = EmptyTuple +fun Tuple7.dropLast0(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple7.dropLast1(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple7.dropLast2(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple7.dropLast3(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple7.dropLast4(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple7.dropLast5(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple7.dropLast6(): Tuple1 = Tuple1(this._1()) +fun Tuple7<*, *, *, *, *, *, *>.dropLast7(): EmptyTuple = EmptyTuple +fun Tuple8.dropLast0(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple8.dropLast1(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple8.dropLast2(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple8.dropLast3(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple8.dropLast4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple8.dropLast5(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple8.dropLast6(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple8.dropLast7(): Tuple1 = Tuple1(this._1()) +fun Tuple8<*, *, *, *, *, *, *, *>.dropLast8(): EmptyTuple = EmptyTuple +fun Tuple9.dropLast0(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple9.dropLast1(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple9.dropLast2(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple9.dropLast3(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple9.dropLast4(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple9.dropLast5(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple9.dropLast6(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple9.dropLast7(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple9.dropLast8(): Tuple1 = Tuple1(this._1()) +fun Tuple9<*, *, *, *, *, *, *, *, *>.dropLast9(): EmptyTuple = EmptyTuple +fun Tuple10.dropLast0(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple10.dropLast1(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple10.dropLast2(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple10.dropLast3(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple10.dropLast4(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple10.dropLast5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple10.dropLast6(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple10.dropLast7(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple10.dropLast8(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple10.dropLast9(): Tuple1 = Tuple1(this._1()) +fun Tuple10<*, *, *, *, *, *, *, *, *, *>.dropLast10(): EmptyTuple = EmptyTuple +fun Tuple11.dropLast0(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple11.dropLast1(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple11.dropLast2(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple11.dropLast3(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple11.dropLast4(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple11.dropLast5(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple11.dropLast6(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple11.dropLast7(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple11.dropLast8(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple11.dropLast9(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple11.dropLast10(): Tuple1 = Tuple1(this._1()) +fun Tuple11<*, *, *, *, *, *, *, *, *, *, *>.dropLast11(): EmptyTuple = EmptyTuple +fun Tuple12.dropLast0(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple12.dropLast1(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple12.dropLast2(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple12.dropLast3(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple12.dropLast4(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple12.dropLast5(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple12.dropLast6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple12.dropLast7(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple12.dropLast8(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple12.dropLast9(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple12.dropLast10(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple12.dropLast11(): Tuple1 = Tuple1(this._1()) +fun Tuple12<*, *, *, *, *, *, *, *, *, *, *, *>.dropLast12(): EmptyTuple = EmptyTuple +fun Tuple13.dropLast0(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple13.dropLast1(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple13.dropLast2(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple13.dropLast3(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple13.dropLast4(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple13.dropLast5(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple13.dropLast6(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple13.dropLast7(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple13.dropLast8(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple13.dropLast9(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple13.dropLast10(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple13.dropLast11(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple13.dropLast12(): Tuple1 = Tuple1(this._1()) +fun Tuple13<*, *, *, *, *, *, *, *, *, *, *, *, *>.dropLast13(): EmptyTuple = EmptyTuple +fun Tuple14.dropLast0(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple14.dropLast1(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple14.dropLast2(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple14.dropLast3(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple14.dropLast4(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple14.dropLast5(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple14.dropLast6(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple14.dropLast7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple14.dropLast8(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple14.dropLast9(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple14.dropLast10(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple14.dropLast11(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple14.dropLast12(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple14.dropLast13(): Tuple1 = Tuple1(this._1()) +fun Tuple14<*, *, *, *, *, *, *, *, *, *, *, *, *, *>.dropLast14(): EmptyTuple = EmptyTuple +fun Tuple15.dropLast0(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple15.dropLast1(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple15.dropLast2(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple15.dropLast3(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple15.dropLast4(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple15.dropLast5(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple15.dropLast6(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple15.dropLast7(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple15.dropLast8(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple15.dropLast9(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple15.dropLast10(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple15.dropLast11(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple15.dropLast12(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple15.dropLast13(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple15.dropLast14(): Tuple1 = Tuple1(this._1()) +fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.dropLast15(): EmptyTuple = EmptyTuple +fun Tuple16.dropLast0(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple16.dropLast1(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple16.dropLast2(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple16.dropLast3(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple16.dropLast4(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple16.dropLast5(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple16.dropLast6(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple16.dropLast7(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple16.dropLast8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple16.dropLast9(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple16.dropLast10(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple16.dropLast11(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple16.dropLast12(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple16.dropLast13(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple16.dropLast14(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple16.dropLast15(): Tuple1 = Tuple1(this._1()) +fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.dropLast16(): EmptyTuple = EmptyTuple +fun Tuple17.dropLast0(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple17.dropLast1(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple17.dropLast2(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple17.dropLast3(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple17.dropLast4(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple17.dropLast5(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple17.dropLast6(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple17.dropLast7(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple17.dropLast8(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple17.dropLast9(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple17.dropLast10(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple17.dropLast11(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple17.dropLast12(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple17.dropLast13(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple17.dropLast14(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple17.dropLast15(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple17.dropLast16(): Tuple1 = Tuple1(this._1()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.dropLast17(): EmptyTuple = EmptyTuple +fun Tuple18.dropLast0(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple18.dropLast1(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple18.dropLast2(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple18.dropLast3(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple18.dropLast4(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple18.dropLast5(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple18.dropLast6(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple18.dropLast7(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple18.dropLast8(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple18.dropLast9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple18.dropLast10(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple18.dropLast11(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple18.dropLast12(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple18.dropLast13(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple18.dropLast14(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple18.dropLast15(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple18.dropLast16(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple18.dropLast17(): Tuple1 = Tuple1(this._1()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.dropLast18(): EmptyTuple = EmptyTuple +fun Tuple19.dropLast0(): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple19.dropLast1(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple19.dropLast2(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple19.dropLast3(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple19.dropLast4(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple19.dropLast5(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple19.dropLast6(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple19.dropLast7(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple19.dropLast8(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple19.dropLast9(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple19.dropLast10(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple19.dropLast11(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple19.dropLast12(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple19.dropLast13(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple19.dropLast14(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple19.dropLast15(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple19.dropLast16(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple19.dropLast17(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple19.dropLast18(): Tuple1 = Tuple1(this._1()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.dropLast19(): EmptyTuple = EmptyTuple +fun Tuple20.dropLast0(): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple20.dropLast1(): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple20.dropLast2(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple20.dropLast3(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple20.dropLast4(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple20.dropLast5(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple20.dropLast6(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple20.dropLast7(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple20.dropLast8(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple20.dropLast9(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple20.dropLast10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple20.dropLast11(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple20.dropLast12(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple20.dropLast13(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple20.dropLast14(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple20.dropLast15(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple20.dropLast16(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple20.dropLast17(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple20.dropLast18(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple20.dropLast19(): Tuple1 = Tuple1(this._1()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.dropLast20(): EmptyTuple = EmptyTuple +fun Tuple21.dropLast0(): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple21.dropLast1(): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple21.dropLast2(): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple21.dropLast3(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple21.dropLast4(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple21.dropLast5(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple21.dropLast6(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple21.dropLast7(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple21.dropLast8(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple21.dropLast9(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple21.dropLast10(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple21.dropLast11(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple21.dropLast12(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple21.dropLast13(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple21.dropLast14(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple21.dropLast15(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple21.dropLast16(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple21.dropLast17(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple21.dropLast18(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple21.dropLast19(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple21.dropLast20(): Tuple1 = Tuple1(this._1()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.dropLast21(): EmptyTuple = EmptyTuple +fun Tuple22.dropLast0(): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) +fun Tuple22.dropLast1(): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple22.dropLast2(): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple22.dropLast3(): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple22.dropLast4(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple22.dropLast5(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple22.dropLast6(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple22.dropLast7(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple22.dropLast8(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple22.dropLast9(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple22.dropLast10(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple22.dropLast11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple22.dropLast12(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple22.dropLast13(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple22.dropLast14(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple22.dropLast15(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple22.dropLast16(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple22.dropLast17(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple22.dropLast18(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple22.dropLast19(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple22.dropLast20(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple22.dropLast21(): Tuple1 = Tuple1(this._1()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.dropLast22(): EmptyTuple = EmptyTuple diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleSplit.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleSplit.kt new file mode 100644 index 00000000..c73b1263 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleSplit.kt @@ -0,0 +1,279 @@ +package org.jetbrains.kotlinx.spark.api.tuples + +import scala.* + +fun Tuple1.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple1(this._1())) +fun Tuple1.splitAt1(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple1(this._1()), EmptyTuple) +fun Tuple2.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple2(this._1(), this._2())) +fun Tuple2.splitAt1(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple1(this._1()), Tuple1(this._2())) +fun Tuple2.splitAt2(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple2(this._1(), this._2()), EmptyTuple) +fun Tuple3.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple3(this._1(), this._2(), this._3())) +fun Tuple3.splitAt1(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple1(this._1()), Tuple2(this._2(), this._3())) +fun Tuple3.splitAt2(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple2(this._1(), this._2()), Tuple1(this._3())) +fun Tuple3.splitAt3(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple3(this._1(), this._2(), this._3()), EmptyTuple) +fun Tuple4.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple4(this._1(), this._2(), this._3(), this._4())) +fun Tuple4.splitAt1(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple1(this._1()), Tuple3(this._2(), this._3(), this._4())) +fun Tuple4.splitAt2(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple2(this._1(), this._2()), Tuple2(this._3(), this._4())) +fun Tuple4.splitAt3(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple3(this._1(), this._2(), this._3()), Tuple1(this._4())) +fun Tuple4.splitAt4(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple4(this._1(), this._2(), this._3(), this._4()), EmptyTuple) +fun Tuple5.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple5(this._1(), this._2(), this._3(), this._4(), this._5())) +fun Tuple5.splitAt1(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple1(this._1()), Tuple4(this._2(), this._3(), this._4(), this._5())) +fun Tuple5.splitAt2(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple2(this._1(), this._2()), Tuple3(this._3(), this._4(), this._5())) +fun Tuple5.splitAt3(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple3(this._1(), this._2(), this._3()), Tuple2(this._4(), this._5())) +fun Tuple5.splitAt4(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple1(this._5())) +fun Tuple5.splitAt5(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), EmptyTuple) +fun Tuple6.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6())) +fun Tuple6.splitAt1(): Tuple2, Tuple5> = Tuple2, Tuple5>(Tuple1(this._1()), Tuple5(this._2(), this._3(), this._4(), this._5(), this._6())) +fun Tuple6.splitAt2(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple2(this._1(), this._2()), Tuple4(this._3(), this._4(), this._5(), this._6())) +fun Tuple6.splitAt3(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple3(this._1(), this._2(), this._3()), Tuple3(this._4(), this._5(), this._6())) +fun Tuple6.splitAt4(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple2(this._5(), this._6())) +fun Tuple6.splitAt5(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), Tuple1(this._6())) +fun Tuple6.splitAt6(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()), EmptyTuple) +fun Tuple7.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7())) +fun Tuple7.splitAt1(): Tuple2, Tuple6> = Tuple2, Tuple6>(Tuple1(this._1()), Tuple6(this._2(), this._3(), this._4(), this._5(), this._6(), this._7())) +fun Tuple7.splitAt2(): Tuple2, Tuple5> = Tuple2, Tuple5>(Tuple2(this._1(), this._2()), Tuple5(this._3(), this._4(), this._5(), this._6(), this._7())) +fun Tuple7.splitAt3(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple3(this._1(), this._2(), this._3()), Tuple4(this._4(), this._5(), this._6(), this._7())) +fun Tuple7.splitAt4(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple3(this._5(), this._6(), this._7())) +fun Tuple7.splitAt5(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), Tuple2(this._6(), this._7())) +fun Tuple7.splitAt6(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()), Tuple1(this._7())) +fun Tuple7.splitAt7(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()), EmptyTuple) +fun Tuple8.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8())) +fun Tuple8.splitAt1(): Tuple2, Tuple7> = Tuple2, Tuple7>(Tuple1(this._1()), Tuple7(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8())) +fun Tuple8.splitAt2(): Tuple2, Tuple6> = Tuple2, Tuple6>(Tuple2(this._1(), this._2()), Tuple6(this._3(), this._4(), this._5(), this._6(), this._7(), this._8())) +fun Tuple8.splitAt3(): Tuple2, Tuple5> = Tuple2, Tuple5>(Tuple3(this._1(), this._2(), this._3()), Tuple5(this._4(), this._5(), this._6(), this._7(), this._8())) +fun Tuple8.splitAt4(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple4(this._5(), this._6(), this._7(), this._8())) +fun Tuple8.splitAt5(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), Tuple3(this._6(), this._7(), this._8())) +fun Tuple8.splitAt6(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()), Tuple2(this._7(), this._8())) +fun Tuple8.splitAt7(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()), Tuple1(this._8())) +fun Tuple8.splitAt8(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()), EmptyTuple) +fun Tuple9.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9())) +fun Tuple9.splitAt1(): Tuple2, Tuple8> = Tuple2, Tuple8>(Tuple1(this._1()), Tuple8(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9())) +fun Tuple9.splitAt2(): Tuple2, Tuple7> = Tuple2, Tuple7>(Tuple2(this._1(), this._2()), Tuple7(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9())) +fun Tuple9.splitAt3(): Tuple2, Tuple6> = Tuple2, Tuple6>(Tuple3(this._1(), this._2(), this._3()), Tuple6(this._4(), this._5(), this._6(), this._7(), this._8(), this._9())) +fun Tuple9.splitAt4(): Tuple2, Tuple5> = Tuple2, Tuple5>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple5(this._5(), this._6(), this._7(), this._8(), this._9())) +fun Tuple9.splitAt5(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), Tuple4(this._6(), this._7(), this._8(), this._9())) +fun Tuple9.splitAt6(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()), Tuple3(this._7(), this._8(), this._9())) +fun Tuple9.splitAt7(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()), Tuple2(this._8(), this._9())) +fun Tuple9.splitAt8(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()), Tuple1(this._9())) +fun Tuple9.splitAt9(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()), EmptyTuple) +fun Tuple10.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10())) +fun Tuple10.splitAt1(): Tuple2, Tuple9> = Tuple2, Tuple9>(Tuple1(this._1()), Tuple9(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10())) +fun Tuple10.splitAt2(): Tuple2, Tuple8> = Tuple2, Tuple8>(Tuple2(this._1(), this._2()), Tuple8(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10())) +fun Tuple10.splitAt3(): Tuple2, Tuple7> = Tuple2, Tuple7>(Tuple3(this._1(), this._2(), this._3()), Tuple7(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10())) +fun Tuple10.splitAt4(): Tuple2, Tuple6> = Tuple2, Tuple6>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple6(this._5(), this._6(), this._7(), this._8(), this._9(), this._10())) +fun Tuple10.splitAt5(): Tuple2, Tuple5> = Tuple2, Tuple5>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), Tuple5(this._6(), this._7(), this._8(), this._9(), this._10())) +fun Tuple10.splitAt6(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()), Tuple4(this._7(), this._8(), this._9(), this._10())) +fun Tuple10.splitAt7(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()), Tuple3(this._8(), this._9(), this._10())) +fun Tuple10.splitAt8(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()), Tuple2(this._9(), this._10())) +fun Tuple10.splitAt9(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()), Tuple1(this._10())) +fun Tuple10.splitAt10(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()), EmptyTuple) +fun Tuple11.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11())) +fun Tuple11.splitAt1(): Tuple2, Tuple10> = Tuple2, Tuple10>(Tuple1(this._1()), Tuple10(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11())) +fun Tuple11.splitAt2(): Tuple2, Tuple9> = Tuple2, Tuple9>(Tuple2(this._1(), this._2()), Tuple9(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11())) +fun Tuple11.splitAt3(): Tuple2, Tuple8> = Tuple2, Tuple8>(Tuple3(this._1(), this._2(), this._3()), Tuple8(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11())) +fun Tuple11.splitAt4(): Tuple2, Tuple7> = Tuple2, Tuple7>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple7(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11())) +fun Tuple11.splitAt5(): Tuple2, Tuple6> = Tuple2, Tuple6>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), Tuple6(this._6(), this._7(), this._8(), this._9(), this._10(), this._11())) +fun Tuple11.splitAt6(): Tuple2, Tuple5> = Tuple2, Tuple5>(Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()), Tuple5(this._7(), this._8(), this._9(), this._10(), this._11())) +fun Tuple11.splitAt7(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()), Tuple4(this._8(), this._9(), this._10(), this._11())) +fun Tuple11.splitAt8(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()), Tuple3(this._9(), this._10(), this._11())) +fun Tuple11.splitAt9(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()), Tuple2(this._10(), this._11())) +fun Tuple11.splitAt10(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()), Tuple1(this._11())) +fun Tuple11.splitAt11(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()), EmptyTuple) +fun Tuple12.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12())) +fun Tuple12.splitAt1(): Tuple2, Tuple11> = Tuple2, Tuple11>(Tuple1(this._1()), Tuple11(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12())) +fun Tuple12.splitAt2(): Tuple2, Tuple10> = Tuple2, Tuple10>(Tuple2(this._1(), this._2()), Tuple10(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12())) +fun Tuple12.splitAt3(): Tuple2, Tuple9> = Tuple2, Tuple9>(Tuple3(this._1(), this._2(), this._3()), Tuple9(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12())) +fun Tuple12.splitAt4(): Tuple2, Tuple8> = Tuple2, Tuple8>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple8(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12())) +fun Tuple12.splitAt5(): Tuple2, Tuple7> = Tuple2, Tuple7>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), Tuple7(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12())) +fun Tuple12.splitAt6(): Tuple2, Tuple6> = Tuple2, Tuple6>(Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()), Tuple6(this._7(), this._8(), this._9(), this._10(), this._11(), this._12())) +fun Tuple12.splitAt7(): Tuple2, Tuple5> = Tuple2, Tuple5>(Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()), Tuple5(this._8(), this._9(), this._10(), this._11(), this._12())) +fun Tuple12.splitAt8(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()), Tuple4(this._9(), this._10(), this._11(), this._12())) +fun Tuple12.splitAt9(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()), Tuple3(this._10(), this._11(), this._12())) +fun Tuple12.splitAt10(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()), Tuple2(this._11(), this._12())) +fun Tuple12.splitAt11(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()), Tuple1(this._12())) +fun Tuple12.splitAt12(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()), EmptyTuple) +fun Tuple13.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13())) +fun Tuple13.splitAt1(): Tuple2, Tuple12> = Tuple2, Tuple12>(Tuple1(this._1()), Tuple12(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13())) +fun Tuple13.splitAt2(): Tuple2, Tuple11> = Tuple2, Tuple11>(Tuple2(this._1(), this._2()), Tuple11(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13())) +fun Tuple13.splitAt3(): Tuple2, Tuple10> = Tuple2, Tuple10>(Tuple3(this._1(), this._2(), this._3()), Tuple10(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13())) +fun Tuple13.splitAt4(): Tuple2, Tuple9> = Tuple2, Tuple9>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple9(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13())) +fun Tuple13.splitAt5(): Tuple2, Tuple8> = Tuple2, Tuple8>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), Tuple8(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13())) +fun Tuple13.splitAt6(): Tuple2, Tuple7> = Tuple2, Tuple7>(Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()), Tuple7(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13())) +fun Tuple13.splitAt7(): Tuple2, Tuple6> = Tuple2, Tuple6>(Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()), Tuple6(this._8(), this._9(), this._10(), this._11(), this._12(), this._13())) +fun Tuple13.splitAt8(): Tuple2, Tuple5> = Tuple2, Tuple5>(Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()), Tuple5(this._9(), this._10(), this._11(), this._12(), this._13())) +fun Tuple13.splitAt9(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()), Tuple4(this._10(), this._11(), this._12(), this._13())) +fun Tuple13.splitAt10(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()), Tuple3(this._11(), this._12(), this._13())) +fun Tuple13.splitAt11(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()), Tuple2(this._12(), this._13())) +fun Tuple13.splitAt12(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()), Tuple1(this._13())) +fun Tuple13.splitAt13(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()), EmptyTuple) +fun Tuple14.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14())) +fun Tuple14.splitAt1(): Tuple2, Tuple13> = Tuple2, Tuple13>(Tuple1(this._1()), Tuple13(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14())) +fun Tuple14.splitAt2(): Tuple2, Tuple12> = Tuple2, Tuple12>(Tuple2(this._1(), this._2()), Tuple12(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14())) +fun Tuple14.splitAt3(): Tuple2, Tuple11> = Tuple2, Tuple11>(Tuple3(this._1(), this._2(), this._3()), Tuple11(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14())) +fun Tuple14.splitAt4(): Tuple2, Tuple10> = Tuple2, Tuple10>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple10(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14())) +fun Tuple14.splitAt5(): Tuple2, Tuple9> = Tuple2, Tuple9>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), Tuple9(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14())) +fun Tuple14.splitAt6(): Tuple2, Tuple8> = Tuple2, Tuple8>(Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()), Tuple8(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14())) +fun Tuple14.splitAt7(): Tuple2, Tuple7> = Tuple2, Tuple7>(Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()), Tuple7(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14())) +fun Tuple14.splitAt8(): Tuple2, Tuple6> = Tuple2, Tuple6>(Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()), Tuple6(this._9(), this._10(), this._11(), this._12(), this._13(), this._14())) +fun Tuple14.splitAt9(): Tuple2, Tuple5> = Tuple2, Tuple5>(Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()), Tuple5(this._10(), this._11(), this._12(), this._13(), this._14())) +fun Tuple14.splitAt10(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()), Tuple4(this._11(), this._12(), this._13(), this._14())) +fun Tuple14.splitAt11(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()), Tuple3(this._12(), this._13(), this._14())) +fun Tuple14.splitAt12(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()), Tuple2(this._13(), this._14())) +fun Tuple14.splitAt13(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()), Tuple1(this._14())) +fun Tuple14.splitAt14(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()), EmptyTuple) +fun Tuple15.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15())) +fun Tuple15.splitAt1(): Tuple2, Tuple14> = Tuple2, Tuple14>(Tuple1(this._1()), Tuple14(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15())) +fun Tuple15.splitAt2(): Tuple2, Tuple13> = Tuple2, Tuple13>(Tuple2(this._1(), this._2()), Tuple13(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15())) +fun Tuple15.splitAt3(): Tuple2, Tuple12> = Tuple2, Tuple12>(Tuple3(this._1(), this._2(), this._3()), Tuple12(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15())) +fun Tuple15.splitAt4(): Tuple2, Tuple11> = Tuple2, Tuple11>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple11(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15())) +fun Tuple15.splitAt5(): Tuple2, Tuple10> = Tuple2, Tuple10>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), Tuple10(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15())) +fun Tuple15.splitAt6(): Tuple2, Tuple9> = Tuple2, Tuple9>(Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()), Tuple9(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15())) +fun Tuple15.splitAt7(): Tuple2, Tuple8> = Tuple2, Tuple8>(Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()), Tuple8(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15())) +fun Tuple15.splitAt8(): Tuple2, Tuple7> = Tuple2, Tuple7>(Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()), Tuple7(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15())) +fun Tuple15.splitAt9(): Tuple2, Tuple6> = Tuple2, Tuple6>(Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()), Tuple6(this._10(), this._11(), this._12(), this._13(), this._14(), this._15())) +fun Tuple15.splitAt10(): Tuple2, Tuple5> = Tuple2, Tuple5>(Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()), Tuple5(this._11(), this._12(), this._13(), this._14(), this._15())) +fun Tuple15.splitAt11(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()), Tuple4(this._12(), this._13(), this._14(), this._15())) +fun Tuple15.splitAt12(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()), Tuple3(this._13(), this._14(), this._15())) +fun Tuple15.splitAt13(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()), Tuple2(this._14(), this._15())) +fun Tuple15.splitAt14(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()), Tuple1(this._15())) +fun Tuple15.splitAt15(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()), EmptyTuple) +fun Tuple16.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16())) +fun Tuple16.splitAt1(): Tuple2, Tuple15> = Tuple2, Tuple15>(Tuple1(this._1()), Tuple15(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16())) +fun Tuple16.splitAt2(): Tuple2, Tuple14> = Tuple2, Tuple14>(Tuple2(this._1(), this._2()), Tuple14(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16())) +fun Tuple16.splitAt3(): Tuple2, Tuple13> = Tuple2, Tuple13>(Tuple3(this._1(), this._2(), this._3()), Tuple13(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16())) +fun Tuple16.splitAt4(): Tuple2, Tuple12> = Tuple2, Tuple12>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple12(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16())) +fun Tuple16.splitAt5(): Tuple2, Tuple11> = Tuple2, Tuple11>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), Tuple11(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16())) +fun Tuple16.splitAt6(): Tuple2, Tuple10> = Tuple2, Tuple10>(Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()), Tuple10(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16())) +fun Tuple16.splitAt7(): Tuple2, Tuple9> = Tuple2, Tuple9>(Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()), Tuple9(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16())) +fun Tuple16.splitAt8(): Tuple2, Tuple8> = Tuple2, Tuple8>(Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()), Tuple8(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16())) +fun Tuple16.splitAt9(): Tuple2, Tuple7> = Tuple2, Tuple7>(Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()), Tuple7(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16())) +fun Tuple16.splitAt10(): Tuple2, Tuple6> = Tuple2, Tuple6>(Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()), Tuple6(this._11(), this._12(), this._13(), this._14(), this._15(), this._16())) +fun Tuple16.splitAt11(): Tuple2, Tuple5> = Tuple2, Tuple5>(Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()), Tuple5(this._12(), this._13(), this._14(), this._15(), this._16())) +fun Tuple16.splitAt12(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()), Tuple4(this._13(), this._14(), this._15(), this._16())) +fun Tuple16.splitAt13(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()), Tuple3(this._14(), this._15(), this._16())) +fun Tuple16.splitAt14(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()), Tuple2(this._15(), this._16())) +fun Tuple16.splitAt15(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()), Tuple1(this._16())) +fun Tuple16.splitAt16(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()), EmptyTuple) +fun Tuple17.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17())) +fun Tuple17.splitAt1(): Tuple2, Tuple16> = Tuple2, Tuple16>(Tuple1(this._1()), Tuple16(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17())) +fun Tuple17.splitAt2(): Tuple2, Tuple15> = Tuple2, Tuple15>(Tuple2(this._1(), this._2()), Tuple15(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17())) +fun Tuple17.splitAt3(): Tuple2, Tuple14> = Tuple2, Tuple14>(Tuple3(this._1(), this._2(), this._3()), Tuple14(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17())) +fun Tuple17.splitAt4(): Tuple2, Tuple13> = Tuple2, Tuple13>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple13(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17())) +fun Tuple17.splitAt5(): Tuple2, Tuple12> = Tuple2, Tuple12>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), Tuple12(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17())) +fun Tuple17.splitAt6(): Tuple2, Tuple11> = Tuple2, Tuple11>(Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()), Tuple11(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17())) +fun Tuple17.splitAt7(): Tuple2, Tuple10> = Tuple2, Tuple10>(Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()), Tuple10(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17())) +fun Tuple17.splitAt8(): Tuple2, Tuple9> = Tuple2, Tuple9>(Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()), Tuple9(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17())) +fun Tuple17.splitAt9(): Tuple2, Tuple8> = Tuple2, Tuple8>(Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()), Tuple8(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17())) +fun Tuple17.splitAt10(): Tuple2, Tuple7> = Tuple2, Tuple7>(Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()), Tuple7(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17())) +fun Tuple17.splitAt11(): Tuple2, Tuple6> = Tuple2, Tuple6>(Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()), Tuple6(this._12(), this._13(), this._14(), this._15(), this._16(), this._17())) +fun Tuple17.splitAt12(): Tuple2, Tuple5> = Tuple2, Tuple5>(Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()), Tuple5(this._13(), this._14(), this._15(), this._16(), this._17())) +fun Tuple17.splitAt13(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()), Tuple4(this._14(), this._15(), this._16(), this._17())) +fun Tuple17.splitAt14(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()), Tuple3(this._15(), this._16(), this._17())) +fun Tuple17.splitAt15(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()), Tuple2(this._16(), this._17())) +fun Tuple17.splitAt16(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()), Tuple1(this._17())) +fun Tuple17.splitAt17(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()), EmptyTuple) +fun Tuple18.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18())) +fun Tuple18.splitAt1(): Tuple2, Tuple17> = Tuple2, Tuple17>(Tuple1(this._1()), Tuple17(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18())) +fun Tuple18.splitAt2(): Tuple2, Tuple16> = Tuple2, Tuple16>(Tuple2(this._1(), this._2()), Tuple16(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18())) +fun Tuple18.splitAt3(): Tuple2, Tuple15> = Tuple2, Tuple15>(Tuple3(this._1(), this._2(), this._3()), Tuple15(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18())) +fun Tuple18.splitAt4(): Tuple2, Tuple14> = Tuple2, Tuple14>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple14(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18())) +fun Tuple18.splitAt5(): Tuple2, Tuple13> = Tuple2, Tuple13>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), Tuple13(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18())) +fun Tuple18.splitAt6(): Tuple2, Tuple12> = Tuple2, Tuple12>(Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()), Tuple12(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18())) +fun Tuple18.splitAt7(): Tuple2, Tuple11> = Tuple2, Tuple11>(Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()), Tuple11(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18())) +fun Tuple18.splitAt8(): Tuple2, Tuple10> = Tuple2, Tuple10>(Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()), Tuple10(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18())) +fun Tuple18.splitAt9(): Tuple2, Tuple9> = Tuple2, Tuple9>(Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()), Tuple9(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18())) +fun Tuple18.splitAt10(): Tuple2, Tuple8> = Tuple2, Tuple8>(Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()), Tuple8(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18())) +fun Tuple18.splitAt11(): Tuple2, Tuple7> = Tuple2, Tuple7>(Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()), Tuple7(this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18())) +fun Tuple18.splitAt12(): Tuple2, Tuple6> = Tuple2, Tuple6>(Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()), Tuple6(this._13(), this._14(), this._15(), this._16(), this._17(), this._18())) +fun Tuple18.splitAt13(): Tuple2, Tuple5> = Tuple2, Tuple5>(Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()), Tuple5(this._14(), this._15(), this._16(), this._17(), this._18())) +fun Tuple18.splitAt14(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()), Tuple4(this._15(), this._16(), this._17(), this._18())) +fun Tuple18.splitAt15(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()), Tuple3(this._16(), this._17(), this._18())) +fun Tuple18.splitAt16(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()), Tuple2(this._17(), this._18())) +fun Tuple18.splitAt17(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()), Tuple1(this._18())) +fun Tuple18.splitAt18(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()), EmptyTuple) +fun Tuple19.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19())) +fun Tuple19.splitAt1(): Tuple2, Tuple18> = Tuple2, Tuple18>(Tuple1(this._1()), Tuple18(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19())) +fun Tuple19.splitAt2(): Tuple2, Tuple17> = Tuple2, Tuple17>(Tuple2(this._1(), this._2()), Tuple17(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19())) +fun Tuple19.splitAt3(): Tuple2, Tuple16> = Tuple2, Tuple16>(Tuple3(this._1(), this._2(), this._3()), Tuple16(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19())) +fun Tuple19.splitAt4(): Tuple2, Tuple15> = Tuple2, Tuple15>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple15(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19())) +fun Tuple19.splitAt5(): Tuple2, Tuple14> = Tuple2, Tuple14>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), Tuple14(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19())) +fun Tuple19.splitAt6(): Tuple2, Tuple13> = Tuple2, Tuple13>(Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()), Tuple13(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19())) +fun Tuple19.splitAt7(): Tuple2, Tuple12> = Tuple2, Tuple12>(Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()), Tuple12(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19())) +fun Tuple19.splitAt8(): Tuple2, Tuple11> = Tuple2, Tuple11>(Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()), Tuple11(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19())) +fun Tuple19.splitAt9(): Tuple2, Tuple10> = Tuple2, Tuple10>(Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()), Tuple10(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19())) +fun Tuple19.splitAt10(): Tuple2, Tuple9> = Tuple2, Tuple9>(Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()), Tuple9(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19())) +fun Tuple19.splitAt11(): Tuple2, Tuple8> = Tuple2, Tuple8>(Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()), Tuple8(this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19())) +fun Tuple19.splitAt12(): Tuple2, Tuple7> = Tuple2, Tuple7>(Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()), Tuple7(this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19())) +fun Tuple19.splitAt13(): Tuple2, Tuple6> = Tuple2, Tuple6>(Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()), Tuple6(this._14(), this._15(), this._16(), this._17(), this._18(), this._19())) +fun Tuple19.splitAt14(): Tuple2, Tuple5> = Tuple2, Tuple5>(Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()), Tuple5(this._15(), this._16(), this._17(), this._18(), this._19())) +fun Tuple19.splitAt15(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()), Tuple4(this._16(), this._17(), this._18(), this._19())) +fun Tuple19.splitAt16(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()), Tuple3(this._17(), this._18(), this._19())) +fun Tuple19.splitAt17(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()), Tuple2(this._18(), this._19())) +fun Tuple19.splitAt18(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()), Tuple1(this._19())) +fun Tuple19.splitAt19(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()), EmptyTuple) +fun Tuple20.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20())) +fun Tuple20.splitAt1(): Tuple2, Tuple19> = Tuple2, Tuple19>(Tuple1(this._1()), Tuple19(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20())) +fun Tuple20.splitAt2(): Tuple2, Tuple18> = Tuple2, Tuple18>(Tuple2(this._1(), this._2()), Tuple18(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20())) +fun Tuple20.splitAt3(): Tuple2, Tuple17> = Tuple2, Tuple17>(Tuple3(this._1(), this._2(), this._3()), Tuple17(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20())) +fun Tuple20.splitAt4(): Tuple2, Tuple16> = Tuple2, Tuple16>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple16(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20())) +fun Tuple20.splitAt5(): Tuple2, Tuple15> = Tuple2, Tuple15>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), Tuple15(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20())) +fun Tuple20.splitAt6(): Tuple2, Tuple14> = Tuple2, Tuple14>(Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()), Tuple14(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20())) +fun Tuple20.splitAt7(): Tuple2, Tuple13> = Tuple2, Tuple13>(Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()), Tuple13(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20())) +fun Tuple20.splitAt8(): Tuple2, Tuple12> = Tuple2, Tuple12>(Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()), Tuple12(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20())) +fun Tuple20.splitAt9(): Tuple2, Tuple11> = Tuple2, Tuple11>(Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()), Tuple11(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20())) +fun Tuple20.splitAt10(): Tuple2, Tuple10> = Tuple2, Tuple10>(Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()), Tuple10(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20())) +fun Tuple20.splitAt11(): Tuple2, Tuple9> = Tuple2, Tuple9>(Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()), Tuple9(this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20())) +fun Tuple20.splitAt12(): Tuple2, Tuple8> = Tuple2, Tuple8>(Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()), Tuple8(this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20())) +fun Tuple20.splitAt13(): Tuple2, Tuple7> = Tuple2, Tuple7>(Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()), Tuple7(this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20())) +fun Tuple20.splitAt14(): Tuple2, Tuple6> = Tuple2, Tuple6>(Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()), Tuple6(this._15(), this._16(), this._17(), this._18(), this._19(), this._20())) +fun Tuple20.splitAt15(): Tuple2, Tuple5> = Tuple2, Tuple5>(Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()), Tuple5(this._16(), this._17(), this._18(), this._19(), this._20())) +fun Tuple20.splitAt16(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()), Tuple4(this._17(), this._18(), this._19(), this._20())) +fun Tuple20.splitAt17(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()), Tuple3(this._18(), this._19(), this._20())) +fun Tuple20.splitAt18(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()), Tuple2(this._19(), this._20())) +fun Tuple20.splitAt19(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()), Tuple1(this._20())) +fun Tuple20.splitAt20(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()), EmptyTuple) +fun Tuple21.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt1(): Tuple2, Tuple20> = Tuple2, Tuple20>(Tuple1(this._1()), Tuple20(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt2(): Tuple2, Tuple19> = Tuple2, Tuple19>(Tuple2(this._1(), this._2()), Tuple19(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt3(): Tuple2, Tuple18> = Tuple2, Tuple18>(Tuple3(this._1(), this._2(), this._3()), Tuple18(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt4(): Tuple2, Tuple17> = Tuple2, Tuple17>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple17(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt5(): Tuple2, Tuple16> = Tuple2, Tuple16>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), Tuple16(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt6(): Tuple2, Tuple15> = Tuple2, Tuple15>(Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()), Tuple15(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt7(): Tuple2, Tuple14> = Tuple2, Tuple14>(Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()), Tuple14(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt8(): Tuple2, Tuple13> = Tuple2, Tuple13>(Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()), Tuple13(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt9(): Tuple2, Tuple12> = Tuple2, Tuple12>(Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()), Tuple12(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt10(): Tuple2, Tuple11> = Tuple2, Tuple11>(Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()), Tuple11(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt11(): Tuple2, Tuple10> = Tuple2, Tuple10>(Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()), Tuple10(this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt12(): Tuple2, Tuple9> = Tuple2, Tuple9>(Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()), Tuple9(this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt13(): Tuple2, Tuple8> = Tuple2, Tuple8>(Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()), Tuple8(this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt14(): Tuple2, Tuple7> = Tuple2, Tuple7>(Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()), Tuple7(this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt15(): Tuple2, Tuple6> = Tuple2, Tuple6>(Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()), Tuple6(this._16(), this._17(), this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt16(): Tuple2, Tuple5> = Tuple2, Tuple5>(Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()), Tuple5(this._17(), this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt17(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()), Tuple4(this._18(), this._19(), this._20(), this._21())) +fun Tuple21.splitAt18(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()), Tuple3(this._19(), this._20(), this._21())) +fun Tuple21.splitAt19(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()), Tuple2(this._20(), this._21())) +fun Tuple21.splitAt20(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()), Tuple1(this._21())) +fun Tuple21.splitAt21(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()), EmptyTuple) +fun Tuple22.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt1(): Tuple2, Tuple21> = Tuple2, Tuple21>(Tuple1(this._1()), Tuple21(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt2(): Tuple2, Tuple20> = Tuple2, Tuple20>(Tuple2(this._1(), this._2()), Tuple20(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt3(): Tuple2, Tuple19> = Tuple2, Tuple19>(Tuple3(this._1(), this._2(), this._3()), Tuple19(this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt4(): Tuple2, Tuple18> = Tuple2, Tuple18>(Tuple4(this._1(), this._2(), this._3(), this._4()), Tuple18(this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt5(): Tuple2, Tuple17> = Tuple2, Tuple17>(Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()), Tuple17(this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt6(): Tuple2, Tuple16> = Tuple2, Tuple16>(Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()), Tuple16(this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt7(): Tuple2, Tuple15> = Tuple2, Tuple15>(Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()), Tuple15(this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt8(): Tuple2, Tuple14> = Tuple2, Tuple14>(Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()), Tuple14(this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt9(): Tuple2, Tuple13> = Tuple2, Tuple13>(Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()), Tuple13(this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt10(): Tuple2, Tuple12> = Tuple2, Tuple12>(Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()), Tuple12(this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt11(): Tuple2, Tuple11> = Tuple2, Tuple11>(Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()), Tuple11(this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt12(): Tuple2, Tuple10> = Tuple2, Tuple10>(Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()), Tuple10(this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt13(): Tuple2, Tuple9> = Tuple2, Tuple9>(Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()), Tuple9(this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt14(): Tuple2, Tuple8> = Tuple2, Tuple8>(Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()), Tuple8(this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt15(): Tuple2, Tuple7> = Tuple2, Tuple7>(Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()), Tuple7(this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt16(): Tuple2, Tuple6> = Tuple2, Tuple6>(Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()), Tuple6(this._17(), this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt17(): Tuple2, Tuple5> = Tuple2, Tuple5>(Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()), Tuple5(this._18(), this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt18(): Tuple2, Tuple4> = Tuple2, Tuple4>(Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()), Tuple4(this._19(), this._20(), this._21(), this._22())) +fun Tuple22.splitAt19(): Tuple2, Tuple3> = Tuple2, Tuple3>(Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()), Tuple3(this._20(), this._21(), this._22())) +fun Tuple22.splitAt20(): Tuple2, Tuple2> = Tuple2, Tuple2>(Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()), Tuple2(this._21(), this._22())) +fun Tuple22.splitAt21(): Tuple2, Tuple1> = Tuple2, Tuple1>(Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()), Tuple1(this._22())) +fun Tuple22.splitAt22(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()), EmptyTuple) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTakeN.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTake.kt similarity index 97% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTakeN.kt rename to kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTake.kt index 76fd0f09..82b18c30 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTakeN.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTake.kt @@ -1,28 +1,34 @@ package org.jetbrains.kotlinx.spark.api.tuples import scala.* - +fun Tuple1<*>.take0(): EmptyTuple = EmptyTuple fun Tuple1.take1(): Tuple1 = Tuple1(this._1()) +fun Tuple2<*, *>.take0(): EmptyTuple = EmptyTuple fun Tuple2.take1(): Tuple1 = Tuple1(this._1()) fun Tuple2.take2(): Tuple2 = Tuple2(this._1(), this._2()) +fun Tuple3<*, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple3.take1(): Tuple1 = Tuple1(this._1()) fun Tuple3.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple3.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) +fun Tuple4<*, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple4.take1(): Tuple1 = Tuple1(this._1()) fun Tuple4.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple4.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) fun Tuple4.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) +fun Tuple5<*, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple5.take1(): Tuple1 = Tuple1(this._1()) fun Tuple5.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple5.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) fun Tuple5.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) fun Tuple5.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) +fun Tuple6<*, *, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple6.take1(): Tuple1 = Tuple1(this._1()) fun Tuple6.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple6.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) fun Tuple6.take4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) fun Tuple6.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) fun Tuple6.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) +fun Tuple7<*, *, *, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple7.take1(): Tuple1 = Tuple1(this._1()) fun Tuple7.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple7.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) @@ -30,6 +36,7 @@ fun Tuple7.take4(): Tuple4 Tuple7.take5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) fun Tuple7.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) fun Tuple7.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) +fun Tuple8<*, *, *, *, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple8.take1(): Tuple1 = Tuple1(this._1()) fun Tuple8.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple8.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) @@ -38,6 +45,7 @@ fun Tuple8.take5(): Tuple5 Tuple8.take6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) fun Tuple8.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) fun Tuple8.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) +fun Tuple9<*, *, *, *, *, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple9.take1(): Tuple1 = Tuple1(this._1()) fun Tuple9.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple9.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) @@ -47,6 +55,7 @@ fun Tuple9.take6(): Tu fun Tuple9.take7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) fun Tuple9.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) fun Tuple9.take9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) +fun Tuple10<*, *, *, *, *, *, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple10.take1(): Tuple1 = Tuple1(this._1()) fun Tuple10.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple10.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) @@ -57,6 +66,7 @@ fun Tuple10.ta fun Tuple10.take8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) fun Tuple10.take9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) fun Tuple10.take10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) +fun Tuple11<*, *, *, *, *, *, *, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple11.take1(): Tuple1 = Tuple1(this._1()) fun Tuple11.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple11.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) @@ -68,6 +78,7 @@ fun Tuple11 Tuple11.take9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) fun Tuple11.take10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) fun Tuple11.take11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) +fun Tuple12<*, *, *, *, *, *, *, *, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple12.take1(): Tuple1 = Tuple1(this._1()) fun Tuple12.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple12.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) @@ -80,6 +91,7 @@ fun Tuple12 Tuple12.take10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) fun Tuple12.take11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) fun Tuple12.take12(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) +fun Tuple13<*, *, *, *, *, *, *, *, *, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple13.take1(): Tuple1 = Tuple1(this._1()) fun Tuple13.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple13.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) @@ -93,6 +105,7 @@ fun Tuple13 Tuple13.take11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) fun Tuple13.take12(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) fun Tuple13.take13(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) +fun Tuple14<*, *, *, *, *, *, *, *, *, *, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple14.take1(): Tuple1 = Tuple1(this._1()) fun Tuple14.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple14.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) @@ -107,6 +120,7 @@ fun Tuple14 Tuple14.take12(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) fun Tuple14.take13(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) fun Tuple14.take14(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) +fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple15.take1(): Tuple1 = Tuple1(this._1()) fun Tuple15.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple15.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) @@ -122,6 +136,7 @@ fun Tuple15 Tuple15.take13(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) fun Tuple15.take14(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) fun Tuple15.take15(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) +fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple16.take1(): Tuple1 = Tuple1(this._1()) fun Tuple16.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple16.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) @@ -138,6 +153,7 @@ fun Tuple16 Tuple16.take14(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) fun Tuple16.take15(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) fun Tuple16.take16(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple17.take1(): Tuple1 = Tuple1(this._1()) fun Tuple17.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple17.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) @@ -155,6 +171,7 @@ fun Tuple17 Tuple17.take15(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) fun Tuple17.take16(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) fun Tuple17.take17(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple18.take1(): Tuple1 = Tuple1(this._1()) fun Tuple18.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple18.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) @@ -173,6 +190,7 @@ fun Tuple18 Tuple18.take16(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) fun Tuple18.take17(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) fun Tuple18.take18(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple19.take1(): Tuple1 = Tuple1(this._1()) fun Tuple19.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple19.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) @@ -192,6 +210,7 @@ fun Tupl fun Tuple19.take17(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) fun Tuple19.take18(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) fun Tuple19.take19(): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple20.take1(): Tuple1 = Tuple1(this._1()) fun Tuple20.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple20.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) @@ -212,6 +231,7 @@ fun fun Tuple20.take18(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) fun Tuple20.take19(): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) fun Tuple20.take20(): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple21.take1(): Tuple1 = Tuple1(this._1()) fun Tuple21.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple21.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) @@ -233,6 +253,7 @@ fun Tuple21.take19(): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) fun Tuple21.take20(): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) fun Tuple21.take21(): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.take0(): EmptyTuple = EmptyTuple fun Tuple22.take1(): Tuple1 = Tuple1(this._1()) fun Tuple22.take2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple22.take3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) @@ -258,27 +279,34 @@ fun .takeLast0(): EmptyTuple = EmptyTuple fun Tuple1.takeLast1(): Tuple1 = Tuple1(this._1()) +fun Tuple2<*, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple2.takeLast2(): Tuple2 = Tuple2(this._1(), this._2()) fun Tuple2<*, T2>.takeLast1(): Tuple1 = Tuple1(this._2()) +fun Tuple3<*, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple3.takeLast3(): Tuple3 = Tuple3(this._1(), this._2(), this._3()) fun Tuple3<*, T2, T3>.takeLast2(): Tuple2 = Tuple2(this._2(), this._3()) fun Tuple3<*, *, T3>.takeLast1(): Tuple1 = Tuple1(this._3()) +fun Tuple4<*, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple4.takeLast4(): Tuple4 = Tuple4(this._1(), this._2(), this._3(), this._4()) fun Tuple4<*, T2, T3, T4>.takeLast3(): Tuple3 = Tuple3(this._2(), this._3(), this._4()) fun Tuple4<*, *, T3, T4>.takeLast2(): Tuple2 = Tuple2(this._3(), this._4()) fun Tuple4<*, *, *, T4>.takeLast1(): Tuple1 = Tuple1(this._4()) +fun Tuple5<*, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple5.takeLast5(): Tuple5 = Tuple5(this._1(), this._2(), this._3(), this._4(), this._5()) fun Tuple5<*, T2, T3, T4, T5>.takeLast4(): Tuple4 = Tuple4(this._2(), this._3(), this._4(), this._5()) fun Tuple5<*, *, T3, T4, T5>.takeLast3(): Tuple3 = Tuple3(this._3(), this._4(), this._5()) fun Tuple5<*, *, *, T4, T5>.takeLast2(): Tuple2 = Tuple2(this._4(), this._5()) fun Tuple5<*, *, *, *, T5>.takeLast1(): Tuple1 = Tuple1(this._5()) +fun Tuple6<*, *, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple6.takeLast6(): Tuple6 = Tuple6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) fun Tuple6<*, T2, T3, T4, T5, T6>.takeLast5(): Tuple5 = Tuple5(this._2(), this._3(), this._4(), this._5(), this._6()) fun Tuple6<*, *, T3, T4, T5, T6>.takeLast4(): Tuple4 = Tuple4(this._3(), this._4(), this._5(), this._6()) fun Tuple6<*, *, *, T4, T5, T6>.takeLast3(): Tuple3 = Tuple3(this._4(), this._5(), this._6()) fun Tuple6<*, *, *, *, T5, T6>.takeLast2(): Tuple2 = Tuple2(this._5(), this._6()) fun Tuple6<*, *, *, *, *, T6>.takeLast1(): Tuple1 = Tuple1(this._6()) +fun Tuple7<*, *, *, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple7.takeLast7(): Tuple7 = Tuple7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) fun Tuple7<*, T2, T3, T4, T5, T6, T7>.takeLast6(): Tuple6 = Tuple6(this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) fun Tuple7<*, *, T3, T4, T5, T6, T7>.takeLast5(): Tuple5 = Tuple5(this._3(), this._4(), this._5(), this._6(), this._7()) @@ -286,6 +314,7 @@ fun Tuple7<*, *, *, T4, T5, T6, T7>.takeLast4(): Tuple4 Tuple7<*, *, *, *, T5, T6, T7>.takeLast3(): Tuple3 = Tuple3(this._5(), this._6(), this._7()) fun Tuple7<*, *, *, *, *, T6, T7>.takeLast2(): Tuple2 = Tuple2(this._6(), this._7()) fun Tuple7<*, *, *, *, *, *, T7>.takeLast1(): Tuple1 = Tuple1(this._7()) +fun Tuple8<*, *, *, *, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple8.takeLast8(): Tuple8 = Tuple8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) fun Tuple8<*, T2, T3, T4, T5, T6, T7, T8>.takeLast7(): Tuple7 = Tuple7(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) fun Tuple8<*, *, T3, T4, T5, T6, T7, T8>.takeLast6(): Tuple6 = Tuple6(this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) @@ -294,6 +323,7 @@ fun Tuple8<*, *, *, *, T5, T6, T7, T8>.takeLast4(): Tuple4 Tuple8<*, *, *, *, *, T6, T7, T8>.takeLast3(): Tuple3 = Tuple3(this._6(), this._7(), this._8()) fun Tuple8<*, *, *, *, *, *, T7, T8>.takeLast2(): Tuple2 = Tuple2(this._7(), this._8()) fun Tuple8<*, *, *, *, *, *, *, T8>.takeLast1(): Tuple1 = Tuple1(this._8()) +fun Tuple9<*, *, *, *, *, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple9.takeLast9(): Tuple9 = Tuple9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) fun Tuple9<*, T2, T3, T4, T5, T6, T7, T8, T9>.takeLast8(): Tuple8 = Tuple8(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) fun Tuple9<*, *, T3, T4, T5, T6, T7, T8, T9>.takeLast7(): Tuple7 = Tuple7(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) @@ -303,6 +333,7 @@ fun Tuple9<*, *, *, *, *, T6, T7, T8, T9>.takeLast4(): Tuple4 Tuple9<*, *, *, *, *, *, T7, T8, T9>.takeLast3(): Tuple3 = Tuple3(this._7(), this._8(), this._9()) fun Tuple9<*, *, *, *, *, *, *, T8, T9>.takeLast2(): Tuple2 = Tuple2(this._8(), this._9()) fun Tuple9<*, *, *, *, *, *, *, *, T9>.takeLast1(): Tuple1 = Tuple1(this._9()) +fun Tuple10<*, *, *, *, *, *, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple10.takeLast10(): Tuple10 = Tuple10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) fun Tuple10<*, T2, T3, T4, T5, T6, T7, T8, T9, T10>.takeLast9(): Tuple9 = Tuple9(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) fun Tuple10<*, *, T3, T4, T5, T6, T7, T8, T9, T10>.takeLast8(): Tuple8 = Tuple8(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) @@ -313,6 +344,7 @@ fun Tuple10<*, *, *, *, *, *, T7, T8, T9, T10>.takeLast4(): Tu fun Tuple10<*, *, *, *, *, *, *, T8, T9, T10>.takeLast3(): Tuple3 = Tuple3(this._8(), this._9(), this._10()) fun Tuple10<*, *, *, *, *, *, *, *, T9, T10>.takeLast2(): Tuple2 = Tuple2(this._9(), this._10()) fun Tuple10<*, *, *, *, *, *, *, *, *, T10>.takeLast1(): Tuple1 = Tuple1(this._10()) +fun Tuple11<*, *, *, *, *, *, *, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple11.takeLast11(): Tuple11 = Tuple11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) fun Tuple11<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>.takeLast10(): Tuple10 = Tuple10(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) fun Tuple11<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11>.takeLast9(): Tuple9 = Tuple9(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) @@ -324,6 +356,7 @@ fun Tuple11<*, *, *, *, *, *, *, T8, T9, T10, T11>.takeLast4( fun Tuple11<*, *, *, *, *, *, *, *, T9, T10, T11>.takeLast3(): Tuple3 = Tuple3(this._9(), this._10(), this._11()) fun Tuple11<*, *, *, *, *, *, *, *, *, T10, T11>.takeLast2(): Tuple2 = Tuple2(this._10(), this._11()) fun Tuple11<*, *, *, *, *, *, *, *, *, *, T11>.takeLast1(): Tuple1 = Tuple1(this._11()) +fun Tuple12<*, *, *, *, *, *, *, *, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple12.takeLast12(): Tuple12 = Tuple12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) fun Tuple12<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.takeLast11(): Tuple11 = Tuple11(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) fun Tuple12<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>.takeLast10(): Tuple10 = Tuple10(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) @@ -336,6 +369,7 @@ fun Tuple12<*, *, *, *, *, *, *, *, T9, T10, T11, T12>.takeL fun Tuple12<*, *, *, *, *, *, *, *, *, T10, T11, T12>.takeLast3(): Tuple3 = Tuple3(this._10(), this._11(), this._12()) fun Tuple12<*, *, *, *, *, *, *, *, *, *, T11, T12>.takeLast2(): Tuple2 = Tuple2(this._11(), this._12()) fun Tuple12<*, *, *, *, *, *, *, *, *, *, *, T12>.takeLast1(): Tuple1 = Tuple1(this._12()) +fun Tuple13<*, *, *, *, *, *, *, *, *, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple13.takeLast13(): Tuple13 = Tuple13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) fun Tuple13<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.takeLast12(): Tuple12 = Tuple12(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) fun Tuple13<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>.takeLast11(): Tuple11 = Tuple11(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) @@ -349,6 +383,7 @@ fun Tuple13<*, *, *, *, *, *, *, *, *, T10, T11, T12, T13>. fun Tuple13<*, *, *, *, *, *, *, *, *, *, T11, T12, T13>.takeLast3(): Tuple3 = Tuple3(this._11(), this._12(), this._13()) fun Tuple13<*, *, *, *, *, *, *, *, *, *, *, T12, T13>.takeLast2(): Tuple2 = Tuple2(this._12(), this._13()) fun Tuple13<*, *, *, *, *, *, *, *, *, *, *, *, T13>.takeLast1(): Tuple1 = Tuple1(this._13()) +fun Tuple14<*, *, *, *, *, *, *, *, *, *, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple14.takeLast14(): Tuple14 = Tuple14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) fun Tuple14<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.takeLast13(): Tuple13 = Tuple13(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) fun Tuple14<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>.takeLast12(): Tuple12 = Tuple12(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) @@ -363,6 +398,7 @@ fun Tuple14<*, *, *, *, *, *, *, *, *, *, T11, T12, T13, T1 fun Tuple14<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14>.takeLast3(): Tuple3 = Tuple3(this._12(), this._13(), this._14()) fun Tuple14<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14>.takeLast2(): Tuple2 = Tuple2(this._13(), this._14()) fun Tuple14<*, *, *, *, *, *, *, *, *, *, *, *, *, T14>.takeLast1(): Tuple1 = Tuple1(this._14()) +fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple15.takeLast15(): Tuple15 = Tuple15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) fun Tuple15<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.takeLast14(): Tuple14 = Tuple14(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) fun Tuple15<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>.takeLast13(): Tuple13 = Tuple13(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) @@ -378,6 +414,7 @@ fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, T12, T13, T14, fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T15>.takeLast3(): Tuple3 = Tuple3(this._13(), this._14(), this._15()) fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15>.takeLast2(): Tuple2 = Tuple2(this._14(), this._15()) fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15>.takeLast1(): Tuple1 = Tuple1(this._15()) +fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple16.takeLast16(): Tuple16 = Tuple16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) fun Tuple16<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.takeLast15(): Tuple15 = Tuple15(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) fun Tuple16<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>.takeLast14(): Tuple14 = Tuple14(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) @@ -394,6 +431,7 @@ fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, T13, T14, T fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15, T16>.takeLast3(): Tuple3 = Tuple3(this._14(), this._15(), this._16()) fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15, T16>.takeLast2(): Tuple2 = Tuple2(this._15(), this._16()) fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T16>.takeLast1(): Tuple1 = Tuple1(this._16()) +fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple17.takeLast17(): Tuple17 = Tuple17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) fun Tuple17<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.takeLast16(): Tuple16 = Tuple16(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) fun Tuple17<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>.takeLast15(): Tuple15 = Tuple15(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) @@ -411,6 +449,7 @@ fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, T14, T15 fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15, T16, T17>.takeLast3(): Tuple3 = Tuple3(this._15(), this._16(), this._17()) fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T16, T17>.takeLast2(): Tuple2 = Tuple2(this._16(), this._17()) fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T17>.takeLast1(): Tuple1 = Tuple1(this._17()) +fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple18.takeLast18(): Tuple18 = Tuple18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) fun Tuple18<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.takeLast17(): Tuple17 = Tuple17(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) fun Tuple18<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>.takeLast16(): Tuple16 = Tuple16(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) @@ -429,6 +468,7 @@ fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, T15, fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T16, T17, T18>.takeLast3(): Tuple3 = Tuple3(this._16(), this._17(), this._18()) fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T17, T18>.takeLast2(): Tuple2 = Tuple2(this._17(), this._18()) fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T18>.takeLast1(): Tuple1 = Tuple1(this._18()) +fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple19.takeLast19(): Tuple19 = Tuple19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) fun Tuple19<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.takeLast18(): Tuple18 = Tuple18(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) fun Tuple19<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>.takeLast17(): Tuple17 = Tuple17(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) @@ -448,6 +488,7 @@ fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T1 fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T17, T18, T19>.takeLast3(): Tuple3 = Tuple3(this._17(), this._18(), this._19()) fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T18, T19>.takeLast2(): Tuple2 = Tuple2(this._18(), this._19()) fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T19>.takeLast1(): Tuple1 = Tuple1(this._19()) +fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple20.takeLast20(): Tuple20 = Tuple20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) fun Tuple20<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.takeLast19(): Tuple19 = Tuple19(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) fun Tuple20<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>.takeLast18(): Tuple18 = Tuple18(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) @@ -468,6 +509,7 @@ fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T18, T19, T20>.takeLast3(): Tuple3 = Tuple3(this._18(), this._19(), this._20()) fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T19, T20>.takeLast2(): Tuple2 = Tuple2(this._19(), this._20()) fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T20>.takeLast1(): Tuple1 = Tuple1(this._20()) +fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple21.takeLast21(): Tuple21 = Tuple21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) fun Tuple21<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.takeLast20(): Tuple20 = Tuple20(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) fun Tuple21<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>.takeLast19(): Tuple19 = Tuple19(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) @@ -489,6 +531,7 @@ fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T19, T20, T21>.takeLast3(): Tuple3 = Tuple3(this._19(), this._20(), this._21()) fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T20, T21>.takeLast2(): Tuple2 = Tuple2(this._20(), this._21()) fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, T21>.takeLast1(): Tuple1 = Tuple1(this._21()) +fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.takeLast0(): EmptyTuple = EmptyTuple fun Tuple22.takeLast22(): Tuple22 = Tuple22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) fun Tuple22<*, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.takeLast21(): Tuple21 = Tuple21(this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) fun Tuple22<*, *, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>.takeLast20(): Tuple20 = Tuple20(this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ZipTuples.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleZip.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ZipTuples.kt rename to kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleZip.kt diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt index fe3b73a9..d2bbdd7a 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt @@ -186,19 +186,31 @@ class TuplesTest : ShouldSpec({ } should("Map tuples") { - val a = t(1, A(), 3L, 4.0, 5).map { - when (it) { - is A -> A() - else -> it.toString() - } - } + t(1, 2.toShort(), 3L, 4.0, 5).map { + it.toString() + } shouldBe t("1", "2", "3", "4.0", "5") } should("Take n from tuples") { t(1, 2, 3).take2() shouldBe t(1, 2) t(1, 2, 3).takeLast2() shouldBe t(2, 3) - val a = t(1.0, 2, 3L, 4f).takeLast3() + t(1, 2, 3).take0() shouldBe t() + t(1, 2, 3).takeLast0() shouldBe t() + } + + should("Drop n from tuples") { + t(1, 2, 3).drop2() shouldBe t(3) + t(1, 2, 3).dropLast2() shouldBe t(1) + + t(1, 2, 3).drop0() shouldBe t(1, 2, 3) + t(1, 2, 3).dropLast0() shouldBe t(1, 2, 3) + } + + should("Split tuples") { + t(1, 2, 3, 4, 5).splitAt2() shouldBe t(t(1, 2), t(3, 4, 5)) + t(1, 2, 3, 4, 5).splitAt0() shouldBe t(t(), t(1, 2, 3, 4, 5)) + t(1, 2, 3, 4, 5).splitAt5() shouldBe t(t(1, 2, 3, 4, 5), t()) } } diff --git a/qodana.yaml b/qodana.yaml index ff68770d..88c0d6d2 100644 --- a/qodana.yaml +++ b/qodana.yaml @@ -1,6 +1,8 @@ +version: "1.0" +linter: jetbrains/qodana-jvm-community:2021.3 profile: name: qodana.recommended exclude: - name: Tuples paths: - - kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples \ No newline at end of file + - kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples From e0037a010b66c86b3a421d7b04a07fd2d36ab09e Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 30 Mar 2022 15:22:04 +0200 Subject: [PATCH 113/213] checking to see whether qodana.yaml is even working --- qodana.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/qodana.yaml b/qodana.yaml index 88c0d6d2..874d4557 100644 --- a/qodana.yaml +++ b/qodana.yaml @@ -1,8 +1,8 @@ version: "1.0" linter: jetbrains/qodana-jvm-community:2021.3 -profile: +proile name: qodana.recommended -exclude: +excde: - name: Tuples paths: - kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples From b13cbd953974cd9325992b119a25db4be8b3c876 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 30 Mar 2022 15:25:37 +0200 Subject: [PATCH 114/213] checking to see whether qodana.yaml is even working --- qodana.yaml | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/qodana.yaml b/qodana.yaml index 874d4557..6f7b1ae5 100644 --- a/qodana.yaml +++ b/qodana.yaml @@ -1,8 +1,6 @@ -version: "1.0" -linter: jetbrains/qodana-jvm-community:2021.3 -proile +profile: name: qodana.recommended -excde: - - name: Tuples +exclude: + - name: All paths: - kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples From a7484d267f744d0ea4f45058f2c8117281501482 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 30 Mar 2022 16:17:04 +0200 Subject: [PATCH 115/213] updating docs and readme --- README.md | 9 +++- .../api/tuples/DestructuredTupleBuilders.kt | 3 ++ .../kotlinx/spark/api/tuples/DropFunctions.kt | 4 +- .../kotlinx/spark/api/tuples/MapTuples.kt | 42 +++++++++++++++++++ .../kotlinx/spark/api/tuples/TupleDrop.kt | 11 +++++ .../kotlinx/spark/api/tuples/TupleSplit.kt | 12 ++++++ .../kotlinx/spark/api/tuples/TupleTake.kt | 12 ++++++ .../kotlinx/spark/api/tuples/TupleZip.kt | 3 +- .../jetbrains/kotlinx/spark/api/TuplesTest.kt | 12 ++++-- 9 files changed, 100 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 79c25b73..8846e4c1 100644 --- a/README.md +++ b/README.md @@ -245,7 +245,7 @@ The concept of `EmptyTuple` from Scala 3 is also already present: tupleOf(1).dropLast() == tupleOf() == emptyTuple() ``` -Finally, all these helper functions are also baked in: +Finally, all these tuple helper functions are also baked in: - `componentX()` for destructuring: `val (a, b) = tuple` - `dropLast() / dropFirst()` @@ -257,10 +257,15 @@ Finally, all these helper functions are also baked in: - `getOrNull(n) / getOrNull(i..j)` - `getAs(n) / getAs(i..j)` - `getAsOrNull(n) / getAsOrNull(i..j)` -- `zip` - `copy(_1 = ..., _5 = ...)` - `first() / last()` - `_1`, `_6` etc. (instead of `_1()`, `_6()`) +- `zip` +- `dropN() / dropLastN()` +- `takeN() / takeLastN()` +- `splitAtN()` +- `map` +- `cast` ## Examples diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt index 96f395f2..a212e3aa 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt @@ -45,6 +45,9 @@ import scala.Tuple22 /** * This file provides a descriptive way to create Tuples using [X]. + * Only use [X] to create new Tuples. + * To create Tuples of Tuples, it's recommended to use [t] or [tupleOf] instead as using [X] can lead + * to unexpected results. * * For instance: * ```val yourTuple = 1 X "test" X a``` diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DropFunctions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DropFunctions.kt index cfc730f3..f264245f 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DropFunctions.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DropFunctions.kt @@ -47,7 +47,9 @@ import scala.Tuple22 * This can be done using [dropFirst] and [dropLast]. * * For example: - * ```val yourTuple: Tuple2 = tupleOf(1, "test", a).dropLast()``` + * ```kotlin + * val yourTuple: Tuple2 = tupleOf(1, "test", a).dropLast() + * ``` * */ diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/MapTuples.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/MapTuples.kt index ee2f517b..2de50286 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/MapTuples.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/MapTuples.kt @@ -2,6 +2,25 @@ package org.jetbrains.kotlinx.spark.api.tuples import scala.* +/** + * This file provides map-functions to all Tuple variants. + * Given a tuple `t(a1, ..., an)`, returns a new tuple `t(func(a1), ..., func(an))`. + * Compared to Scala 3, no type mapping can occur in Kotlin, so to create a `TupleX` + * the user will need to explicitly [cast] the result. + * + * For example: + * ```kotlin + * val myTuple: Tuple4 = t(1, "3", 2, "4") + * val myStringTuple: Tuple4 = myTuple.map { + * when (it) { + * is Int -> it.toString() + * is String -> it.toInt() + * else -> error("") + * } + * }.cast() + * ``` + */ + fun Tuple1.map(func: (T) -> R): Tuple1 = Tuple1(func(this._1())) fun Tuple2.map(func: (T) -> R): Tuple2 = Tuple2(func(this._1()), func(this._2())) fun Tuple3.map(func: (T) -> R): Tuple3 = Tuple3(func(this._1()), func(this._2()), func(this._3())) @@ -24,3 +43,26 @@ fun Tuple20.map(func: (T) -> R): Tuple20 = Tuple20(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8()), func(this._9()), func(this._10()), func(this._11()), func(this._12()), func(this._13()), func(this._14()), func(this._15()), func(this._16()), func(this._17()), func(this._18()), func(this._19()), func(this._20())) fun Tuple21.map(func: (T) -> R): Tuple21 = Tuple21(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8()), func(this._9()), func(this._10()), func(this._11()), func(this._12()), func(this._13()), func(this._14()), func(this._15()), func(this._16()), func(this._17()), func(this._18()), func(this._19()), func(this._20()), func(this._21())) fun Tuple22.map(func: (T) -> R): Tuple22 = Tuple22(func(this._1()), func(this._2()), func(this._3()), func(this._4()), func(this._5()), func(this._6()), func(this._7()), func(this._8()), func(this._9()), func(this._10()), func(this._11()), func(this._12()), func(this._13()), func(this._14()), func(this._15()), func(this._16()), func(this._17()), func(this._18()), func(this._19()), func(this._20()), func(this._21()), func(this._22())) + +inline fun Tuple1<*>.cast(): Tuple1 = Tuple1(this._1() as T1) +inline fun Tuple2<*, *>.cast(): Tuple2 = Tuple2(this._1() as T1, this._2() as T2) +inline fun Tuple3<*, *, *>.cast(): Tuple3 = Tuple3(this._1() as T1, this._2() as T2, this._3() as T3) +inline fun Tuple4<*, *, *, *>.cast(): Tuple4 = Tuple4(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4) +inline fun Tuple5<*, *, *, *, *>.cast(): Tuple5 = Tuple5(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5) +inline fun Tuple6<*, *, *, *, *, *>.cast(): Tuple6 = Tuple6(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5, this._6() as T6) +inline fun Tuple7<*, *, *, *, *, *, *>.cast(): Tuple7 = Tuple7(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5, this._6() as T6, this._7() as T7) +inline fun Tuple8<*, *, *, *, *, *, *, *>.cast(): Tuple8 = Tuple8(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5, this._6() as T6, this._7() as T7, this._8() as T8) +inline fun Tuple9<*, *, *, *, *, *, *, *, *>.cast(): Tuple9 = Tuple9(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5, this._6() as T6, this._7() as T7, this._8() as T8, this._9() as T9) +inline fun Tuple10<*, *, *, *, *, *, *, *, *, *>.cast(): Tuple10 = Tuple10(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5, this._6() as T6, this._7() as T7, this._8() as T8, this._9() as T9, this._10() as T10) +inline fun Tuple11<*, *, *, *, *, *, *, *, *, *, *>.cast(): Tuple11 = Tuple11(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5, this._6() as T6, this._7() as T7, this._8() as T8, this._9() as T9, this._10() as T10, this._11() as T11) +inline fun Tuple12<*, *, *, *, *, *, *, *, *, *, *, *>.cast(): Tuple12 = Tuple12(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5, this._6() as T6, this._7() as T7, this._8() as T8, this._9() as T9, this._10() as T10, this._11() as T11, this._12() as T12) +inline fun Tuple13<*, *, *, *, *, *, *, *, *, *, *, *, *>.cast(): Tuple13 = Tuple13(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5, this._6() as T6, this._7() as T7, this._8() as T8, this._9() as T9, this._10() as T10, this._11() as T11, this._12() as T12, this._13() as T13) +inline fun Tuple14<*, *, *, *, *, *, *, *, *, *, *, *, *, *>.cast(): Tuple14 = Tuple14(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5, this._6() as T6, this._7() as T7, this._8() as T8, this._9() as T9, this._10() as T10, this._11() as T11, this._12() as T12, this._13() as T13, this._14() as T14) +inline fun Tuple15<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.cast(): Tuple15 = Tuple15(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5, this._6() as T6, this._7() as T7, this._8() as T8, this._9() as T9, this._10() as T10, this._11() as T11, this._12() as T12, this._13() as T13, this._14() as T14, this._15() as T15) +inline fun Tuple16<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.cast(): Tuple16 = Tuple16(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5, this._6() as T6, this._7() as T7, this._8() as T8, this._9() as T9, this._10() as T10, this._11() as T11, this._12() as T12, this._13() as T13, this._14() as T14, this._15() as T15, this._16() as T16) +inline fun Tuple17<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.cast(): Tuple17 = Tuple17(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5, this._6() as T6, this._7() as T7, this._8() as T8, this._9() as T9, this._10() as T10, this._11() as T11, this._12() as T12, this._13() as T13, this._14() as T14, this._15() as T15, this._16() as T16, this._17() as T17) +inline fun Tuple18<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.cast(): Tuple18 = Tuple18(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5, this._6() as T6, this._7() as T7, this._8() as T8, this._9() as T9, this._10() as T10, this._11() as T11, this._12() as T12, this._13() as T13, this._14() as T14, this._15() as T15, this._16() as T16, this._17() as T17, this._18() as T18) +inline fun Tuple19<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.cast(): Tuple19 = Tuple19(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5, this._6() as T6, this._7() as T7, this._8() as T8, this._9() as T9, this._10() as T10, this._11() as T11, this._12() as T12, this._13() as T13, this._14() as T14, this._15() as T15, this._16() as T16, this._17() as T17, this._18() as T18, this._19() as T19) +inline fun Tuple20<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.cast(): Tuple20 = Tuple20(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5, this._6() as T6, this._7() as T7, this._8() as T8, this._9() as T9, this._10() as T10, this._11() as T11, this._12() as T12, this._13() as T13, this._14() as T14, this._15() as T15, this._16() as T16, this._17() as T17, this._18() as T18, this._19() as T19, this._20() as T20) +inline fun Tuple21<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.cast(): Tuple21 = Tuple21(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5, this._6() as T6, this._7() as T7, this._8() as T8, this._9() as T9, this._10() as T10, this._11() as T11, this._12() as T12, this._13() as T13, this._14() as T14, this._15() as T15, this._16() as T16, this._17() as T17, this._18() as T18, this._19() as T19, this._20() as T20, this._21() as T21) +inline fun Tuple22<*, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *, *>.cast(): Tuple22 = Tuple22(this._1() as T1, this._2() as T2, this._3() as T3, this._4() as T4, this._5() as T5, this._6() as T6, this._7() as T7, this._8() as T8, this._9() as T9, this._10() as T10, this._11() as T11, this._12() as T12, this._13() as T13, this._14() as T14, this._15() as T15, this._16() as T16, this._17() as T17, this._18() as T18, this._19() as T19, this._20() as T20, this._21() as T21, this._22() as T22) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleDrop.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleDrop.kt index da532935..a0581778 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleDrop.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleDrop.kt @@ -2,6 +2,17 @@ package org.jetbrains.kotlinx.spark.api.tuples import scala.* +/** + * This file contains all functions to drop N items from the beginning or end of a Tuple. + * If all items are dropped, the result will be [EmptyTuple]. + * + * For example: + * ```kotlin + * tupleOf(1, 2, 3, 4).drop2() == tupleOf(3, 4) + * tupleOf(1, 2, 3, 4).dropLast2() == tupleOf(1, 2) + * ``` + */ + fun Tuple1.drop0(): Tuple1 = Tuple1(this._1()) fun Tuple1<*>.drop1(): EmptyTuple = EmptyTuple fun Tuple2.drop0(): Tuple2 = Tuple2(this._1(), this._2()) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleSplit.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleSplit.kt index c73b1263..8cf59726 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleSplit.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleSplit.kt @@ -2,6 +2,18 @@ package org.jetbrains.kotlinx.spark.api.tuples import scala.* +/** + * Given a tuple `t(a1, ..., am)`, returns a [Tuple2] of the tuple `t(a1, ..., an)` + * consisting of the first n elements, and the tuple `t(an+1, ..., am)` consisting + * of the remaining elements. + * Splitting at 0 or at n results in `t(t(), myTuple)` or `t(myTuple, t())` respectively. + * + * For example: + * ```kotlin + * t(1, 2, 3, 4, 5).splitAt2() == t(t(1, 2), t(3, 4, 5)) + * ``` + */ + fun Tuple1.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple1(this._1())) fun Tuple1.splitAt1(): Tuple2, EmptyTuple> = Tuple2, EmptyTuple>(Tuple1(this._1()), EmptyTuple) fun Tuple2.splitAt0(): Tuple2> = Tuple2>(EmptyTuple, Tuple2(this._1(), this._2())) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTake.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTake.kt index 82b18c30..706e1ef0 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTake.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTake.kt @@ -1,6 +1,18 @@ package org.jetbrains.kotlinx.spark.api.tuples import scala.* + +/** + * This file contains all functions to take N items from the beginning or end of a Tuple. + * If 0 items are taken, the result will be [EmptyTuple]. + * + * For example: + * ```kotlin + * tupleOf(1, 2, 3, 4).take2() == tupleOf(1, 2) + * tupleOf(1, 2, 3, 4).takeLast2() == tupleOf(3, 4) + * ``` + */ + fun Tuple1<*>.take0(): EmptyTuple = EmptyTuple fun Tuple1.take1(): Tuple1 = Tuple1(this._1()) fun Tuple2<*, *>.take0(): EmptyTuple = EmptyTuple diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleZip.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleZip.kt index f003e507..49c70396 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleZip.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleZip.kt @@ -4,11 +4,10 @@ import scala.* /** * This file provides zip-functions to all Tuple variants. - * Given two tuples, `t(a1, ..., an)` and `t(a1, ..., an)`, returns a tuple + * Given two tuples, `t(a1, ..., an) zip t(a1, ..., an)`, returns a tuple * `t(t(a1, b1), ..., t(an, bn))`. If the two tuples have different sizes, * the extra elements of the larger tuple will be disregarded. * The result is typed as `TupleX, ..., Tuple2>`. - * */ infix fun Tuple1<*>.zip(other: EmptyTuple): EmptyTuple = EmptyTuple diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt index d2bbdd7a..a13f9c58 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt @@ -186,9 +186,15 @@ class TuplesTest : ShouldSpec({ } should("Map tuples") { - t(1, 2.toShort(), 3L, 4.0, 5).map { - it.toString() - } shouldBe t("1", "2", "3", "4.0", "5") + t(1, 2.toShort(), 3L, 4.0, 5) + .map { it.toString() } + .shouldBe( + t("1", "2", "3", "4.0", "5") + ) + + shouldThrow { + t(1, "2", 3L).cast() + } } should("Take n from tuples") { From 2002f95c7c64af9e3a48af2291555a82b6fd7a36 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 30 Mar 2022 18:22:24 +0200 Subject: [PATCH 116/213] adding error to qodana.yaml on purpose --- qodana.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/qodana.yaml b/qodana.yaml index 6f7b1ae5..2c71dc3f 100644 --- a/qodana.yaml +++ b/qodana.yaml @@ -1,6 +1,6 @@ -profile: +proile: name: qodana.recommended -exclude: +exclue: - name: All - paths: + pat: - kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples From 5683b09788e036302a066d3d86dc5f20c7b827d7 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 31 Mar 2022 12:29:03 +0200 Subject: [PATCH 117/213] adding qodana version? --- qodana.yaml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/qodana.yaml b/qodana.yaml index 2c71dc3f..8b94758c 100644 --- a/qodana.yaml +++ b/qodana.yaml @@ -1,6 +1,8 @@ -proile: +version: "1.0" +linter: jetbrains/qodana-jvm-community:2021.3 +profile: name: qodana.recommended -exclue: +exclude: - name: All - pat: + paths: - kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples From 7862da24afc1c9cbd28086e17e6c4ed398f61ac8 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 31 Mar 2022 15:16:39 +0200 Subject: [PATCH 118/213] excluding arities from qodana --- qodana.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/qodana.yaml b/qodana.yaml index 8b94758c..54b82040 100644 --- a/qodana.yaml +++ b/qodana.yaml @@ -6,3 +6,4 @@ exclude: - name: All paths: - kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples + - kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt \ No newline at end of file From 08f825935a882712a5677ace6ab69420fcecf459 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 31 Mar 2022 15:47:55 +0200 Subject: [PATCH 119/213] updating examples and deprecations for qodana --- .../spark/examples/CachedOperations.kt | 4 +- .../jetbrains/kotlinx/spark/examples/Group.kt | 5 +- .../jetbrains/kotlinx/spark/examples/Main.kt | 2 +- .../spark/examples/MapAndListOperations.kt | 2 +- .../kotlinx/spark/api/Conversions.kt | 92 +++++++++---------- .../jetbrains/kotlinx/spark/api/Dataset.kt | 8 +- 6 files changed, 56 insertions(+), 57 deletions(-) diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/CachedOperations.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/CachedOperations.kt index 81190361..1904de74 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/CachedOperations.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/CachedOperations.kt @@ -31,9 +31,7 @@ fun main() { filter { it._1 % 2 == 0 }.showDS() } - .map { (first, second) -> - first X second X (first + second) * 2 - } + .map { it.appendedBy(it._1 + it._2 * 2) } .show() } } diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Group.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Group.kt index f4d35097..7fde698e 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Group.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Group.kt @@ -21,7 +21,6 @@ package org.jetbrains.kotlinx.spark.examples import org.jetbrains.kotlinx.spark.api.* import org.jetbrains.kotlinx.spark.api.tuples.* -import org.jetbrains.kotlinx.spark.api.tuples.t fun main() { withSpark { @@ -31,7 +30,9 @@ fun main() { 2 X "c", ) .groupByKey { it._1 } - .reduceGroupsK { a, b -> tupleOf(_1 = a._1 + b._1, _2 = a._2 + b._2) } + .reduceGroupsK { a, b -> + tupleOf(a._1 + b._1, a._2 + b._2) + } .show() } } diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt index 920716bf..de44b412 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt @@ -44,7 +44,7 @@ object Main { Q(3, 3 X "333"), ) ) - .map { (a, b) -> t(_1 = a + b._1, _2 = b._2?.length) } + .map { (a, b) -> t(a + b._1, b._2?.length) } .map { it: Tuple2 -> it + 1 } // add counter val pairs = spark diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/MapAndListOperations.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/MapAndListOperations.kt index 15892cfd..8d36017d 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/MapAndListOperations.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/MapAndListOperations.kt @@ -30,7 +30,7 @@ fun main() { ) .flatMap { it.toList() - .map { (first, tuple) -> listOf(first, tuple._1, tuple._2, tuple._3) } + .map { (first, tuple) -> (first + tuple).toList() } .iterator() } .flatten() diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt index 6db44e42..62e9f221 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt @@ -186,7 +186,7 @@ fun Tuple2.toPair(): Pair = Pair(_1(), _2()) /** * Returns a new [Pair] based on the arguments in the current [Arity2]. */ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity2.toPair(): Pair = Pair(_1, _2) @@ -209,270 +209,270 @@ fun Tuple3.toTriple(): Triple = Triple Arity3.toTriple(): Triple = Triple(_1, _2, _3) /** * Returns a new Arity1 based on this Tuple1. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple1.toArity(): Arity1 = Arity1(this._1()) /** * Returns a new Arity2 based on this Tuple2. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple2.toArity(): Arity2 = Arity2(this._1(), this._2()) /** * Returns a new Arity3 based on this Tuple3. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple3.toArity(): Arity3 = Arity3(this._1(), this._2(), this._3()) /** * Returns a new Arity4 based on this Tuple4. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple4.toArity(): Arity4 = Arity4(this._1(), this._2(), this._3(), this._4()) /** * Returns a new Arity5 based on this Tuple5. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple5.toArity(): Arity5 = Arity5(this._1(), this._2(), this._3(), this._4(), this._5()) /** * Returns a new Arity6 based on this Tuple6. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple6.toArity(): Arity6 = Arity6(this._1(), this._2(), this._3(), this._4(), this._5(), this._6()) /** * Returns a new Arity7 based on this Tuple7. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple7.toArity(): Arity7 = Arity7(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7()) /** * Returns a new Arity8 based on this Tuple8. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple8.toArity(): Arity8 = Arity8(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8()) /** * Returns a new Arity9 based on this Tuple9. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple9.toArity(): Arity9 = Arity9(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9()) /** * Returns a new Arity10 based on this Tuple10. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple10.toArity(): Arity10 = Arity10(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10()) /** * Returns a new Arity11 based on this Tuple11. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple11.toArity(): Arity11 = Arity11(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11()) /** * Returns a new Arity12 based on this Tuple12. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple12.toArity(): Arity12 = Arity12(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12()) /** * Returns a new Arity13 based on this Tuple13. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple13.toArity(): Arity13 = Arity13(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13()) /** * Returns a new Arity14 based on this Tuple14. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple14.toArity(): Arity14 = Arity14(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14()) /** * Returns a new Arity15 based on this Tuple15. - **/@Deprecated("Use Scala tuples instead.") + **/@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple15.toArity(): Arity15 = Arity15(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15()) /** * Returns a new Arity16 based on this Tuple16. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple16.toArity(): Arity16 = Arity16(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16()) /** * Returns a new Arity17 based on this Tuple17. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple17.toArity(): Arity17 = Arity17(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17()) /** * Returns a new Arity18 based on this Tuple18. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple18.toArity(): Arity18 = Arity18(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18()) /** * Returns a new Arity19 based on this Tuple19. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple19.toArity(): Arity19 = Arity19(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19()) /** * Returns a new Arity20 based on this Tuple20. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple20.toArity(): Arity20 = Arity20(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20()) /** * Returns a new Arity21 based on this Tuple21. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple21.toArity(): Arity21 = Arity21(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21()) /** * Returns a new Arity22 based on this Tuple22. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Tuple22.toArity(): Arity22 = Arity22(this._1(), this._2(), this._3(), this._4(), this._5(), this._6(), this._7(), this._8(), this._9(), this._10(), this._11(), this._12(), this._13(), this._14(), this._15(), this._16(), this._17(), this._18(), this._19(), this._20(), this._21(), this._22()) /** * Returns a new Tuple1 based on this Arity1. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity1.toTuple(): Tuple1 = Tuple1(this._1) /** * Returns a new Tuple2 based on this Arity2. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity2.toTuple(): Tuple2 = Tuple2(this._1, this._2) /** * Returns a new Tuple3 based on this Arity3. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity3.toTuple(): Tuple3 = Tuple3(this._1, this._2, this._3) /** * Returns a new Tuple4 based on this Arity4. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity4.toTuple(): Tuple4 = Tuple4(this._1, this._2, this._3, this._4) /** * Returns a new Tuple5 based on this Arity5. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity5.toTuple(): Tuple5 = Tuple5(this._1, this._2, this._3, this._4, this._5) /** * Returns a new Tuple6 based on this Arity6. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity6.toTuple(): Tuple6 = Tuple6(this._1, this._2, this._3, this._4, this._5, this._6) /** * Returns a new Tuple7 based on this Arity7. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity7.toTuple(): Tuple7 = Tuple7(this._1, this._2, this._3, this._4, this._5, this._6, this._7) /** * Returns a new Tuple8 based on this Arity8. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity8.toTuple(): Tuple8 = Tuple8(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8) /** * Returns a new Tuple9 based on this Arity9. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity9.toTuple(): Tuple9 = Tuple9(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9) /** * Returns a new Tuple10 based on this Arity10. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity10.toTuple(): Tuple10 = Tuple10(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10) /** * Returns a new Tuple11 based on this Arity11. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity11.toTuple(): Tuple11 = Tuple11(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11) /** * Returns a new Tuple12 based on this Arity12. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity12.toTuple(): Tuple12 = Tuple12(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12) /** * Returns a new Tuple13 based on this Arity13. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity13.toTuple(): Tuple13 = Tuple13(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13) /** * Returns a new Tuple14 based on this Arity14. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity14.toTuple(): Tuple14 = Tuple14(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14) /** * Returns a new Tuple15 based on this Arity15. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity15.toTuple(): Tuple15 = Tuple15(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15) /** * Returns a new Tuple16 based on this Arity16. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity16.toTuple(): Tuple16 = Tuple16(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16) /** * Returns a new Tuple17 based on this Arity17. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity17.toTuple(): Tuple17 = Tuple17(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17) /** * Returns a new Tuple18 based on this Arity18. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity18.toTuple(): Tuple18 = Tuple18(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18) /** * Returns a new Tuple19 based on this Arity19. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity19.toTuple(): Tuple19 = Tuple19(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19) /** * Returns a new Tuple20 based on this Arity20. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity20.toTuple(): Tuple20 = Tuple20(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20) /** * Returns a new Tuple21 based on this Arity21. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity21.toTuple(): Tuple21 = Tuple21(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21) /** * Returns a new Tuple22 based on this Arity22. **/ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity22.toTuple(): Tuple22 = Tuple22(this._1, this._2, this._3, this._4, this._5, this._6, this._7, this._8, this._9, this._10, this._11, this._12, this._13, this._14, this._15, this._16, this._17, this._18, this._19, this._20, this._21, this._22) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt index 467a3078..0fc52936 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt @@ -147,7 +147,7 @@ inline fun Dataset>.takeKeys(): Dataset = map * Maps the Dataset to only retain the "keys" or [Arity2._1] values. */ @JvmName("takeKeysArity2") -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) inline fun Dataset>.takeKeys(): Dataset = map { it._1 } /** @@ -168,7 +168,7 @@ inline fun Dataset>.takeValues(): Dataset = ma * Maps the Dataset to only retain the "values" or [Arity2._2] values. */ @JvmName("takeValuesArity2") -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) inline fun Dataset>.takeValues(): Dataset = map { it._2 } /** DEPRECATED: Use [as] or [to] for this. */ @@ -318,12 +318,12 @@ fun Dataset>.sortByKey(): Dataset> = sort fun Dataset>.sortByValue(): Dataset> = sort("_2") /** Returns a dataset sorted by the first (`_1`) value of each [Arity2] inside. */ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) @JvmName("sortByArity2Key") fun Dataset>.sortByKey(): Dataset> = sort("_1") /** Returns a dataset sorted by the second (`_2`) value of each [Arity2] inside. */ -@Deprecated("Use Scala tuples instead.") +@Deprecated("Use Scala tuples instead.", ReplaceWith("")) @JvmName("sortByArity2Value") fun Dataset>.sortByValue(): Dataset> = sort("_2") From 2de07324d01ff6d0097db9772e466b610859e6fe Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 31 Mar 2022 15:58:10 +0200 Subject: [PATCH 120/213] pleasing qodana --- .../main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt | 1 + .../src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt | 2 ++ 2 files changed, 3 insertions(+) diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt index de44b412..0fc2517f 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt @@ -26,6 +26,7 @@ import org.jetbrains.kotlinx.spark.api.tuples.* import scala.* data class Q(val id: Int, val text: T) +@Suppress("RedundantLambdaArrow", "UsePropertyAccessSyntax") object Main { @JvmStatic diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt index 0fc52936..9173bf8d 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt @@ -25,6 +25,8 @@ * possible/easier. */ +@file:Suppress("unused") + package org.jetbrains.kotlinx.spark.api import org.apache.spark.api.java.JavaRDDLike From ea3e7d89858fff090bfcd3f802460ca51a832782 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 31 Mar 2022 22:41:37 +0200 Subject: [PATCH 121/213] removed commented out code, fixed checkpointPath (needs cleaning up code), new notation for entering KSparkSession scope inside streaming scope --- ...t => KotlinRecoverableNetworkWordCount.kt} | 148 +-- .../kotlinx/spark/examples/Streaming.kt | 13 +- .../kotlinx/spark/api/SparkSession.kt | 126 +- .../jetbrains/kotlinx/spark/api/Streaming.kt | 1170 ----------------- 4 files changed, 142 insertions(+), 1315 deletions(-) rename examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/{JavaRecoverableNetworkWordCount.kt => KotlinRecoverableNetworkWordCount.kt} (60%) diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinRecoverableNetworkWordCount.kt similarity index 60% rename from examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt rename to examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinRecoverableNetworkWordCount.kt index 9b64149e..10f57a28 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JavaRecoverableNetworkWordCount.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinRecoverableNetworkWordCount.kt @@ -22,21 +22,13 @@ package org.jetbrains.kotlinx.spark.examples import com.google.common.io.Files -import org.apache.spark.api.java.JavaPairRDD import org.apache.spark.api.java.JavaSparkContext import org.apache.spark.broadcast.Broadcast -import org.apache.spark.streaming.Duration import org.apache.spark.streaming.Durations import org.apache.spark.streaming.Time -import org.apache.spark.streaming.api.java.JavaDStream -import org.apache.spark.streaming.api.java.JavaPairDStream -import org.apache.spark.streaming.dstream.DStream -import org.apache.spark.streaming.dstream.PairDStreamFunctions import org.apache.spark.util.LongAccumulator import org.jetbrains.kotlinx.spark.api.* -import scala.Tuple1 import scala.Tuple2 -import scala.reflect.ClassTag import java.io.File import java.nio.charset.Charset import java.util.regex.Pattern @@ -49,13 +41,13 @@ import kotlin.time.measureTimedValue /** * Use this singleton to get or register a Broadcast variable. */ -internal object JavaWordExcludeList { +internal object KotlinWordExcludeList { @Volatile private var instance: Broadcast>? = null fun getInstance(sc: JavaSparkContext): Broadcast> { - if (instance == null) synchronized(JavaWordExcludeList::class.java) { + if (instance == null) synchronized(KotlinWordExcludeList::class) { if (instance == null) { val wordExcludeList = listOf("a", "b", "c") instance = sc.broadcast(wordExcludeList) @@ -68,13 +60,13 @@ internal object JavaWordExcludeList { /** * Use this singleton to get or register an Accumulator. */ -internal object JavaDroppedWordsCounter { +internal object KotlinDroppedWordsCounter { @Volatile private var instance: LongAccumulator? = null fun getInstance(sc: JavaSparkContext): LongAccumulator { - if (instance == null) synchronized(JavaDroppedWordsCounter::class.java) { + if (instance == null) synchronized(KotlinDroppedWordsCounter::class) { if (instance == null) instance = sc.sc().longAccumulator("DroppedWordsCounter") } @@ -87,7 +79,7 @@ internal object JavaDroppedWordsCounter { * shows how to use lazily instantiated singleton instances for Accumulator and Broadcast so that * they can be registered on driver failures. * - * Usage: JavaRecoverableNetworkWordCount + * Usage: KotlinRecoverableNetworkWordCount * and describe the TCP server that Spark Streaming would connect to receive * data. directory to HDFS-compatible file system which checkpoint data * file to which the word counts will be appended @@ -100,7 +92,7 @@ internal object JavaDroppedWordsCounter { * * and run the example as * - * `$ ./bin/run-example org.apache.spark.examples.streaming.JavaRecoverableNetworkWordCount \ + * `$ ./bin/run-example org.apache.spark.examples.streaming.KotlinRecoverableNetworkWordCount \ * localhost 9999 ~/checkpoint/ ~/out` * * If the directory ~/checkpoint/ does not exist (e.g. running for the first time), it will create @@ -110,7 +102,7 @@ internal object JavaDroppedWordsCounter { * * Refer to the online documentation for more details. */ -object JavaRecoverableNetworkWordCount { +object KotlinRecoverableNetworkWordCount { private val SPACE = Pattern.compile(" ") @@ -125,7 +117,7 @@ object JavaRecoverableNetworkWordCount { if (args.size != 4 && args.isNotEmpty()) { System.err.println("You arguments were " + listOf(*args)) System.err.println( - """Usage: JavaRecoverableNetworkWordCount + """Usage: KotlinRecoverableNetworkWordCount . and describe the TCP server that Spark Streaming would connect to receive data. directory to HDFS-compatible file system which checkpoint data file to which @@ -141,12 +133,13 @@ object JavaRecoverableNetworkWordCount { val checkpointDirectory = args.getOrElse(2) { DEFAULT_CHECKPOINT_DIRECTORY } val outputPath = args.getOrElse(3) { DEFAULT_OUTPUT_PATH } + // (used to detect the new context) // Create the context with a 1 second batch size or load from checkpointDirectory withSparkStreaming( -// checkpointPath = checkpointDirectory, TODO + checkpointPath = checkpointDirectory, batchDuration = Durations.seconds(1), - appName = "JavaRecoverableNetworkWordCount", + appName = "KotlinRecoverableNetworkWordCount", ) { createContext( ip = ip, @@ -167,8 +160,12 @@ object JavaRecoverableNetworkWordCount { // If you do not see this printed, that means the StreamingContext has been loaded // from the new checkpoint println("Creating new context") - val outputFile = File(outputPath) - if (outputFile.exists()) outputFile.delete() + val outputFile = File(outputPath).apply { + if (exists()) delete() + parentFile.mkdirs() + createNewFile() + } + // Create a socket stream on target ip:port and count the // words in input stream of \n delimited text (e.g. generated by 'nc') @@ -176,86 +173,55 @@ object JavaRecoverableNetworkWordCount { val words = lines.flatMap { it.split(SPACE).iterator() } -// val wordCounts = words -// .map { c(it, 1) } -// .reduceByKey { a, b -> a + b } -// .reduceByKey { a, b -> a + b } -// .reduceByKey { a, b -> a + b } -// .reduceByKey { a, b -> a + b } -// .reduceByKey { a, b -> a + b } -// .reduceByKey { a, b -> a + b } -// .reduceByKey { a, b -> a + b } -// .reduceByKey { a, b -> a + b } - - val wordCounts4 = words - .mapToPair { Tuple2(it, 1) } - .reduceByKey { a, b -> a + b } - .reduceByKey { a, b -> a + b } - .reduceByKey { a, b -> a + b } - .reduceByKey { a, b -> a + b } - .reduceByKey { a, b -> a + b } - .reduceByKey { a, b -> a + b } - .reduceByKey { a, b -> a + b } - .reduceByKey { a, b -> a + b } - - -// val wordCounts2 = words -// .map { it to 1 } -// .reduceByKey { a, b -> a + b } - val wordCounts3 = words .map { Tuple2(it, 1) } .reduceByKey { a, b -> a + b } - .reduceByKey { a, b -> a + b } - .reduceByKey { a, b -> a + b } - .reduceByKey { a, b -> a + b } - .reduceByKey { a, b -> a + b } - .reduceByKey { a, b -> a + b } - .reduceByKey { a, b -> a + b } - .reduceByKey { a, b -> a + b } -// val wordCounts5 = words -// .dstream() -// .map({ Tuple2(it, 1) }, fakeClassTag()) -// .let { DStream.toPairDStreamFunctions(it, fakeClassTag(), fakeClassTag(), null) } -// .reduceByKey { a, b -> a + b } -// .let { JavaDStream(it, fakeClassTag()) } + // in normal streaming context we can create a SparkSession from ssc: JavaStreamingContext + // normally `ssc.sparkContext().conf` + withSpark(ssc) { + listOf(1, 2, 3).toDS().show() + } wordCounts3.foreachRDD { rdd, time: Time -> - val sc = JavaSparkContext(rdd.context()) - - // Get or register the excludeList Broadcast - val excludeList = JavaWordExcludeList.getInstance(sc) - - // Get or register the droppedWordsCounter Accumulator - val droppedWordsCounter = JavaDroppedWordsCounter.getInstance(sc) - - // Use excludeList to drop words and use droppedWordsCounter to count them - val (counts, duration) = measureTimedValue { - rdd.filter { wordCount -> - if (excludeList.value().contains(wordCount._1)) { - droppedWordsCounter.add(wordCount._2.toLong()) - false - } else { - true - } - }.collect() - } - - println("Debug: ${rdd.toDebugString()}") - - val output = "Counts at time $time $counts\n$duration" - println(output) - println("Dropped ${droppedWordsCounter.value()} word(s) totally") - println("Appending to " + outputFile.absolutePath) - Files.append( - """ + // but in foreachRDD we must obtain this conf from the RDD + // like `rdd.context().conf` + withSpark(rdd) { + + rdd.toDS().show() + + // Get or register the excludeList Broadcast + val excludeList = KotlinWordExcludeList.getInstance(sc) + + // Get or register the droppedWordsCounter Accumulator + val droppedWordsCounter = KotlinDroppedWordsCounter.getInstance(sc) + + // Use excludeList to drop words and use droppedWordsCounter to count them + val (counts, duration) = measureTimedValue { + rdd.filter { wordCount -> + if (excludeList.value().contains(wordCount._1)) { + droppedWordsCounter.add(wordCount._2.toLong()) + false + } else { + true + } + }.collect() + } + + + val output = "Counts at time $time $counts\n$duration" + println(output) + println("Dropped ${droppedWordsCounter.value()} word(s) totally") + println("Appending to " + outputFile.absolutePath) + Files.append( + """ $output """.trimIndent(), - outputFile, - Charset.defaultCharset(), - ) + outputFile, + Charset.defaultCharset(), + ) + } } } diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt index de77c96f..07e06af9 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt @@ -37,12 +37,15 @@ fun main() = withSparkStreaming(Durations.seconds(1), timeout = 10_000) { words.foreachRDD { rdd, time -> - val dataframe: Dataset = rdd.map { TestRow(it) }.toDS() + withSpark(rdd) { - dataframe - .groupByKey { it.word } - .count() - .show() + val dataframe: Dataset = rdd.map { TestRow(it) }.toDS() + + dataframe + .groupByKey { it.word } + .count() + .show() + } } diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index d4481f1d..53d6a62e 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -22,6 +22,8 @@ * This file contains the main entry points and wrappers for the Kotlin Spark API. */ +@file:Suppress("UsePropertyAccessSyntax") + package org.jetbrains.kotlinx.spark.api import org.apache.spark.SparkConf @@ -38,13 +40,14 @@ import org.apache.spark.streaming.Durations import org.apache.spark.streaming.api.java.JavaStreamingContext import org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions +import scala.Tuple2 /** * This wrapper over [SparkSession] which provides several additional methods to create [org.apache.spark.sql.Dataset]. * * @param spark The current [SparkSession] to wrap */ -open class KSparkSession(val spark: SparkSession) { +class KSparkSession(val spark: SparkSession) { /** Lazy instance of [JavaSparkContext] wrapper around [sparkContext]. */ val sc: JavaSparkContext by lazy { JavaSparkContext(spark.sparkContext) } @@ -82,13 +85,44 @@ open class KSparkSession(val spark: SparkSession) { /** * This wrapper over [SparkSession] and [JavaStreamingContext] provides several additional methods to create [org.apache.spark.sql.Dataset] */ -class KSparkStreamingSession(spark: SparkSession, val ssc: JavaStreamingContext) : KSparkSession(spark) { +class KSparkStreamingSession(val ssc: JavaStreamingContext) { /** Can be overwritten to be run after the streaming session has started and before it's terminated. */ var runAfterStart: KSparkStreamingSession.() -> Unit = {} -} + + fun invokeRunAfterStart(): Unit = runAfterStart() + + + fun withSpark(sc: SparkConf, func: KSparkSession.() -> Unit) { + val spark = SparkSession.builder().config(sc).getOrCreate() + KSparkSession(spark).apply(func) + } + + /** + * Helper function to enter Spark scope from [ssc] like + * ```kotlin + * ssc.withSpark { // this: KSparkSession + * + * } + * ``` + */ + fun withSpark(ssc: JavaStreamingContext, func: KSparkSession.() -> Unit) = withSpark(ssc.sparkContext().conf, func) + + + /** + * Helper function to enter Spark scope from a provided like + * when using the `foreachRDD` function. + * ```kotlin + * withSpark(rdd) { // this: KSparkSession + * + * } + * ``` + */ + fun withSpark(rdd: JavaRDDLike<*, *>, func: KSparkSession.() -> Unit) = withSpark(rdd.context().conf, func) +} + /** * The entry point to programming Spark with the Dataset and DataFrame API. @@ -160,7 +194,7 @@ inline fun withSpark( * @param logLevel Control our logLevel. This overrides any user-defined log settings. * @param func function which will be executed in context of [KSparkSession] (it means that `this` inside block will point to [KSparkSession]) */ -@Suppress("UsePropertyAccessSyntax") + @JvmOverloads inline fun withSpark(builder: Builder, logLevel: SparkLogLevel = ERROR, func: KSparkSession.() -> Unit) { builder @@ -212,7 +246,6 @@ inline fun withSpark(sparkConf: SparkConf, logLevel: SparkLogLevel = ERROR, func * @param logLevel Control our logLevel. This overrides any user-defined log settings. * @param timeout The time in milliseconds to wait for the stream to terminate without input. -1 by default, this means no timeout. * @param func function which will be executed in context of [KSparkStreamingSession] (it means that `this` inside block will point to [KSparkStreamingSession]) - * todo: provide alternatives with path instead of batchDuration etc */ @JvmOverloads inline fun withSparkStreaming( @@ -221,61 +254,56 @@ inline fun withSparkStreaming( props: Map = emptyMap(), master: String = SparkConf().get("spark.master", "local[*]"), appName: String = "Kotlin Spark Sample", - logLevel: SparkLogLevel = SparkLogLevel.ERROR, timeout: Long = -1L, - func: KSparkStreamingSession.() -> Unit, + crossinline func: KSparkStreamingSession.() -> Unit, ) { if (checkpointPath != null) { - TODO() -// var kSparkStreamingSession: KSparkStreamingSession? = null -// val ssc = JavaStreamingContext.getOrCreate(checkpointPath) { -// val jssc = JavaStreamingContext( -// SparkConf() -// .setAppName(appName) -// .setMaster(master) -// .setAll(props.map { (key, value) -> -// c(key, value.toString()).toTuple() -// }.asScalaIterable()), -// batchDuration, -// ) -// jssc.sparkContext().sc().setLogLevel(logLevel) -// jssc.checkpoint(checkpointPath) -// kSparkStreamingSession = KSparkStreamingSession( -// spark = SparkSession -// .builder() -// .sparkContext(jssc.sparkContext().sc()) -// .getOrCreate(), -// ssc = jssc, -// ).apply { func() } -// -// jssc -// } -// ssc.start() -// kSparkStreamingSession?.apply { runAfterStart() } -// ssc.awaitTerminationOrTimeout(timeout) -// ssc.stop() - } else { + var kSparkStreamingSession: KSparkStreamingSession? = null + val ssc = JavaStreamingContext.getOrCreate(checkpointPath) { + val sc = SparkConf() + .setAppName(appName) + .setMaster(master) + .setAll( + props + .map { (key, value) -> Tuple2(key, value.toString()) } + .asScalaIterable() + ) - withSpark( - props = props, - master = master, - appName = appName, - logLevel = logLevel, - ) { val ssc = JavaStreamingContext(sc, batchDuration) - KSparkStreamingSession(spark = spark, ssc = ssc).apply { - func() - ssc.start() - runAfterStart() - } + ssc.checkpoint(checkpointPath) - ssc.awaitTerminationOrTimeout(timeout) - ssc.stop() + kSparkStreamingSession = KSparkStreamingSession(ssc) + func(kSparkStreamingSession!!) + + ssc } + ssc.start() + kSparkStreamingSession?.invokeRunAfterStart() + ssc.awaitTerminationOrTimeout(timeout) + ssc.stop() + } else { + val sc = SparkConf() + .setAppName(appName) + .setMaster(master) + .setAll( + props + .map { (key, value) -> Tuple2(key, value.toString()) } + .asScalaIterable() + ) + val ssc = JavaStreamingContext(sc, batchDuration) + val kSparkStreamingSession = KSparkStreamingSession(ssc) + + func(kSparkStreamingSession) + ssc.start() + kSparkStreamingSession.invokeRunAfterStart() + + ssc.awaitTerminationOrTimeout(timeout) + ssc.stop() } } + /** * Broadcast a read-only variable to the cluster, returning a * [org.apache.spark.broadcast.Broadcast] object for reading it in distributed functions. diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt index 74aaa520..ccba0e01 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt @@ -47,1176 +47,6 @@ fun JavaDStreamLike, *, *>.toPairDStream(): JavaPairDStream< fun JavaDStreamLike, *, *>.toPairDStream(): JavaPairDStream = mapToPair(Pair::toTuple) -///** -// * Return a new DStream by applying `groupByKey` to each RDD. Hash partitioning is used to -// * generate the RDDs with `numPartitions` partitions. -// */ -//@JvmName("groupByKeyArity2") -//fun JavaDStreamLike, *, *>.groupByKey( -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .groupByKey(numPartitions) -// .map { it.toArity() } -// -///** -// * Return a new DStream by applying `groupByKey` on each RDD. The supplied -// * org.apache.spark.Partitioner is used to control the partitioning of each RDD. -// */ -//@JvmName("groupByKeyArity2") -//fun JavaDStreamLike, *, *>.groupByKey(partitioner: Partitioner): JavaDStream>> = -// mapToPair { it.toTuple() } -// .groupByKey(partitioner) -// .map { it.toArity() } -// -///** -// * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are -// * merged using the supplied reduce function. Hash partitioning is used to generate the RDDs -// * with `numPartitions` partitions. -// */ -//@JvmName("reduceByKeyArity2") -//fun JavaDStreamLike, *, *>.reduceByKey( -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -// reduceFunc: (V, V) -> V, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .reduceByKey(reduceFunc, numPartitions) -// .map { it.toArity() } -// -///** -// * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are -// * merged using the supplied reduce function. org.apache.spark.Partitioner is used to control -// * the partitioning of each RDD. -// */ -//@JvmName("reduceByKeyArity2") -//fun JavaDStreamLike, *, *>.reduceByKey( -// partitioner: Partitioner, -// reduceFunc: (V, V) -> V, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .reduceByKey(reduceFunc, partitioner) -// .map { it.toArity() } -// -///** -// * Combine elements of each key in DStream's RDDs using custom functions. This is similar to the -// * combineByKey for RDDs. Please refer to combineByKey in -// * org.apache.spark.rdd.PairRDDFunctions in the Spark core documentation for more information. -// */ -//@JvmName("combineByKeyArity2") -//fun JavaDStreamLike, *, *>.combineByKey( -// createCombiner: (V) -> C, -// mergeValue: (C, V) -> C, -// mergeCombiner: (C, C) -> C, -// partitioner: Partitioner, -// mapSideCombine: Boolean = true, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .combineByKey(createCombiner, mergeValue, mergeCombiner, partitioner, mapSideCombine) -// .map { it.toArity() } -// -///** -// * Return a new DStream by applying `groupByKey` over a sliding window on `this` DStream. -// * Similar to `DStream.groupByKey()`, but applies it over a sliding window. -// * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. -// * @param windowDuration width of the window; must be a multiple of this DStream's -// * batching interval -// * @param slideDuration sliding interval of the window (i.e., the interval after which -// * the new DStream will generate RDDs); must be a multiple of this -// * DStream's batching interval -// * @param numPartitions number of partitions of each RDD in the new DStream; if not specified -// * then Spark's default number of partitions will be used -// */ -//@JvmName("groupByKeyAndWindowArity2") -//fun JavaDStreamLike, *, *>.groupByKeyAndWindow( -// windowDuration: Duration, -// slideDuration: Duration = dstream().slideDuration(), -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .groupByKeyAndWindow(windowDuration, slideDuration, numPartitions) -// .map { it.toArity() } -// -///** -// * Create a new DStream by applying `groupByKey` over a sliding window on `this` DStream. -// * Similar to `DStream.groupByKey()`, but applies it over a sliding window. -// * @param windowDuration width of the window; must be a multiple of this DStream's -// * batching interval -// * @param slideDuration sliding interval of the window (i.e., the interval after which -// * the new DStream will generate RDDs); must be a multiple of this -// * DStream's batching interval -// * @param partitioner partitioner for controlling the partitioning of each RDD in the new -// * DStream. -// */ -//@JvmName("groupByKeyAndWindowArity2") -//fun JavaDStreamLike, *, *>.groupByKeyAndWindow( -// windowDuration: Duration, -// slideDuration: Duration = dstream().slideDuration(), -// partitioner: Partitioner, -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .groupByKeyAndWindow(windowDuration, slideDuration, partitioner) -// .map { it.toArity() } -// -///** -// * Return a new DStream by applying `reduceByKey` over a sliding window. This is similar to -// * `DStream.reduceByKey()` but applies it over a sliding window. Hash partitioning is used to -// * generate the RDDs with `numPartitions` partitions. -// * @param reduceFunc associative and commutative reduce function -// * @param windowDuration width of the window; must be a multiple of this DStream's -// * batching interval -// * @param slideDuration sliding interval of the window (i.e., the interval after which -// * the new DStream will generate RDDs); must be a multiple of this -// * DStream's batching interval -// * @param numPartitions number of partitions of each RDD in the new DStream. -// */ -//@JvmName("reduceByKeyAndWindowArity2") -//fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( -// windowDuration: Duration, -// slideDuration: Duration = dstream().slideDuration(), -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -// reduceFunc: (V, V) -> V, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, numPartitions) -// .map { it.toArity() } -// -///** -// * Return a new DStream by applying `reduceByKey` over a sliding window. Similar to -// * `DStream.reduceByKey()`, but applies it over a sliding window. -// * @param reduceFunc associative and commutative reduce function -// * @param windowDuration width of the window; must be a multiple of this DStream's -// * batching interval -// * @param slideDuration sliding interval of the window (i.e., the interval after which -// * the new DStream will generate RDDs); must be a multiple of this -// * DStream's batching interval -// * @param partitioner partitioner for controlling the partitioning of each RDD -// * in the new DStream. -// */ -//@JvmName("reduceByKeyAndWindowArity2") -//fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( -// windowDuration: Duration, -// slideDuration: Duration = dstream().slideDuration(), -// partitioner: Partitioner, -// reduceFunc: (V, V) -> V, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, partitioner) -// .map { it.toArity() } -// -///** -// * Return a new DStream by applying incremental `reduceByKey` over a sliding window. -// * The reduced value of over a new window is calculated using the old window's reduced value : -// * 1. reduce the new values that entered the window (e.g., adding new counts) -// * -// * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) -// * -// * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. -// * However, it is applicable to only "invertible reduce functions". -// * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. -// * @param reduceFunc associative and commutative reduce function -// * @param invReduceFunc inverse reduce function; such that for all y, invertible x: -// * `invReduceFunc(reduceFunc(x, y), x) = y` -// * @param windowDuration width of the window; must be a multiple of this DStream's -// * batching interval -// * @param slideDuration sliding interval of the window (i.e., the interval after which -// * the new DStream will generate RDDs); must be a multiple of this -// * DStream's batching interval -// * @param filterFunc Optional function to filter expired key-value pairs; -// * only pairs that satisfy the function are retained -// */ -//@JvmName("reduceByKeyAndWindowArity2") -//fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( -// invReduceFunc: (V, V) -> V, -// windowDuration: Duration, -// slideDuration: Duration = dstream().slideDuration(), -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -// filterFunc: ((Arity2) -> Boolean)? = null, -// reduceFunc: (V, V) -> V, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .reduceByKeyAndWindow( -// reduceFunc, -// invReduceFunc, -// windowDuration, -// slideDuration, -// numPartitions, -// filterFunc?.let { -// { tuple -> -// filterFunc(tuple.toArity()) -// } -// } -// ) -// .map { it.toArity() } -// -///** -// * Return a new DStream by applying incremental `reduceByKey` over a sliding window. -// * The reduced value of over a new window is calculated using the old window's reduced value : -// * 1. reduce the new values that entered the window (e.g., adding new counts) -// * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) -// * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. -// * However, it is applicable to only "invertible reduce functions". -// * @param reduceFunc associative and commutative reduce function -// * @param invReduceFunc inverse reduce function -// * @param windowDuration width of the window; must be a multiple of this DStream's -// * batching interval -// * @param slideDuration sliding interval of the window (i.e., the interval after which -// * the new DStream will generate RDDs); must be a multiple of this -// * DStream's batching interval -// * @param partitioner partitioner for controlling the partitioning of each RDD in the new -// * DStream. -// * @param filterFunc Optional function to filter expired key-value pairs; -// * only pairs that satisfy the function are retained -// */ -//@JvmName("reduceByKeyAndWindowArity2") -//fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( -// invReduceFunc: (V, V) -> V, -// windowDuration: Duration, -// slideDuration: Duration = dstream().slideDuration(), -// partitioner: Partitioner, -// filterFunc: ((Arity2) -> Boolean)? = null, -// reduceFunc: (V, V) -> V, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .reduceByKeyAndWindow( -// reduceFunc, -// invReduceFunc, -// windowDuration, -// slideDuration, -// partitioner, -// filterFunc?.let { -// { tuple -> -// filterFunc(tuple.toArity()) -// } -// } -// ) -// .map { it.toArity() } -// -///** -// * Return a [MapWithStateDStream] by applying a function to every key-value element of -// * `this` stream, while maintaining some state data for each unique key. The mapping function -// * and other specification (e.g. partitioners, timeouts, initial state data, etc.) of this -// * transformation can be specified using `StateSpec` class. The state data is accessible in -// * as a parameter of type `State` in the mapping function. -// * -// * Example of using `mapWithState`: -// * {{{ -// * // A mapping function that maintains an integer state and return a String -// * def mappingFunction(key: String, value: Option[Int], state: State[Int]): Option[String] = { -// * // Use state.exists(), state.get(), state.update() and state.remove() -// * // to manage state, and return the necessary string -// * } -// * -// * val spec = StateSpec.function(mappingFunction).numPartitions(10) -// * -// * val mapWithStateDStream = keyValueDStream.mapWithState[StateType, MappedType](spec) -// * }}} -// * -// * @param spec Specification of this transformation -// * @tparam StateType Class type of the state data -// * @tparam MappedType Class type of the mapped data -// */ -//@JvmName("mapWithStateArity2") -//fun JavaDStreamLike, *, *>.mapWithState( -// spec: StateSpec, -//): JavaMapWithStateDStream = -// mapToPair { it.toTuple() } -// .mapWithState(spec) -// -///** -// * Return a new "state" DStream where the state for each key is updated by applying -// * the given function on the previous state of the key and the new values of each key. -// * In every batch the updateFunc will be called for each state even if there are no new values. -// * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. -// * @param updateFunc State update function. If `this` function returns None, then -// * corresponding state key-value pair will be eliminated. -// * @tparam S State type -// */ -//@JvmName("updateStateByKeyArity2") -//fun JavaDStreamLike, *, *>.updateStateByKey( -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -// updateFunc: (List, S?) -> S?, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .updateStateByKey( -// { list: List, s: Optional -> -// updateFunc(list, s.getOrNull()).asOptional() -// }, -// numPartitions, -// ) -// .map { it.toArity() } -// -///** -// * Return a new "state" DStream where the state for each key is updated by applying -// * the given function on the previous state of the key and the new values of each key. -// * In every batch the updateFunc will be called for each state even if there are no new values. -// * [[org.apache.spark.Partitioner]] is used to control the partitioning of each RDD. -// * @param updateFunc State update function. Note, that this function may generate a different -// * tuple with a different key than the input key. Therefore keys may be removed -// * or added in this way. It is up to the developer to decide whether to -// * remember the partitioner despite the key being changed. -// * @param partitioner Partitioner for controlling the partitioning of each RDD in the new -// * DStream -// * @tparam S State type -// */ -//@JvmName("updateStateByKeyArity2") -//fun JavaDStreamLike, *, *>.updateStateByKey( -// partitioner: Partitioner, -// updateFunc: (List, S?) -> S?, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .updateStateByKey( -// { list: List, s: Optional -> -// updateFunc(list, s.getOrNull()).asOptional() -// }, -// partitioner, -// ) -// .map { it.toArity() } -// -///** -// * Return a new "state" DStream where the state for each key is updated by applying -// * the given function on the previous state of the key and the new values of the key. -// * org.apache.spark.Partitioner is used to control the partitioning of each RDD. -// * @param updateFunc State update function. If `this` function returns None, then -// * corresponding state key-value pair will be eliminated. -// * @param partitioner Partitioner for controlling the partitioning of each RDD in the new -// * DStream. -// * @param initialRDD initial state value of each key. -// * @tparam S State type -// */ -//@JvmName("updateStateByKeyArity2") -//fun JavaDStreamLike, *, *>.updateStateByKey( -// partitioner: Partitioner, -// initialRDD: JavaRDD>, -// updateFunc: (List, S?) -> S?, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .updateStateByKey( -// { list: List, s: Optional -> -// updateFunc(list, s.getOrNull()).asOptional() -// }, -// partitioner, -// initialRDD.mapToPair { it.toTuple() }, -// ) -// .map { it.toArity() } -// -///** -// * Return a new DStream by applying a map function to the value of each key-value pairs in -// * 'this' DStream without changing the key. -// */ -//@JvmName("mapValuesArity2") -//fun JavaDStreamLike, *, *>.mapValues( -// mapValuesFunc: (V) -> U, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .mapValues(mapValuesFunc) -// .map { it.toArity() } -// -///** -// * Return a new DStream by applying a flatmap function to the value of each key-value pairs in -// * 'this' DStream without changing the key. -// */ -//@JvmName("flatMapValuesArity2") -//fun JavaDStreamLike, *, *>.flatMapValues( -// flatMapValuesFunc: (V) -> Iterator, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .flatMapValues(flatMapValuesFunc) -// .map { it.toArity() } -// -///** -// * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. -// * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. -// */ -//@JvmName("cogroupArity2") -//fun JavaDStreamLike, *, *>.cogroup( -// other: JavaDStreamLike, *, *>, -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -//): JavaDStream, Iterable>>> = -// mapToPair { it.toTuple() } -// .cogroup( -// other.mapToPair { it.toTuple() }, -// numPartitions, -// ) -// .map { -// c(it._1, it._2.toArity()) -// } -// -///** -// * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. -// * The supplied org.apache.spark.Partitioner is used to partition the generated RDDs. -// */ -//@JvmName("cogroupArity2") -//fun JavaDStreamLike, *, *>.cogroup( -// other: JavaDStreamLike, *, *>, -// partitioner: Partitioner, -//): JavaDStream, Iterable>>> = -// mapToPair { it.toTuple() } -// .cogroup( -// other.mapToPair { it.toTuple() }, -// partitioner, -// ) -// .map { -// c(it._1, it._2.toArity()) -// } -// -///** -// * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. -// * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. -// */ -//@JvmName("joinArity2") -//fun JavaDStreamLike, *, *>.join( -// other: JavaDStreamLike, *, *>, -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .join( -// other.mapToPair { it.toTuple() }, -// numPartitions, -// ) -// .map { -// c(it._1, it._2.toArity()) -// } -// -///** -// * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. -// * The supplied org.apache.spark.Partitioner is used to control the partitioning of each RDD. -// */ -//@JvmName("joinArity2") -//fun JavaDStreamLike, *, *>.join( -// other: JavaDStreamLike, *, *>, -// partitioner: Partitioner, -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .join( -// other.mapToPair { it.toTuple() }, -// partitioner, -// ) -// .map { -// c(it._1, it._2.toArity()) -// } -// -///** -// * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and -// * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` -// * partitions. -// */ -//@JvmName("leftOuterJoinArity2") -//fun JavaDStreamLike, *, *>.leftOuterJoin( -// other: JavaDStreamLike, *, *>, -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .leftOuterJoin( -// other.mapToPair { it.toTuple() }, -// numPartitions, -// ) -// .map { -// c(it._1, c(it._2._1, it._2._2.getOrNull())) -// } -// -///** -// * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and -// * `other` DStream. The supplied org.apache.spark.Partitioner is used to control -// * the partitioning of each RDD. -// */ -//@JvmName("leftOuterJoinArity2") -//fun JavaDStreamLike, *, *>.leftOuterJoin( -// other: JavaDStreamLike, *, *>, -// partitioner: Partitioner, -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .leftOuterJoin( -// other.mapToPair { it.toTuple() }, -// partitioner, -// ) -// .map { -// c(it._1, c(it._2._1, it._2._2.getOrNull())) -// } -// -///** -// * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and -// * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` -// * partitions. -// */ -//@JvmName("rightOuterJoinArity2") -//fun JavaDStreamLike, *, *>.rightOuterJoin( -// other: JavaDStreamLike, *, *>, -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .rightOuterJoin( -// other.mapToPair { it.toTuple() }, -// numPartitions, -// ) -// .map { -// c(it._1, c(it._2._1.getOrNull(), it._2._2)) -// } -// -///** -// * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and -// * `other` DStream. The supplied org.apache.spark.Partitioner is used to control -// * the partitioning of each RDD. -// */ -//@JvmName("rightOuterJoinArity2") -//fun JavaDStreamLike, *, *>.rightOuterJoin( -// other: JavaDStreamLike, *, *>, -// partitioner: Partitioner, -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .rightOuterJoin( -// other.mapToPair { it.toTuple() }, -// partitioner, -// ) -// .map { -// c(it._1, c(it._2._1.getOrNull(), it._2._2)) -// } -// -///** -// * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and -// * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` -// * partitions. -// */ -//@JvmName("fullOuterJoinArity2") -//fun JavaDStreamLike, *, *>.fullOuterJoin( -// other: JavaDStreamLike, *, *>, -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .fullOuterJoin( -// other.mapToPair { it.toTuple() }, -// numPartitions, -// ) -// .map { -// c(it._1, c(it._2._1.getOrNull(), it._2._2.getOrNull())) -// } -// -///** -// * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and -// * `other` DStream. The supplied org.apache.spark.Partitioner is used to control -// * the partitioning of each RDD. -// */ -//@JvmName("fullOuterJoinArity2") -//fun JavaDStreamLike, *, *>.fullOuterJoin( -// other: JavaDStreamLike, *, *>, -// partitioner: Partitioner, -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .fullOuterJoin( -// other.mapToPair { it.toTuple() }, -// partitioner, -// ) -// .map { -// c(it._1, c(it._2._1.getOrNull(), it._2._2.getOrNull())) -// } -// -///** -// * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is -// * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". -// */ -//@JvmName("saveAsHadoopFilesArity2") -//fun JavaDStreamLike, *, *>.saveAsHadoopFiles( -// prefix: String, suffix: String, -//): Unit = -// mapToPair { it.toTuple() } -// .saveAsHadoopFiles(prefix, suffix) -// -///** -// * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is -// * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". -// */ -//@JvmName("saveAsNewAPIHadoopFilesArity2") -//fun JavaDStreamLike, *, *>.saveAsNewAPIHadoopFiles( -// prefix: String, suffix: String, -//): Unit = -// mapToPair { it.toTuple() } -// .saveAsNewAPIHadoopFiles(prefix, suffix) -// -// -///** -// * Return a new DStream by applying `groupByKey` to each RDD. Hash partitioning is used to -// * generate the RDDs with `numPartitions` partitions. -// */ -//@JvmName("groupByKeyPair") -//fun JavaDStreamLike, *, *>.groupByKey( -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .groupByKey(numPartitions) -// .map { it.toPair() } -// -///** -// * Return a new DStream by applying `groupByKey` on each RDD. The supplied -// * org.apache.spark.Partitioner is used to control the partitioning of each RDD. -// */ -//@JvmName("groupByKeyPair") -//fun JavaDStreamLike, *, *>.groupByKey(partitioner: Partitioner): JavaDStream>> = -// mapToPair { it.toTuple() } -// .groupByKey(partitioner) -// .map { it.toPair() } -// -///** -// * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are -// * merged using the supplied reduce function. Hash partitioning is used to generate the RDDs -// * with `numPartitions` partitions. -// */ -//@JvmName("reduceByKeyPair") -//fun JavaDStreamLike, *, *>.reduceByKey( -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -// reduceFunc: (V, V) -> V, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .reduceByKey(reduceFunc, numPartitions) -// .map { it.toPair() } -// -///** -// * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are -// * merged using the supplied reduce function. org.apache.spark.Partitioner is used to control -// * the partitioning of each RDD. -// */ -//@JvmName("reduceByKeyPair") -//fun JavaDStreamLike, *, *>.reduceByKey( -// partitioner: Partitioner, -// reduceFunc: (V, V) -> V, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .reduceByKey(reduceFunc, partitioner) -// .map { it.toPair() } -// -///** -// * Combine elements of each key in DStream's RDDs using custom functions. This is similar to the -// * combineByKey for RDDs. Please refer to combineByKey in -// * org.apache.spark.rdd.PairRDDFunctions in the Spark core documentation for more information. -// */ -//@JvmName("combineByKeyPair") -//fun JavaDStreamLike, *, *>.combineByKey( -// createCombiner: (V) -> C, -// mergeValue: (C, V) -> C, -// mergeCombiner: (C, C) -> C, -// partitioner: Partitioner, -// mapSideCombine: Boolean = true, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .combineByKey(createCombiner, mergeValue, mergeCombiner, partitioner, mapSideCombine) -// .map { it.toPair() } -// -///** -// * Return a new DStream by applying `groupByKey` over a sliding window on `this` DStream. -// * Similar to `DStream.groupByKey()`, but applies it over a sliding window. -// * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. -// * @param windowDuration width of the window; must be a multiple of this DStream's -// * batching interval -// * @param slideDuration sliding interval of the window (i.e., the interval after which -// * the new DStream will generate RDDs); must be a multiple of this -// * DStream's batching interval -// * @param numPartitions number of partitions of each RDD in the new DStream; if not specified -// * then Spark's default number of partitions will be used -// */ -//@JvmName("groupByKeyAndWindowPair") -//fun JavaDStreamLike, *, *>.groupByKeyAndWindow( -// windowDuration: Duration, -// slideDuration: Duration = dstream().slideDuration(), -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .groupByKeyAndWindow(windowDuration, slideDuration, numPartitions) -// .map { it.toPair() } -// -///** -// * Create a new DStream by applying `groupByKey` over a sliding window on `this` DStream. -// * Similar to `DStream.groupByKey()`, but applies it over a sliding window. -// * @param windowDuration width of the window; must be a multiple of this DStream's -// * batching interval -// * @param slideDuration sliding interval of the window (i.e., the interval after which -// * the new DStream will generate RDDs); must be a multiple of this -// * DStream's batching interval -// * @param partitioner partitioner for controlling the partitioning of each RDD in the new -// * DStream. -// */ -//@JvmName("groupByKeyAndWindowPair") -//fun JavaDStreamLike, *, *>.groupByKeyAndWindow( -// windowDuration: Duration, -// slideDuration: Duration = dstream().slideDuration(), -// partitioner: Partitioner, -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .groupByKeyAndWindow(windowDuration, slideDuration, partitioner) -// .map { it.toPair() } -// -///** -// * Return a new DStream by applying `reduceByKey` over a sliding window. This is similar to -// * `DStream.reduceByKey()` but applies it over a sliding window. Hash partitioning is used to -// * generate the RDDs with `numPartitions` partitions. -// * @param reduceFunc associative and commutative reduce function -// * @param windowDuration width of the window; must be a multiple of this DStream's -// * batching interval -// * @param slideDuration sliding interval of the window (i.e., the interval after which -// * the new DStream will generate RDDs); must be a multiple of this -// * DStream's batching interval -// * @param numPartitions number of partitions of each RDD in the new DStream. -// */ -//@JvmName("reduceByKeyAndWindowPair") -//fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( -// windowDuration: Duration, -// slideDuration: Duration = dstream().slideDuration(), -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -// reduceFunc: (V, V) -> V, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, numPartitions) -// .map { it.toPair() } -// -///** -// * Return a new DStream by applying `reduceByKey` over a sliding window. Similar to -// * `DStream.reduceByKey()`, but applies it over a sliding window. -// * @param reduceFunc associative and commutative reduce function -// * @param windowDuration width of the window; must be a multiple of this DStream's -// * batching interval -// * @param slideDuration sliding interval of the window (i.e., the interval after which -// * the new DStream will generate RDDs); must be a multiple of this -// * DStream's batching interval -// * @param partitioner partitioner for controlling the partitioning of each RDD -// * in the new DStream. -// */ -//@JvmName("reduceByKeyAndWindowPair") -//fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( -// windowDuration: Duration, -// slideDuration: Duration = dstream().slideDuration(), -// partitioner: Partitioner, -// reduceFunc: (V, V) -> V, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, partitioner) -// .map { it.toPair() } -// -///** -// * Return a new DStream by applying incremental `reduceByKey` over a sliding window. -// * The reduced value of over a new window is calculated using the old window's reduced value : -// * 1. reduce the new values that entered the window (e.g., adding new counts) -// * -// * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) -// * -// * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. -// * However, it is applicable to only "invertible reduce functions". -// * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. -// * @param reduceFunc associative and commutative reduce function -// * @param invReduceFunc inverse reduce function; such that for all y, invertible x: -// * `invReduceFunc(reduceFunc(x, y), x) = y` -// * @param windowDuration width of the window; must be a multiple of this DStream's -// * batching interval -// * @param slideDuration sliding interval of the window (i.e., the interval after which -// * the new DStream will generate RDDs); must be a multiple of this -// * DStream's batching interval -// * @param filterFunc Optional function to filter expired key-value pairs; -// * only pairs that satisfy the function are retained -// */ -//@JvmName("reduceByKeyAndWindowPair") -//fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( -// invReduceFunc: (V, V) -> V, -// windowDuration: Duration, -// slideDuration: Duration = dstream().slideDuration(), -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -// filterFunc: ((Pair) -> Boolean)? = null, -// reduceFunc: (V, V) -> V, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .reduceByKeyAndWindow( -// reduceFunc, -// invReduceFunc, -// windowDuration, -// slideDuration, -// numPartitions, -// filterFunc?.let { -// { tuple -> -// filterFunc(tuple.toPair()) -// } -// } -// ) -// .map { it.toPair() } -// -///** -// * Return a new DStream by applying incremental `reduceByKey` over a sliding window. -// * The reduced value of over a new window is calculated using the old window's reduced value : -// * 1. reduce the new values that entered the window (e.g., adding new counts) -// * 2. "inverse reduce" the old values that left the window (e.g., subtracting old counts) -// * This is more efficient than reduceByKeyAndWindow without "inverse reduce" function. -// * However, it is applicable to only "invertible reduce functions". -// * @param reduceFunc associative and commutative reduce function -// * @param invReduceFunc inverse reduce function -// * @param windowDuration width of the window; must be a multiple of this DStream's -// * batching interval -// * @param slideDuration sliding interval of the window (i.e., the interval after which -// * the new DStream will generate RDDs); must be a multiple of this -// * DStream's batching interval -// * @param partitioner partitioner for controlling the partitioning of each RDD in the new -// * DStream. -// * @param filterFunc Optional function to filter expired key-value pairs; -// * only pairs that satisfy the function are retained -// */ -//@JvmName("reduceByKeyAndWindowPair") -//fun JavaDStreamLike, *, *>.reduceByKeyAndWindow( -// invReduceFunc: (V, V) -> V, -// windowDuration: Duration, -// slideDuration: Duration = dstream().slideDuration(), -// partitioner: Partitioner, -// filterFunc: ((Pair) -> Boolean)? = null, -// reduceFunc: (V, V) -> V, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .reduceByKeyAndWindow( -// reduceFunc, -// invReduceFunc, -// windowDuration, -// slideDuration, -// partitioner, -// filterFunc?.let { -// { tuple -> -// filterFunc(tuple.toPair()) -// } -// } -// ) -// .map { it.toPair() } -// -///** -// * Return a [MapWithStateDStream] by applying a function to every key-value element of -// * `this` stream, while maintaining some state data for each unique key. The mapping function -// * and other specification (e.g. partitioners, timeouts, initial state data, etc.) of this -// * transformation can be specified using `StateSpec` class. The state data is accessible in -// * as a parameter of type `State` in the mapping function. -// * -// * Example of using `mapWithState`: -// * {{{ -// * // A mapping function that maintains an integer state and return a String -// * def mappingFunction(key: String, value: Option[Int], state: State[Int]): Option[String] = { -// * // Use state.exists(), state.get(), state.update() and state.remove() -// * // to manage state, and return the necessary string -// * } -// * -// * val spec = StateSpec.function(mappingFunction).numPartitions(10) -// * -// * val mapWithStateDStream = keyValueDStream.mapWithState[StateType, MappedType](spec) -// * }}} -// * -// * @param spec Specification of this transformation -// * @tparam StateType Class type of the state data -// * @tparam MappedType Class type of the mapped data -// */ -//@JvmName("mapWithStatePair") -//fun JavaDStreamLike, *, *>.mapWithState( -// spec: StateSpec, -//): JavaMapWithStateDStream = -// mapToPair { it.toTuple() } -// .mapWithState(spec) -// -///** -// * Return a new "state" DStream where the state for each key is updated by applying -// * the given function on the previous state of the key and the new values of each key. -// * In every batch the updateFunc will be called for each state even if there are no new values. -// * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. -// * @param updateFunc State update function. If `this` function returns None, then -// * corresponding state key-value pair will be eliminated. -// * @tparam S State type -// */ -//@JvmName("updateStateByKeyPair") -//fun JavaDStreamLike, *, *>.updateStateByKey( -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -// updateFunc: (List, S?) -> S?, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .updateStateByKey( -// { list: List, s: Optional -> -// updateFunc(list, s.getOrNull()).asOptional() -// }, -// numPartitions, -// ) -// .map { it.toPair() } -// -///** -// * Return a new "state" DStream where the state for each key is updated by applying -// * the given function on the previous state of the key and the new values of each key. -// * In every batch the updateFunc will be called for each state even if there are no new values. -// * [[org.apache.spark.Partitioner]] is used to control the partitioning of each RDD. -// * @param updateFunc State update function. Note, that this function may generate a different -// * tuple with a different key than the input key. Therefore keys may be removed -// * or added in this way. It is up to the developer to decide whether to -// * remember the partitioner despite the key being changed. -// * @param partitioner Partitioner for controlling the partitioning of each RDD in the new -// * DStream -// * @tparam S State type -// */ -//@JvmName("updateStateByKeyPair") -//fun JavaDStreamLike, *, *>.updateStateByKey( -// partitioner: Partitioner, -// updateFunc: (List, S?) -> S?, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .updateStateByKey( -// { list: List, s: Optional -> -// updateFunc(list, s.getOrNull()).asOptional() -// }, -// partitioner, -// ) -// .map { it.toPair() } -// -///** -// * Return a new "state" DStream where the state for each key is updated by applying -// * the given function on the previous state of the key and the new values of the key. -// * org.apache.spark.Partitioner is used to control the partitioning of each RDD. -// * @param updateFunc State update function. If `this` function returns None, then -// * corresponding state key-value pair will be eliminated. -// * @param partitioner Partitioner for controlling the partitioning of each RDD in the new -// * DStream. -// * @param initialRDD initial state value of each key. -// * @tparam S State type -// */ -//@JvmName("updateStateByKeyPair") -//fun JavaDStreamLike, *, *>.updateStateByKey( -// partitioner: Partitioner, -// initialRDD: JavaRDD>, -// updateFunc: (List, S?) -> S?, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .updateStateByKey( -// { list: List, s: Optional -> -// updateFunc(list, s.getOrNull()).asOptional() -// }, -// partitioner, -// initialRDD.mapToPair { it.toTuple() }, -// ) -// .map { it.toPair() } -// -///** -// * Return a new DStream by applying a map function to the value of each key-value pairs in -// * 'this' DStream without changing the key. -// */ -//@JvmName("mapValuesPair") -//fun JavaDStreamLike, *, *>.mapValues( -// mapValuesFunc: (V) -> U, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .mapValues(mapValuesFunc) -// .map { it.toPair() } -// -///** -// * Return a new DStream by applying a flatmap function to the value of each key-value pairs in -// * 'this' DStream without changing the key. -// */ -//@JvmName("flatMapValuesPair") -//fun JavaDStreamLike, *, *>.flatMapValues( -// flatMapValuesFunc: (V) -> Iterator, -//): JavaDStream> = -// mapToPair { it.toTuple() } -// .flatMapValues(flatMapValuesFunc) -// .map { it.toPair() } -// -///** -// * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. -// * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. -// */ -//@JvmName("cogroupPair") -//fun JavaDStreamLike, *, *>.cogroup( -// other: JavaDStreamLike, *, *>, -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -//): JavaDStream, Iterable>>> = -// mapToPair { it.toTuple() } -// .cogroup( -// other.mapToPair { it.toTuple() }, -// numPartitions, -// ) -// .map { -// Pair(it._1, it._2.toPair()) -// } -// -///** -// * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. -// * The supplied org.apache.spark.Partitioner is used to partition the generated RDDs. -// */ -//@JvmName("cogroupPair") -//fun JavaDStreamLike, *, *>.cogroup( -// other: JavaDStreamLike, *, *>, -// partitioner: Partitioner, -//): JavaDStream, Iterable>>> = -// mapToPair { it.toTuple() } -// .cogroup( -// other.mapToPair { it.toTuple() }, -// partitioner, -// ) -// .map { -// Pair(it._1, it._2.toPair()) -// } -// -///** -// * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. -// * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. -// */ -//@JvmName("joinPair") -//fun JavaDStreamLike, *, *>.join( -// other: JavaDStreamLike, *, *>, -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .join( -// other.mapToPair { it.toTuple() }, -// numPartitions, -// ) -// .map { -// Pair(it._1, it._2.toPair()) -// } -// -///** -// * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. -// * The supplied org.apache.spark.Partitioner is used to control the partitioning of each RDD. -// */ -//@JvmName("joinPair") -//fun JavaDStreamLike, *, *>.join( -// other: JavaDStreamLike, *, *>, -// partitioner: Partitioner, -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .join( -// other.mapToPair { it.toTuple() }, -// partitioner, -// ) -// .map { -// Pair(it._1, it._2.toPair()) -// } -// -///** -// * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and -// * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` -// * partitions. -// */ -//@JvmName("leftOuterJoinPair") -//fun JavaDStreamLike, *, *>.leftOuterJoin( -// other: JavaDStreamLike, *, *>, -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .leftOuterJoin( -// other.mapToPair { it.toTuple() }, -// numPartitions, -// ) -// .map { -// Pair(it._1, Pair(it._2._1, it._2._2.getOrNull())) -// } -// -///** -// * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and -// * `other` DStream. The supplied org.apache.spark.Partitioner is used to control -// * the partitioning of each RDD. -// */ -//@JvmName("leftOuterJoinPair") -//fun JavaDStreamLike, *, *>.leftOuterJoin( -// other: JavaDStreamLike, *, *>, -// partitioner: Partitioner, -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .leftOuterJoin( -// other.mapToPair { it.toTuple() }, -// partitioner, -// ) -// .map { -// Pair(it._1, Pair(it._2._1, it._2._2.getOrNull())) -// } -// -///** -// * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and -// * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` -// * partitions. -// */ -//@JvmName("rightOuterJoinPair") -//fun JavaDStreamLike, *, *>.rightOuterJoin( -// other: JavaDStreamLike, *, *>, -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .rightOuterJoin( -// other.mapToPair { it.toTuple() }, -// numPartitions, -// ) -// .map { -// Pair(it._1, Pair(it._2._1.getOrNull(), it._2._2)) -// } -// -///** -// * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and -// * `other` DStream. The supplied org.apache.spark.Partitioner is used to control -// * the partitioning of each RDD. -// */ -//@JvmName("rightOuterJoinPair") -//fun JavaDStreamLike, *, *>.rightOuterJoin( -// other: JavaDStreamLike, *, *>, -// partitioner: Partitioner, -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .rightOuterJoin( -// other.mapToPair { it.toTuple() }, -// partitioner, -// ) -// .map { -// Pair(it._1, Pair(it._2._1.getOrNull(), it._2._2)) -// } -// -///** -// * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and -// * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` -// * partitions. -// */ -//@JvmName("fullOuterJoinPair") -//fun JavaDStreamLike, *, *>.fullOuterJoin( -// other: JavaDStreamLike, *, *>, -// numPartitions: Int = dstream().ssc().sc().defaultParallelism(), -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .fullOuterJoin( -// other.mapToPair { it.toTuple() }, -// numPartitions, -// ) -// .map { -// Pair(it._1, Pair(it._2._1.getOrNull(), it._2._2.getOrNull())) -// } -// -///** -// * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and -// * `other` DStream. The supplied org.apache.spark.Partitioner is used to control -// * the partitioning of each RDD. -// */ -//@JvmName("fullOuterJoinPair") -//fun JavaDStreamLike, *, *>.fullOuterJoin( -// other: JavaDStreamLike, *, *>, -// partitioner: Partitioner, -//): JavaDStream>> = -// mapToPair { it.toTuple() } -// .fullOuterJoin( -// other.mapToPair { it.toTuple() }, -// partitioner, -// ) -// .map { -// Pair(it._1, Pair(it._2._1.getOrNull(), it._2._2.getOrNull())) -// } -// -///** -// * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is -// * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". -// */ -//@JvmName("saveAsHadoopFilesPair") -//fun JavaDStreamLike, *, *>.saveAsHadoopFiles( -// prefix: String, suffix: String, -//): Unit = -// mapToPair { it.toTuple() } -// .saveAsHadoopFiles(prefix, suffix) -// -///** -// * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is -// * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". -// */ -//@JvmName("saveAsNewAPIHadoopFilesPair") -//fun JavaDStreamLike, *, *>.saveAsNewAPIHadoopFiles( -// prefix: String, suffix: String, -//): Unit = -// mapToPair { it.toTuple() } -// .saveAsNewAPIHadoopFiles(prefix, suffix) - - /** * Return a new DStream by applying `groupByKey` to each RDD. Hash partitioning is used to * generate the RDDs with `numPartitions` partitions. From ec78459d7002e3eee9d51a460c3bfd4ee3557f9b Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 1 Apr 2022 14:19:21 +0200 Subject: [PATCH 122/213] moved tuples to separate module --- kotlin-spark-api/3.2/pom_2.12.xml | 10 ++ .../kotlinx/spark/api/Conversions.kt | 21 --- .../kotlinx/spark/api/DatasetFunctionTest.kt | 4 +- pom_2.12.xml | 6 + scala-tuples-in-kotlin/pom_2.12.xml | 159 ++++++++++++++++++ .../kotlinx/spark/api/Conversions.kt | 51 ++++++ .../api/tuples/DestructuredTupleBuilders.kt | 0 .../kotlinx/spark/api/tuples/DropFunctions.kt | 0 .../kotlinx/spark/api/tuples/EmptyTuple.kt | 0 .../kotlinx/spark/api/tuples/MapTuples.kt | 19 +++ .../spark/api/tuples/ProductDestructuring.kt | 0 .../spark/api/tuples/ProductExtensions.kt | 0 .../api/tuples/ProductTextualAccessors.kt | 0 .../kotlinx/spark/api/tuples/TupleBuilders.kt | 0 .../spark/api/tuples/TupleConcatenation.kt | 0 .../kotlinx/spark/api/tuples/TupleCopy.kt | 0 .../kotlinx/spark/api/tuples/TupleDrop.kt | 19 +++ .../spark/api/tuples/TupleExtending.kt | 0 .../kotlinx/spark/api/tuples/TupleSplit.kt | 19 +++ .../kotlinx/spark/api/tuples/TupleTake.kt | 19 +++ .../kotlinx/spark/api/tuples/TupleZip.kt | 19 +++ .../api/tuples/TypedProductExtensions.kt | 0 .../kotlinx/spark/api/tuples}/TuplesTest.kt | 23 ++- 23 files changed, 344 insertions(+), 25 deletions(-) create mode 100644 scala-tuples-in-kotlin/pom_2.12.xml create mode 100644 scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt rename {kotlin-spark-api/3.2 => scala-tuples-in-kotlin}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt (100%) rename {kotlin-spark-api/3.2 => scala-tuples-in-kotlin}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DropFunctions.kt (100%) rename {kotlin-spark-api/3.2 => scala-tuples-in-kotlin}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/EmptyTuple.kt (100%) rename {kotlin-spark-api/3.2 => scala-tuples-in-kotlin}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/MapTuples.kt (96%) rename {kotlin-spark-api/3.2 => scala-tuples-in-kotlin}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductDestructuring.kt (100%) rename {kotlin-spark-api/3.2 => scala-tuples-in-kotlin}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductExtensions.kt (100%) rename {kotlin-spark-api/3.2 => scala-tuples-in-kotlin}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductTextualAccessors.kt (100%) rename {kotlin-spark-api/3.2 => scala-tuples-in-kotlin}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleBuilders.kt (100%) rename {kotlin-spark-api/3.2 => scala-tuples-in-kotlin}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleConcatenation.kt (100%) rename {kotlin-spark-api/3.2 => scala-tuples-in-kotlin}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleCopy.kt (100%) rename {kotlin-spark-api/3.2 => scala-tuples-in-kotlin}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleDrop.kt (99%) rename {kotlin-spark-api/3.2 => scala-tuples-in-kotlin}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleExtending.kt (100%) rename {kotlin-spark-api/3.2 => scala-tuples-in-kotlin}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleSplit.kt (99%) rename {kotlin-spark-api/3.2 => scala-tuples-in-kotlin}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTake.kt (99%) rename {kotlin-spark-api/3.2 => scala-tuples-in-kotlin}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleZip.kt (99%) rename {kotlin-spark-api/3.2 => scala-tuples-in-kotlin}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TypedProductExtensions.kt (100%) rename {kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api => scala-tuples-in-kotlin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/tuples}/TuplesTest.kt (91%) diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 826547d2..e07de9d9 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -27,6 +27,10 @@ org.jetbrains.kotlinx.spark core-3.2_${scala.compat.version} + + org.jetbrains.kotlinx.spark + scala-tuples-in-kotlin + @@ -75,6 +79,7 @@ src/test/kotlin target/${scala.compat.version} + org.jetbrains.kotlin kotlin-maven-plugin @@ -93,10 +98,12 @@ + org.apache.maven.plugins maven-surefire-plugin + org.jetbrains.dokka dokka-maven-plugin @@ -121,6 +128,7 @@ + io.qameta.allure allure-maven @@ -128,10 +136,12 @@ ${project.basedir}/allure-results/${scala.compat.version} + org.jacoco jacoco-maven-plugin + diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt index 62e9f221..fb6b4d29 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt @@ -167,45 +167,24 @@ fun ScalaConcurrentMap.asKotlinConcurrentMap(): ConcurrentMap JavaConverters.mapAsJavaConcurrentMap(this) -/** - * Returns a new [Tuple2] based on the arguments in the current [Pair]. - */ -fun Pair.toTuple(): Tuple2 = Tuple2(first, second) - /** * Returns a new [Arity2] based on the arguments in the current [Pair]. */ @Deprecated("Use Scala tuples instead.", ReplaceWith("this.toTuple()", "scala.Tuple2")) fun Pair.toArity(): Arity2 = Arity2(first, second) -/** - * Returns a new [Pair] based on the arguments in the current [Tuple2]. - */ -fun Tuple2.toPair(): Pair = Pair(_1(), _2()) - /** * Returns a new [Pair] based on the arguments in the current [Arity2]. */ @Deprecated("Use Scala tuples instead.", ReplaceWith("")) fun Arity2.toPair(): Pair = Pair(_1, _2) - -/** - * Returns a new [Tuple3] based on the arguments in the current [Triple]. - */ -fun Triple.toTuple(): Tuple3 = Tuple3(first, second, third) - /** * Returns a new [Arity3] based on the arguments in the current [Triple]. */ @Deprecated("Use Scala tuples instead.", ReplaceWith("this.toTuple()", "scala.Tuple3")) fun Triple.toArity(): Arity3 = Arity3(first, second, third) -/** - * Returns a new [Triple] based on the arguments in the current [Tuple3]. - */ -fun Tuple3.toTriple(): Triple = Triple(_1(), _2(), _3()) - /** * Returns a new [Triple] based on the arguments in the current [Arity3]. */ diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt index 2459d5e7..26dcceaf 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt @@ -215,11 +215,11 @@ class DatasetFunctionTest : ShouldSpec({ } should("be able to cogroup grouped datasets") { - val groupedDataset1 = listOf(1 to "a", 1 to "b", 2 to "c").map { it.toTuple() } + val groupedDataset1 = listOf(1 X "a", 1 X "b", 2 X "c") .toDS() .groupByKey { it._1 } - val groupedDataset2 = listOf(1 to "d", 5 to "e", 3 to "f").map { it.toTuple() } + val groupedDataset2 = listOf(1 X "d", 5 X "e", 3 X "f") .toDS() .groupByKey { it._1 } diff --git a/pom_2.12.xml b/pom_2.12.xml index 87e64994..29b854c1 100644 --- a/pom_2.12.xml +++ b/pom_2.12.xml @@ -20,6 +20,7 @@ core/3.2/pom_2.12.xml + scala-tuples-in-kotlin/pom_2.12.xml kotlin-spark-api/3.2/pom_2.12.xml examples/pom-3.2_2.12.xml @@ -31,6 +32,11 @@ core-3.2_${scala.compat.version} ${project.version} + + org.jetbrains.kotlinx.spark + scala-tuples-in-kotlin + ${project.version} + diff --git a/scala-tuples-in-kotlin/pom_2.12.xml b/scala-tuples-in-kotlin/pom_2.12.xml new file mode 100644 index 00000000..c06aa7ee --- /dev/null +++ b/scala-tuples-in-kotlin/pom_2.12.xml @@ -0,0 +1,159 @@ + + + 4.0.0 + + Kotlin Spark API: Scala Tuples in Kotlin + Scala Tuple helper functions for kotlin + scala-tuples-in-kotlin + + org.jetbrains.kotlinx.spark + kotlin-spark-api-parent_2.12 + 1.0.4-SNAPSHOT + ../pom_2.12.xml + + + + + org.scala-lang + scala-library + ${scala.version} + + + org.jetbrains.kotlin + kotlin-stdlib-jdk8 + ${kotlin.version} + + + + + io.kotest + kotest-runner-junit5-jvm + ${kotest.version} + test + + + io.kotest.extensions + kotest-extensions-allure + ${kotest-extension-allure.version} + test + + + com.beust + klaxon + ${klaxon.version} + test + + + ch.tutteli.atrium + atrium-fluent-en_GB + ${atrium.version} + test + + + + org.jetbrains.kotlin + kotlin-test + ${kotlin.version} + test + + + + + src/main/kotlin + src/test/kotlin + target/${scala.compat.version} + + + + org.jetbrains.dokka + dokka-maven-plugin + ${dokka.version} + + 8 + + + + dokka + + dokka + + pre-site + + + javadocjar + + javadocJar + + pre-integration-test + + + + + + org.jetbrains.kotlin + kotlin-maven-plugin + ${kotlin.version} + + + compile + compile + + compile + + + + test-compile + test-compile + + test-compile + + + + + 1.8 + + + + + org.apache.maven.plugins + maven-assembly-plugin + ${maven-assembly-plugin.version} + + + jar-with-dependencies + + + + org.jetbrains.spark.api.examples.WordCountKt + + + + + + + org.apache.maven.plugins + maven-site-plugin + + true + + + + + org.apache.maven.plugins + maven-deploy-plugin + + true + + + + + org.sonatype.plugins + nexus-staging-maven-plugin + + true + + + + + + diff --git a/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt new file mode 100644 index 00000000..ce4f7e83 --- /dev/null +++ b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt @@ -0,0 +1,51 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.0+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2021 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ + +/** + * This files contains conversions of Tuples between the Scala- + * and Kotlin/Java variants. + */ + +@file:Suppress("NOTHING_TO_INLINE", "RemoveExplicitTypeArguments", "unused") + +package org.jetbrains.kotlinx.spark.api + +import scala.* + + +/** + * Returns a new [Tuple2] based on the arguments in the current [Pair]. + */ +fun Pair.toTuple(): Tuple2 = Tuple2(first, second) + +/** + * Returns a new [Pair] based on the arguments in the current [Tuple2]. + */ +fun Tuple2.toPair(): Pair = Pair(_1(), _2()) + +/** + * Returns a new [Tuple3] based on the arguments in the current [Triple]. + */ +fun Triple.toTuple(): Tuple3 = Tuple3(first, second, third) + +/** + * Returns a new [Triple] based on the arguments in the current [Tuple3]. + */ +fun Tuple3.toTriple(): Triple = Triple(_1(), _2(), _3()) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt rename to scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DestructuredTupleBuilders.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DropFunctions.kt b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DropFunctions.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DropFunctions.kt rename to scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/DropFunctions.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/EmptyTuple.kt b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/EmptyTuple.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/EmptyTuple.kt rename to scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/EmptyTuple.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/MapTuples.kt b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/MapTuples.kt similarity index 96% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/MapTuples.kt rename to scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/MapTuples.kt index 2de50286..164ac92a 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/MapTuples.kt +++ b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/MapTuples.kt @@ -1,3 +1,22 @@ +/*- + * =LICENSE= + * Kotlin Spark API: Scala Tuples in Kotlin + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ package org.jetbrains.kotlinx.spark.api.tuples import scala.* diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductDestructuring.kt b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductDestructuring.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductDestructuring.kt rename to scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductDestructuring.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductExtensions.kt b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductExtensions.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductExtensions.kt rename to scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductExtensions.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductTextualAccessors.kt b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductTextualAccessors.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductTextualAccessors.kt rename to scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/ProductTextualAccessors.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleBuilders.kt b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleBuilders.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleBuilders.kt rename to scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleBuilders.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleConcatenation.kt b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleConcatenation.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleConcatenation.kt rename to scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleConcatenation.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleCopy.kt b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleCopy.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleCopy.kt rename to scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleCopy.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleDrop.kt b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleDrop.kt similarity index 99% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleDrop.kt rename to scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleDrop.kt index a0581778..e8ed8706 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleDrop.kt +++ b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleDrop.kt @@ -1,3 +1,22 @@ +/*- + * =LICENSE= + * Kotlin Spark API: Scala Tuples in Kotlin + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ package org.jetbrains.kotlinx.spark.api.tuples import scala.* diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleExtending.kt b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleExtending.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleExtending.kt rename to scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleExtending.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleSplit.kt b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleSplit.kt similarity index 99% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleSplit.kt rename to scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleSplit.kt index 8cf59726..984bd213 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleSplit.kt +++ b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleSplit.kt @@ -1,3 +1,22 @@ +/*- + * =LICENSE= + * Kotlin Spark API: Scala Tuples in Kotlin + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ package org.jetbrains.kotlinx.spark.api.tuples import scala.* diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTake.kt b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTake.kt similarity index 99% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTake.kt rename to scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTake.kt index 706e1ef0..ffe7f18d 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTake.kt +++ b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleTake.kt @@ -1,3 +1,22 @@ +/*- + * =LICENSE= + * Kotlin Spark API: Scala Tuples in Kotlin + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ package org.jetbrains.kotlinx.spark.api.tuples import scala.* diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleZip.kt b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleZip.kt similarity index 99% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleZip.kt rename to scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleZip.kt index 49c70396..c4930996 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleZip.kt +++ b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TupleZip.kt @@ -1,3 +1,22 @@ +/*- + * =LICENSE= + * Kotlin Spark API: Scala Tuples in Kotlin + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ package org.jetbrains.kotlinx.spark.api.tuples import scala.* diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TypedProductExtensions.kt b/scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TypedProductExtensions.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TypedProductExtensions.kt rename to scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TypedProductExtensions.kt diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt b/scala-tuples-in-kotlin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TuplesTest.kt similarity index 91% rename from kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt rename to scala-tuples-in-kotlin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TuplesTest.kt index a13f9c58..75d6f49a 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TuplesTest.kt +++ b/scala-tuples-in-kotlin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TuplesTest.kt @@ -1,4 +1,23 @@ -package org.jetbrains.kotlinx.spark.api +/*- + * =LICENSE= + * Kotlin Spark API: Scala Tuples in Kotlin + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.api.tuples import io.kotest.assertions.throwables.shouldThrow import io.kotest.core.spec.style.ShouldSpec @@ -225,4 +244,4 @@ class TuplesTest : ShouldSpec({ interface Super class A : Super -class B : Super \ No newline at end of file +class B : Super From f523d9d1bd24f4deb6af67cb42f1a571a1a79024 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 1 Apr 2022 14:39:12 +0200 Subject: [PATCH 123/213] small streaming updates --- .../kotlinx/spark/api/SparkSession.kt | 9 +-- .../jetbrains/kotlinx/spark/api/Streaming.kt | 67 +++++++++---------- .../kotlinx/spark/api/StreamingTest.kt | 20 ++++-- 3 files changed, 51 insertions(+), 45 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index 53d6a62e..85f43841 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -93,9 +93,10 @@ class KSparkStreamingSession(val ssc: JavaStreamingContext) { fun invokeRunAfterStart(): Unit = runAfterStart() - fun withSpark(sc: SparkConf, func: KSparkSession.() -> Unit) { + fun withSpark(sc: SparkConf, func: KSparkSession.() -> T): T { val spark = SparkSession.builder().config(sc).getOrCreate() - KSparkSession(spark).apply(func) + + return with(KSparkSession(spark), func) } /** @@ -106,7 +107,7 @@ class KSparkStreamingSession(val ssc: JavaStreamingContext) { * } * ``` */ - fun withSpark(ssc: JavaStreamingContext, func: KSparkSession.() -> Unit) = withSpark(ssc.sparkContext().conf, func) + fun withSpark(ssc: JavaStreamingContext, func: KSparkSession.() -> T): T = withSpark(ssc.sparkContext().conf, func) /** @@ -118,7 +119,7 @@ class KSparkStreamingSession(val ssc: JavaStreamingContext) { * } * ``` */ - fun withSpark(rdd: JavaRDDLike<*, *>, func: KSparkSession.() -> Unit) = withSpark(rdd.context().conf, func) + fun withSpark(rdd: JavaRDDLike<*, *>, func: KSparkSession.() -> T): T = withSpark(rdd.context().conf, func) } diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt index ccba0e01..ea9b4df4 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt @@ -33,19 +33,18 @@ import org.apache.spark.streaming.dstream.DStream import scala.Tuple2 -@JvmName("tuple2ToPairDStream") fun JavaDStream>.toPairDStream(): JavaPairDStream = JavaPairDStream.fromJavaDStream(this) -fun JavaRDD>.toPairRDD(): JavaPairRDD = JavaPairRDD.fromJavaRDD(this) +fun JavaPairDStream.toTupleDStream(): JavaDStream> = + toJavaDStream() -@JvmName("arity2ToPairDStream") -fun JavaDStreamLike, *, *>.toPairDStream(): JavaPairDStream = - mapToPair(Arity2::toTuple) +fun JavaRDD>.toPairRDD(): JavaPairRDD = + JavaPairRDD.fromJavaRDD(this) + +fun JavaPairRDD.toTupleRDD(): JavaRDD> = + JavaPairRDD.toRDD(this).toJavaRDD() -@JvmName("pairToPairDStream") -fun JavaDStreamLike, *, *>.toPairDStream(): JavaPairDStream = - mapToPair(Pair::toTuple) /** * Return a new DStream by applying `groupByKey` to each RDD. Hash partitioning is used to @@ -57,7 +56,7 @@ fun JavaDStream>.groupByKey( ): JavaDStream>> = toPairDStream() .groupByKey(numPartitions) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying `groupByKey` on each RDD. The supplied @@ -67,7 +66,7 @@ fun JavaDStream>.groupByKey( fun JavaDStream>.groupByKey(partitioner: Partitioner): JavaDStream>> = toPairDStream() .groupByKey(partitioner) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are @@ -81,7 +80,7 @@ fun JavaDStream>.reduceByKey( ): JavaDStream> = toPairDStream() .reduceByKey(reduceFunc, numPartitions) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying `reduceByKey` to each RDD. The values for each key are @@ -95,7 +94,7 @@ fun JavaDStream>.reduceByKey( ): JavaDStream> = toPairDStream() .reduceByKey(reduceFunc, partitioner) - .toJavaDStream() + .toTupleDStream() /** * Combine elements of each key in DStream's RDDs using custom functions. This is similar to the @@ -112,7 +111,7 @@ fun JavaDStream>.combineByKey( ): JavaDStream> = toPairDStream() .combineByKey(createCombiner, mergeValue, mergeCombiner, partitioner, mapSideCombine) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying `groupByKey` over a sliding window on `this` DStream. @@ -134,7 +133,7 @@ fun JavaDStream>.groupByKeyAndWindow( ): JavaDStream>> = toPairDStream() .groupByKeyAndWindow(windowDuration, slideDuration, numPartitions) - .toJavaDStream() + .toTupleDStream() /** * Create a new DStream by applying `groupByKey` over a sliding window on `this` DStream. @@ -155,7 +154,7 @@ fun JavaDStream>.groupByKeyAndWindow( ): JavaDStream>> = toPairDStream() .groupByKeyAndWindow(windowDuration, slideDuration, partitioner) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying `reduceByKey` over a sliding window. This is similar to @@ -178,7 +177,7 @@ fun JavaDStream>.reduceByKeyAndWindow( ): JavaDStream> = toPairDStream() .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, numPartitions) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying `reduceByKey` over a sliding window. Similar to @@ -201,7 +200,7 @@ fun JavaDStream>.reduceByKeyAndWindow( ): JavaDStream> = toPairDStream() .reduceByKeyAndWindow(reduceFunc, windowDuration, slideDuration, partitioner) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying incremental `reduceByKey` over a sliding window. @@ -246,7 +245,7 @@ fun JavaDStream>.reduceByKeyAndWindow( } } ) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying incremental `reduceByKey` over a sliding window. @@ -289,7 +288,7 @@ fun JavaDStream>.reduceByKeyAndWindow( } } ) - .toJavaDStream() + .toTupleDStream() /** * Return a [MapWithStateDStream] by applying a function to every key-value element of @@ -342,7 +341,7 @@ fun JavaDStream>.updateStateByKey( }, numPartitions, ) - .toJavaDStream() + .toTupleDStream() /** * Return a new "state" DStream where the state for each key is updated by applying @@ -369,7 +368,7 @@ fun JavaDStream>.updateStateByKey( }, partitioner, ) - .toJavaDStream() + .toTupleDStream() /** * Return a new "state" DStream where the state for each key is updated by applying @@ -396,7 +395,7 @@ fun JavaDStream>.updateStateByKey( partitioner, initialRDD.toPairRDD(), ) - .toJavaDStream() + .toTupleDStream() /** @@ -409,7 +408,7 @@ fun JavaDStream>.mapValues( ): JavaDStream> = toPairDStream() .mapValues(mapValuesFunc) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying a flatmap function to the value of each key-value pairs in @@ -421,7 +420,7 @@ fun JavaDStream>.flatMapValues( ): JavaDStream> = toPairDStream() .flatMapValues(flatMapValuesFunc) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. @@ -437,7 +436,7 @@ fun JavaDStream>.cogroup( other.toPairDStream(), numPartitions, ) - .toJavaDStream() + .toTupleDStream() /** @@ -454,7 +453,7 @@ fun JavaDStream>.cogroup( other.toPairDStream(), partitioner, ) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. @@ -470,7 +469,7 @@ fun JavaDStream>.join( other.toPairDStream(), numPartitions, ) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. @@ -486,7 +485,7 @@ fun JavaDStream>.join( other.toPairDStream(), partitioner, ) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and @@ -503,7 +502,7 @@ fun JavaDStream>.leftOuterJoin( other.toPairDStream(), numPartitions, ) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying 'left outer join' between RDDs of `this` DStream and @@ -520,7 +519,7 @@ fun JavaDStream>.leftOuterJoin( other.toPairDStream(), partitioner, ) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and @@ -537,7 +536,7 @@ fun JavaDStream>.rightOuterJoin( other.toPairDStream(), numPartitions, ) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying 'right outer join' between RDDs of `this` DStream and @@ -554,7 +553,7 @@ fun JavaDStream>.rightOuterJoin( other.toPairDStream(), partitioner, ) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and @@ -571,7 +570,7 @@ fun JavaDStream>.fullOuterJoin( other.toPairDStream(), numPartitions, ) - .toJavaDStream() + .toTupleDStream() /** * Return a new DStream by applying 'full outer join' between RDDs of `this` DStream and @@ -588,7 +587,7 @@ fun JavaDStream>.fullOuterJoin( other.toPairDStream(), partitioner, ) - .toJavaDStream() + .toTupleDStream() /** * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt index d93c7d23..47d7c408 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -22,9 +22,11 @@ package org.jetbrains.kotlinx.spark.api import io.kotest.core.spec.style.ShouldSpec import io.kotest.matchers.collections.shouldBeIn import io.kotest.matchers.shouldBe +import org.apache.spark.api.java.JavaRDD import org.apache.spark.streaming.Duration import java.io.Serializable import org.jetbrains.kotlinx.spark.api.* +import org.jetbrains.kotlinx.spark.api.tuples.* import java.util.LinkedList @@ -40,19 +42,23 @@ class StreamingTest : ShouldSpec({ } withSparkStreaming(Duration(10), timeout = 1000) { - val resultsBroadcast = spark.broadcast(results) - - val rdd = sc.parallelize(input) - val queue = LinkedList(listOf(rdd)) + val (resultsBroadcast, queue) = withSpark(ssc) { + val resultsBroadcast = spark.broadcast(results) + val rdd = sc.parallelize(input) + resultsBroadcast X LinkedList(listOf(rdd)) + } val inputStream = ssc.queueStream(queue) inputStream.foreachRDD { rdd, _ -> - rdd.toDS().forEach { - it shouldBeIn input - resultsBroadcast.value.counter++ + withSpark(rdd) { + rdd.toDS().forEach { + it shouldBeIn input + resultsBroadcast.value.counter++ + } } } + } results.counter shouldBe input.size From 4a73acaf01889b4b891aa5a299e9687f42575844 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 1 Apr 2022 14:40:41 +0200 Subject: [PATCH 124/213] forgot qodana --- qodana.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qodana.yaml b/qodana.yaml index 54b82040..51272398 100644 --- a/qodana.yaml +++ b/qodana.yaml @@ -5,5 +5,5 @@ profile: exclude: - name: All paths: - - kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples + - scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples - kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt \ No newline at end of file From d79d61a25c01e3247528b912d2a324caae8bd007 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 1 Apr 2022 17:01:22 +0200 Subject: [PATCH 125/213] qodana works locally, come on github --- qodana.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qodana.yaml b/qodana.yaml index 51272398..ff38d36f 100644 --- a/qodana.yaml +++ b/qodana.yaml @@ -6,4 +6,4 @@ exclude: - name: All paths: - scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples - - kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt \ No newline at end of file + - kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt From 2708a5c2054e07225c2b1a351fbf51ba66d7cb0b Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 1 Apr 2022 17:41:30 +0200 Subject: [PATCH 126/213] created temporary branch merging tuples with streaming. Will merge into normal "exploring-streaming" after vacation --- .../examples/KotlinDirectKafkaWordCount.kt | 8 +++++-- .../kotlinx/spark/api/SparkSession.kt | 21 ++++++++++------- .../jetbrains/kotlinx/spark/api/ApiTest.kt | 23 ------------------- .../kotlinx/spark/api/StreamingTest.kt | 14 ++++++----- 4 files changed, 27 insertions(+), 39 deletions(-) diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinDirectKafkaWordCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinDirectKafkaWordCount.kt index eea40720..897a9176 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinDirectKafkaWordCount.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinDirectKafkaWordCount.kt @@ -30,8 +30,11 @@ import org.apache.spark.streaming.kafka010.ConsumerStrategies import org.apache.spark.streaming.kafka010.KafkaUtils import org.apache.spark.streaming.kafka010.LocationStrategies import org.jetbrains.kotlinx.spark.api.c +import org.jetbrains.kotlinx.spark.api.reduceByKey import org.jetbrains.kotlinx.spark.api.toTuple +import org.jetbrains.kotlinx.spark.api.tuples.* import org.jetbrains.kotlinx.spark.api.withSparkStreaming +import scala.Tuple2 import java.io.Serializable import java.util.regex.Pattern import kotlin.system.exitProcess @@ -56,6 +59,7 @@ import kotlin.system.exitProcess * consumer-group topic1,topic2 */ object KotlinDirectKafkaWordCount { + private val SPACE = Pattern.compile(" ") private const val DEFAULT_BROKER = "localhost:9092" @@ -102,8 +106,8 @@ object KotlinDirectKafkaWordCount { val lines: JavaDStream = messages.map { it.value() } val words: JavaDStream = lines.flatMap { it.split(SPACE).iterator() } - val wordCounts: JavaPairDStream = words - .mapToPair { c(it, 1).toTuple() } + val wordCounts: JavaDStream> = words + .map { it X 1 } .reduceByKey { a: Int, b: Int -> a + b } wordCounts.print() diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index 85f43841..18d88fea 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -92,22 +92,26 @@ class KSparkStreamingSession(val ssc: JavaStreamingContext) { fun invokeRunAfterStart(): Unit = runAfterStart() + fun getSpark(sc: SparkConf): SparkSession = + SparkSession + .builder() + .config(sc) + .getOrCreate() - fun withSpark(sc: SparkConf, func: KSparkSession.() -> T): T { - val spark = SparkSession.builder().config(sc).getOrCreate() + fun withSpark(sc: SparkConf, func: KSparkSession.() -> T): T = + KSparkSession(getSpark(sc)).func() - return with(KSparkSession(spark), func) - } /** - * Helper function to enter Spark scope from [ssc] like + * Helper function to enter Spark scope from [sscForConf] like * ```kotlin - * ssc.withSpark { // this: KSparkSession + * withSpark(ssc) { // this: KSparkSession * * } * ``` */ - fun withSpark(ssc: JavaStreamingContext, func: KSparkSession.() -> T): T = withSpark(ssc.sparkContext().conf, func) + fun withSpark(sscForConf: JavaStreamingContext, func: KSparkSession.() -> T): T = + withSpark(sscForConf.sparkContext().conf, func) /** @@ -119,7 +123,8 @@ class KSparkStreamingSession(val ssc: JavaStreamingContext) { * } * ``` */ - fun withSpark(rdd: JavaRDDLike<*, *>, func: KSparkSession.() -> T): T = withSpark(rdd.context().conf, func) + fun withSpark(rddForConf: JavaRDDLike<*, *>, func: KSparkSession.() -> T): T = + withSpark(rddForConf.context().conf, func) } diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt index b4e08216..e95a65f7 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt @@ -20,32 +20,9 @@ package org.jetbrains.kotlinx.spark.api/*- import ch.tutteli.atrium.api.fluent.en_GB.* import ch.tutteli.atrium.api.verbs.expect import io.kotest.core.spec.style.ShouldSpec -import io.kotest.matchers.should import io.kotest.matchers.shouldBe -import org.apache.spark.api.java.JavaDoubleRDD -import org.apache.spark.api.java.JavaPairRDD -import org.apache.spark.api.java.JavaRDD -import org.apache.spark.api.java.JavaSparkContext -import org.apache.spark.rdd.RDD -import org.apache.spark.sql.Dataset -import org.apache.spark.sql.functions.* -import org.apache.spark.sql.streaming.GroupState -import org.apache.spark.sql.streaming.GroupStateTimeout -import org.apache.spark.sql.types.Decimal -import org.apache.spark.unsafe.types.CalendarInterval -import scala.Product -import scala.Tuple1 -import scala.Tuple2 -import scala.Tuple3 import scala.collection.Seq import java.io.Serializable -import java.math.BigDecimal -import java.sql.Date -import java.sql.Timestamp -import java.time.Duration -import java.time.Instant -import java.time.LocalDate -import java.time.Period import kotlin.collections.Iterator import scala.collection.Iterator as ScalaIterator import scala.collection.Map as ScalaMap diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt index 47d7c408..405181ce 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -42,16 +42,18 @@ class StreamingTest : ShouldSpec({ } withSparkStreaming(Duration(10), timeout = 1000) { - val (resultsBroadcast, queue) = withSpark(ssc) { - val resultsBroadcast = spark.broadcast(results) - val rdd = sc.parallelize(input) + val (resultsBroadcast, queue) = + withSpark(sscForConf = ssc) { + val resultsBroadcast = spark.broadcast(results) + val rdd = sc.parallelize(input) + + resultsBroadcast X LinkedList(listOf(rdd)) + } - resultsBroadcast X LinkedList(listOf(rdd)) - } val inputStream = ssc.queueStream(queue) inputStream.foreachRDD { rdd, _ -> - withSpark(rdd) { + withSpark(rddForConf = rdd) { rdd.toDS().forEach { it shouldBeIn input resultsBroadcast.value.counter++ From b4bb8ce237ad375de928fa21b203ccc3bcfb7690 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Mon, 11 Apr 2022 13:24:25 +0200 Subject: [PATCH 127/213] Update README.md Updated cached sample using tuples --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 8846e4c1..cd6266f3 100644 --- a/README.md +++ b/README.md @@ -122,13 +122,13 @@ To solve these problems we've added `withCached` function ```kotlin withSpark { dsOf(1, 2, 3, 4, 5) - .map { it to (it + 2) } + .map { tupleOf(it, it + 2) } .withCached { showDS() - filter { it.first % 2 == 0 }.showDS() + filter { it._1 % 2 == 0 }.showDS() } - .map { c(it.first, it.second, (it.first + it.second) * 2) } + .map { tupleOf(it._1, it._2, (it._1 + it._2) * 2) } .show() } ``` From d62e3afe115c943bacd386fba28d0a13af660582 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Mon, 11 Apr 2022 13:26:44 +0200 Subject: [PATCH 128/213] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index cd6266f3..bd86267f 100644 --- a/README.md +++ b/README.md @@ -87,7 +87,7 @@ val spark = SparkSession ```kotlin spark.toDS("a" to 1, "b" to 2) ``` -The example above produces `Dataset>`. +The example above produces `Dataset>`. While Kotlin Pairs and Triples are supported, Scala Tuples are reccomended for better support. ### Null safety There are several aliases in API, like `leftJoin`, `rightJoin` etc. These are null-safe by design. @@ -104,7 +104,7 @@ After work block ends, `spark.stop()` is called automatically. ```kotlin withSpark { dsOf(1, 2) - .map { it to it } + .map { it X it } // creates Tuple2 .show() } ``` From c70c0f3705385531c39fc837c05b625044e4f4d4 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 12 Apr 2022 17:23:58 +0200 Subject: [PATCH 129/213] working on testing, not yet finished --- .../KotlinRecoverableNetworkWordCount.kt | 32 +-- kotlin-spark-api/3.2/pom_2.12.xml | 27 +++ .../kotlinx/spark/api/SparkSession.kt | 140 +++++++----- .../kotlinx/spark/api/StreamingTest.kt | 208 ++++++++++++++++-- pom.xml | 1 + 5 files changed, 318 insertions(+), 90 deletions(-) diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinRecoverableNetworkWordCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinRecoverableNetworkWordCount.kt index 10f57a28..7c4873f0 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinRecoverableNetworkWordCount.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinRecoverableNetworkWordCount.kt @@ -28,8 +28,10 @@ import org.apache.spark.streaming.Durations import org.apache.spark.streaming.Time import org.apache.spark.util.LongAccumulator import org.jetbrains.kotlinx.spark.api.* +import org.jetbrains.kotlinx.spark.api.tuples.* import scala.Tuple2 import java.io.File +import java.io.Serializable import java.nio.charset.Charset import java.util.regex.Pattern import kotlin.experimental.ExperimentalTypeInference @@ -174,7 +176,7 @@ object KotlinRecoverableNetworkWordCount { val words = lines.flatMap { it.split(SPACE).iterator() } val wordCounts3 = words - .map { Tuple2(it, 1) } + .map { t(it, 1) } .reduceByKey { a, b -> a + b } // in normal streaming context we can create a SparkSession from ssc: JavaStreamingContext @@ -183,6 +185,10 @@ object KotlinRecoverableNetworkWordCount { listOf(1, 2, 3).toDS().show() } + setRunAfterStart { + println("Context is created and started running!") + } + wordCounts3.foreachRDD { rdd, time: Time -> // but in foreachRDD we must obtain this conf from the RDD // like `rdd.context().conf` @@ -197,19 +203,17 @@ object KotlinRecoverableNetworkWordCount { val droppedWordsCounter = KotlinDroppedWordsCounter.getInstance(sc) // Use excludeList to drop words and use droppedWordsCounter to count them - val (counts, duration) = measureTimedValue { - rdd.filter { wordCount -> - if (excludeList.value().contains(wordCount._1)) { - droppedWordsCounter.add(wordCount._2.toLong()) - false - } else { - true - } - }.collect() - } - - - val output = "Counts at time $time $counts\n$duration" + val counts = rdd.filter { (word, count) -> + if (excludeList.value().contains(word)) { + droppedWordsCounter.add(count.toLong()) + false + } else { + true + } + }.collect() + + + val output = "Counts at time $time $counts" println(output) println("Dropped ${droppedWordsCounter.value()} word(s) totally") println("Appending to " + outputFile.absolutePath) diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index cf0a68c4..586a89cc 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -63,6 +63,25 @@ provided + + + + + + + + + + + + + + org.apache.hadoop + hadoop-client + ${hadoop.version} + provided + + io.kotest @@ -88,6 +107,14 @@ ${atrium.version} test + + org.apache.spark + spark-streaming_${scala.compat.version} + ${spark3.version} + tests + test + + diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index 18d88fea..465f5780 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -26,11 +26,14 @@ package org.jetbrains.kotlinx.spark.api + +import org.apache.hadoop.conf.Configuration import org.apache.spark.SparkConf import org.apache.spark.SparkContext import org.apache.spark.api.java.JavaRDDLike import org.apache.spark.api.java.JavaSparkContext import org.apache.spark.broadcast.Broadcast +import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.rdd.RDD import org.apache.spark.sql.Dataset import org.apache.spark.sql.SparkSession.Builder @@ -39,8 +42,9 @@ import org.apache.spark.streaming.Duration import org.apache.spark.streaming.Durations import org.apache.spark.streaming.api.java.JavaStreamingContext import org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR +import org.jetbrains.kotlinx.spark.api.tuples.* import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions -import scala.Tuple2 +import java.io.Serializable /** * This wrapper over [SparkSession] which provides several additional methods to create [org.apache.spark.sql.Dataset]. @@ -85,51 +89,58 @@ class KSparkSession(val spark: SparkSession) { /** * This wrapper over [SparkSession] and [JavaStreamingContext] provides several additional methods to create [org.apache.spark.sql.Dataset] */ -class KSparkStreamingSession(val ssc: JavaStreamingContext) { - - /** Can be overwritten to be run after the streaming session has started and before it's terminated. */ - var runAfterStart: KSparkStreamingSession.() -> Unit = {} +class KSparkStreamingSession(@Transient val ssc: JavaStreamingContext) : Serializable { + // Serializable and Transient to that [withSpark] works inside [foreachRDD] and other Spark functions that serialize - fun invokeRunAfterStart(): Unit = runAfterStart() + private var runAfterStart: KSparkStreamingSession.() -> Unit = {} - fun getSpark(sc: SparkConf): SparkSession = - SparkSession - .builder() - .config(sc) - .getOrCreate() + /** Will be run after the streaming session has started and before it's terminated. */ + fun setRunAfterStart(block: KSparkStreamingSession.() -> Unit) { + runAfterStart = block + } - fun withSpark(sc: SparkConf, func: KSparkSession.() -> T): T = - KSparkSession(getSpark(sc)).func() + internal fun invokeRunAfterStart(): Unit = runAfterStart() /** - * Helper function to enter Spark scope from [sscForConf] like + * Helper function to enter Spark scope from a provided like + * when using the `foreachRDD` function. * ```kotlin - * withSpark(ssc) { // this: KSparkSession + * withSpark(rdd) { // this: KSparkSession * * } * ``` */ - fun withSpark(sscForConf: JavaStreamingContext, func: KSparkSession.() -> T): T = - withSpark(sscForConf.sparkContext().conf, func) + fun withSpark(rddForConf: JavaRDDLike<*, *>, func: KSparkSession.() -> T): T = + withSpark(rddForConf.context().conf, func) + + + fun getSpark(sc: SparkConf): SparkSession = SparkSession + .builder() + .config(sc) + .getOrCreate() + + fun withSpark(sc: SparkConf, func: KSparkSession.() -> T): T = + KSparkSession(getSpark(sc)).func() /** - * Helper function to enter Spark scope from a provided like - * when using the `foreachRDD` function. + * Helper function to enter Spark scope from [sscForConf] like * ```kotlin - * withSpark(rdd) { // this: KSparkSession + * withSpark(ssc) { // this: KSparkSession * * } * ``` */ - fun withSpark(rddForConf: JavaRDDLike<*, *>, func: KSparkSession.() -> T): T = - withSpark(rddForConf.context().conf, func) + fun withSpark(sscForConf: JavaStreamingContext, func: KSparkSession.() -> T): T = + withSpark(sscForConf.sparkContext().conf, func) } + + /** * The entry point to programming Spark with the Dataset and DataFrame API. * @@ -239,52 +250,67 @@ inline fun withSpark(sparkConf: SparkConf, logLevel: SparkLogLevel = ERROR, func * recreated from the checkpoint data. If the data does not exist, then the provided factory * will be used to create a JavaStreamingContext. * - * @param batchDuration The time interval at which streaming data will be divided into batches. Defaults to 1 second. - * @param checkpointPath If checkpoint data exists in the provided `checkpointPath`, then StreamingContext will be - * recreated from the checkpoint data. If the data does not exist (or `null` is provided), then the streaming context will be built using - * the other provided parameters. - * @param props spark options, value types are runtime-checked for type-correctness - * @param master Sets the Spark master URL to connect to, such as "local" to run locally, "local[4]" to - * run locally with 4 cores, or "spark://master:7077" to run on a Spark standalone cluster. By default, it - * tries to get the system value "spark.master", otherwise it uses "local[*]" - * @param appName Sets a name for the application, which will be shown in the Spark web UI. - * If no application name is set, a randomly generated name will be used. - * @param logLevel Control our logLevel. This overrides any user-defined log settings. - * @param timeout The time in milliseconds to wait for the stream to terminate without input. -1 by default, this means no timeout. - * @param func function which will be executed in context of [KSparkStreamingSession] (it means that `this` inside block will point to [KSparkStreamingSession]) + * @param batchDuration The time interval at which streaming data will be divided into batches. Defaults to 1 + * second. + * @param checkpointPath If checkpoint data exists in the provided `checkpointPath`, then StreamingContext will be + * recreated from the checkpoint data. If the data does not exist (or `null` is provided), + * then the streaming context will be built using the other provided parameters. + * @param hadoopConf Only used if [checkpointPath] is given. Hadoop configuration if necessary for reading from + * any HDFS compatible file system. + * @param createOnError Only used if [checkpointPath] is given. Whether to create a new JavaStreamingContext if + * there is an error in reading checkpoint data. + * @param props Spark options, value types are runtime-checked for type-correctness. + * @param master Sets the Spark master URL to connect to, such as "local" to run locally, "local[4]" to + * run locally with 4 cores, or "spark://master:7077" to run on a Spark standalone cluster. + * By default, it tries to get the system value "spark.master", otherwise it uses "local[*]". + * @param appName Sets a name for the application, which will be shown in the Spark web UI. + * If no application name is set, a randomly generated name will be used. + * @param timeout The time in milliseconds to wait for the stream to terminate without input. -1 by default, + * this means no timeout. + * @param func Function which will be executed in context of [KSparkStreamingSession] (it means that + * `this` inside block will point to [KSparkStreamingSession]) */ @JvmOverloads -inline fun withSparkStreaming( +fun withSparkStreaming( batchDuration: Duration = Durations.seconds(1L), checkpointPath: String? = null, + hadoopConf: Configuration = SparkHadoopUtil.get().conf(), + createOnError: Boolean = false, props: Map = emptyMap(), master: String = SparkConf().get("spark.master", "local[*]"), appName: String = "Kotlin Spark Sample", timeout: Long = -1L, - crossinline func: KSparkStreamingSession.() -> Unit, + startStreamingContext: Boolean = true, + func: KSparkStreamingSession.() -> Unit, ) { if (checkpointPath != null) { - var kSparkStreamingSession: KSparkStreamingSession? = null - val ssc = JavaStreamingContext.getOrCreate(checkpointPath) { - val sc = SparkConf() - .setAppName(appName) - .setMaster(master) - .setAll( - props - .map { (key, value) -> Tuple2(key, value.toString()) } - .asScalaIterable() - ) - val ssc = JavaStreamingContext(sc, batchDuration) - ssc.checkpoint(checkpointPath) - - kSparkStreamingSession = KSparkStreamingSession(ssc) - - func(kSparkStreamingSession!!) + var kSparkStreamingSession: KSparkStreamingSession? = null - ssc - } - ssc.start() + val ssc = JavaStreamingContext.getOrCreate( + /* checkpointPath = */ checkpointPath, + /* creatingFunc = */ { + val sc = SparkConf() + .setAppName(appName) + .setMaster(master) + .setAll( + props + .map { (key, value) -> key X value.toString() } + .asScalaIterable() + ) + + val ssc = JavaStreamingContext(sc, batchDuration) + ssc.checkpoint(checkpointPath) + + kSparkStreamingSession = KSparkStreamingSession(ssc) + func(kSparkStreamingSession!!) + + ssc + }, + /* hadoopConf = */ hadoopConf, + /* createOnError = */ createOnError + ) + if (startStreamingContext) ssc.start() kSparkStreamingSession?.invokeRunAfterStart() ssc.awaitTerminationOrTimeout(timeout) ssc.stop() @@ -294,7 +320,7 @@ inline fun withSparkStreaming( .setMaster(master) .setAll( props - .map { (key, value) -> Tuple2(key, value.toString()) } + .map { (key, value) -> key X value.toString() } .asScalaIterable() ) val ssc = JavaStreamingContext(sc, batchDuration) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt index 405181ce..3f3515ec 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -22,12 +22,22 @@ package org.jetbrains.kotlinx.spark.api import io.kotest.core.spec.style.ShouldSpec import io.kotest.matchers.collections.shouldBeIn import io.kotest.matchers.shouldBe -import org.apache.spark.api.java.JavaRDD -import org.apache.spark.streaming.Duration +import org.apache.commons.io.FileUtils +import org.apache.hadoop.fs.FileSystem +import org.apache.spark.SparkConf +import org.apache.spark.api.java.JavaSparkContext +import org.apache.spark.streaming.* +import org.apache.spark.streaming.api.java.JavaStreamingContext +import org.apache.spark.util.Utils +import org.jetbrains.kotlinx.spark.api.tuples.X +import org.jetbrains.kotlinx.spark.api.tuples.component1 +import org.jetbrains.kotlinx.spark.api.tuples.component2 +import java.io.File import java.io.Serializable -import org.jetbrains.kotlinx.spark.api.* -import org.jetbrains.kotlinx.spark.api.tuples.* -import java.util.LinkedList +import java.net.ConnectException +import java.nio.charset.StandardCharsets +import java.util.* +import java.util.concurrent.atomic.AtomicBoolean class StreamingTest : ShouldSpec({ @@ -35,35 +45,195 @@ class StreamingTest : ShouldSpec({ should("stream") { val input = listOf("aaa", "bbb", "aaa", "ccc") - - val results = object : Serializable { - @Volatile - var counter = 0 - } + val counter = Counter(0) withSparkStreaming(Duration(10), timeout = 1000) { - val (resultsBroadcast, queue) = - withSpark(sscForConf = ssc) { - val resultsBroadcast = spark.broadcast(results) - val rdd = sc.parallelize(input) - resultsBroadcast X LinkedList(listOf(rdd)) - } + val (counterBroadcast, queue) = withSpark(ssc) { + spark.broadcast(counter) X LinkedList(listOf(sc.parallelize(input))) + } val inputStream = ssc.queueStream(queue) inputStream.foreachRDD { rdd, _ -> - withSpark(rddForConf = rdd) { + withSpark(rdd) { rdd.toDS().forEach { it shouldBeIn input - resultsBroadcast.value.counter++ + counterBroadcast.value.value++ } } } + } + + counter.value shouldBe input.size + + } + +// should("checkpoint") { +// +// val emptyDir: File = Files.createTempDir() +// emptyDir.deleteOnExit() +// val contextSuite = StreamingContextSuite() +// val corruptedCheckpointDir: String = contextSuite.createCorruptedCheckpoint() +// val checkpointDir: String = contextSuite.createValidCheckpoint() +// +// // Function to create JavaStreamingContext without any output operations +// // (used to detect the new context) +// +// // Function to create JavaStreamingContext without any output operations +// // (used to detect the new context) +// val newContextCreated = AtomicBoolean(false) +// val creatingFunc: Function0 = { +// newContextCreated.set(true) +// JavaStreamingContext(conf, Seconds.apply(1)) +// } +// +// newContextCreated.set(false) +// ssc = JavaStreamingContext.getOrCreate(emptyDir.absolutePath, creatingFunc) +// Assert.assertTrue("new context not created", newContextCreated.get()) +// ssc.stop() +// +// newContextCreated.set(false) +// ssc = JavaStreamingContext.getOrCreate( +// corruptedCheckpointDir, creatingFunc, +// Configuration(), true +// ) +// Assert.assertTrue("new context not created", newContextCreated.get()) +// ssc.stop() +// +// newContextCreated.set(false) +// ssc = JavaStreamingContext.getOrCreate( +// checkpointDir, creatingFunc, +// Configuration() +// ) +// Assert.assertTrue("old context not recovered", !newContextCreated.get()) +// ssc.stop() +// +// newContextCreated.set(false) +// val sc = JavaSparkContext(conf) +// ssc = JavaStreamingContext.getOrCreate( +// checkpointDir, creatingFunc, +// Configuration() +// ) +// Assert.assertTrue("old context not recovered", !newContextCreated.get()) +// ssc.stop() +// } + + should("Work with checkpointpath") { + + + val conf = SparkConf() + .setMaster("local[*]") + .setAppName("Kotlin Spark Sample") + .set("newContext", "true") + + val emptyDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "spark") + emptyDir.deleteOnExit() + + val batchDuration = Durations.seconds(1) + val timeout = Durations.seconds(1).milliseconds() + + val corruptedCheckpointDir = createCorruptedCheckpoint() + val checkpointDir = createValidCheckpoint(conf, batchDuration) + + val newContextCreated = AtomicBoolean(false) + + val creatingFun: KSparkStreamingSession.() -> Unit = { +// if (conf == null) conf = ssc.sparkContext().conf + + println("created new context") + newContextCreated.set(true) +// setRunAfterStart { +// ssc.stop() +// } } - results.counter shouldBe input.size + + + newContextCreated.set(false) + withSparkStreaming( + batchDuration = batchDuration, + checkpointPath = emptyDir.absolutePath, + props = mapOf("newContext" to true), + timeout = timeout, + func = creatingFun, + startStreamingContext = false, + ) + newContextCreated.get() shouldBe true + + + + newContextCreated.set(false) + withSparkStreaming( + batchDuration = batchDuration, + checkpointPath = corruptedCheckpointDir, + props = mapOf("newContext" to true), + timeout = timeout, + func = creatingFun, + startStreamingContext = false, + createOnError = true, + ) + newContextCreated.get() shouldBe true + + newContextCreated.set(false) + withSparkStreaming( + batchDuration = batchDuration, + checkpointPath = checkpointDir, + props = mapOf("newContext" to true), + timeout = timeout, + func = creatingFun, + startStreamingContext = false, + ) + newContextCreated.get() shouldBe false + + + newContextCreated.set(false) +// val sc = JavaSparkContext( +// SparkConf() +// .setAppName("test") +// .setMaster("local[*]") +// ) + withSparkStreaming( + batchDuration = batchDuration, + checkpointPath = checkpointDir, + props = mapOf("newContext" to true), + timeout = timeout, + func = creatingFun, + startStreamingContext = false, + ) + newContextCreated.get() shouldBe false + // todo do something with checkpoint again, check that it doesn't create new instance + + + // TODO clean up checkpoint } } }) + +private fun createCorruptedCheckpoint(): String { + val checkpointDirectory = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "spark").absolutePath + val fakeCheckpointFile = Checkpoint.checkpointFile(checkpointDirectory, Time(1000)) + FileUtils.write(File(fakeCheckpointFile.toString()), "blablabla", StandardCharsets.UTF_8) + assert(Checkpoint.getCheckpointFiles(checkpointDirectory, (null as FileSystem?).asOption()).nonEmpty()) + return checkpointDirectory +} + +private fun createValidCheckpoint(conf: SparkConf, batchDuration: Duration): String { + val testDirectory = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "spark").absolutePath + val checkpointDirectory = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "spark").absolutePath + val ssc = JavaStreamingContext( + conf.clone().set("someKey", "someValue"), + batchDuration, + ) + + ssc.checkpoint(checkpointDirectory) + ssc.textFileStream(testDirectory).foreachRDD { rdd, _ -> rdd.count() } + ssc.start() + ssc.stop() + + return checkpointDirectory +} + +class Counter(@Volatile var value: Int) : Serializable + diff --git a/pom.xml b/pom.xml index 0df3adac..5d76c6df 100644 --- a/pom.xml +++ b/pom.xml @@ -16,6 +16,7 @@ 4.6.0 1.0.1 3.2.1 + 3.3.1 2.10.0 From 04b5ba6d83ce5b76a4df0c387a21e1a002549676 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 13 Apr 2022 13:47:06 +0200 Subject: [PATCH 130/213] rewrote product encoding to support scala case classes --- .../org/jetbrains/kotlinx/spark/api/Encoding.kt | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt index fb3ac0a4..eafd460f 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt @@ -271,8 +271,18 @@ fun schema(type: KType, map: Map = mapOf()): DataType { KDataTypeWrapper(structType, klass.java, true) } klass.isSubclassOf(Product::class) -> { - val params = type.arguments.mapIndexed { i, it -> - "_${i + 1}" to it.type!! + + // create map from T1, T2 to Int, String etc. + val typeMap = klass.constructors.first().typeParameters.map { it.name } + .zip( + type.arguments.map { it.type } + ) + .toMap() + + // collect params by name and actual type + val params = klass.constructors.first().parameters.map { + val typeName = it.type.toString().replace("!", "") + it.name to (typeMap[typeName] ?: it.type) } val structType = DataTypes.createStructType( From 02d50ccf1d869b43339b92c9e6bf1f41a76aa31c Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 13 Apr 2022 16:16:17 +0200 Subject: [PATCH 131/213] updated to kotlin 1.6.20, refactored withSparkStreaming and fixed tests --- .../kotlinx/spark/api/SparkSession.kt | 148 +++++++++--------- .../kotlinx/spark/api/StreamingTest.kt | 146 +++++------------ pom.xml | 2 +- 3 files changed, 110 insertions(+), 186 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index 465f5780..1e6e5025 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -90,11 +90,12 @@ class KSparkSession(val spark: SparkSession) { * This wrapper over [SparkSession] and [JavaStreamingContext] provides several additional methods to create [org.apache.spark.sql.Dataset] */ class KSparkStreamingSession(@Transient val ssc: JavaStreamingContext) : Serializable { - // Serializable and Transient to that [withSpark] works inside [foreachRDD] and other Spark functions that serialize + // Serializable and Transient so that [withSpark] works inside [foreachRDD] and other Spark functions that serialize private var runAfterStart: KSparkStreamingSession.() -> Unit = {} - /** Will be run after the streaming session has started and before it's terminated. */ + /** [block] will be run after the streaming session has started from a new context (so not when loading from a checkpoint) + * and before it's terminated. */ fun setRunAfterStart(block: KSparkStreamingSession.() -> Unit) { runAfterStart = block } @@ -102,6 +103,30 @@ class KSparkStreamingSession(@Transient val ssc: JavaStreamingContext) : Seriali internal fun invokeRunAfterStart(): Unit = runAfterStart() + /** Creates new spark session from given [sc]. */ + fun getSpark(sc: SparkConf): SparkSession = + SparkSession + .builder() + .config(sc) + .getOrCreate() + + /** Creates new spark session from context of given JavaRDD, [rddForConf]. */ + fun getSpark(rddForConf: JavaRDDLike<*, *>): SparkSession = getSpark(rddForConf.context().conf) + + /** Creates new spark session from context of given JavaStreamingContext, [sscForConf] */ + fun getSpark(sscForConf: JavaStreamingContext): SparkSession = getSpark(sscForConf.sparkContext().conf) + + /** + * Helper function to enter Spark scope from [sc] like + * ```kotlin + * withSpark(sc) { // this: KSparkSession + * + * } + * ``` + */ + fun withSpark(sc: SparkConf, func: KSparkSession.() -> T): T = + KSparkSession(getSpark(sc)).func() + /** * Helper function to enter Spark scope from a provided like * when using the `foreachRDD` function. @@ -112,17 +137,7 @@ class KSparkStreamingSession(@Transient val ssc: JavaStreamingContext) : Seriali * ``` */ fun withSpark(rddForConf: JavaRDDLike<*, *>, func: KSparkSession.() -> T): T = - withSpark(rddForConf.context().conf, func) - - - fun getSpark(sc: SparkConf): SparkSession = SparkSession - .builder() - .config(sc) - .getOrCreate() - - fun withSpark(sc: SparkConf, func: KSparkSession.() -> T): T = - KSparkSession(getSpark(sc)).func() - + KSparkSession(getSpark(rddForConf)).func() /** * Helper function to enter Spark scope from [sscForConf] like @@ -133,14 +148,10 @@ class KSparkStreamingSession(@Transient val ssc: JavaStreamingContext) : Seriali * ``` */ fun withSpark(sscForConf: JavaStreamingContext, func: KSparkSession.() -> T): T = - withSpark(sscForConf.sparkContext().conf, func) - - + KSparkSession(getSpark(sscForConf)).func() } - - /** * The entry point to programming Spark with the Dataset and DataFrame API. * @@ -250,25 +261,26 @@ inline fun withSpark(sparkConf: SparkConf, logLevel: SparkLogLevel = ERROR, func * recreated from the checkpoint data. If the data does not exist, then the provided factory * will be used to create a JavaStreamingContext. * - * @param batchDuration The time interval at which streaming data will be divided into batches. Defaults to 1 - * second. - * @param checkpointPath If checkpoint data exists in the provided `checkpointPath`, then StreamingContext will be - * recreated from the checkpoint data. If the data does not exist (or `null` is provided), - * then the streaming context will be built using the other provided parameters. - * @param hadoopConf Only used if [checkpointPath] is given. Hadoop configuration if necessary for reading from - * any HDFS compatible file system. - * @param createOnError Only used if [checkpointPath] is given. Whether to create a new JavaStreamingContext if - * there is an error in reading checkpoint data. - * @param props Spark options, value types are runtime-checked for type-correctness. - * @param master Sets the Spark master URL to connect to, such as "local" to run locally, "local[4]" to - * run locally with 4 cores, or "spark://master:7077" to run on a Spark standalone cluster. - * By default, it tries to get the system value "spark.master", otherwise it uses "local[*]". - * @param appName Sets a name for the application, which will be shown in the Spark web UI. - * If no application name is set, a randomly generated name will be used. - * @param timeout The time in milliseconds to wait for the stream to terminate without input. -1 by default, - * this means no timeout. - * @param func Function which will be executed in context of [KSparkStreamingSession] (it means that - * `this` inside block will point to [KSparkStreamingSession]) + * @param batchDuration The time interval at which streaming data will be divided into batches. Defaults to 1 + * second. + * @param checkpointPath If checkpoint data exists in the provided `checkpointPath`, then StreamingContext will be + * recreated from the checkpoint data. If the data does not exist (or `null` is provided), + * then the streaming context will be built using the other provided parameters. + * @param hadoopConf Only used if [checkpointPath] is given. Hadoop configuration if necessary for reading from + * any HDFS compatible file system. + * @param createOnError Only used if [checkpointPath] is given. Whether to create a new JavaStreamingContext if + * there is an error in reading checkpoint data. + * @param props Spark options, value types are runtime-checked for type-correctness. + * @param master Sets the Spark master URL to connect to, such as "local" to run locally, "local[4]" to + * run locally with 4 cores, or "spark://master:7077" to run on a Spark standalone cluster. + * By default, it tries to get the system value "spark.master", otherwise it uses "local[*]". + * @param appName Sets a name for the application, which will be shown in the Spark web UI. + * If no application name is set, a randomly generated name will be used. + * @param timeout The time in milliseconds to wait for the stream to terminate without input. -1 by default, + * this means no timeout. + * @param startStreamingContext Defaults to `true`. If set to `false`, then the streaming context will not be started. + * @param func Function which will be executed in context of [KSparkStreamingSession] (it means that + * `this` inside block will point to [KSparkStreamingSession]) */ @JvmOverloads fun withSparkStreaming( @@ -283,38 +295,11 @@ fun withSparkStreaming( startStreamingContext: Boolean = true, func: KSparkStreamingSession.() -> Unit, ) { - if (checkpointPath != null) { - - var kSparkStreamingSession: KSparkStreamingSession? = null - - val ssc = JavaStreamingContext.getOrCreate( - /* checkpointPath = */ checkpointPath, - /* creatingFunc = */ { - val sc = SparkConf() - .setAppName(appName) - .setMaster(master) - .setAll( - props - .map { (key, value) -> key X value.toString() } - .asScalaIterable() - ) - - val ssc = JavaStreamingContext(sc, batchDuration) - ssc.checkpoint(checkpointPath) - - kSparkStreamingSession = KSparkStreamingSession(ssc) - func(kSparkStreamingSession!!) - - ssc - }, - /* hadoopConf = */ hadoopConf, - /* createOnError = */ createOnError - ) - if (startStreamingContext) ssc.start() - kSparkStreamingSession?.invokeRunAfterStart() - ssc.awaitTerminationOrTimeout(timeout) - ssc.stop() - } else { + + // will only be set when a new context is created + var kSparkStreamingSession: KSparkStreamingSession? = null + + val creatingFunc = { val sc = SparkConf() .setAppName(appName) .setMaster(master) @@ -323,16 +308,29 @@ fun withSparkStreaming( .map { (key, value) -> key X value.toString() } .asScalaIterable() ) + val ssc = JavaStreamingContext(sc, batchDuration) - val kSparkStreamingSession = KSparkStreamingSession(ssc) + ssc.checkpoint(checkpointPath) - func(kSparkStreamingSession) - ssc.start() - kSparkStreamingSession.invokeRunAfterStart() + kSparkStreamingSession = KSparkStreamingSession(ssc) + func(kSparkStreamingSession!!) + + ssc + } + + val ssc: JavaStreamingContext = when { + checkpointPath != null -> + JavaStreamingContext.getOrCreate(checkpointPath, creatingFunc, hadoopConf, createOnError) - ssc.awaitTerminationOrTimeout(timeout) - ssc.stop() + else -> creatingFunc() + } + + if (startStreamingContext) { + ssc.start() + kSparkStreamingSession?.invokeRunAfterStart() } + ssc.awaitTerminationOrTimeout(timeout) + ssc.stop() } diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt index 3f3515ec..686f414f 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -19,12 +19,16 @@ */ package org.jetbrains.kotlinx.spark.api +import io.kotest.assertions.throwables.shouldThrow +import io.kotest.assertions.timing.eventually import io.kotest.core.spec.style.ShouldSpec import io.kotest.matchers.collections.shouldBeIn import io.kotest.matchers.shouldBe +import kotlinx.coroutines.runBlocking import org.apache.commons.io.FileUtils import org.apache.hadoop.fs.FileSystem import org.apache.spark.SparkConf +import org.apache.spark.SparkException import org.apache.spark.api.java.JavaSparkContext import org.apache.spark.streaming.* import org.apache.spark.streaming.api.java.JavaStreamingContext @@ -32,12 +36,15 @@ import org.apache.spark.util.Utils import org.jetbrains.kotlinx.spark.api.tuples.X import org.jetbrains.kotlinx.spark.api.tuples.component1 import org.jetbrains.kotlinx.spark.api.tuples.component2 +import scala.Option import java.io.File import java.io.Serializable import java.net.ConnectException import java.nio.charset.StandardCharsets import java.util.* import java.util.concurrent.atomic.AtomicBoolean +import kotlin.time.Duration.Companion.seconds +import kotlin.time.ExperimentalTime class StreamingTest : ShouldSpec({ @@ -69,88 +76,30 @@ class StreamingTest : ShouldSpec({ } -// should("checkpoint") { -// -// val emptyDir: File = Files.createTempDir() -// emptyDir.deleteOnExit() -// val contextSuite = StreamingContextSuite() -// val corruptedCheckpointDir: String = contextSuite.createCorruptedCheckpoint() -// val checkpointDir: String = contextSuite.createValidCheckpoint() -// -// // Function to create JavaStreamingContext without any output operations -// // (used to detect the new context) -// -// // Function to create JavaStreamingContext without any output operations -// // (used to detect the new context) -// val newContextCreated = AtomicBoolean(false) -// val creatingFunc: Function0 = { -// newContextCreated.set(true) -// JavaStreamingContext(conf, Seconds.apply(1)) -// } -// -// newContextCreated.set(false) -// ssc = JavaStreamingContext.getOrCreate(emptyDir.absolutePath, creatingFunc) -// Assert.assertTrue("new context not created", newContextCreated.get()) -// ssc.stop() -// -// newContextCreated.set(false) -// ssc = JavaStreamingContext.getOrCreate( -// corruptedCheckpointDir, creatingFunc, -// Configuration(), true -// ) -// Assert.assertTrue("new context not created", newContextCreated.get()) -// ssc.stop() -// -// newContextCreated.set(false) -// ssc = JavaStreamingContext.getOrCreate( -// checkpointDir, creatingFunc, -// Configuration() -// ) -// Assert.assertTrue("old context not recovered", !newContextCreated.get()) -// ssc.stop() -// -// newContextCreated.set(false) -// val sc = JavaSparkContext(conf) -// ssc = JavaStreamingContext.getOrCreate( -// checkpointDir, creatingFunc, -// Configuration() -// ) -// Assert.assertTrue("old context not recovered", !newContextCreated.get()) -// ssc.stop() -// } - should("Work with checkpointpath") { - - - val conf = SparkConf() - .setMaster("local[*]") - .setAppName("Kotlin Spark Sample") - .set("newContext", "true") - - val emptyDir = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "spark") + val emptyDir = createTempDir() emptyDir.deleteOnExit() val batchDuration = Durations.seconds(1) val timeout = Durations.seconds(1).milliseconds() + val testDirectory = createTempDir() + testDirectory.deleteOnExit() + val corruptedCheckpointDir = createCorruptedCheckpoint() - val checkpointDir = createValidCheckpoint(conf, batchDuration) val newContextCreated = AtomicBoolean(false) val creatingFun: KSparkStreamingSession.() -> Unit = { -// if (conf == null) conf = ssc.sparkContext().conf - println("created new context") newContextCreated.set(true) -// setRunAfterStart { -// ssc.stop() -// } - } - + // closing statement + ssc.textFileStream(testDirectory.absolutePath).foreachRDD { rdd, _ -> rdd.count() } + } + // fill emptyDir with checkpoint newContextCreated.set(false) withSparkStreaming( batchDuration = batchDuration, @@ -158,82 +107,59 @@ class StreamingTest : ShouldSpec({ props = mapOf("newContext" to true), timeout = timeout, func = creatingFun, - startStreamingContext = false, ) newContextCreated.get() shouldBe true - - + // check that creatingFun isn't executed when checkpoint is present newContextCreated.set(false) withSparkStreaming( batchDuration = batchDuration, - checkpointPath = corruptedCheckpointDir, + checkpointPath = emptyDir.absolutePath, props = mapOf("newContext" to true), timeout = timeout, func = creatingFun, - startStreamingContext = false, - createOnError = true, ) - newContextCreated.get() shouldBe true + newContextCreated.get() shouldBe false + // check that creatingFun is not executed when createOnError = false using corrupted checkpoint newContextCreated.set(false) - withSparkStreaming( - batchDuration = batchDuration, - checkpointPath = checkpointDir, - props = mapOf("newContext" to true), - timeout = timeout, - func = creatingFun, - startStreamingContext = false, - ) + shouldThrow { + withSparkStreaming( + batchDuration = batchDuration, + checkpointPath = corruptedCheckpointDir, + props = mapOf("newContext" to true), + timeout = timeout, + func = creatingFun, + createOnError = false, + ) + } newContextCreated.get() shouldBe false - + // check that creatingFun is executed when createOnError = true using corrupted checkpoint newContextCreated.set(false) -// val sc = JavaSparkContext( -// SparkConf() -// .setAppName("test") -// .setMaster("local[*]") -// ) withSparkStreaming( batchDuration = batchDuration, - checkpointPath = checkpointDir, + checkpointPath = corruptedCheckpointDir, props = mapOf("newContext" to true), timeout = timeout, func = creatingFun, - startStreamingContext = false, + createOnError = true, ) - newContextCreated.get() shouldBe false - // todo do something with checkpoint again, check that it doesn't create new instance - - - // TODO clean up checkpoint + newContextCreated.get() shouldBe true } } }) +private fun createTempDir() = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "spark") + private fun createCorruptedCheckpoint(): String { - val checkpointDirectory = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "spark").absolutePath + val checkpointDirectory = createTempDir().absolutePath val fakeCheckpointFile = Checkpoint.checkpointFile(checkpointDirectory, Time(1000)) FileUtils.write(File(fakeCheckpointFile.toString()), "blablabla", StandardCharsets.UTF_8) assert(Checkpoint.getCheckpointFiles(checkpointDirectory, (null as FileSystem?).asOption()).nonEmpty()) return checkpointDirectory } -private fun createValidCheckpoint(conf: SparkConf, batchDuration: Duration): String { - val testDirectory = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "spark").absolutePath - val checkpointDirectory = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "spark").absolutePath - val ssc = JavaStreamingContext( - conf.clone().set("someKey", "someValue"), - batchDuration, - ) - - ssc.checkpoint(checkpointDirectory) - ssc.textFileStream(testDirectory).foreachRDD { rdd, _ -> rdd.count() } - ssc.start() - ssc.stop() - - return checkpointDirectory -} class Counter(@Volatile var value: Int) : Serializable diff --git a/pom.xml b/pom.xml index 5d76c6df..acee0ac5 100644 --- a/pom.xml +++ b/pom.xml @@ -10,7 +10,7 @@ pom - 1.5.30 + 1.6.20 1.6.10 0.16.0 4.6.0 From 219b949dba1fd498adacab5a60752c23bb896493 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 14 Apr 2022 13:03:40 +0200 Subject: [PATCH 132/213] added conversions for Option(al), State. Added DataStreamWriter forEachBatch helper added KotlinStatefulNetworkCount.kt cleaning up --- examples/pom-3.2_2.12.xml | 8 ++ .../KotlinDirectKafkaWordCount.kt | 7 +- .../KotlinRecoverableNetworkWordCount.kt | 20 ++--- .../streaming/KotlinStatefulNetworkCount.kt | 88 +++++++++++++++++++ .../examples/{ => streaming}/Streaming.kt | 10 ++- kotlin-spark-api/3.2/pom_2.12.xml | 8 ++ .../kotlinx/spark/api/Conversions.kt | 24 ++++- .../kotlinx/spark/api/DataStreamWriter.kt | 23 +++++ .../kotlinx/spark/api/SparkSession.kt | 2 +- .../{Streaming.kt => StreamingKeyValues.kt} | 47 ++-------- .../kotlinx/spark/api/StreamingTest.kt | 20 +---- 11 files changed, 176 insertions(+), 81 deletions(-) rename examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/{ => streaming}/KotlinDirectKafkaWordCount.kt (95%) rename examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/{ => streaming}/KotlinRecoverableNetworkWordCount.kt (95%) create mode 100644 examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinStatefulNetworkCount.kt rename examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/{ => streaming}/Streaming.kt (88%) create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt rename kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/{Streaming.kt => StreamingKeyValues.kt} (93%) diff --git a/examples/pom-3.2_2.12.xml b/examples/pom-3.2_2.12.xml index 5f214b69..58d9856c 100644 --- a/examples/pom-3.2_2.12.xml +++ b/examples/pom-3.2_2.12.xml @@ -90,6 +90,14 @@ true + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinDirectKafkaWordCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinDirectKafkaWordCount.kt similarity index 95% rename from examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinDirectKafkaWordCount.kt rename to examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinDirectKafkaWordCount.kt index 897a9176..476815e2 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinDirectKafkaWordCount.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinDirectKafkaWordCount.kt @@ -17,7 +17,7 @@ * limitations under the License. * =LICENSEEND= */ -package org.jetbrains.kotlinx.spark.examples +package org.jetbrains.kotlinx.spark.examples.streaming import org.apache.kafka.clients.consumer.ConsumerConfig.* import org.apache.kafka.clients.consumer.ConsumerRecord @@ -25,13 +25,10 @@ import org.apache.kafka.common.serialization.StringDeserializer import org.apache.spark.streaming.Durations import org.apache.spark.streaming.api.java.JavaDStream import org.apache.spark.streaming.api.java.JavaInputDStream -import org.apache.spark.streaming.api.java.JavaPairDStream import org.apache.spark.streaming.kafka010.ConsumerStrategies import org.apache.spark.streaming.kafka010.KafkaUtils import org.apache.spark.streaming.kafka010.LocationStrategies -import org.jetbrains.kotlinx.spark.api.c import org.jetbrains.kotlinx.spark.api.reduceByKey -import org.jetbrains.kotlinx.spark.api.toTuple import org.jetbrains.kotlinx.spark.api.tuples.* import org.jetbrains.kotlinx.spark.api.withSparkStreaming import scala.Tuple2 @@ -84,7 +81,7 @@ object KotlinDirectKafkaWordCount { val topics: String = args.getOrElse(2) { DEFAULT_TOPIC } // Create context with a 2 seconds batch interval - withSparkStreaming(batchDuration = Durations.seconds(2), appName = "JavaDirectKafkaWordCount") { + withSparkStreaming(batchDuration = Durations.seconds(2), appName = "KotlinDirectKafkaWordCount") { val topicsSet: Set = topics.split(',').toSet() diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinRecoverableNetworkWordCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinRecoverableNetworkWordCount.kt similarity index 95% rename from examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinRecoverableNetworkWordCount.kt rename to examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinRecoverableNetworkWordCount.kt index 7c4873f0..f25571b0 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/KotlinRecoverableNetworkWordCount.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinRecoverableNetworkWordCount.kt @@ -19,7 +19,7 @@ */ @file:OptIn(ExperimentalTime::class) -package org.jetbrains.kotlinx.spark.examples +package org.jetbrains.kotlinx.spark.examples.streaming import com.google.common.io.Files import org.apache.spark.api.java.JavaSparkContext @@ -29,15 +29,11 @@ import org.apache.spark.streaming.Time import org.apache.spark.util.LongAccumulator import org.jetbrains.kotlinx.spark.api.* import org.jetbrains.kotlinx.spark.api.tuples.* -import scala.Tuple2 import java.io.File -import java.io.Serializable import java.nio.charset.Charset import java.util.regex.Pattern -import kotlin.experimental.ExperimentalTypeInference import kotlin.system.exitProcess import kotlin.time.ExperimentalTime -import kotlin.time.measureTimedValue /** @@ -77,6 +73,8 @@ internal object KotlinDroppedWordsCounter { } /** + * Src: https://github.com/apache/spark/blob/master/examples/src/main/java/org/apache/spark/examples/streaming/JavaRecoverableNetworkWordCount.java + * * Counts words in text encoded with UTF8 received from the network every second. This example also * shows how to use lazily instantiated singleton instances for Accumulator and Broadcast so that * they can be registered on driver failures. @@ -151,9 +149,6 @@ object KotlinRecoverableNetworkWordCount { } } - - @OptIn(ExperimentalTypeInference::class) - @Suppress("UnstableApiUsage") private fun KSparkStreamingSession.createContext( ip: String, port: Int, @@ -168,14 +163,11 @@ object KotlinRecoverableNetworkWordCount { createNewFile() } - // Create a socket stream on target ip:port and count the // words in input stream of \n delimited text (e.g. generated by 'nc') val lines = ssc.socketTextStream(ip, port) - val words = lines.flatMap { it.split(SPACE).iterator() } - - val wordCounts3 = words + val wordCounts = words .map { t(it, 1) } .reduceByKey { a, b -> a + b } @@ -189,7 +181,7 @@ object KotlinRecoverableNetworkWordCount { println("Context is created and started running!") } - wordCounts3.foreachRDD { rdd, time: Time -> + wordCounts.foreachRDD { rdd, time: Time -> // but in foreachRDD we must obtain this conf from the RDD // like `rdd.context().conf` withSpark(rdd) { @@ -228,6 +220,4 @@ object KotlinRecoverableNetworkWordCount { } } } - - } diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinStatefulNetworkCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinStatefulNetworkCount.kt new file mode 100644 index 00000000..e247a873 --- /dev/null +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinStatefulNetworkCount.kt @@ -0,0 +1,88 @@ +package org.jetbrains.kotlinx.spark.examples.streaming + +import org.apache.spark.SparkConf +import org.apache.spark.api.java.Optional +import org.apache.spark.api.java.StorageLevels +import org.apache.spark.api.java.function.Function3 +import org.apache.spark.streaming.Durations +import org.apache.spark.streaming.State +import org.apache.spark.streaming.StateSpec +import org.apache.spark.streaming.api.java.* +import org.jetbrains.kotlinx.spark.api.* +import org.jetbrains.kotlinx.spark.api.tuples.X +import org.jetbrains.kotlinx.spark.api.tuples.t +import scala.Tuple2 +import java.util.* +import java.util.regex.Pattern +import kotlin.system.exitProcess + + +/** + * Src: https://github.com/apache/spark/blob/master/examples/src/main/java/org/apache/spark/examples/streaming/JavaStatefulNetworkWordCount.java + * + * Counts words cumulatively in UTF8 encoded, '\n' delimited text received from the network every + * second starting with initial value of word count. + * Usage: JavaStatefulNetworkWordCount + * and describe the TCP server that Spark Streaming would connect to receive + * data. + * + * + * To run this on your local machine, you need to first run a Netcat server + * `$ nc -lk 9999` + * and then run the example + * `$ bin/run-example + * org.apache.spark.examples.streaming.JavaStatefulNetworkWordCount localhost 9999` */ +object KotlinStatefulNetworkCount { + + private val SPACE = Pattern.compile(" ") + + private const val DEFAULT_HOSTNAME = "localhost" + private const val DEFAULT_PORT = "9999" + + @Throws(Exception::class) + @JvmStatic + fun main(args: Array) { + if (args.size < 2 && args.isNotEmpty()) { + System.err.println("Usage: JavaStatefulNetworkWordCount ") + exitProcess(1) + } + + // Create the context with a 1 second batch size + withSparkStreaming( + batchDuration = Durations.seconds(1), + checkpointPath = ".", + appName = "JavaStatefulNetworkWordCount", + ) { + + // Initial state RDD input to mapWithState + val tuples = listOf("hello" X 1, "world" X 1) + val initialRDD = ssc.sparkContext().parallelize(tuples) + + val lines = ssc.socketTextStream( + args.getOrElse(0) { DEFAULT_HOSTNAME }, + args.getOrElse(1) { DEFAULT_PORT }.toInt(), + StorageLevels.MEMORY_AND_DISK_SER_2, + ) + val words = lines.flatMap { it.split(SPACE).iterator() } + + val wordsDstream = words.map { it X 1 } + + // Update the cumulative count function + val mappingFunc = { word: String, one: Optional, state: State -> + val sum = one.getOrElse(0) + state.getOrElse(0) + val output = word X sum + state.update(sum) + output + } + + // DStream made of get cumulative counts that get updated in every batch + val stateDstream = wordsDstream.mapWithState( + StateSpec + .function(mappingFunc) + .initialState(initialRDD.toPairRDD()) + ) + + stateDstream.print() + } + } +} \ No newline at end of file diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/Streaming.kt similarity index 88% rename from examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt rename to examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/Streaming.kt index 07e06af9..e0befd04 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Streaming.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/Streaming.kt @@ -17,7 +17,7 @@ * limitations under the License. * =LICENSEEND= */ -package org.jetbrains.kotlinx.spark.examples +package org.jetbrains.kotlinx.spark.examples.streaming import org.apache.spark.SparkConf import org.apache.spark.sql.Dataset @@ -30,13 +30,17 @@ data class TestRow( val word: String, ) +/** + * To run this on your local machine, you need to first run a Netcat server + * + * `$ nc -lk 9999` + */ fun main() = withSparkStreaming(Durations.seconds(1), timeout = 10_000) { val lines = ssc.socketTextStream("localhost", 9999) val words = lines.flatMap { it.split(" ").iterator() } - - words.foreachRDD { rdd, time -> + words.foreachRDD { rdd, _ -> withSpark(rdd) { val dataframe: Dataset = rdd.map { TestRow(it) }.toDS() diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 586a89cc..5e421162 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -184,6 +184,14 @@ org.jacoco jacoco-maven-plugin + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt index 135a75ec..35bb8ee8 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt @@ -44,19 +44,39 @@ import scala.collection.mutable.Buffer as ScalaMutableBuffer import scala.collection.mutable.Map as ScalaMutableMap import scala.collection.mutable.Seq as ScalaMutableSeq import scala.collection.mutable.Set as ScalaMutableSet +import org.apache.spark.streaming.State + +/** Returns state value if it exists, else `null`. */ +fun State.getOrNull(): T? = if (exists()) get() else null + +/** Returns state value if it exists, else [other]. */ +fun State.getOrElse(other: T): T = if (exists()) get() else other /** Converts Scala [Option] to Kotlin nullable. */ fun Option.getOrNull(): T? = getOrElse(null) +/** Get if available else [other]. */ +fun Option.getOrElse(other: T): T = getOrElse { other } + /** Converts nullable value to Scala [Option]. */ -fun T?.asOption(): Option = Option.apply(this) +fun T?.toOption(): Option = Option.apply(this) + +/** Converts Scala [Option] to Java [Optional]. */ +fun Option.toOptional(): Optional = Optional.ofNullable(getOrNull()) + /** Converts [Optional] to Kotlin nullable. */ fun Optional.getOrNull(): T? = orNull() +/** Get if available else [other]. */ +fun Optional.getOrElse(other: T): T = orElse(other) + /** Converts nullable value to [Optional]. */ -fun T?.asOptional(): Optional = Optional.ofNullable(this) +fun T?.toOptional(): Optional = Optional.ofNullable(this) + +/** Converts Java [Optional] to Scala [Option]. */ +fun Optional.toOption(): Option = Option.apply(getOrNull()) /** * TODO test diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt new file mode 100644 index 00000000..5b6884ba --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt @@ -0,0 +1,23 @@ +package org.jetbrains.kotlinx.spark.api + +import org.apache.spark.api.java.function.VoidFunction2 +import org.apache.spark.sql.Dataset +import org.apache.spark.sql.streaming.DataStreamWriter + +/** + * :: Experimental :: + * + * (Scala-specific) Sets the output of the streaming query to be processed using the provided + * function. This is supported only in the micro-batch execution modes (that is, when the + * trigger is not continuous). In every micro-batch, the provided function will be called in + * every micro-batch with (i) the output rows as a Dataset and (ii) the batch identifier. + * The batchId can be used to deduplicate and transactionally write the output + * (that is, the provided Dataset) to external systems. The output Dataset is guaranteed + * to be exactly the same for the same batchId (assuming all operations are deterministic + * in the query). + * + * @since 2.4.0 + */ +fun DataStreamWriter.forEachBatch( + func: (batch: Dataset, batchId: Long) -> Unit, +): DataStreamWriter = foreachBatch(VoidFunction2(func)) \ No newline at end of file diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index 1e6e5025..f089b6e5 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -318,7 +318,7 @@ fun withSparkStreaming( ssc } - val ssc: JavaStreamingContext = when { + val ssc = when { checkpointPath != null -> JavaStreamingContext.getOrCreate(checkpointPath, creatingFunc, hadoopConf, createOnError) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt similarity index 93% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt rename to kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt index ea9b4df4..96f6dadf 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Streaming.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt @@ -26,10 +26,8 @@ import org.apache.spark.api.java.Optional import org.apache.spark.streaming.Duration import org.apache.spark.streaming.StateSpec import org.apache.spark.streaming.api.java.JavaDStream -import org.apache.spark.streaming.api.java.JavaDStreamLike import org.apache.spark.streaming.api.java.JavaMapWithStateDStream import org.apache.spark.streaming.api.java.JavaPairDStream -import org.apache.spark.streaming.dstream.DStream import scala.Tuple2 @@ -50,7 +48,6 @@ fun JavaPairRDD.toTupleRDD(): JavaRDD> = * Return a new DStream by applying `groupByKey` to each RDD. Hash partitioning is used to * generate the RDDs with `numPartitions` partitions. */ -@JvmName("groupByKeyTuple2") fun JavaDStream>.groupByKey( numPartitions: Int = dstream().ssc().sc().defaultParallelism(), ): JavaDStream>> = @@ -62,7 +59,6 @@ fun JavaDStream>.groupByKey( * Return a new DStream by applying `groupByKey` on each RDD. The supplied * org.apache.spark.Partitioner is used to control the partitioning of each RDD. */ -@JvmName("groupByKeyTuple2") fun JavaDStream>.groupByKey(partitioner: Partitioner): JavaDStream>> = toPairDStream() .groupByKey(partitioner) @@ -73,7 +69,6 @@ fun JavaDStream>.groupByKey(partitioner: Partitioner): JavaD * merged using the supplied reduce function. Hash partitioning is used to generate the RDDs * with `numPartitions` partitions. */ -@JvmName("reduceByKeyTuple2") fun JavaDStream>.reduceByKey( numPartitions: Int = dstream().ssc().sc().defaultParallelism(), reduceFunc: (V, V) -> V, @@ -87,7 +82,6 @@ fun JavaDStream>.reduceByKey( * merged using the supplied reduce function. org.apache.spark.Partitioner is used to control * the partitioning of each RDD. */ -@JvmName("reduceByKeyTuple2") fun JavaDStream>.reduceByKey( partitioner: Partitioner, reduceFunc: (V, V) -> V, @@ -101,7 +95,6 @@ fun JavaDStream>.reduceByKey( * combineByKey for RDDs. Please refer to combineByKey in * org.apache.spark.rdd.PairRDDFunctions in the Spark core documentation for more information. */ -@JvmName("combineByKeyTuple2") fun JavaDStream>.combineByKey( createCombiner: (V) -> C, mergeValue: (C, V) -> C, @@ -125,7 +118,6 @@ fun JavaDStream>.combineByKey( * @param numPartitions number of partitions of each RDD in the new DStream; if not specified * then Spark's default number of partitions will be used */ -@JvmName("groupByKeyAndWindowTuple2") fun JavaDStream>.groupByKeyAndWindow( windowDuration: Duration, slideDuration: Duration = dstream().slideDuration(), @@ -146,7 +138,6 @@ fun JavaDStream>.groupByKeyAndWindow( * @param partitioner partitioner for controlling the partitioning of each RDD in the new * DStream. */ -@JvmName("groupByKeyAndWindowTuple2") fun JavaDStream>.groupByKeyAndWindow( windowDuration: Duration, slideDuration: Duration = dstream().slideDuration(), @@ -168,7 +159,6 @@ fun JavaDStream>.groupByKeyAndWindow( * DStream's batching interval * @param numPartitions number of partitions of each RDD in the new DStream. */ -@JvmName("reduceByKeyAndWindowTuple2") fun JavaDStream>.reduceByKeyAndWindow( windowDuration: Duration, slideDuration: Duration = dstream().slideDuration(), @@ -191,7 +181,6 @@ fun JavaDStream>.reduceByKeyAndWindow( * @param partitioner partitioner for controlling the partitioning of each RDD * in the new DStream. */ -@JvmName("reduceByKeyAndWindowTuple2") fun JavaDStream>.reduceByKeyAndWindow( windowDuration: Duration, slideDuration: Duration = dstream().slideDuration(), @@ -223,7 +212,6 @@ fun JavaDStream>.reduceByKeyAndWindow( * @param filterFunc Optional function to filter expired key-value pairs; * only pairs that satisfy the function are retained */ -@JvmName("reduceByKeyAndWindowTuple2") fun JavaDStream>.reduceByKeyAndWindow( invReduceFunc: (V, V) -> V, windowDuration: Duration, @@ -266,7 +254,6 @@ fun JavaDStream>.reduceByKeyAndWindow( * @param filterFunc Optional function to filter expired key-value pairs; * only pairs that satisfy the function are retained */ -@JvmName("reduceByKeyAndWindowTuple2") fun JavaDStream>.reduceByKeyAndWindow( invReduceFunc: (V, V) -> V, windowDuration: Duration, @@ -298,23 +285,22 @@ fun JavaDStream>.reduceByKeyAndWindow( * as a parameter of type `State` in the mapping function. * * Example of using `mapWithState`: - * {{{ + * ```kotlin * // A mapping function that maintains an integer state and return a String - * def mappingFunction(key: String, value: Option[Int], state: State[Int]): Option[String] = { + * fun mappingFunction(key: String, value: Optional, state: State): Optional { * // Use state.exists(), state.get(), state.update() and state.remove() * // to manage state, and return the necessary string * } * - * val spec = StateSpec.function(mappingFunction).numPartitions(10) + * val spec = StateSpec.function(::mappingFunction).numPartitions(10) * - * val mapWithStateDStream = keyValueDStream.mapWithState[StateType, MappedType](spec) - * }}} + * val mapWithStateDStream = keyValueDStream.mapWithState(spec) + * ``` * * @param spec Specification of this transformation * @tparam StateType Class type of the state data * @tparam MappedType Class type of the mapped data */ -@JvmName("mapWithStateTuple2") fun JavaDStream>.mapWithState( spec: StateSpec, ): JavaMapWithStateDStream = @@ -329,7 +315,6 @@ fun JavaDStream>.mapWithState( * corresponding state key-value pair will be eliminated. * @tparam S State type */ -@JvmName("updateStateByKeyTuple2") fun JavaDStream>.updateStateByKey( numPartitions: Int = dstream().ssc().sc().defaultParallelism(), updateFunc: (List, S?) -> S?, @@ -337,7 +322,7 @@ fun JavaDStream>.updateStateByKey( toPairDStream() .updateStateByKey( { list: List, s: Optional -> - updateFunc(list, s.getOrNull()).asOptional() + updateFunc(list, s.getOrNull()).toOptional() }, numPartitions, ) @@ -356,7 +341,6 @@ fun JavaDStream>.updateStateByKey( * DStream * @tparam S State type */ -@JvmName("updateStateByKeyTuple2") fun JavaDStream>.updateStateByKey( partitioner: Partitioner, updateFunc: (List, S?) -> S?, @@ -364,7 +348,7 @@ fun JavaDStream>.updateStateByKey( toPairDStream() .updateStateByKey( { list: List, s: Optional -> - updateFunc(list, s.getOrNull()).asOptional() + updateFunc(list, s.getOrNull()).toOptional() }, partitioner, ) @@ -381,7 +365,6 @@ fun JavaDStream>.updateStateByKey( * @param initialRDD initial state value of each key. * @tparam S State type */ -@JvmName("updateStateByKeyTuple2") fun JavaDStream>.updateStateByKey( partitioner: Partitioner, initialRDD: JavaRDD>, @@ -390,7 +373,7 @@ fun JavaDStream>.updateStateByKey( toPairDStream() .updateStateByKey( { list: List, s: Optional -> - updateFunc(list, s.getOrNull()).asOptional() + updateFunc(list, s.getOrNull()).toOptional() }, partitioner, initialRDD.toPairRDD(), @@ -402,7 +385,6 @@ fun JavaDStream>.updateStateByKey( * Return a new DStream by applying a map function to the value of each key-value pairs in * 'this' DStream without changing the key. */ -@JvmName("mapValuesTuple2") fun JavaDStream>.mapValues( mapValuesFunc: (V) -> U, ): JavaDStream> = @@ -414,7 +396,6 @@ fun JavaDStream>.mapValues( * Return a new DStream by applying a flatmap function to the value of each key-value pairs in * 'this' DStream without changing the key. */ -@JvmName("flatMapValuesTuple2") fun JavaDStream>.flatMapValues( flatMapValuesFunc: (V) -> Iterator, ): JavaDStream> = @@ -426,7 +407,6 @@ fun JavaDStream>.flatMapValues( * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. */ -@JvmName("cogroupTuple2") fun JavaDStream>.cogroup( other: JavaDStream>, numPartitions: Int = dstream().ssc().sc().defaultParallelism(), @@ -443,7 +423,6 @@ fun JavaDStream>.cogroup( * Return a new DStream by applying 'cogroup' between RDDs of `this` DStream and `other` DStream. * The supplied org.apache.spark.Partitioner is used to partition the generated RDDs. */ -@JvmName("cogroupTuple2") fun JavaDStream>.cogroup( other: JavaDStream>, partitioner: Partitioner, @@ -459,7 +438,6 @@ fun JavaDStream>.cogroup( * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. * Hash partitioning is used to generate the RDDs with `numPartitions` partitions. */ -@JvmName("joinTuple2") fun JavaDStream>.join( other: JavaDStream>, numPartitions: Int = dstream().ssc().sc().defaultParallelism(), @@ -475,7 +453,6 @@ fun JavaDStream>.join( * Return a new DStream by applying 'join' between RDDs of `this` DStream and `other` DStream. * The supplied org.apache.spark.Partitioner is used to control the partitioning of each RDD. */ -@JvmName("joinTuple2") fun JavaDStream>.join( other: JavaDStream>, partitioner: Partitioner, @@ -492,7 +469,6 @@ fun JavaDStream>.join( * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` * partitions. */ -@JvmName("leftOuterJoinTuple2") fun JavaDStream>.leftOuterJoin( other: JavaDStream>, numPartitions: Int = dstream().ssc().sc().defaultParallelism(), @@ -509,7 +485,6 @@ fun JavaDStream>.leftOuterJoin( * `other` DStream. The supplied org.apache.spark.Partitioner is used to control * the partitioning of each RDD. */ -@JvmName("leftOuterJoinTuple2") fun JavaDStream>.leftOuterJoin( other: JavaDStream>, partitioner: Partitioner, @@ -526,7 +501,6 @@ fun JavaDStream>.leftOuterJoin( * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` * partitions. */ -@JvmName("rightOuterJoinTuple2") fun JavaDStream>.rightOuterJoin( other: JavaDStream>, numPartitions: Int = dstream().ssc().sc().defaultParallelism(), @@ -543,7 +517,6 @@ fun JavaDStream>.rightOuterJoin( * `other` DStream. The supplied org.apache.spark.Partitioner is used to control * the partitioning of each RDD. */ -@JvmName("rightOuterJoinTuple2") fun JavaDStream>.rightOuterJoin( other: JavaDStream>, partitioner: Partitioner, @@ -560,7 +533,6 @@ fun JavaDStream>.rightOuterJoin( * `other` DStream. Hash partitioning is used to generate the RDDs with `numPartitions` * partitions. */ -@JvmName("fullOuterJoinTuple2") fun JavaDStream>.fullOuterJoin( other: JavaDStream>, numPartitions: Int = dstream().ssc().sc().defaultParallelism(), @@ -577,7 +549,6 @@ fun JavaDStream>.fullOuterJoin( * `other` DStream. The supplied org.apache.spark.Partitioner is used to control * the partitioning of each RDD. */ -@JvmName("fullOuterJoinTuple2") fun JavaDStream>.fullOuterJoin( other: JavaDStream>, partitioner: Partitioner, @@ -593,7 +564,6 @@ fun JavaDStream>.fullOuterJoin( * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". */ -@JvmName("saveAsHadoopFilesTuple2") fun JavaDStream>.saveAsHadoopFiles( prefix: String, suffix: String, @@ -603,7 +573,6 @@ fun JavaDStream>.saveAsHadoopFiles( * Save each RDD in `this` DStream as a Hadoop file. The file name at each batch interval is * generated based on `prefix` and `suffix`: "prefix-TIME_IN_MS.suffix". */ -@JvmName("saveAsNewAPIHadoopFilesTuple2") fun JavaDStream>.saveAsNewAPIHadoopFiles( prefix: String, suffix: String, diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt index 686f414f..d1639e85 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -20,31 +20,22 @@ package org.jetbrains.kotlinx.spark.api import io.kotest.assertions.throwables.shouldThrow -import io.kotest.assertions.timing.eventually import io.kotest.core.spec.style.ShouldSpec import io.kotest.matchers.collections.shouldBeIn import io.kotest.matchers.shouldBe -import kotlinx.coroutines.runBlocking import org.apache.commons.io.FileUtils import org.apache.hadoop.fs.FileSystem -import org.apache.spark.SparkConf import org.apache.spark.SparkException -import org.apache.spark.api.java.JavaSparkContext import org.apache.spark.streaming.* -import org.apache.spark.streaming.api.java.JavaStreamingContext import org.apache.spark.util.Utils import org.jetbrains.kotlinx.spark.api.tuples.X import org.jetbrains.kotlinx.spark.api.tuples.component1 import org.jetbrains.kotlinx.spark.api.tuples.component2 -import scala.Option import java.io.File import java.io.Serializable -import java.net.ConnectException import java.nio.charset.StandardCharsets import java.util.* import java.util.concurrent.atomic.AtomicBoolean -import kotlin.time.Duration.Companion.seconds -import kotlin.time.ExperimentalTime class StreamingTest : ShouldSpec({ @@ -78,19 +69,15 @@ class StreamingTest : ShouldSpec({ should("Work with checkpointpath") { val emptyDir = createTempDir() - emptyDir.deleteOnExit() + val testDirectory = createTempDir() + val corruptedCheckpointDir = createCorruptedCheckpoint() val batchDuration = Durations.seconds(1) val timeout = Durations.seconds(1).milliseconds() - val testDirectory = createTempDir() - testDirectory.deleteOnExit() - - val corruptedCheckpointDir = createCorruptedCheckpoint() val newContextCreated = AtomicBoolean(false) - val creatingFun: KSparkStreamingSession.() -> Unit = { println("created new context") newContextCreated.set(true) @@ -151,12 +138,13 @@ class StreamingTest : ShouldSpec({ }) private fun createTempDir() = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "spark") + .apply { deleteOnExit() } private fun createCorruptedCheckpoint(): String { val checkpointDirectory = createTempDir().absolutePath val fakeCheckpointFile = Checkpoint.checkpointFile(checkpointDirectory, Time(1000)) FileUtils.write(File(fakeCheckpointFile.toString()), "blablabla", StandardCharsets.UTF_8) - assert(Checkpoint.getCheckpointFiles(checkpointDirectory, (null as FileSystem?).asOption()).nonEmpty()) + assert(Checkpoint.getCheckpointFiles(checkpointDirectory, (null as FileSystem?).toOption()).nonEmpty()) return checkpointDirectory } From 59f658e080d6d9ccfc98c3d7739d5cc65873904b Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 14 Apr 2022 13:45:06 +0200 Subject: [PATCH 133/213] added toDataFrame conversions for RDDs added KotlinSqlNetworkWordCount.kt --- .../streaming/KotlinSqlNetworkWordCount.kt | 80 +++++++++++++++++++ .../jetbrains/kotlinx/spark/api/Dataset.kt | 20 ++++- .../kotlinx/spark/api/SparkSession.kt | 13 +++ 3 files changed, 109 insertions(+), 4 deletions(-) create mode 100644 examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinSqlNetworkWordCount.kt diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinSqlNetworkWordCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinSqlNetworkWordCount.kt new file mode 100644 index 00000000..2f6aacf8 --- /dev/null +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinSqlNetworkWordCount.kt @@ -0,0 +1,80 @@ +package org.jetbrains.kotlinx.spark.examples.streaming + +import org.apache.spark.api.java.JavaRDD +import org.apache.spark.api.java.StorageLevels +import org.apache.spark.streaming.Durations +import org.apache.spark.streaming.Time +import org.jetbrains.kotlinx.spark.api.withSparkStreaming +import java.io.Serializable +import java.util.regex.Pattern +import kotlin.system.exitProcess + + +/** + * Use DataFrames and SQL to count words in UTF8 encoded, '\n' delimited text received from the + * network every second. + * + * Usage: KotlinSqlNetworkWordCount + * and describe the TCP server that Spark Streaming would connect to receive data. + * + * To run this on your local machine, you need to first run a Netcat server + * `$ nc -lk 9999` + * and then run the example + * `$ bin/run-example org.apache.spark.examples.streaming.KotlinSqlNetworkWordCount localhost 9999` + */ +object KotlinSqlNetworkWordCount { + + private val SPACE = Pattern.compile(" ") + + private const val DEFAULT_IP = "localhost" + private const val DEFAULT_PORT = "9999" + + @Throws(Exception::class) + @JvmStatic + fun main(args: Array) { + if (args.size < 2 && args.isNotEmpty()) { + System.err.println("Usage: KotlinNetworkWordCount ") + exitProcess(1) + } + + // Create the context with a 1 second batch size + withSparkStreaming( + batchDuration = Durations.seconds(1), + appName = "KotlinSqlNetworkWordCount", + ) { + + + // Create a KotlinReceiverInputDStream on target ip:port and count the + // words in input stream of \n delimited text (e.g. generated by 'nc') + // Note that no duplication in storage level only for running locally. + // Replication necessary in distributed scenario for fault tolerance. + val lines = ssc.socketTextStream( + args.getOrElse(0) { DEFAULT_IP }, + args.getOrElse(1) { DEFAULT_PORT }.toInt(), + StorageLevels.MEMORY_AND_DISK_SER, + ) + val words = lines.flatMap { it.split(SPACE).iterator() } + + // Convert RDDs of the words DStream to DataFrame and run SQL query + words.foreachRDD { rdd: JavaRDD, time: Time -> + withSpark(rdd) { + + // Convert JavaRDD to JavaRDD to DataFrame (Dataset) + val rowRDD = rdd.map(::KotlinRecord) + val wordsDataFrame = rowRDD.toDF() + + // Creates a temporary view using the DataFrame + wordsDataFrame.createOrReplaceTempView("words") + + // Do word count on table using SQL and print it + val wordCountsDataFrame = + spark.sql("select word, count(*) as total from words group by word") + println("========= $time=========") + wordCountsDataFrame.show() + } + } + } + } +} + +data class KotlinRecord(val word: String): Serializable diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt index 9173bf8d..71abb1ee 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt @@ -36,10 +36,7 @@ import org.apache.spark.api.java.function.ForeachPartitionFunction import org.apache.spark.api.java.function.MapFunction import org.apache.spark.api.java.function.ReduceFunction import org.apache.spark.rdd.RDD -import org.apache.spark.sql.Column -import org.apache.spark.sql.Dataset -import org.apache.spark.sql.KeyValueGroupedDataset -import org.apache.spark.sql.TypedColumn +import org.apache.spark.sql.* import org.jetbrains.kotlinx.spark.extensions.KSparkExtensions import scala.Tuple2 import scala.Tuple3 @@ -78,6 +75,21 @@ inline fun RDD.toDS(spark: SparkSession): Dataset = inline fun JavaRDDLike.toDS(spark: SparkSession): Dataset = spark.createDataset(this.rdd(), encoder()) +/** + * Utility method to create Dataset (Dataframe) from JavaRDD. + * NOTE: [T] must be [Serializable]. + */ +inline fun JavaRDDLike.toDF(spark: SparkSession): Dataset = + toDS(spark).toDF() + +/** + * Utility method to create Dataset (Dataframe) from RDD. + * NOTE: [T] must be [Serializable]. + */ +inline fun RDD.toDF(spark: SparkSession): Dataset = + toDS(spark).toDF() + + /** * (Kotlin-specific) * Returns a new Dataset that contains the result of applying [func] to each element. diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index f089b6e5..27513ffc 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -36,6 +36,7 @@ import org.apache.spark.broadcast.Broadcast import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.rdd.RDD import org.apache.spark.sql.Dataset +import org.apache.spark.sql.Row import org.apache.spark.sql.SparkSession.Builder import org.apache.spark.sql.UDFRegistration import org.apache.spark.streaming.Duration @@ -71,6 +72,18 @@ class KSparkSession(val spark: SparkSession) { /** Utility method to create dataset from [JavaRDDLike]. */ inline fun JavaRDDLike.toDS(): Dataset = toDS(spark) + /** + * Utility method to create Dataset (Dataframe) from RDD. + * NOTE: [T] must be [Serializable]. + */ + inline fun RDD.toDF(): Dataset = toDF(spark) + + /** + * Utility method to create Dataset (Dataframe) from JavaRDD. + * NOTE: [T] must be [Serializable]. + */ + inline fun JavaRDDLike.toDF(): Dataset = toDF(spark) + /** * A collection of methods for registering user-defined functions (UDF). * From 547ab1433caccfe34a7eb12a2446ea69f9def0cc Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 14 Apr 2022 13:52:28 +0200 Subject: [PATCH 134/213] added toDataFrame conversions for RDDs added KotlinSqlNetworkWordCount.kt --- .../examples/streaming/KotlinStatefulNetworkCount.kt | 11 ++++------- .../kotlinx/spark/examples/streaming/Streaming.kt | 3 --- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinStatefulNetworkCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinStatefulNetworkCount.kt index e247a873..64ad0392 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinStatefulNetworkCount.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinStatefulNetworkCount.kt @@ -1,18 +1,15 @@ package org.jetbrains.kotlinx.spark.examples.streaming -import org.apache.spark.SparkConf import org.apache.spark.api.java.Optional import org.apache.spark.api.java.StorageLevels -import org.apache.spark.api.java.function.Function3 import org.apache.spark.streaming.Durations import org.apache.spark.streaming.State import org.apache.spark.streaming.StateSpec -import org.apache.spark.streaming.api.java.* -import org.jetbrains.kotlinx.spark.api.* +import org.jetbrains.kotlinx.spark.api.getOrElse +import org.jetbrains.kotlinx.spark.api.mapWithState +import org.jetbrains.kotlinx.spark.api.toPairRDD import org.jetbrains.kotlinx.spark.api.tuples.X -import org.jetbrains.kotlinx.spark.api.tuples.t -import scala.Tuple2 -import java.util.* +import org.jetbrains.kotlinx.spark.api.withSparkStreaming import java.util.regex.Pattern import kotlin.system.exitProcess diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/Streaming.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/Streaming.kt index e0befd04..fb576cbd 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/Streaming.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/Streaming.kt @@ -19,11 +19,8 @@ */ package org.jetbrains.kotlinx.spark.examples.streaming -import org.apache.spark.SparkConf import org.apache.spark.sql.Dataset -import org.apache.spark.streaming.Duration import org.apache.spark.streaming.Durations -import org.apache.spark.streaming.api.java.JavaStreamingContext import org.jetbrains.kotlinx.spark.api.* data class TestRow( From 6a5fce452497e27f37d8b469d730517ed02c318d Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 14 Apr 2022 15:51:36 +0200 Subject: [PATCH 135/213] more tests --- .../kotlinx/spark/api/StreamingKeyValues.kt | 20 ++++++ .../kotlinx/spark/api/StreamingTest.kt | 63 ++++++++++++++++++- 2 files changed, 80 insertions(+), 3 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt index 96f6dadf..75c2c23f 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt @@ -19,6 +19,7 @@ */ package org.jetbrains.kotlinx.spark.api +import org.apache.spark.HashPartitioner import org.apache.spark.Partitioner import org.apache.spark.api.java.JavaPairRDD import org.apache.spark.api.java.JavaRDD @@ -90,6 +91,22 @@ fun JavaDStream>.reduceByKey( .reduceByKey(reduceFunc, partitioner) .toTupleDStream() +/** + * Combine elements of each key in DStream's RDDs using custom functions. This is similar to the + * combineByKey for RDDs. Please refer to combineByKey in + * org.apache.spark.rdd.PairRDDFunctions in the Spark core documentation for more information. + */ +fun JavaDStream>.combineByKey( + createCombiner: (V) -> C, + mergeValue: (C, V) -> C, + mergeCombiner: (C, C) -> C, + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), + mapSideCombine: Boolean = true, +): JavaDStream> = + toPairDStream() + .combineByKey(createCombiner, mergeValue, mergeCombiner, HashPartitioner(numPartitions), mapSideCombine) + .toTupleDStream() + /** * Combine elements of each key in DStream's RDDs using custom functions. This is similar to the * combineByKey for RDDs. Please refer to combineByKey in @@ -311,6 +328,7 @@ fun JavaDStream>.mapWithState( * the given function on the previous state of the key and the new values of each key. * In every batch the updateFunc will be called for each state even if there are no new values. * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. + * Note: Needs checkpoint directory to be set. * @param updateFunc State update function. If `this` function returns `null`, then * corresponding state key-value pair will be eliminated. * @tparam S State type @@ -333,6 +351,7 @@ fun JavaDStream>.updateStateByKey( * the given function on the previous state of the key and the new values of each key. * In every batch the updateFunc will be called for each state even if there are no new values. * [[org.apache.spark.Partitioner]] is used to control the partitioning of each RDD. + * Note: Needs checkpoint directory to be set. * @param updateFunc State update function. Note, that this function may generate a different * tuple with a different key than the input key. Therefore keys may be removed * or added in this way. It is up to the developer to decide whether to @@ -358,6 +377,7 @@ fun JavaDStream>.updateStateByKey( * Return a new "state" DStream where the state for each key is updated by applying * the given function on the previous state of the key and the new values of the key. * org.apache.spark.Partitioner is used to control the partitioning of each RDD. + * Note: Needs checkpoint directory to be set. * @param updateFunc State update function. If `this` function returns `null`, then * corresponding state key-value pair will be eliminated. * @param partitioner Partitioner for controlling the partitioning of each RDD in the new diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt index d1639e85..0a88586d 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -22,15 +22,15 @@ package org.jetbrains.kotlinx.spark.api import io.kotest.assertions.throwables.shouldThrow import io.kotest.core.spec.style.ShouldSpec import io.kotest.matchers.collections.shouldBeIn +import io.kotest.matchers.collections.shouldContainAll import io.kotest.matchers.shouldBe import org.apache.commons.io.FileUtils import org.apache.hadoop.fs.FileSystem import org.apache.spark.SparkException import org.apache.spark.streaming.* import org.apache.spark.util.Utils -import org.jetbrains.kotlinx.spark.api.tuples.X -import org.jetbrains.kotlinx.spark.api.tuples.component1 -import org.jetbrains.kotlinx.spark.api.tuples.component2 +import org.jetbrains.kotlinx.spark.api.tuples.* +import scala.Tuple2 import java.io.File import java.io.Serializable import java.nio.charset.StandardCharsets @@ -134,6 +134,62 @@ class StreamingTest : ShouldSpec({ ) newContextCreated.get() shouldBe true } + + should("Have handy tuple2 functions") { + val input = listOf("aaa", "bbb", "aaa", "ccc") + val result = Result() + + withSparkStreaming(Duration(10), timeout = 1000, checkpointPath = createTempDir().absolutePath) { + + val (resultBroadcast, queue) = withSpark(ssc) { + spark.broadcast(result) X LinkedList(listOf(sc.parallelize(input))) + } + + val inputStream = ssc + + .queueStream(queue) // "aaa", "bbb", "aaa", "ccc" + + .map { it X 1 } // ("aaa", 1), ("bbb", 1), ("aaa", 1), ("ccc", 1) + + .reduceByKey(reduceFunc = Int::plus) // ("aaa", 2), ("bbb", 1), ("ccc", 1) + + .flatMapValues { iterator { yield(it); yield(it) } } // ("aaa", 2), ("aaa", 2), ("bbb", 1), ("bbb", 1), ("ccc", 1), ("ccc", 1) + + .groupByKey() // ("aaa", [2, 2]), ("bbb", [1, 1]), ("ccc", [1, 1]) + + .flatMap { (key, values) -> + values.mapIndexed { i, it -> key X it + i }.iterator() + } // ("aaa", 2), ("aaa", 3), ("bbb", 1), ("bbb", 2), ("ccc", 1), ("ccc", 2) + + .combineByKey( + createCombiner = { listOf(it) }, + mergeValue = { list, int -> + list + int + }, + mergeCombiner = { list1, list2 -> + list1 + list2 + }, + ) // ("aaa", [2, 3]), ("bbb", [1, 2]), ("ccc", [1, 2]) + + + // Note: this will update state inside the checkpoint, which we won't test here for now + .updateStateByKey(numPartitions = 3) { lists, s: Int? -> + (s ?: 0) + lists.sumOf { it.sum() } + } // ("aaa", 5), ("bbb", 3), ("ccc", 3) + + inputStream.foreachRDD { rdd, _ -> + withSpark(rdd) { + rdd.toDS().forEach { + it._1 shouldBeIn input + + resultBroadcast.value.list = resultBroadcast.value.list.plusElement(it) + } + } + } + } + + result.list.shouldContainAll(t("aaa", 5), t("bbb", 3), t("ccc", 3)) + } } }) @@ -151,3 +207,4 @@ private fun createCorruptedCheckpoint(): String { class Counter(@Volatile var value: Int) : Serializable +class Result(@Volatile var list: List> = listOf()) : Serializable \ No newline at end of file From 9449b9f0527584588f874e166e945d5db62dc401 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 14 Apr 2022 17:44:33 +0200 Subject: [PATCH 136/213] added kafka test, but issue with kotest extension --- kotlin-spark-api/3.2/pom_2.12.xml | 6 ++ .../kotlinx/spark/api/StreamingTest.kt | 87 +++++++++++++++++++ pom.xml | 1 + 3 files changed, 94 insertions(+) diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 5e421162..8a468e34 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -95,6 +95,12 @@ ${kotest-extension-allure.version} test + + io.kotest.extensions + kotest-extensions-embedded-kafka + ${kotest-extensions-embedded-kafka.version} + test + com.beust klaxon diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt index 0a88586d..70b5d8a9 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -20,14 +20,26 @@ package org.jetbrains.kotlinx.spark.api import io.kotest.assertions.throwables.shouldThrow +import io.kotest.assertions.timing.eventually import io.kotest.core.spec.style.ShouldSpec +import io.kotest.extensions.embedded.kafka.EmbeddedKafkaListener +import io.kotest.extensions.embedded.kafka.embeddedKafkaListener import io.kotest.matchers.collections.shouldBeIn import io.kotest.matchers.collections.shouldContainAll import io.kotest.matchers.shouldBe import org.apache.commons.io.FileUtils import org.apache.hadoop.fs.FileSystem +import org.apache.kafka.clients.consumer.ConsumerConfig +import org.apache.kafka.clients.consumer.ConsumerRecord +import org.apache.kafka.clients.producer.ProducerRecord +import org.apache.kafka.common.serialization.StringDeserializer import org.apache.spark.SparkException import org.apache.spark.streaming.* +import org.apache.spark.streaming.api.java.JavaDStream +import org.apache.spark.streaming.api.java.JavaInputDStream +import org.apache.spark.streaming.kafka010.ConsumerStrategies +import org.apache.spark.streaming.kafka010.KafkaUtils +import org.apache.spark.streaming.kafka010.LocationStrategies import org.apache.spark.util.Utils import org.jetbrains.kotlinx.spark.api.tuples.* import scala.Tuple2 @@ -36,10 +48,85 @@ import java.io.Serializable import java.nio.charset.StandardCharsets import java.util.* import java.util.concurrent.atomic.AtomicBoolean +import kotlin.time.Duration.Companion.seconds +import java.time.Duration class StreamingTest : ShouldSpec({ + context("streaming") { + + context("kafka") { + val port = 9092 + val broker = "localhost:$port" + val topic1 = "test1" + val topic2 = "test2" + val kafkaListener = EmbeddedKafkaListener(port) + + listener(kafkaListener) + + val producer = kafkaListener.stringStringProducer() + producer.send(ProducerRecord(topic1, "Hello this is a test test test")) + producer.send(ProducerRecord(topic2, "This is also also a test test something")) + producer.close() + + withSparkStreaming( + batchDuration = Durations.seconds(2), + appName = "KotlinDirectKafkaWordCount", + timeout = 1000L, + ) { + + val kafkaParams: Map = mapOf( + ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG to broker, + ConsumerConfig.GROUP_ID_CONFIG to "consumer-group", + ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, + ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, + ) + + // Create direct kafka stream with brokers and topics + val messages: JavaInputDStream> = KafkaUtils.createDirectStream( + ssc, + LocationStrategies.PreferConsistent(), + ConsumerStrategies.Subscribe(setOf(topic1, topic2), kafkaParams), + ) + + // Get the lines, split them into words, count the words and print + val lines = messages.map { it.topic() X it.value() } + val words = lines.flatMapValues { it.split(" ").iterator() } + + val wordCounts = words + .map { t(it, 1) } + .reduceByKey { a: Int, b: Int -> a + b } + .map { (tup, counter) -> tup + counter } + + val resultLists = mapOf( + topic1 to listOf( + "Hello" X 1, + "this" X 1, + "is" X 1, + "a" X 1, + "test" X 3, + ), + topic2 to listOf( + "This" X 1, + "is" X 1, + "also" X 2, + "a" X 1, + "test" X 2, + "something" X 1, + ) + ) + + wordCounts.foreachRDD { rdd, _ -> + rdd.foreach { (topic, word, count) -> + t(word, count).shouldBeIn(collection = resultLists[topic]!!) + } + } + + wordCounts.print() + } + } + should("stream") { val input = listOf("aaa", "bbb", "aaa", "ccc") diff --git a/pom.xml b/pom.xml index acee0ac5..33ee181e 100644 --- a/pom.xml +++ b/pom.xml @@ -15,6 +15,7 @@ 0.16.0 4.6.0 1.0.1 + 1.0.6 3.2.1 3.3.1 From 70949ef3846b4fc9e40951870bea3c0f23f15108 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 19 Apr 2022 13:40:53 +0200 Subject: [PATCH 137/213] added embedded kafka so kafka tests work --- .../streaming/KotlinSqlNetworkWordCount.kt | 19 +++ .../streaming/KotlinStatefulNetworkCount.kt | 21 ++- kotlin-spark-api/3.2/pom_2.12.xml | 34 ++--- .../kotlinx/spark/api/DataStreamWriter.kt | 21 ++- .../kotlinx/spark/api/StreamingTest.kt | 2 - .../kotlinx/spark/api/kafkaHelper.kt | 128 ++++++++++++++++++ pom.xml | 2 +- 7 files changed, 198 insertions(+), 29 deletions(-) create mode 100644 kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinSqlNetworkWordCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinSqlNetworkWordCount.kt index 2f6aacf8..51060a20 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinSqlNetworkWordCount.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinSqlNetworkWordCount.kt @@ -1,3 +1,22 @@ +/*- + * =LICENSE= + * Kotlin Spark API: Examples for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ package org.jetbrains.kotlinx.spark.examples.streaming import org.apache.spark.api.java.JavaRDD diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinStatefulNetworkCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinStatefulNetworkCount.kt index 64ad0392..2c938ead 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinStatefulNetworkCount.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinStatefulNetworkCount.kt @@ -1,3 +1,22 @@ +/*- + * =LICENSE= + * Kotlin Spark API: Examples for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ package org.jetbrains.kotlinx.spark.examples.streaming import org.apache.spark.api.java.Optional @@ -82,4 +101,4 @@ object KotlinStatefulNetworkCount { stateDstream.print() } } -} \ No newline at end of file +} diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 8a468e34..eeea1bff 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -31,7 +31,6 @@ org.jetbrains.kotlinx.spark scala-tuples-in-kotlin - org.apache.spark spark-sql-kafka-0-10_${scala.compat.version} @@ -42,12 +41,7 @@ spark-streaming-kafka-0-10_${scala.compat.version} ${spark3.version} - - org.apache.kafka - kafka-streams-test-utils - 3.1.0 - test - + @@ -62,19 +56,6 @@ ${spark3.version} provided - - - - - - - - - - - - - org.apache.hadoop hadoop-client @@ -96,9 +77,9 @@ test - io.kotest.extensions - kotest-extensions-embedded-kafka - ${kotest-extensions-embedded-kafka.version} + io.github.embeddedkafka + embedded-kafka_${scala.compat.version} + ${embedded-kafka.version} test @@ -119,7 +100,12 @@ ${spark3.version} tests test - + + + org.apache.kafka + kafka-streams-test-utils + 3.1.0 + test diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt index 5b6884ba..d0c1ece1 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt @@ -1,3 +1,22 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ package org.jetbrains.kotlinx.spark.api import org.apache.spark.api.java.function.VoidFunction2 @@ -20,4 +39,4 @@ import org.apache.spark.sql.streaming.DataStreamWriter */ fun DataStreamWriter.forEachBatch( func: (batch: Dataset, batchId: Long) -> Unit, -): DataStreamWriter = foreachBatch(VoidFunction2(func)) \ No newline at end of file +): DataStreamWriter = foreachBatch(VoidFunction2(func)) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt index 70b5d8a9..8d9bb818 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -22,8 +22,6 @@ package org.jetbrains.kotlinx.spark.api import io.kotest.assertions.throwables.shouldThrow import io.kotest.assertions.timing.eventually import io.kotest.core.spec.style.ShouldSpec -import io.kotest.extensions.embedded.kafka.EmbeddedKafkaListener -import io.kotest.extensions.embedded.kafka.embeddedKafkaListener import io.kotest.matchers.collections.shouldBeIn import io.kotest.matchers.collections.shouldContainAll import io.kotest.matchers.shouldBe diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt new file mode 100644 index 00000000..a9e32aaf --- /dev/null +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt @@ -0,0 +1,128 @@ +@file:Suppress("MemberVisibilityCanBePrivate", "BlockingMethodInNonBlockingContext") + +package org.jetbrains.kotlinx.spark.api + +/** + * Source: https://github.com/kotest/kotest-extensions-embedded-kafka + * + */ + +import io.github.embeddedkafka.EmbeddedKafka +import io.github.embeddedkafka.EmbeddedKafkaConfig +import io.kotest.core.listeners.TestListener +import io.kotest.core.spec.Spec +import org.apache.kafka.clients.CommonClientConfigs +import org.apache.kafka.clients.consumer.ConsumerConfig +import org.apache.kafka.clients.consumer.KafkaConsumer +import org.apache.kafka.clients.producer.KafkaProducer +import org.apache.kafka.common.serialization.BytesDeserializer +import org.apache.kafka.common.serialization.BytesSerializer +import org.apache.kafka.common.serialization.StringDeserializer +import org.apache.kafka.common.serialization.StringSerializer +import org.apache.kafka.common.utils.Bytes +import scala.Predef +import java.util.Properties + +val embeddedKafkaListener: EmbeddedKafkaListener = EmbeddedKafkaListener(EmbeddedKafkaConfig.defaultConfig()) + +class EmbeddedKafkaListener( + private val config: EmbeddedKafkaConfig, +) : TestListener { + + constructor(port: Int) : this( + EmbeddedKafkaConfig.apply( + port, + EmbeddedKafkaConfig.defaultZookeeperPort(), + Predef.Map().empty(), + Predef.Map().empty(), + Predef.Map().empty(), + ) + ) + + constructor(kafkaPort: Int, zookeeperPort: Int) : this( + EmbeddedKafkaConfig.apply( + kafkaPort, + zookeeperPort, + Predef.Map().empty(), + Predef.Map().empty(), + Predef.Map().empty(), + ) + ) + + val port: Int = config.kafkaPort() + + val host: String = "127.0.0.1" + + val bootstrapServer = "$host:$port" + + override suspend fun beforeSpec(spec: Spec) { + EmbeddedKafka.start(config) + while (!EmbeddedKafka.isRunning()) { + Thread.sleep(100) + } + } + + override suspend fun afterSpec(spec: Spec) { + EmbeddedKafka.stop() + while (EmbeddedKafka.isRunning()) { + Thread.sleep(100) + } + } + + /** + * Returns a kafka consumer configured with the details of the embedded broker. + */ + fun stringStringConsumer(configure: Properties.() -> Unit = {}): KafkaConsumer { + val props = Properties() + props[CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG] = "$host:$port" + props[ConsumerConfig.GROUP_ID_CONFIG] = "test_consumer_group_" + System.currentTimeMillis() + props[ConsumerConfig.AUTO_OFFSET_RESET_CONFIG] = "earliest" + props.configure() + return KafkaConsumer(props, StringDeserializer(), StringDeserializer()) + } + + /** + * Returns a kafka consumer subscribed to the given topic on the embedded broker. + */ + fun stringStringConsumer(topic: String, configure: Properties.() -> Unit = {}): KafkaConsumer { + val consumer = stringStringConsumer(configure) + consumer.subscribe(listOf(topic)) + return consumer + } + + /** + * Returns a kafka consumer configured with the details of the embedded broker. + */ + fun bytesBytesConsumer(configure: Properties.() -> Unit = {}): KafkaConsumer { + val props = Properties() + props[CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG] = "$host:$port" + props[ConsumerConfig.GROUP_ID_CONFIG] = "test_consumer_group_" + System.currentTimeMillis() + props[ConsumerConfig.AUTO_OFFSET_RESET_CONFIG] = "earliest" + props.configure() + return KafkaConsumer(props, BytesDeserializer(), BytesDeserializer()) + } + + /** + * Returns a kafka consumer subscribed to the given topic on the embedded broker. + */ + fun bytesBytesConsumer(topic: String, configure: Properties.() -> Unit = {}): KafkaConsumer { + val consumer = bytesBytesConsumer(configure) + consumer.subscribe(listOf(topic)) + return consumer + } + + fun bytesBytesProducer(configure: Properties.() -> Unit = {}): KafkaProducer { + val props = Properties() + props[CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG] = "$host:$port" + props.configure() + return KafkaProducer(props, BytesSerializer(), BytesSerializer()) + } + + fun stringStringProducer(configure: Properties.() -> Unit = {}): KafkaProducer { + val props = Properties() + props[CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG] = "$host:$port" + props.configure() + return KafkaProducer(props, StringSerializer(), StringSerializer()) + } +} + diff --git a/pom.xml b/pom.xml index 33ee181e..dd6f3844 100644 --- a/pom.xml +++ b/pom.xml @@ -15,7 +15,7 @@ 0.16.0 4.6.0 1.0.1 - 1.0.6 + 3.1.0 3.2.1 3.3.1 From 92aee4d5209878cec7b593e239c218a80e53d90c Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 19 Apr 2022 14:10:45 +0200 Subject: [PATCH 138/213] found some more breaking test cases --- .../kotlinx/spark/api/EncodingTest.kt | 41 +++++++++++++++++-- 1 file changed, 37 insertions(+), 4 deletions(-) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt index e053e05b..62681956 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt @@ -27,10 +27,7 @@ import org.apache.spark.sql.Dataset import org.apache.spark.sql.types.Decimal import org.apache.spark.unsafe.types.CalendarInterval import org.jetbrains.kotlinx.spark.api.tuples.* -import scala.Product -import scala.Tuple1 -import scala.Tuple2 -import scala.Tuple3 +import scala.* import java.math.BigDecimal import java.sql.Date import java.sql.Timestamp @@ -180,6 +177,42 @@ class EncodingTest : ShouldSpec({ context("schema") { withSpark(props = mapOf("spark.sql.codegen.comments" to true)) { + should("handle Scala case class datasets") { + val caseClasses = listOf(Some(1), Some(2), Some(3)) + val dataset = caseClasses.toDS() + dataset.collectAsList() shouldBe caseClasses + } + + should("handle Scala case class case class datasets") { + val caseClasses = listOf( + Some(Some(1)), + Some(Some(2)), + Some(Some(3)), + ) + val dataset = caseClasses.toDS() + dataset.collectAsList() shouldBe caseClasses + } + + should("handle data class Scala case class datasets") { + val caseClasses = listOf( + Some(1) to Some(2), + Some(3) to Some(4), + Some(5) to Some(6), + ) + val dataset = caseClasses.toDS() + dataset.collectAsList() shouldBe caseClasses + } + + should("handle Scala case class data class datasets") { + val caseClasses = listOf( + Some(1 to 2), + Some(3 to 4), + Some(5 to 6), + ) + val dataset = caseClasses.toDS() + dataset.collectAsList() shouldBe caseClasses + } + should("collect data classes with doubles correctly") { val ll1 = LonLat(1.0, 2.0) val ll2 = LonLat(3.0, 4.0) From 07692f4fcad43c73dfe9b46984755c3c5a513b25 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 19 Apr 2022 15:44:18 +0200 Subject: [PATCH 139/213] adding more working cases, found option cases not to work --- .../apache/spark/sql/KotlinReflection.scala | 16 +++++- .../spark/extensions/DemoCaseClass.scala | 3 + .../kotlinx/spark/api/EncodingTest.kt | 55 +++++++++++++++++-- 3 files changed, 67 insertions(+), 7 deletions(-) create mode 100644 core/3.2/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala diff --git a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala index 05ff330b..cbc30be3 100644 --- a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala +++ b/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala @@ -28,7 +28,7 @@ import org.apache.spark.sql.catalyst.analysis.GetColumnByOrdinal import org.apache.spark.sql.catalyst.expressions.objects._ import org.apache.spark.sql.catalyst.expressions.{Expression, _} import org.apache.spark.sql.catalyst.util.ArrayBasedMapData -import org.apache.spark.sql.catalyst.{InternalRow, ScalaReflection, WalkedTypePath} +import org.apache.spark.sql.catalyst.{DefinedByConstructorParams, InternalRow, ScalaReflection, WalkedTypePath} import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String} import org.apache.spark.util.Utils @@ -42,11 +42,12 @@ import java.lang.Exception * for classes whose fields are entirely defined by constructor params but should not be * case classes. */ -trait DefinedByConstructorParams +//trait DefinedByConstructorParams /** * KotlinReflection is heavily inspired by ScalaReflection and even extends it just to add several methods */ +//noinspection RedundantBlock object KotlinReflection extends KotlinReflection { /** * Returns the Spark SQL DataType for a given java class. Where this is not an exact mapping @@ -916,9 +917,18 @@ object KotlinReflection extends KotlinReflection { } // - case _ if predefinedDt.isDefined => { + // Kotlin specific cases + case t if predefinedDt.isDefined => { + +// if (seenTypeSet.contains(t)) { +// throw new UnsupportedOperationException( +// s"cannot have circular references in class, but got the circular reference of class $t" +// ) +// } + predefinedDt.get match { + // Kotlin data class case dataType: KDataTypeWrapper => { val cls = dataType.cls val properties = getJavaBeanReadableProperties(cls) diff --git a/core/3.2/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala b/core/3.2/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala new file mode 100644 index 00000000..eb5a1a47 --- /dev/null +++ b/core/3.2/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala @@ -0,0 +1,3 @@ +package org.jetbrains.kotlinx.spark.extensions + +case class DemoCaseClass[T](a: Int, b: T) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt index 62681956..f39ab769 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt @@ -27,6 +27,7 @@ import org.apache.spark.sql.Dataset import org.apache.spark.sql.types.Decimal import org.apache.spark.unsafe.types.CalendarInterval import org.jetbrains.kotlinx.spark.api.tuples.* +import org.jetbrains.kotlinx.spark.extensions.DemoCaseClass import scala.* import java.math.BigDecimal import java.sql.Date @@ -177,13 +178,59 @@ class EncodingTest : ShouldSpec({ context("schema") { withSpark(props = mapOf("spark.sql.codegen.comments" to true)) { - should("handle Scala case class datasets") { + should("handle Scala Case class datasets") { + val caseClasses = listOf( + DemoCaseClass(1, "1"), + DemoCaseClass(2, "2"), + DemoCaseClass(3, "3"), + ) + val dataset = caseClasses.toDS() + dataset.show() + dataset.collectAsList() shouldBe caseClasses + } + + should("handle Scala Case class with data class datasets") { + val caseClasses = listOf( + DemoCaseClass(1, "1" to 1L), + DemoCaseClass(2, "2" to 2L), + DemoCaseClass(3, "3" to 3L), + ) + val dataset = caseClasses.toDS() + dataset.show() + dataset.collectAsList() shouldBe caseClasses + } + + should("handle data class with Scala Case class datasets") { + val caseClasses = listOf( + 1 to DemoCaseClass(1, "1"), + 2 to DemoCaseClass(2, "2"), + 3 to DemoCaseClass(3, "3"), + ) + val dataset = caseClasses.toDS() + dataset.show() + dataset.collectAsList() shouldBe caseClasses + } + + should("handle data class with Scala Case class & deeper datasets") { + val caseClasses = listOf( + 1 to DemoCaseClass(1, "1" to DemoCaseClass(1, 1.0)), + 2 to DemoCaseClass(2, "2" to DemoCaseClass(2, 2.0)), + 3 to DemoCaseClass(3, "3" to DemoCaseClass(3, 3.0)), + ) + val dataset = caseClasses.toDS() + dataset.show() + dataset.collectAsList() shouldBe caseClasses + } + + + should("handle Scala Option datasets") { val caseClasses = listOf(Some(1), Some(2), Some(3)) val dataset = caseClasses.toDS() + dataset.show() dataset.collectAsList() shouldBe caseClasses } - should("handle Scala case class case class datasets") { + should("handle Scala Option Option datasets") { val caseClasses = listOf( Some(Some(1)), Some(Some(2)), @@ -193,7 +240,7 @@ class EncodingTest : ShouldSpec({ dataset.collectAsList() shouldBe caseClasses } - should("handle data class Scala case class datasets") { + should("handle data class Scala Option datasets") { val caseClasses = listOf( Some(1) to Some(2), Some(3) to Some(4), @@ -203,7 +250,7 @@ class EncodingTest : ShouldSpec({ dataset.collectAsList() shouldBe caseClasses } - should("handle Scala case class data class datasets") { + should("handle Scala Option data class datasets") { val caseClasses = listOf( Some(1 to 2), Some(3 to 4), From eaf13cea02b184d18cd3aaa3ff45948fdac3525e Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 19 Apr 2022 16:26:37 +0200 Subject: [PATCH 140/213] Optional and nullable options for updateStateByKey, changing port for kafka test --- .../kotlinx/spark/api/StreamingKeyValues.kt | 75 +++++++++++++++++++ .../kotlinx/spark/api/StreamingTest.kt | 2 +- .../kotlinx/spark/api/kafkaHelper.kt | 19 +++++ 3 files changed, 95 insertions(+), 1 deletion(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt index 75c2c23f..4a0c6195 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt @@ -333,6 +333,7 @@ fun JavaDStream>.mapWithState( * corresponding state key-value pair will be eliminated. * @tparam S State type */ +@JvmName("updateStateByKeyNullable") fun JavaDStream>.updateStateByKey( numPartitions: Int = dstream().ssc().sc().defaultParallelism(), updateFunc: (List, S?) -> S?, @@ -346,6 +347,28 @@ fun JavaDStream>.updateStateByKey( ) .toTupleDStream() +/** + * Return a new "state" DStream where the state for each key is updated by applying + * the given function on the previous state of the key and the new values of each key. + * In every batch the updateFunc will be called for each state even if there are no new values. + * Hash partitioning is used to generate the RDDs with Spark's default number of partitions. + * Note: Needs checkpoint directory to be set. + * @param updateFunc State update function. If `this` function returns `null`, then + * corresponding state key-value pair will be eliminated. + * @tparam S State type + */ +@JvmName("updateStateByKey") +fun JavaDStream>.updateStateByKey( + numPartitions: Int = dstream().ssc().sc().defaultParallelism(), + updateFunc: (List, Optional) -> Optional, +): JavaDStream> = + toPairDStream() + .updateStateByKey( + updateFunc, + numPartitions, + ) + .toTupleDStream() + /** * Return a new "state" DStream where the state for each key is updated by applying * the given function on the previous state of the key and the new values of each key. @@ -360,6 +383,7 @@ fun JavaDStream>.updateStateByKey( * DStream * @tparam S State type */ +@JvmName("updateStateByKeyNullable") fun JavaDStream>.updateStateByKey( partitioner: Partitioner, updateFunc: (List, S?) -> S?, @@ -373,6 +397,31 @@ fun JavaDStream>.updateStateByKey( ) .toTupleDStream() +/** + * Return a new "state" DStream where the state for each key is updated by applying + * the given function on the previous state of the key and the new values of each key. + * In every batch the updateFunc will be called for each state even if there are no new values. + * [[org.apache.spark.Partitioner]] is used to control the partitioning of each RDD. + * Note: Needs checkpoint directory to be set. + * @param updateFunc State update function. Note, that this function may generate a different + * tuple with a different key than the input key. Therefore keys may be removed + * or added in this way. It is up to the developer to decide whether to + * remember the partitioner despite the key being changed. + * @param partitioner Partitioner for controlling the partitioning of each RDD in the new + * DStream + * @tparam S State type + */ +fun JavaDStream>.updateStateByKey( + partitioner: Partitioner, + updateFunc: (List, Optional) -> Optional, +): JavaDStream> = + toPairDStream() + .updateStateByKey( + updateFunc, + partitioner, + ) + .toTupleDStream() + /** * Return a new "state" DStream where the state for each key is updated by applying * the given function on the previous state of the key and the new values of the key. @@ -385,6 +434,7 @@ fun JavaDStream>.updateStateByKey( * @param initialRDD initial state value of each key. * @tparam S State type */ +@JvmName("updateStateByKeyNullable") fun JavaDStream>.updateStateByKey( partitioner: Partitioner, initialRDD: JavaRDD>, @@ -400,6 +450,31 @@ fun JavaDStream>.updateStateByKey( ) .toTupleDStream() +/** + * Return a new "state" DStream where the state for each key is updated by applying + * the given function on the previous state of the key and the new values of the key. + * org.apache.spark.Partitioner is used to control the partitioning of each RDD. + * Note: Needs checkpoint directory to be set. + * @param updateFunc State update function. If `this` function returns `null`, then + * corresponding state key-value pair will be eliminated. + * @param partitioner Partitioner for controlling the partitioning of each RDD in the new + * DStream. + * @param initialRDD initial state value of each key. + * @tparam S State type + */ +fun JavaDStream>.updateStateByKey( + partitioner: Partitioner, + initialRDD: JavaRDD>, + updateFunc: (List, Optional) -> Optional, +): JavaDStream> = + toPairDStream() + .updateStateByKey( + updateFunc, + partitioner, + initialRDD.toPairRDD(), + ) + .toTupleDStream() + /** * Return a new DStream by applying a map function to the value of each key-value pairs in diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt index 8d9bb818..054628d2 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -55,7 +55,7 @@ class StreamingTest : ShouldSpec({ context("streaming") { context("kafka") { - val port = 9092 + val port = 6001 val broker = "localhost:$port" val topic1 = "test1" val topic2 = "test2" diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt index a9e32aaf..a9e5c9f9 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt @@ -1,3 +1,22 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ @file:Suppress("MemberVisibilityCanBePrivate", "BlockingMethodInNonBlockingContext") package org.jetbrains.kotlinx.spark.api From 2a409c60089cb41156c907e245f3fcd00e24757b Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 19 Apr 2022 16:34:37 +0200 Subject: [PATCH 141/213] added tests for case classes, noticed option classes don't work as expected --- .../org/jetbrains/kotlinx/spark/api/EncodingTest.kt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt index f39ab769..29a073ad 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt @@ -223,14 +223,14 @@ class EncodingTest : ShouldSpec({ } - should("handle Scala Option datasets") { + xshould("handle Scala Option datasets") { val caseClasses = listOf(Some(1), Some(2), Some(3)) val dataset = caseClasses.toDS() dataset.show() dataset.collectAsList() shouldBe caseClasses } - should("handle Scala Option Option datasets") { + xshould("handle Scala Option Option datasets") { val caseClasses = listOf( Some(Some(1)), Some(Some(2)), @@ -240,7 +240,7 @@ class EncodingTest : ShouldSpec({ dataset.collectAsList() shouldBe caseClasses } - should("handle data class Scala Option datasets") { + xshould("handle data class Scala Option datasets") { val caseClasses = listOf( Some(1) to Some(2), Some(3) to Some(4), @@ -250,7 +250,7 @@ class EncodingTest : ShouldSpec({ dataset.collectAsList() shouldBe caseClasses } - should("handle Scala Option data class datasets") { + xshould("handle Scala Option data class datasets") { val caseClasses = listOf( Some(1 to 2), Some(3 to 4), From 8ccf5ea5d9b84c7b64bcc9b7c5834308f27c51af Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 19 Apr 2022 16:44:22 +0200 Subject: [PATCH 142/213] qodana suggestions --- .../examples/streaming/KotlinRecoverableNetworkWordCount.kt | 2 ++ .../org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinRecoverableNetworkWordCount.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinRecoverableNetworkWordCount.kt index f25571b0..32db58f7 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinRecoverableNetworkWordCount.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/KotlinRecoverableNetworkWordCount.kt @@ -209,6 +209,8 @@ object KotlinRecoverableNetworkWordCount { println(output) println("Dropped ${droppedWordsCounter.value()} word(s) totally") println("Appending to " + outputFile.absolutePath) + + @Suppress("UnstableApiUsage") Files.append( """ $output diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt index 4a0c6195..8664081b 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt @@ -17,6 +17,8 @@ * limitations under the License. * =LICENSEEND= */ +@file:Suppress("unused") + package org.jetbrains.kotlinx.spark.api import org.apache.spark.HashPartitioner @@ -295,7 +297,7 @@ fun JavaDStream>.reduceByKeyAndWindow( .toTupleDStream() /** - * Return a [MapWithStateDStream] by applying a function to every key-value element of + * Return a [JavaMapWithStateDStream] by applying a function to every key-value element of * `this` stream, while maintaining some state data for each unique key. The mapping function * and other specification (e.g. partitioners, timeouts, initial state data, etc.) of this * transformation can be specified using `StateSpec` class. The state data is accessible in From 8e3b952d063f40fdfa17f59929543e1ab6f1b880 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 19 Apr 2022 17:34:24 +0200 Subject: [PATCH 143/213] qodana suggestions --- kotlin-spark-api/3.2/pom_2.12.xml | 5 ----- .../kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt | 2 +- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index eeea1bff..f9dc7332 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -31,11 +31,6 @@ org.jetbrains.kotlinx.spark scala-tuples-in-kotlin - - org.apache.spark - spark-sql-kafka-0-10_${scala.compat.version} - ${spark3.version} - org.apache.spark spark-streaming-kafka-0-10_${scala.compat.version} diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt index 054628d2..8d9bb818 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -55,7 +55,7 @@ class StreamingTest : ShouldSpec({ context("streaming") { context("kafka") { - val port = 6001 + val port = 9092 val broker = "localhost:$port" val topic1 = "test1" val topic2 = "test2" From 1af4c044a4d6f1e5f415dc40adfae5a546e223f3 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 19 Apr 2022 21:41:53 +0200 Subject: [PATCH 144/213] let's see if adding a container does anything --- .github/workflows/build.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e8024b45..d0d201d4 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -8,6 +8,7 @@ on: jobs: build-scala-12: + container: node:14.16 runs-on: ubuntu-latest steps: From e4d6e2aea155e46c295ed1862a8f8a38489b78aa Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 20 Apr 2022 14:44:59 +0200 Subject: [PATCH 145/213] changing to ip 0.0.0.0 --- .github/workflows/build.yml | 1 - kotlin-spark-api/3.2/pom_2.12.xml | 8 +- .../kotlinx/spark/api/StreamingTest.kt | 150 +++++++++++------- .../kotlinx/spark/api/kafkaHelper.kt | 3 +- pom.xml | 7 +- scala-tuples-in-kotlin/pom_2.12.xml | 2 +- 6 files changed, 111 insertions(+), 60 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d0d201d4..e8024b45 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -8,7 +8,6 @@ on: jobs: build-scala-12: - container: node:14.16 runs-on: ubuntu-latest steps: diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index f9dc7332..99172895 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -68,7 +68,7 @@ io.kotest.extensions kotest-extensions-allure - ${kotest-extension-allure.version} + ${kotest-extensions-allure.version} test @@ -77,6 +77,12 @@ ${embedded-kafka.version} test + + io.kotest.extensions + kotest-extensions-testcontainers + ${kotest-extensions-testcontainers.version} + test + com.beust klaxon diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt index 8d9bb818..9b293efa 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -19,16 +19,24 @@ */ package org.jetbrains.kotlinx.spark.api +import io.kotest.assertions.print.print import io.kotest.assertions.throwables.shouldThrow import io.kotest.assertions.timing.eventually +import io.kotest.core.extensions.install import io.kotest.core.spec.style.ShouldSpec +import io.kotest.extensions.testcontainers.TestContainerExtension +import io.kotest.extensions.testcontainers.kafka.createStringStringConsumer +import io.kotest.extensions.testcontainers.kafka.createStringStringProducer +import io.kotest.extensions.testcontainers.perTest import io.kotest.matchers.collections.shouldBeIn import io.kotest.matchers.collections.shouldContainAll +import io.kotest.matchers.collections.shouldHaveSize import io.kotest.matchers.shouldBe import org.apache.commons.io.FileUtils import org.apache.hadoop.fs.FileSystem import org.apache.kafka.clients.consumer.ConsumerConfig import org.apache.kafka.clients.consumer.ConsumerRecord +import org.apache.kafka.clients.producer.ProducerConfig import org.apache.kafka.clients.producer.ProducerRecord import org.apache.kafka.common.serialization.StringDeserializer import org.apache.spark.SparkException @@ -40,6 +48,8 @@ import org.apache.spark.streaming.kafka010.KafkaUtils import org.apache.spark.streaming.kafka010.LocationStrategies import org.apache.spark.util.Utils import org.jetbrains.kotlinx.spark.api.tuples.* +import org.testcontainers.containers.KafkaContainer +import org.testcontainers.utility.DockerImageName import scala.Tuple2 import java.io.File import java.io.Serializable @@ -55,76 +65,110 @@ class StreamingTest : ShouldSpec({ context("streaming") { context("kafka") { + +// val kafka = install( +// TestContainerExtension(KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:6.2.1"))) +// ) { +// withEmbeddedZookeeper() +// } +// +// should("support kafka streams") { +// val topic1 = "test1" +// val topic2 = "test2" +// +// val producer = kafka.createStringStringProducer() +// producer.send(ProducerRecord(topic1, "Hello this is a test test test")) +// producer.send(ProducerRecord(topic1, "Hello this is a test test test")) +// producer.send(ProducerRecord(topic1, "Hello this is a test test test")) +// producer.send(ProducerRecord(topic1, "Hello this is a test test test")) +// producer.send(ProducerRecord(topic2, "This is also also a test test something")) +// producer.send(ProducerRecord(topic2, "This is also also a test test something")) +// producer.send(ProducerRecord(topic2, "This is also also a test test something")) +// producer.send(ProducerRecord(topic2, "This is also also a test test something")) +// producer.close() +// +// val consumer = kafka.createStringStringConsumer { +// this[ConsumerConfig.MAX_POLL_RECORDS_CONFIG] = 1 +// } +// +// consumer.subscribe(listOf(topic1)) +// val records = consumer.poll(Duration.ofSeconds(100)) +// records.shouldHaveSize(4) +// records.print() + val port = 9092 val broker = "localhost:$port" val topic1 = "test1" val topic2 = "test2" val kafkaListener = EmbeddedKafkaListener(port) - listener(kafkaListener) - val producer = kafkaListener.stringStringProducer() - producer.send(ProducerRecord(topic1, "Hello this is a test test test")) - producer.send(ProducerRecord(topic2, "This is also also a test test something")) - producer.close() + should("support kafka streams") { + val producer = kafkaListener.stringStringProducer() + producer.send(ProducerRecord(topic1, "Hello this is a test test test")) + producer.send(ProducerRecord(topic2, "This is also also a test test something")) + producer.close() - withSparkStreaming( - batchDuration = Durations.seconds(2), - appName = "KotlinDirectKafkaWordCount", - timeout = 1000L, - ) { - - val kafkaParams: Map = mapOf( - ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG to broker, - ConsumerConfig.GROUP_ID_CONFIG to "consumer-group", - ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, - ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, - ) + withSparkStreaming( + batchDuration = Durations.seconds(2), + appName = "KotlinDirectKafkaWordCount", + timeout = 1000L, + ) { + + val kafkaParams: Map = mapOf( + ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG to broker, + ConsumerConfig.GROUP_ID_CONFIG to "consumer-group", + ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, + ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, + ) - // Create direct kafka stream with brokers and topics - val messages: JavaInputDStream> = KafkaUtils.createDirectStream( - ssc, - LocationStrategies.PreferConsistent(), - ConsumerStrategies.Subscribe(setOf(topic1, topic2), kafkaParams), - ) + // Create direct kafka stream with brokers and topics + val messages: JavaInputDStream> = KafkaUtils.createDirectStream( + ssc, + LocationStrategies.PreferConsistent(), + ConsumerStrategies.Subscribe(setOf(topic1, topic2), kafkaParams), + ) - // Get the lines, split them into words, count the words and print - val lines = messages.map { it.topic() X it.value() } - val words = lines.flatMapValues { it.split(" ").iterator() } - - val wordCounts = words - .map { t(it, 1) } - .reduceByKey { a: Int, b: Int -> a + b } - .map { (tup, counter) -> tup + counter } - - val resultLists = mapOf( - topic1 to listOf( - "Hello" X 1, - "this" X 1, - "is" X 1, - "a" X 1, - "test" X 3, - ), - topic2 to listOf( - "This" X 1, - "is" X 1, - "also" X 2, - "a" X 1, - "test" X 2, - "something" X 1, + // Get the lines, split them into words, count the words and print + val lines = messages.map { it.topic() X it.value() } + val words = lines.flatMapValues { it.split(" ").iterator() } + + val wordCounts = words + .map { t(it, 1) } + .reduceByKey { a: Int, b: Int -> a + b } + .map { (tup, counter) -> tup + counter } + + val resultLists = mapOf( + topic1 to listOf( + "Hello" X 1, + "this" X 1, + "is" X 1, + "a" X 1, + "test" X 3, + ), + topic2 to listOf( + "This" X 1, + "is" X 1, + "also" X 2, + "a" X 1, + "test" X 2, + "something" X 1, + ) ) - ) - wordCounts.foreachRDD { rdd, _ -> - rdd.foreach { (topic, word, count) -> - t(word, count).shouldBeIn(collection = resultLists[topic]!!) + wordCounts.foreachRDD { rdd, _ -> + rdd.foreach { (topic, word, count) -> + t(word, count).shouldBeIn(collection = resultLists[topic]!!) + } } - } - wordCounts.print() + wordCounts.print() + } } + } + should("stream") { val input = listOf("aaa", "bbb", "aaa", "ccc") diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt index a9e5c9f9..e6e14228 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt @@ -70,7 +70,8 @@ class EmbeddedKafkaListener( val port: Int = config.kafkaPort() - val host: String = "127.0.0.1" +// val host: String = "127.0.0.1" + val host: String = "0.0.0.0" val bootstrapServer = "$host:$port" diff --git a/pom.xml b/pom.xml index dd6f3844..8c99ec41 100644 --- a/pom.xml +++ b/pom.xml @@ -12,9 +12,10 @@ 1.6.20 1.6.10 - 0.16.0 - 4.6.0 - 1.0.1 + 0.17.0 + 5.2.3 + 1.1.0 + 1.3.1 3.1.0 3.2.1 3.3.1 diff --git a/scala-tuples-in-kotlin/pom_2.12.xml b/scala-tuples-in-kotlin/pom_2.12.xml index c06aa7ee..cf67af41 100644 --- a/scala-tuples-in-kotlin/pom_2.12.xml +++ b/scala-tuples-in-kotlin/pom_2.12.xml @@ -34,7 +34,7 @@ io.kotest.extensions kotest-extensions-allure - ${kotest-extension-allure.version} + ${kotest-extensions-allure.version} test From 25464d8a9935f489e3723c13da3d7b4d9b8e58b0 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 20 Apr 2022 15:13:13 +0200 Subject: [PATCH 146/213] changing to ip localhost --- .../test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt index e6e14228..20b7eca6 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt @@ -71,7 +71,8 @@ class EmbeddedKafkaListener( val port: Int = config.kafkaPort() // val host: String = "127.0.0.1" - val host: String = "0.0.0.0" +// val host: String = "0.0.0.0" + val host: String = "localhost" val bootstrapServer = "$host:$port" From 9901c5bbed372d690dd36334b04e77225fc0776d Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 20 Apr 2022 16:35:55 +0200 Subject: [PATCH 147/213] attempt to add exclusion for kafka streaming test for github --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e8024b45..fd1587ac 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -25,5 +25,5 @@ jobs: key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: ${{ runner.os }}-m2 - name: Build with Maven - run: ./mvnw -B package --file pom.xml -Pscala-2.12 + run: ./mvnw -B package --file pom.xml -Pscala-2.12 -Dkotlin-spark-api-3.2.test.excludes="**/*support kafka streams" # vim: ts=2:sts=2:sw=2:expandtab From 1090e2b2d58be6de9fb6eee8f681ad960bc30aae Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 20 Apr 2022 16:49:23 +0200 Subject: [PATCH 148/213] attempting to exclude entire file --- .github/workflows/build.yml | 2 +- .../kotlinx/spark/api/KafkaStreamingTest.kt | 140 ++++++++++++++++++ .../kotlinx/spark/api/StreamingTest.kt | 105 ------------- 3 files changed, 141 insertions(+), 106 deletions(-) create mode 100644 kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index fd1587ac..909c7d16 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -25,5 +25,5 @@ jobs: key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: ${{ runner.os }}-m2 - name: Build with Maven - run: ./mvnw -B package --file pom.xml -Pscala-2.12 -Dkotlin-spark-api-3.2.test.excludes="**/*support kafka streams" + run: ./mvnw -B package --file pom.xml -Pscala-2.12 -Dkotlin-spark-api-3.2.test.excludes="**/*KafkaStreamingTest.*" # vim: ts=2:sts=2:sw=2:expandtab diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt new file mode 100644 index 00000000..19bfc17b --- /dev/null +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt @@ -0,0 +1,140 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.api + +import io.kotest.core.spec.style.ShouldSpec +import io.kotest.matchers.collections.shouldBeIn +import org.apache.kafka.clients.consumer.ConsumerConfig +import org.apache.kafka.clients.consumer.ConsumerRecord +import org.apache.kafka.clients.producer.ProducerRecord +import org.apache.kafka.common.serialization.StringDeserializer +import org.apache.spark.streaming.Durations +import org.apache.spark.streaming.api.java.JavaInputDStream +import org.apache.spark.streaming.kafka010.ConsumerStrategies +import org.apache.spark.streaming.kafka010.KafkaUtils +import org.apache.spark.streaming.kafka010.LocationStrategies +import org.jetbrains.kotlinx.spark.api.tuples.* +import java.io.Serializable + +class KafkaStreamingTest : ShouldSpec({ + context("kafka") { + +// val kafka = install( +// TestContainerExtension(KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:6.2.1"))) +// ) { +// withEmbeddedZookeeper() +// } +// +// should("support kafka streams") { +// val topic1 = "test1" +// val topic2 = "test2" +// +// val producer = kafka.createStringStringProducer() +// producer.send(ProducerRecord(topic1, "Hello this is a test test test")) +// producer.send(ProducerRecord(topic1, "Hello this is a test test test")) +// producer.send(ProducerRecord(topic1, "Hello this is a test test test")) +// producer.send(ProducerRecord(topic1, "Hello this is a test test test")) +// producer.send(ProducerRecord(topic2, "This is also also a test test something")) +// producer.send(ProducerRecord(topic2, "This is also also a test test something")) +// producer.send(ProducerRecord(topic2, "This is also also a test test something")) +// producer.send(ProducerRecord(topic2, "This is also also a test test something")) +// producer.close() +// +// val consumer = kafka.createStringStringConsumer { +// this[ConsumerConfig.MAX_POLL_RECORDS_CONFIG] = 1 +// } +// +// consumer.subscribe(listOf(topic1)) +// val records = consumer.poll(Duration.ofSeconds(100)) +// records.shouldHaveSize(4) +// records.print() + + val port = 9092 + val broker = "localhost:$port" + val topic1 = "test1" + val topic2 = "test2" + val kafkaListener = EmbeddedKafkaListener(port) + listener(kafkaListener) + + should("support kafka streams") { + val producer = kafkaListener.stringStringProducer() + producer.send(ProducerRecord(topic1, "Hello this is a test test test")) + producer.send(ProducerRecord(topic2, "This is also also a test test something")) + producer.close() + + withSparkStreaming( + batchDuration = Durations.seconds(2), + appName = "KotlinDirectKafkaWordCount", + timeout = 1000L, + ) { + + val kafkaParams: Map = mapOf( + ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG to broker, + ConsumerConfig.GROUP_ID_CONFIG to "consumer-group", + ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, + ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, + ) + + // Create direct kafka stream with brokers and topics + val messages: JavaInputDStream> = KafkaUtils.createDirectStream( + ssc, + LocationStrategies.PreferConsistent(), + ConsumerStrategies.Subscribe(setOf(topic1, topic2), kafkaParams), + ) + + // Get the lines, split them into words, count the words and print + val lines = messages.map { it.topic() X it.value() } + val words = lines.flatMapValues { it.split(" ").iterator() } + + val wordCounts = words + .map { t(it, 1) } + .reduceByKey { a: Int, b: Int -> a + b } + .map { (tup, counter) -> tup + counter } + + val resultLists = mapOf( + topic1 to listOf( + "Hello" X 1, + "this" X 1, + "is" X 1, + "a" X 1, + "test" X 3, + ), + topic2 to listOf( + "This" X 1, + "is" X 1, + "also" X 2, + "a" X 1, + "test" X 2, + "something" X 1, + ) + ) + + wordCounts.foreachRDD { rdd, _ -> + rdd.foreach { (topic, word, count) -> + t(word, count).shouldBeIn(collection = resultLists[topic]!!) + } + } + + wordCounts.print() + } + } + + } +}) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt index 9b293efa..64f82365 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -64,111 +64,6 @@ class StreamingTest : ShouldSpec({ context("streaming") { - context("kafka") { - -// val kafka = install( -// TestContainerExtension(KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:6.2.1"))) -// ) { -// withEmbeddedZookeeper() -// } -// -// should("support kafka streams") { -// val topic1 = "test1" -// val topic2 = "test2" -// -// val producer = kafka.createStringStringProducer() -// producer.send(ProducerRecord(topic1, "Hello this is a test test test")) -// producer.send(ProducerRecord(topic1, "Hello this is a test test test")) -// producer.send(ProducerRecord(topic1, "Hello this is a test test test")) -// producer.send(ProducerRecord(topic1, "Hello this is a test test test")) -// producer.send(ProducerRecord(topic2, "This is also also a test test something")) -// producer.send(ProducerRecord(topic2, "This is also also a test test something")) -// producer.send(ProducerRecord(topic2, "This is also also a test test something")) -// producer.send(ProducerRecord(topic2, "This is also also a test test something")) -// producer.close() -// -// val consumer = kafka.createStringStringConsumer { -// this[ConsumerConfig.MAX_POLL_RECORDS_CONFIG] = 1 -// } -// -// consumer.subscribe(listOf(topic1)) -// val records = consumer.poll(Duration.ofSeconds(100)) -// records.shouldHaveSize(4) -// records.print() - - val port = 9092 - val broker = "localhost:$port" - val topic1 = "test1" - val topic2 = "test2" - val kafkaListener = EmbeddedKafkaListener(port) - listener(kafkaListener) - - should("support kafka streams") { - val producer = kafkaListener.stringStringProducer() - producer.send(ProducerRecord(topic1, "Hello this is a test test test")) - producer.send(ProducerRecord(topic2, "This is also also a test test something")) - producer.close() - - withSparkStreaming( - batchDuration = Durations.seconds(2), - appName = "KotlinDirectKafkaWordCount", - timeout = 1000L, - ) { - - val kafkaParams: Map = mapOf( - ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG to broker, - ConsumerConfig.GROUP_ID_CONFIG to "consumer-group", - ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, - ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, - ) - - // Create direct kafka stream with brokers and topics - val messages: JavaInputDStream> = KafkaUtils.createDirectStream( - ssc, - LocationStrategies.PreferConsistent(), - ConsumerStrategies.Subscribe(setOf(topic1, topic2), kafkaParams), - ) - - // Get the lines, split them into words, count the words and print - val lines = messages.map { it.topic() X it.value() } - val words = lines.flatMapValues { it.split(" ").iterator() } - - val wordCounts = words - .map { t(it, 1) } - .reduceByKey { a: Int, b: Int -> a + b } - .map { (tup, counter) -> tup + counter } - - val resultLists = mapOf( - topic1 to listOf( - "Hello" X 1, - "this" X 1, - "is" X 1, - "a" X 1, - "test" X 3, - ), - topic2 to listOf( - "This" X 1, - "is" X 1, - "also" X 2, - "a" X 1, - "test" X 2, - "something" X 1, - ) - ) - - wordCounts.foreachRDD { rdd, _ -> - rdd.foreach { (topic, word, count) -> - t(word, count).shouldBeIn(collection = resultLists[topic]!!) - } - } - - wordCounts.print() - } - } - - } - - should("stream") { val input = listOf("aaa", "bbb", "aaa", "ccc") From 729279e4e70fdb39c3ec860aaaead49bef908b09 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 20 Apr 2022 17:02:29 +0200 Subject: [PATCH 149/213] attempting to exclude entire file --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 909c7d16..6b3cbe49 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -25,5 +25,5 @@ jobs: key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: ${{ runner.os }}-m2 - name: Build with Maven - run: ./mvnw -B package --file pom.xml -Pscala-2.12 -Dkotlin-spark-api-3.2.test.excludes="**/*KafkaStreamingTest.*" + run: ./mvnw -B package --file pom.xml -Pscala-2.12 -Dkotlin-spark-api-3.2.test.excludes="**/*KafkaStreamingTest*" # vim: ts=2:sts=2:sw=2:expandtab From 9b5c1fb6c2a79e93d9d953183b61c11d6c7224ff Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 20 Apr 2022 17:14:38 +0200 Subject: [PATCH 150/213] attempting to exclude entire file --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6b3cbe49..db88061f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -25,5 +25,5 @@ jobs: key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: ${{ runner.os }}-m2 - name: Build with Maven - run: ./mvnw -B package --file pom.xml -Pscala-2.12 -Dkotlin-spark-api-3.2.test.excludes="**/*KafkaStreamingTest*" + run: ./mvnw -B package --file pom.xml -Pscala-2.12 -Dkotlin-spark-api-3.2.test.excludes="**KafkaStreamingTest" # vim: ts=2:sts=2:sw=2:expandtab From 290fc9e1842c46932858dde03c14d6485151b02a Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 20 Apr 2022 17:42:59 +0200 Subject: [PATCH 151/213] attempting to exclude entire file --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index db88061f..25cc1bb1 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -25,5 +25,5 @@ jobs: key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: ${{ runner.os }}-m2 - name: Build with Maven - run: ./mvnw -B package --file pom.xml -Pscala-2.12 -Dkotlin-spark-api-3.2.test.excludes="**KafkaStreamingTest" + run: ./mvnw -B package --file pom.xml -Pscala-2.12 -Dkotlin-spark-api-3.2.test.excludes="org.jetbrains.kotlinx.spark.api.KafkaStreamingTest" # vim: ts=2:sts=2:sw=2:expandtab From fdee3a2e0cd39446205054a703fe4e6ba6a755d7 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 21 Apr 2022 11:56:46 +0200 Subject: [PATCH 152/213] exclusion kafka works! --- .github/workflows/build.yml | 2 +- .../kotlinx/spark/api/KafkaStreamingTest.kt | 36 ++++--------------- 2 files changed, 7 insertions(+), 31 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 25cc1bb1..5288b2c3 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -25,5 +25,5 @@ jobs: key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: ${{ runner.os }}-m2 - name: Build with Maven - run: ./mvnw -B package --file pom.xml -Pscala-2.12 -Dkotlin-spark-api-3.2.test.excludes="org.jetbrains.kotlinx.spark.api.KafkaStreamingTest" + run: ./mvnw -B package --file pom.xml -Pscala-2.12 -Dkotest.tags="!Kafka" # vim: ts=2:sts=2:sw=2:expandtab diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt index 19bfc17b..fe414cc8 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt @@ -19,6 +19,7 @@ */ package org.jetbrains.kotlinx.spark.api +import io.kotest.core.Tag import io.kotest.core.spec.style.ShouldSpec import io.kotest.matchers.collections.shouldBeIn import org.apache.kafka.clients.consumer.ConsumerConfig @@ -33,39 +34,14 @@ import org.apache.spark.streaming.kafka010.LocationStrategies import org.jetbrains.kotlinx.spark.api.tuples.* import java.io.Serializable +object Kafka : Tag() + class KafkaStreamingTest : ShouldSpec({ - context("kafka") { -// val kafka = install( -// TestContainerExtension(KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:6.2.1"))) -// ) { -// withEmbeddedZookeeper() -// } -// -// should("support kafka streams") { -// val topic1 = "test1" -// val topic2 = "test2" -// -// val producer = kafka.createStringStringProducer() -// producer.send(ProducerRecord(topic1, "Hello this is a test test test")) -// producer.send(ProducerRecord(topic1, "Hello this is a test test test")) -// producer.send(ProducerRecord(topic1, "Hello this is a test test test")) -// producer.send(ProducerRecord(topic1, "Hello this is a test test test")) -// producer.send(ProducerRecord(topic2, "This is also also a test test something")) -// producer.send(ProducerRecord(topic2, "This is also also a test test something")) -// producer.send(ProducerRecord(topic2, "This is also also a test test something")) -// producer.send(ProducerRecord(topic2, "This is also also a test test something")) -// producer.close() -// -// val consumer = kafka.createStringStringConsumer { -// this[ConsumerConfig.MAX_POLL_RECORDS_CONFIG] = 1 -// } -// -// consumer.subscribe(listOf(topic1)) -// val records = consumer.poll(Duration.ofSeconds(100)) -// records.shouldHaveSize(4) -// records.print() + // making sure it can be skipped on github actions since it times out + tags(Kafka) + context("kafka") { val port = 9092 val broker = "localhost:$port" val topic1 = "test1" From 4dfb747764622ccc661add0acae17d0008642686 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 21 Apr 2022 12:11:39 +0200 Subject: [PATCH 153/213] exclusion kafka works! --- examples/pom-3.2_2.12.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/examples/pom-3.2_2.12.xml b/examples/pom-3.2_2.12.xml index 58d9856c..c3b95b0e 100644 --- a/examples/pom-3.2_2.12.xml +++ b/examples/pom-3.2_2.12.xml @@ -29,6 +29,11 @@ spark-streaming_${scala.compat.version} ${spark3.version} + + org.apache.spark + spark-streaming-kafka-0-10_${scala.compat.version} + ${spark3.version} + From 54b9d1066e5ad75819a52d56ddd73b6c790d8032 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 21 Apr 2022 13:18:51 +0200 Subject: [PATCH 154/213] updating readme and example --- README.md | 43 +++++++++++++++++++ .../spark/examples/streaming/Streaming.kt | 18 ++++---- 2 files changed, 52 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index bd86267f..bd227403 100644 --- a/README.md +++ b/README.md @@ -23,6 +23,7 @@ We have opened a Spark Project Improvement Proposal: [Kotlin support for Apache - [Column infix/operator functions](#column-infixoperator-functions) - [Overload Resolution Ambiguity](#overload-resolution-ambiguity) - [Tuples](#tuples) + - [Streaming](#streaming) - [Examples](#examples) - [Reporting issues/Support](#reporting-issuessupport) - [Code of Conduct](#code-of-conduct) @@ -267,6 +268,48 @@ Finally, all these tuple helper functions are also baked in: - `map` - `cast` +### Streaming + +A popular Spark extension is [Spark Streaming](https://spark.apache.org/docs/latest/streaming-programming-guide.html). +Of course the Kotlin Spark API also introduces a more Kotlin-esque approach to write your streaming programs. +There are examples for use with a checkpoint, Kafka and SQL in the [examples module](examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming). + +We shall also provide a quick example below: +```kotlin +// Automatically provides ssc: JavaStreamingContext which starts and awaits termination or timeout +withSparkStreaming(batchDuration = Durations.seconds(1), timeout = 10_000) { // this: KSparkStreamingSession + + // create input stream for, for instance, Netcat: `$ nc -lk 9999` + val lines: JavaReceiverInputDStream = ssc.socketTextStream("localhost", 9999) + + // split input stream on space + val words: JavaDStream = lines.flatMap { it.split(" ").iterator() } + + // perform action on each formed RDD in the stream + words.foreachRDD { rdd: JavaRDD, _: Time -> + + // to convert the JavaRDD to a Dataset, we need a spark session using the RDD context + withSpark(rdd) { // this: KSparkSession + val dataframe: Dataset = rdd.map { TestRow(word = it) }.toDS() + dataframe + .groupByKey { it.word } + .count() + .show() + // +-----+--------+ + // | key|count(1)| + // +-----+--------+ + // |hello| 1| + // | is| 1| + // | a| 1| + // | this| 1| + // | test| 3| + // +-----+--------+ + } + } +} +``` + + ## Examples For more, check out [examples](https://github.com/JetBrains/kotlin-spark-api/tree/master/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples) module. diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/Streaming.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/Streaming.kt index fb576cbd..85db9775 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/Streaming.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/Streaming.kt @@ -19,8 +19,12 @@ */ package org.jetbrains.kotlinx.spark.examples.streaming +import org.apache.spark.api.java.JavaRDD import org.apache.spark.sql.Dataset import org.apache.spark.streaming.Durations +import org.apache.spark.streaming.Time +import org.apache.spark.streaming.api.java.JavaDStream +import org.apache.spark.streaming.api.java.JavaReceiverInputDStream import org.jetbrains.kotlinx.spark.api.* data class TestRow( @@ -32,22 +36,18 @@ data class TestRow( * * `$ nc -lk 9999` */ -fun main() = withSparkStreaming(Durations.seconds(1), timeout = 10_000) { +fun main() = withSparkStreaming(batchDuration = Durations.seconds(1), timeout = 10_000) { // this: KSparkStreamingSession - val lines = ssc.socketTextStream("localhost", 9999) - val words = lines.flatMap { it.split(" ").iterator() } - - words.foreachRDD { rdd, _ -> - withSpark(rdd) { + val lines: JavaReceiverInputDStream = ssc.socketTextStream("localhost", 9999) + val words: JavaDStream = lines.flatMap { it.split(" ").iterator() } + words.foreachRDD { rdd: JavaRDD, _: Time -> + withSpark(rdd) { // this: KSparkSession val dataframe: Dataset = rdd.map { TestRow(it) }.toDS() - dataframe .groupByKey { it.word } .count() .show() } - } - } \ No newline at end of file From f694d07a0f185cf553c3993e192bce426cb09c02 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 21 Apr 2022 16:28:38 +0200 Subject: [PATCH 155/213] attempt to add qodana scan action to github actions --- .github/workflows/build.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5288b2c3..6e86848d 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -26,4 +26,10 @@ jobs: restore-keys: ${{ runner.os }}-m2 - name: Build with Maven run: ./mvnw -B package --file pom.xml -Pscala-2.12 -Dkotest.tags="!Kafka" + qodana: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: 'Qodana Scan' + uses: JetBrains/qodana-action@v5.0.2 # vim: ts=2:sts=2:sw=2:expandtab From 299fb7509c6195665f983c59171898d32df74010 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 21 Apr 2022 17:07:02 +0200 Subject: [PATCH 156/213] removed qodana app, let's try --- .github/workflows/build.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6e86848d..63d49c25 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -32,4 +32,5 @@ jobs: - uses: actions/checkout@v3 - name: 'Qodana Scan' uses: JetBrains/qodana-action@v5.0.2 + # vim: ts=2:sts=2:sw=2:expandtab From 717501a2c6cf0455c7fd90bfc69c6bc0466a24a9 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 21 Apr 2022 17:12:13 +0200 Subject: [PATCH 157/213] adding qodana scan to github actions. Improved tuple and rdd render support, updated kotlin version --- .github/workflows/build.yml | 7 +++++++ .../kotlinx/spark/api/jupyter/Integration.kt | 18 +++++++++++------- .../kotlinx/spark/api/JupyterTests.kt | 16 ++++++++-------- pom.xml | 4 ++-- 4 files changed, 28 insertions(+), 17 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e8024b45..a22106c0 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -26,4 +26,11 @@ jobs: restore-keys: ${{ runner.os }}-m2 - name: Build with Maven run: ./mvnw -B package --file pom.xml -Pscala-2.12 + qodana: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: 'Qodana Scan' + uses: JetBrains/qodana-action@v5.0.2 + # vim: ts=2:sts=2:sw=2:expandtab diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt index 7083182d..eaa3094d 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -44,12 +44,15 @@ import org.apache.spark.sql.SparkSession.Builder import scala.collection.* import org.apache.spark.rdd.* import org.jetbrains.kotlinx.spark.api.SparkSession +import scala.Product import java.io.Serializable +import scala.collection.Iterable as ScalaIterable +import scala.collection.Iterator as ScalaIterator @OptIn(ExperimentalStdlibApi::class) internal class Integration : JupyterIntegration() { - private val kotlinVersion = "1.5.30" + private val kotlinVersion = "1.6.20" private val scalaCompatVersion = "2.12" private val scalaVersion = "2.12.15" private val spark3Version = "3.2.1" @@ -77,24 +80,22 @@ internal class Integration : JupyterIntegration() { ) import("org.jetbrains.kotlinx.spark.api.*") + import("org.jetbrains.kotlinx.spark.api.tuples.*") + import(*(1..22).map { "scala.Tuple$it" }.toTypedArray()) import("org.apache.spark.sql.functions.*") import("org.apache.spark.*") import("org.apache.spark.sql.*") import("org.apache.spark.api.java.*") - import("org.apache.spark.sql.SparkSession.Builder") import("scala.collection.Seq") import("org.apache.spark.rdd.*") import("java.io.Serializable") - var spark: SparkSession? = null - - val a: Map = mapOf() // starting spark and unwrapping KSparkContext functions onLoaded { @Language("kts") - val sparkField = execute( + val spark = execute( """ val spark = org.jetbrains.kotlinx.spark.api.SparkSession .builder() @@ -104,7 +105,6 @@ internal class Integration : JupyterIntegration() { spark """.trimIndent() ).value!! as SparkSession - spark = sparkField @Language("kts") val logLevel = execute("""spark.sparkContext.setLogLevel(SparkLogLevel.ERROR)""") @@ -174,10 +174,14 @@ private fun JavaRDDLike.toHtml(limit: Int = 20, truncate: Int = 30): S is BooleanArray -> row.iterator().asSequence().toList().toString() is Array<*> -> row.iterator().asSequence().toList().toString() is Iterable<*> -> row.iterator().asSequence().toList().toString() + is ScalaIterable<*> -> row.asKotlinIterable().iterator().asSequence().toList().toString() is Iterator<*> -> row.asSequence().toList().toString() + is ScalaIterator<*> -> row.asKotlinIterator().asSequence().toList().toString() + is Product -> row.productIterator().asKotlinIterator().asSequence().toList().toString() is Serializable -> row.toString() // maybe others? + is Any? -> row.toString() else -> row.toString() } diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt index e37028c2..ca58d200 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt @@ -94,8 +94,8 @@ class JupyterTests : ShouldSpec({ ) println(html) - html shouldContain "[1, 2, 3]" - html shouldContain "[4, 5, 6]" + html shouldContain "1, 2, 3" + html shouldContain "4, 5, 6" } } @@ -113,8 +113,8 @@ class JupyterTests : ShouldSpec({ ) println(html) - html shouldContain "[1, 2, 3]" - html shouldContain "[4, 5, 6]" + html shouldContain "1, 2, 3" + html shouldContain "4, 5, 6" } } @@ -157,8 +157,8 @@ class JupyterTests : ShouldSpec({ ) println(html) - html shouldContain "(1,2)" - html shouldContain "(3,4)" + html shouldContain "1, 2" + html shouldContain "3, 4" } } @@ -196,8 +196,8 @@ class JupyterTests : ShouldSpec({ ) println(html) - html shouldContain "[1, 2, 3]" - html shouldContain "[4, 5, 6]" + html shouldContain "1, 2, 3" + html shouldContain "4, 5, 6" } } } diff --git a/pom.xml b/pom.xml index fa31ff5a..6d588834 100644 --- a/pom.xml +++ b/pom.xml @@ -10,14 +10,14 @@ pom - 1.5.30 + 1.6.20 1.6.10 0.16.0 4.6.0 5.8.2 1.0.1 3.2.1 - 0.11.0-62 + 0.11.0-76 0.7.3 From f83727d9a66b171a5ee1a58bd8f5d8c0e95e7f2d Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 21 Apr 2022 17:33:23 +0200 Subject: [PATCH 158/213] removed todo test and updated unused imports --- .../kotlinx/spark/examples/Broadcasting.kt | 1 - .../org/jetbrains/kotlinx/spark/examples/Main.kt | 5 +++-- .../org/jetbrains/kotlinx/spark/api/Conversions.kt | 14 -------------- 3 files changed, 3 insertions(+), 17 deletions(-) diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Broadcasting.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Broadcasting.kt index 2e5914e3..9612b350 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Broadcasting.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Broadcasting.kt @@ -21,7 +21,6 @@ package org.jetbrains.kotlinx.spark.examples import org.jetbrains.kotlinx.spark.api.broadcast import org.jetbrains.kotlinx.spark.api.map -import org.jetbrains.kotlinx.spark.api.sparkContext import org.jetbrains.kotlinx.spark.api.withSpark import java.io.Serializable diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt index 0fc2517f..fc0a2888 100644 --- a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/Main.kt @@ -19,13 +19,14 @@ */ package org.jetbrains.kotlinx.spark.examples -import org.apache.spark.api.java.function.ReduceFunction import org.apache.spark.sql.Dataset import org.jetbrains.kotlinx.spark.api.* import org.jetbrains.kotlinx.spark.api.tuples.* -import scala.* +import scala.Tuple2 +import scala.Tuple3 data class Q(val id: Int, val text: T) + @Suppress("RedundantLambdaArrow", "UsePropertyAccessSyntax") object Main { diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt index 35bb8ee8..034109f2 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt @@ -78,20 +78,6 @@ fun T?.toOptional(): Optional = Optional.ofNullable(this) /** Converts Java [Optional] to Scala [Option]. */ fun Optional.toOption(): Option = Option.apply(getOrNull()) -/** - * TODO test - * Produces a ClassTag[T], which is actually just a casted ClassTag[AnyRef]. - * - * This method is used to keep ClassTags out of the external Java API, as the Java compiler - * cannot produce them automatically. While this ClassTag-faking does please the compiler, - * it can cause problems at runtime if the Scala API relies on ClassTags for correctness. - * - * Often, though, a ClassTag[AnyRef] will not lead to incorrect behavior, just worse performance - * or security issues. For instance, an Array[AnyRef] can hold any type T, but may lose primitive - * specialization. - */ -fun fakeClassTag(): ClassTag = ClassTag.AnyRef() as ClassTag - /** * @see JavaConverters.asScalaIterator for more information. */ From 4ece47eafc2aa74a4051afb8fdaecd575d3f0f3f Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 21 Apr 2022 17:58:53 +0200 Subject: [PATCH 159/213] last cleanups --- .../kotlinx/spark/api/Conversions.kt | 1 - .../api/{kafkaHelper.kt => KafkaHelper.kt} | 3 +- .../kotlinx/spark/api/KafkaStreamingTest.kt | 2 +- .../kotlinx/spark/api/ProjectConfig.kt | 2 ++ .../kotlinx/spark/api/StreamingTest.kt | 32 +++++-------------- 5 files changed, 12 insertions(+), 28 deletions(-) rename kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/{kafkaHelper.kt => KafkaHelper.kt} (99%) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt index 034109f2..f8d90fa3 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt @@ -30,7 +30,6 @@ package org.jetbrains.kotlinx.spark.api import org.apache.spark.api.java.Optional import scala.* import scala.collection.JavaConverters -import scala.reflect.ClassTag import java.util.* import java.util.Enumeration import java.util.concurrent.ConcurrentMap diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt similarity index 99% rename from kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt rename to kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt index 20b7eca6..6ec5924c 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/kafkaHelper.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt @@ -23,7 +23,6 @@ package org.jetbrains.kotlinx.spark.api /** * Source: https://github.com/kotest/kotest-extensions-embedded-kafka - * */ import io.github.embeddedkafka.EmbeddedKafka @@ -40,7 +39,7 @@ import org.apache.kafka.common.serialization.StringDeserializer import org.apache.kafka.common.serialization.StringSerializer import org.apache.kafka.common.utils.Bytes import scala.Predef -import java.util.Properties +import java.util.* val embeddedKafkaListener: EmbeddedKafkaListener = EmbeddedKafkaListener(EmbeddedKafkaConfig.defaultConfig()) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt index fe414cc8..b755b1e6 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt @@ -38,7 +38,7 @@ object Kafka : Tag() class KafkaStreamingTest : ShouldSpec({ - // making sure it can be skipped on github actions since it times out + // making sure it can be skipped on Github actions since it times out tags(Kafka) context("kafka") { diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt index 8516ae62..4238cd78 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt @@ -25,4 +25,6 @@ import io.kotest.extensions.allure.AllureTestReporter @Suppress("unused") object ProjectConfig : AbstractProjectConfig() { override fun listeners() = super.listeners() + AllureTestReporter(true) + + override fun extensions() = super.extensions() + AllureTestReporter(true) } diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt index 64f82365..8ae7f5c2 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -19,45 +19,29 @@ */ package org.jetbrains.kotlinx.spark.api -import io.kotest.assertions.print.print import io.kotest.assertions.throwables.shouldThrow -import io.kotest.assertions.timing.eventually -import io.kotest.core.extensions.install import io.kotest.core.spec.style.ShouldSpec -import io.kotest.extensions.testcontainers.TestContainerExtension -import io.kotest.extensions.testcontainers.kafka.createStringStringConsumer -import io.kotest.extensions.testcontainers.kafka.createStringStringProducer -import io.kotest.extensions.testcontainers.perTest import io.kotest.matchers.collections.shouldBeIn import io.kotest.matchers.collections.shouldContainAll -import io.kotest.matchers.collections.shouldHaveSize import io.kotest.matchers.shouldBe import org.apache.commons.io.FileUtils import org.apache.hadoop.fs.FileSystem -import org.apache.kafka.clients.consumer.ConsumerConfig -import org.apache.kafka.clients.consumer.ConsumerRecord -import org.apache.kafka.clients.producer.ProducerConfig -import org.apache.kafka.clients.producer.ProducerRecord -import org.apache.kafka.common.serialization.StringDeserializer import org.apache.spark.SparkException -import org.apache.spark.streaming.* -import org.apache.spark.streaming.api.java.JavaDStream -import org.apache.spark.streaming.api.java.JavaInputDStream -import org.apache.spark.streaming.kafka010.ConsumerStrategies -import org.apache.spark.streaming.kafka010.KafkaUtils -import org.apache.spark.streaming.kafka010.LocationStrategies +import org.apache.spark.streaming.Checkpoint +import org.apache.spark.streaming.Duration +import org.apache.spark.streaming.Durations +import org.apache.spark.streaming.Time import org.apache.spark.util.Utils -import org.jetbrains.kotlinx.spark.api.tuples.* -import org.testcontainers.containers.KafkaContainer -import org.testcontainers.utility.DockerImageName +import org.jetbrains.kotlinx.spark.api.tuples.X +import org.jetbrains.kotlinx.spark.api.tuples.component1 +import org.jetbrains.kotlinx.spark.api.tuples.component2 +import org.jetbrains.kotlinx.spark.api.tuples.t import scala.Tuple2 import java.io.File import java.io.Serializable import java.nio.charset.StandardCharsets import java.util.* import java.util.concurrent.atomic.AtomicBoolean -import kotlin.time.Duration.Companion.seconds -import java.time.Duration class StreamingTest : ShouldSpec({ From 33ad506194f6856be3ef321718959e1fce125535 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 21 Apr 2022 19:37:06 +0200 Subject: [PATCH 160/213] updated from main branch --- pom.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/pom.xml b/pom.xml index d0f9931c..f4c888f2 100644 --- a/pom.xml +++ b/pom.xml @@ -21,6 +21,7 @@ 0.11.0-76 0.7.3 3.3.1 + 5.8.2 2.10.0 From af420943d9bc14ba05a25679605e0d6b1eaea261 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Fri, 22 Apr 2022 11:47:10 +0200 Subject: [PATCH 161/213] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index bd227403..322ae8d8 100644 --- a/README.md +++ b/README.md @@ -312,7 +312,7 @@ withSparkStreaming(batchDuration = Durations.seconds(1), timeout = 10_000) { // ## Examples -For more, check out [examples](https://github.com/JetBrains/kotlin-spark-api/tree/master/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples) module. +For more, check out [examples](https://github.com/JetBrains/kotlin-spark-api/tree/spark-3.2/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming) module. To get up and running quickly, check out this [tutorial](https://github.com/JetBrains/kotlin-spark-api/wiki/Quick-Start-Guide). ## Reporting issues/Support From 5a1fa5a038b2b8f6835510bcdf477ec74a4d79c9 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 22 Apr 2022 11:48:02 +0200 Subject: [PATCH 162/213] Revert "Update README.md" This reverts commit af420943d9bc14ba05a25679605e0d6b1eaea261. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 322ae8d8..bd227403 100644 --- a/README.md +++ b/README.md @@ -312,7 +312,7 @@ withSparkStreaming(batchDuration = Durations.seconds(1), timeout = 10_000) { // ## Examples -For more, check out [examples](https://github.com/JetBrains/kotlin-spark-api/tree/spark-3.2/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming) module. +For more, check out [examples](https://github.com/JetBrains/kotlin-spark-api/tree/master/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples) module. To get up and running quickly, check out this [tutorial](https://github.com/JetBrains/kotlin-spark-api/wiki/Quick-Start-Guide). ## Reporting issues/Support From ea59d6640cec75ff14287564531c44a42fba38ad Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 22 Apr 2022 13:28:20 +0200 Subject: [PATCH 163/213] maybe fixed kotest? --- kotlin-spark-api/3.2/pom_2.12.xml | 20 ++++++++--------- pom.xml | 22 +++++++++---------- scala-tuples-in-kotlin/pom_2.12.xml | 5 +++++ .../kotlinx/spark/api/tuples/TuplesTest.kt | 5 ++--- 4 files changed, 28 insertions(+), 24 deletions(-) diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index ff7fbe16..99da641c 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -100,10 +100,9 @@ test - io.kotest.extensions - kotest-extensions-testcontainers - ${kotest-extensions-testcontainers.version} - test + io.kotest + kotest-assertions-shared-jvm + ${kotest.version} com.beust @@ -117,12 +116,12 @@ ${atrium.version} test - - org.testng - testng - RELEASE - test - + + + + + + org.apache.spark spark-streaming_${scala.compat.version} @@ -208,6 +207,7 @@ org.apache.maven.plugins maven-compiler-plugin + ${maven-compiler-plugin.version} 8 8 diff --git a/pom.xml b/pom.xml index f4c888f2..e4020b53 100644 --- a/pom.xml +++ b/pom.xml @@ -10,18 +10,17 @@ pom - 1.6.20 + 1.6.21 1.6.10 0.17.0 5.2.3 1.1.0 - 1.3.1 3.1.0 3.2.1 0.11.0-76 0.7.3 3.3.1 - 5.8.2 + 2.10.0 @@ -35,7 +34,8 @@ 3.2.0 3.9.1 3.2.1 - 3.0.0-M5 + 3.10.1 + 3.0.0-M6 1.6.8 4.5.6 official @@ -67,13 +67,13 @@ kotlin-jupyter-test-kit ${kotlin-jupyter-api.version} - - org.junit - junit-bom - ${junit.version} - pom - import - + + + + + + + diff --git a/scala-tuples-in-kotlin/pom_2.12.xml b/scala-tuples-in-kotlin/pom_2.12.xml index cf67af41..3d892860 100644 --- a/scala-tuples-in-kotlin/pom_2.12.xml +++ b/scala-tuples-in-kotlin/pom_2.12.xml @@ -31,6 +31,11 @@ ${kotest.version} test + + io.kotest + kotest-assertions-shared-jvm + ${kotest.version} + io.kotest.extensions kotest-extensions-allure diff --git a/scala-tuples-in-kotlin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TuplesTest.kt b/scala-tuples-in-kotlin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TuplesTest.kt index 75d6f49a..5cbf6c72 100644 --- a/scala-tuples-in-kotlin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TuplesTest.kt +++ b/scala-tuples-in-kotlin/src/test/kotlin/org/jetbrains/kotlinx/spark/api/tuples/TuplesTest.kt @@ -21,15 +21,14 @@ package org.jetbrains.kotlinx.spark.api.tuples import io.kotest.assertions.throwables.shouldThrow import io.kotest.core.spec.style.ShouldSpec -import io.kotest.matchers.collections.shouldNotBeIn import io.kotest.matchers.shouldBe import io.kotest.matchers.shouldNotBe import org.jetbrains.kotlinx.spark.api.tuples.* import org.jetbrains.kotlinx.spark.api.* import scala.Tuple3 -import io.kotest.matchers.types.shouldBeInstanceOf import scala.Tuple1 import scala.Tuple2 +import kotlin.reflect.typeOf @Suppress("ShouldBeInstanceOfInspection", "RedundantLambdaArrow", "USELESS_IS_CHECK") class TuplesTest : ShouldSpec({ @@ -159,7 +158,7 @@ class TuplesTest : ShouldSpec({ tupleOf(1, 2, 3).toTriple() shouldBe Triple(1, 2, 3) tupleOf(1, 2, 3, 4, 5, 6, 7)[1..3].let { - it.shouldBeInstanceOf>() + (it is List) shouldBe true it.containsAll(listOf(2, 3, 4)) shouldBe true } tupleOf(1, 1, 2)[1..2] shouldBe tupleOf(1, 2, 2)[0..1] From 41e959ea008abc9b3d908e82474693dbe7a92ba2 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 26 Apr 2022 12:32:06 +0200 Subject: [PATCH 164/213] adding test for filter function of dataset of local data class and other stuff --- examples/pom-3.2_2.12.xml | 4 + kotlin-spark-api-streaming.json | 14 ++ kotlin-spark-api.json | 23 +++- kotlin-spark-api/3.2/pom_2.12.xml | 21 +-- .../{Integration.kt => HtmlRendering.kt} | 121 ++---------------- .../spark/api/jupyter/SparkIntegration.kt | 104 +++++++++++++++ .../kotlin-jupyter-libraries/libraries.json | 2 +- .../kotlinx/spark/api/JupyterTests.kt | 83 +++++++----- pom_2.12.xml | 1 + scala-tuples-in-kotlin/pom_2.12.xml | 5 - 10 files changed, 211 insertions(+), 167 deletions(-) create mode 100644 kotlin-spark-api-streaming.json rename kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/{Integration.kt => HtmlRendering.kt} (52%) create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt diff --git a/examples/pom-3.2_2.12.xml b/examples/pom-3.2_2.12.xml index c3b95b0e..7ad28cb7 100644 --- a/examples/pom-3.2_2.12.xml +++ b/examples/pom-3.2_2.12.xml @@ -14,6 +14,10 @@ + + org.jetbrains.kotlin + kotlin-reflect + org.jetbrains.kotlinx.spark kotlin-spark-api-3.2 diff --git a/kotlin-spark-api-streaming.json b/kotlin-spark-api-streaming.json new file mode 100644 index 00000000..fb3709d3 --- /dev/null +++ b/kotlin-spark-api-streaming.json @@ -0,0 +1,14 @@ +{ + "description": "Kotlin for Apache® Spark™", + "properties": { + "spark": "3.2", + "v": "2.0.0" + }, + "link": "https://github.com/JetBrains/kotlin-spark-api", + "dependencies": [ + "org.jetbrains.kotlinx.spark:kotlin-spark-api-$spark:$v" + ], + "init": [ + "%dumpClassesForSpark" + ] +} \ No newline at end of file diff --git a/kotlin-spark-api.json b/kotlin-spark-api.json index 1809fbb5..1aef9797 100644 --- a/kotlin-spark-api.json +++ b/kotlin-spark-api.json @@ -2,10 +2,29 @@ "description": "Kotlin for Apache® Spark™", "properties": { "spark": "3.2", - "version": "1.0.4" + "v": "2.0.0" }, "link": "https://github.com/JetBrains/kotlin-spark-api", "dependencies": [ - "org.jetbrains.kotlinx.spark:kotlin-spark-api-$spark:$version" + "org.jetbrains.kotlinx.spark:kotlin-spark-api-$spark:$v" + ], + "init": [ + "%dumpClassesForSpark", + "val spark = org.jetbrains.kotlinx.spark.api.SparkSession.builder().master(SparkConf().get(\"spark.master\", \"local[*]\")).appName(\"Jupyter\").getOrCreate()", + "spark.sparkContext.setLogLevel(org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR)", + "val sc by lazy { org.apache.spark.api.java.JavaSparkContext(spark.sparkContext) }", + "println(\"Spark session has been started and is running. No `withSpark { }` necessary, you can access `spark` and `sc` directly. To use Spark streaming, use `%use kotlin-spark-api-streaming` instead.\")", + + "inline fun List.toDS(): Dataset = toDS(spark)", + "inline fun Array.toDS(): Dataset = spark.dsOf(*this)", + "inline fun dsOf(vararg arg: T): Dataset = spark.dsOf(*arg)", + "inline fun RDD.toDS(): Dataset = toDS(spark)", + "inline fun JavaRDDLike.toDS(): Dataset = toDS(spark)", + "inline fun RDD.toDF(): Dataset = toDF(spark)", + "inline fun JavaRDDLike.toDF(): Dataset = toDF(spark)", + "val udf: UDFRegistration get() = spark.udf()" + ], + "shutdown": [ + "spark.stop()" ] } \ No newline at end of file diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 99da641c..95e6761d 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -76,11 +76,6 @@ - - org.jetbrains.kotlinx - kotlin-jupyter-test-kit - test - io.kotest kotest-runner-junit5-jvm @@ -99,11 +94,6 @@ ${embedded-kafka.version} test - - io.kotest - kotest-assertions-shared-jvm - ${kotest.version} - com.beust klaxon @@ -116,12 +106,6 @@ ${atrium.version} test - - - - - - org.apache.spark spark-streaming_${scala.compat.version} @@ -135,6 +119,11 @@ 3.1.0 test + + org.jetbrains.kotlinx + kotlin-jupyter-test-kit + test + diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/HtmlRendering.kt similarity index 52% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt rename to kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/HtmlRendering.kt index eaa3094d..5abf4d3c 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/HtmlRendering.kt @@ -21,124 +21,23 @@ package org.jetbrains.kotlinx.spark.api.jupyter import kotlinx.html.* import kotlinx.html.stream.appendHTML +import org.apache.spark.SparkException import org.apache.spark.api.java.JavaRDDLike -import org.apache.spark.api.java.function.MapGroupsFunction -import org.apache.spark.rdd.RDD -import org.apache.spark.sql.functions.* import org.apache.spark.sql.Dataset -import org.apache.spark.sql.KeyValueGroupedDataset import org.apache.spark.unsafe.array.ByteArrayMethods -import org.intellij.lang.annotations.Language -import org.jetbrains.kotlinx.jupyter.api.HTML -import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration -import org.jetbrains.kotlinx.spark.api.* -import java.io.InputStreamReader - - -import org.jetbrains.kotlinx.spark.api.* -import org.apache.spark.sql.functions.* -import org.apache.spark.* -import org.apache.spark.sql.* -import org.apache.spark.api.java.* -import org.apache.spark.sql.SparkSession.Builder -import scala.collection.* -import org.apache.spark.rdd.* -import org.jetbrains.kotlinx.spark.api.SparkSession +import org.jetbrains.kotlinx.spark.api.asKotlinIterable +import org.jetbrains.kotlinx.spark.api.asKotlinIterator +import org.jetbrains.kotlinx.spark.api.asKotlinList import scala.Product +import java.io.InputStreamReader import java.io.Serializable -import scala.collection.Iterable as ScalaIterable -import scala.collection.Iterator as ScalaIterator - -@OptIn(ExperimentalStdlibApi::class) -internal class Integration : JupyterIntegration() { - - private val kotlinVersion = "1.6.20" - private val scalaCompatVersion = "2.12" - private val scalaVersion = "2.12.15" - private val spark3Version = "3.2.1" - - override fun Builder.onLoaded() { - - dependencies( - "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion", - "org.jetbrains.kotlin:kotlin-reflect:$kotlinVersion", - "org.apache.spark:spark-sql_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-streaming_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-mllib_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-sql_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-repl_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-graphx_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-launcher_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-catalyst_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-streaming_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-core_$scalaCompatVersion:$spark3Version", - "org.scala-lang:scala-library:$scalaVersion", - "org.scala-lang.modules:scala-xml_$scalaCompatVersion:2.0.1", - "org.scala-lang:scala-reflect:$scalaVersion", - "org.scala-lang:scala-compiler:$scalaVersion", - "commons-io:commons-io:2.11.0", - ) - - import("org.jetbrains.kotlinx.spark.api.*") - import("org.jetbrains.kotlinx.spark.api.tuples.*") - import(*(1..22).map { "scala.Tuple$it" }.toTypedArray()) - import("org.apache.spark.sql.functions.*") - import("org.apache.spark.*") - import("org.apache.spark.sql.*") - import("org.apache.spark.api.java.*") - import("scala.collection.Seq") - import("org.apache.spark.rdd.*") - import("java.io.Serializable") - - - // starting spark and unwrapping KSparkContext functions - onLoaded { - - @Language("kts") - val spark = execute( - """ - val spark = org.jetbrains.kotlinx.spark.api.SparkSession - .builder() - .master(SparkConf().get("spark.master", "local[*]")) - .appName("Jupyter") - .getOrCreate() - spark - """.trimIndent() - ).value!! as SparkSession - - @Language("kts") - val logLevel = execute("""spark.sparkContext.setLogLevel(SparkLogLevel.ERROR)""") - - @Language("kts") - val sc = execute("""val sc = org.apache.spark.api.java.JavaSparkContext(spark.sparkContext)""") - } - - - // Render Dataset - render> { - HTML(it.toHtml()) - } - - render> { - HTML(it.toJavaRDD().toHtml()) - } - - render> { - HTML(it.toHtml()) - } - -// render> { -// HTML(it.toHtml(spark!!)) -// } - } -} private fun createHtmlTable(fillTable: TABLE.() -> Unit): String = buildString { appendHTML().head { style("text/css") { unsafe { val resource = "/table.css" - val res = Integration::class.java + val res = SparkIntegration::class.java .getResourceAsStream(resource) ?: error("Resource '$resource' not found") val readRes = InputStreamReader(res).readText() raw("\n" + readRes) @@ -150,7 +49,7 @@ private fun createHtmlTable(fillTable: TABLE.() -> Unit): String = buildString { } -private fun JavaRDDLike.toHtml(limit: Int = 20, truncate: Int = 30): String = try { +internal fun JavaRDDLike.toHtml(limit: Int = 20, truncate: Int = 30): String = try { createHtmlTable { val numRows = limit.coerceIn(0 until ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH) val tmpRows = take(numRows).toList() @@ -174,9 +73,9 @@ private fun JavaRDDLike.toHtml(limit: Int = 20, truncate: Int = 30): S is BooleanArray -> row.iterator().asSequence().toList().toString() is Array<*> -> row.iterator().asSequence().toList().toString() is Iterable<*> -> row.iterator().asSequence().toList().toString() - is ScalaIterable<*> -> row.asKotlinIterable().iterator().asSequence().toList().toString() + is scala.collection.Iterable<*> -> row.asKotlinIterable().iterator().asSequence().toList().toString() is Iterator<*> -> row.asSequence().toList().toString() - is ScalaIterator<*> -> row.asKotlinIterator().asSequence().toList().toString() + is scala.collection.Iterator<*> -> row.asKotlinIterator().asSequence().toList().toString() is Product -> row.productIterator().asKotlinIterator().asSequence().toList().toString() is Serializable -> row.toString() // maybe others? @@ -207,7 +106,7 @@ private fun JavaRDDLike.toHtml(limit: Int = 20, truncate: Int = 30): S |Cannot render this RDD of this class.""".trimMargin() } -private fun Dataset.toHtml(limit: Int = 20, truncate: Int = 30): String = createHtmlTable { +internal fun Dataset.toHtml(limit: Int = 20, truncate: Int = 30): String = createHtmlTable { val numRows = limit.coerceIn(0 until ByteArrayMethods.MAX_ROUNDED_ARRAY_LENGTH) val tmpRows = getRows(numRows, truncate).asKotlinList().map { it.asKotlinList() } diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt new file mode 100644 index 00000000..2dca6ac9 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt @@ -0,0 +1,104 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.api.jupyter + +import kotlinx.html.* +import kotlinx.html.stream.appendHTML +import org.apache.spark.api.java.JavaRDDLike +import org.apache.spark.rdd.RDD +import org.apache.spark.sql.Dataset +import org.apache.spark.unsafe.array.ByteArrayMethods +import org.intellij.lang.annotations.Language +import org.jetbrains.kotlinx.jupyter.api.HTML +import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration +import org.jetbrains.kotlinx.spark.api.* +import java.io.InputStreamReader + + +import org.apache.spark.* +import scala.collection.* +import org.jetbrains.kotlinx.spark.api.SparkSession +import scala.Product +import java.io.Serializable +import scala.collection.Iterable as ScalaIterable +import scala.collection.Iterator as ScalaIterator + +@OptIn(ExperimentalStdlibApi::class) +internal class SparkIntegration : JupyterIntegration() { + + private val kotlinVersion = "1.6.21" + private val scalaCompatVersion = "2.12" + private val scalaVersion = "2.12.15" + private val spark3Version = "3.2.1" + + override fun Builder.onLoaded() { + + dependencies( + "org.apache.spark:spark-repl_$scalaCompatVersion:$spark3Version", + "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion", + "org.jetbrains.kotlin:kotlin-reflect:$kotlinVersion", + "org.apache.spark:spark-sql_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-streaming_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-mllib_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-sql_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-graphx_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-launcher_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-catalyst_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-streaming_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-core_$scalaCompatVersion:$spark3Version", + "org.scala-lang:scala-library:$scalaVersion", + "org.scala-lang.modules:scala-xml_$scalaCompatVersion:2.0.1", + "org.scala-lang:scala-reflect:$scalaVersion", + "org.scala-lang:scala-compiler:$scalaVersion", + "commons-io:commons-io:2.11.0", + ) + + println("SparkIntegration loaded") + + import("org.jetbrains.kotlinx.spark.api.*") + import("org.jetbrains.kotlinx.spark.api.tuples.*") + import(*(1..22).map { "scala.Tuple$it" }.toTypedArray()) + import("org.apache.spark.sql.functions.*") + import("org.apache.spark.*") + import("org.apache.spark.sql.*") + import("org.apache.spark.api.java.*") + import("scala.collection.Seq") + import("org.apache.spark.rdd.*") + import("java.io.Serializable") + import("org.apache.spark.streaming.api.java.*") + import("org.apache.spark.streaming.api.*") + import("org.apache.spark.streaming.*") + + // onLoaded is only done for the non-streaming variant of kotlin-spark-api in the json file + + // Render Dataset + render> { + HTML(it.toHtml()) + } + + render> { + HTML(it.toJavaRDD().toHtml()) + } + + render> { + HTML(it.toHtml()) + } + } +} diff --git a/kotlin-spark-api/3.2/src/main/resources/META-INF/kotlin-jupyter-libraries/libraries.json b/kotlin-spark-api/3.2/src/main/resources/META-INF/kotlin-jupyter-libraries/libraries.json index 2041fce8..d751cf79 100644 --- a/kotlin-spark-api/3.2/src/main/resources/META-INF/kotlin-jupyter-libraries/libraries.json +++ b/kotlin-spark-api/3.2/src/main/resources/META-INF/kotlin-jupyter-libraries/libraries.json @@ -2,7 +2,7 @@ "definitions": [], "producers": [ { - "fqn": "org.jetbrains.kotlinx.spark.api.jupyter.Integration" + "fqn": "org.jetbrains.kotlinx.spark.api.jupyter.SparkIntegration" } ] } diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt index ca58d200..9d40d688 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt @@ -31,6 +31,7 @@ import org.jetbrains.kotlinx.jupyter.EvalRequestData import org.jetbrains.kotlinx.jupyter.ReplForJupyter import org.jetbrains.kotlinx.jupyter.api.Code import org.jetbrains.kotlinx.jupyter.api.MimeTypedResult +import org.jetbrains.kotlinx.jupyter.libraries.buildDependenciesInitCode import org.jetbrains.kotlinx.jupyter.repl.EvalResultEx import org.jetbrains.kotlinx.jupyter.testkit.ReplProvider import kotlin.script.experimental.jvm.util.classpathFromClassloader @@ -41,29 +42,61 @@ class JupyterTests : ShouldSpec({ val scriptClasspath = classpathFromClassloader(currentClassLoader).orEmpty() fun createRepl(): ReplForJupyter = replProvider(scriptClasspath) - fun withRepl(action: ReplForJupyter.() -> Unit): Unit = createRepl().action() + suspend fun withRepl(action: suspend ReplForJupyter.() -> Unit): Unit = createRepl().action() + context("Jupyter") { - should("Have spark instance") { - withRepl { + withRepl { + + println(currentClasspath.filter { "repl" in it }) + + // init + val _a = exec( + """%dumpClassesForSpark""" + ) + + @Language("kts") + val _b = exec( + """val spark = org.jetbrains.kotlinx.spark.api.SparkSession.builder().master(SparkConf().get("spark.master", "local[*]")).appName("Jupyter").getOrCreate()""" + ) + + @Language("kts") + val _c = exec( + """spark.sparkContext.setLogLevel(org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR)""" + ) + + @Language("kts") + val _d = exec( + """val sc = org.apache.spark.api.java.JavaSparkContext(spark.sparkContext)""" + ) + + should("Allow functions on local data classes") { @Language("kts") - val spark = exec("""spark""") + val klass = exec("""data class Test(val a: Int, val b: String)""") + + @Language("kts") + val ds = exec("""val ds = spark.dsOf(Test(1, "hi"), Test(2, "something"))""") + + @Language("kts") + val filtered = exec("""val filtered = ds.filter { it.a > 1 }""") + + @Language("kts") + val filteredShow = exec("""filtered.show()""") + } + should("Have spark instance") { + @Language("kts") + val spark = exec("""spark""") spark as? SparkSession shouldNotBe null } - } - should("Have JavaSparkContext instance") { - withRepl { + should("Have JavaSparkContext instance") { @Language("kts") val sc = exec("""sc""") - sc as? JavaSparkContext shouldNotBe null } - } - should("render Datasets") { - withRepl { + should("render Datasets") { @Language("kts") val html = execHtml( """ @@ -78,10 +111,8 @@ class JupyterTests : ShouldSpec({ html shouldContain "2" html shouldContain "3" } - } - should("render JavaRDDs") { - withRepl { + should("render JavaRDDs") { @Language("kts") val html = execHtml( """ @@ -97,10 +128,8 @@ class JupyterTests : ShouldSpec({ html shouldContain "1, 2, 3" html shouldContain "4, 5, 6" } - } - should("render JavaRDDs with Arrays") { - withRepl { + should("render JavaRDDs with Arrays") { @Language("kts") val html = execHtml( """ @@ -116,10 +145,8 @@ class JupyterTests : ShouldSpec({ html shouldContain "1, 2, 3" html shouldContain "4, 5, 6" } - } - should("not render JavaRDDs with custom class") { - withRepl { + should("not render JavaRDDs with custom class") { @Language("kts") val html = execHtml( """ @@ -139,12 +166,10 @@ class JupyterTests : ShouldSpec({ """.trimIndent() ) html shouldContain "Cannot render this RDD of this class." - } - } - should("render JavaPairRDDs") { - withRepl { + + should("render JavaPairRDDs") { @Language("kts") val html = execHtml( """ @@ -159,12 +184,9 @@ class JupyterTests : ShouldSpec({ html shouldContain "1, 2" html shouldContain "3, 4" - } - } - should("render JavaDoubleRDD") { - withRepl { + should("render JavaDoubleRDD") { @Language("kts") val html = execHtml( """ @@ -178,12 +200,9 @@ class JupyterTests : ShouldSpec({ html shouldContain "2.0" html shouldContain "3.0" html shouldContain "4.0" - } - } - should("render Scala RDD") { - withRepl { + should("render Scala RDD") { @Language("kts") val html = execHtml( """ diff --git a/pom_2.12.xml b/pom_2.12.xml index 29b854c1..6be7e614 100644 --- a/pom_2.12.xml +++ b/pom_2.12.xml @@ -14,6 +14,7 @@ pom + true 2.12.15 2.12 diff --git a/scala-tuples-in-kotlin/pom_2.12.xml b/scala-tuples-in-kotlin/pom_2.12.xml index 3d892860..cf67af41 100644 --- a/scala-tuples-in-kotlin/pom_2.12.xml +++ b/scala-tuples-in-kotlin/pom_2.12.xml @@ -31,11 +31,6 @@ ${kotest.version} test - - io.kotest - kotest-assertions-shared-jvm - ${kotest.version} - io.kotest.extensions kotest-extensions-allure From 90df422d7f38e11db4a938b2b100614101f22161 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 26 Apr 2022 12:52:38 +0200 Subject: [PATCH 165/213] now able to debug right test --- .../test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt index 9d40d688..db05449b 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt @@ -48,6 +48,8 @@ class JupyterTests : ShouldSpec({ context("Jupyter") { withRepl { + exec("""@file:DependsOn("org.apache.spark:spark-repl_2.12:3.2.1")""") + println(currentClasspath.filter { "repl" in it }) // init From dfb48dfff5a34e918faff1c922b57f32c96ce9e4 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 26 Apr 2022 12:54:13 +0200 Subject: [PATCH 166/213] now able to debug right test --- .../test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt index db05449b..eeaa2dae 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt @@ -148,7 +148,7 @@ class JupyterTests : ShouldSpec({ html shouldContain "4, 5, 6" } - should("not render JavaRDDs with custom class") { + xshould("not render JavaRDDs with custom class") { @Language("kts") val html = execHtml( """ From 3a7adaf2e9acbd4db0fa30d339e9574e6c1e8b55 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 26 Apr 2022 14:49:37 +0200 Subject: [PATCH 167/213] spark 3.2 -> spark 3.1 --- core/{3.2 => 3.1}/pom_2.12.xml | 14 ++++++-- .../apache/spark/sql/KotlinReflection.scala | 21 ++++++------ .../org/apache/spark/sql/KotlinWrappers.scala | 2 +- .../sql/catalyst/CatalystTypeConverters.scala | 0 .../spark/extensions/DemoCaseClass.scala | 0 .../spark/extensions/KSparkExtensions.scala | 0 dummy/pom.xml | 2 +- .../{pom-3.2_2.12.xml => pom-3.1_2.12.xml} | 8 ++--- kotlin-spark-api/{3.2 => 3.1}/pom_2.12.xml | 8 ++--- .../jetbrains/kotlinx/spark/api/Arities.kt | 0 .../org/jetbrains/kotlinx/spark/api/Column.kt | 0 .../kotlinx/spark/api/Conversions.kt | 0 .../kotlinx/spark/api/DataStreamWriter.kt | 0 .../jetbrains/kotlinx/spark/api/Dataset.kt | 0 .../jetbrains/kotlinx/spark/api/Encoding.kt | 28 +++++++++------- .../jetbrains/kotlinx/spark/api/GroupState.kt | 0 .../jetbrains/kotlinx/spark/api/Iterators.kt | 0 .../spark/api/KeyValueGroupedDataset.kt | 0 .../kotlinx/spark/api/SparkSession.kt | 0 .../kotlinx/spark/api/StreamingKeyValues.kt | 0 .../kotlinx/spark/api/UDFRegister.kt | 0 .../jetbrains/kotlinx/spark/api/ApiTest.kt | 0 .../kotlinx/spark/api/DatasetFunctionTest.kt | 0 .../kotlinx/spark/api/EncodingTest.kt | 32 ++++++++++++------- .../kotlinx/spark/api/KafkaHelper.kt | 0 .../kotlinx/spark/api/KafkaStreamingTest.kt | 0 .../kotlinx/spark/api/ProjectConfig.kt | 0 .../kotlinx/spark/api/StreamingTest.kt | 0 .../kotlinx/spark/api/TypeInferenceTest.kt | 0 .../kotlinx/spark/api/UDFRegisterTest.kt | 0 .../kotlinx/spark/api/struct/model/models.kt | 0 pom.xml | 2 +- pom_2.12.xml | 8 ++--- 33 files changed, 73 insertions(+), 52 deletions(-) rename core/{3.2 => 3.1}/pom_2.12.xml (85%) rename core/{3.2 => 3.1}/src/main/scala/org/apache/spark/sql/KotlinReflection.scala (98%) rename core/{3.2 => 3.1}/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala (98%) rename core/{3.2 => 3.1}/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala (100%) rename core/{3.2 => 3.1}/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala (100%) rename core/{3.2 => 3.1}/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala (100%) rename examples/{pom-3.2_2.12.xml => pom-3.1_2.12.xml} (93%) rename kotlin-spark-api/{3.2 => 3.1}/pom_2.12.xml (96%) rename kotlin-spark-api/{3.2 => 3.1}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt (96%) rename kotlin-spark-api/{3.2 => 3.1}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt (95%) rename kotlin-spark-api/{3.2 => 3.1}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegisterTest.kt (100%) rename kotlin-spark-api/{3.2 => 3.1}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/struct/model/models.kt (100%) diff --git a/core/3.2/pom_2.12.xml b/core/3.1/pom_2.12.xml similarity index 85% rename from core/3.2/pom_2.12.xml rename to core/3.1/pom_2.12.xml index 8ed1eb4f..2bd0dbb7 100644 --- a/core/3.2/pom_2.12.xml +++ b/core/3.1/pom_2.12.xml @@ -2,9 +2,9 @@ 4.0.0 - Kotlin Spark API: Scala core for Spark 3.2+ (Scala 2.12) - Scala-Spark 3.2+ compatibility layer for Kotlin for Apache Spark - core-3.2_2.12 + Kotlin Spark API: Scala core for Spark 3.1+ (Scala 2.12) + Scala-Spark 3.1+ compatibility layer for Kotlin for Apache Spark + core-3.1_2.12 org.jetbrains.kotlinx.spark kotlin-spark-api-parent_2.12 @@ -70,6 +70,14 @@ true + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + diff --git a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala b/core/3.1/src/main/scala/org/apache/spark/sql/KotlinReflection.scala similarity index 98% rename from core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala rename to core/3.1/src/main/scala/org/apache/spark/sql/KotlinReflection.scala index cbc30be3..7f1190d8 100644 --- a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinReflection.scala +++ b/core/3.1/src/main/scala/org/apache/spark/sql/KotlinReflection.scala @@ -49,6 +49,7 @@ import java.lang.Exception */ //noinspection RedundantBlock object KotlinReflection extends KotlinReflection { + ScalaReflection /** * Returns the Spark SQL DataType for a given java class. Where this is not an exact mapping * to a native type, an ObjectType is returned. @@ -290,13 +291,13 @@ object KotlinReflection extends KotlinReflection { createDeserializerForSqlTimestamp(path) } case t if isSubtype(t, localTypeOf[java.time.LocalDateTime]) => { - createDeserializerForLocalDateTime(path) + throw new IllegalArgumentException("java.time.LocalDateTime is supported in Spark 3.2+") } case t if isSubtype(t, localTypeOf[java.time.Duration]) => { - createDeserializerForDuration(path) + throw new IllegalArgumentException("java.time.Duration is supported in Spark 3.2+") } case t if isSubtype(t, localTypeOf[java.time.Period]) => { - createDeserializerForPeriod(path) + throw new IllegalArgumentException("java.time.Period is supported in Spark 3.2+") } case t if isSubtype(t, localTypeOf[java.lang.String]) => { createDeserializerForString(path, returnNullable = false) @@ -828,7 +829,7 @@ object KotlinReflection extends KotlinReflection { createSerializerForSqlTimestamp(inputObject) } case t if isSubtype(t, localTypeOf[java.time.LocalDateTime]) => { - createSerializerForLocalDateTime(inputObject) + throw new IllegalArgumentException("java.time.LocalDateTime is supported in Spark 3.2+") } case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => { createSerializerForJavaLocalDate(inputObject) @@ -837,10 +838,10 @@ object KotlinReflection extends KotlinReflection { createSerializerForSqlDate(inputObject) } case t if isSubtype(t, localTypeOf[java.time.Duration]) => { - createSerializerForJavaDuration(inputObject) + throw new IllegalArgumentException("java.time.Duration is supported in Spark 3.2+") } case t if isSubtype(t, localTypeOf[java.time.Period]) => { - createSerializerForJavaPeriod(inputObject) + throw new IllegalArgumentException("java.time.Period is supported in Spark 3.2+") } case t if isSubtype(t, localTypeOf[BigDecimal]) => { createSerializerForScalaBigDecimal(inputObject) @@ -1178,7 +1179,7 @@ object KotlinReflection extends KotlinReflection { } // SPARK-36227: Remove TimestampNTZ type support in Spark 3.2 with minimal code changes. case t if isSubtype(t, localTypeOf[java.time.LocalDateTime]) && Utils.isTesting => { - Schema(TimestampNTZType, nullable = true) + throw new IllegalArgumentException("TimestampNTZType is supported in spark 3.2+") } case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => { Schema(DateType, nullable = true) @@ -1190,10 +1191,10 @@ object KotlinReflection extends KotlinReflection { Schema(CalendarIntervalType, nullable = true) } case t if isSubtype(t, localTypeOf[java.time.Duration]) => { - Schema(DayTimeIntervalType(), nullable = true) + throw new IllegalArgumentException("DayTimeIntervalType for java.time.Duration is supported in spark 3.2+") } case t if isSubtype(t, localTypeOf[java.time.Period]) => { - Schema(YearMonthIntervalType(), nullable = true) + throw new IllegalArgumentException("YearMonthIntervalType for java.time.Period is supported in spark 3.2+") } case t if isSubtype(t, localTypeOf[BigDecimal]) => { Schema(DecimalType.SYSTEM_DEFAULT, nullable = true) @@ -1268,8 +1269,6 @@ object KotlinReflection extends KotlinReflection { @scala.annotation.tailrec def javaBoxedType(dt: DataType): Class[_] = dt match { case _: DecimalType => classOf[Decimal] - case _: DayTimeIntervalType => classOf[java.lang.Long] - case _: YearMonthIntervalType => classOf[java.lang.Integer] case BinaryType => classOf[Array[Byte]] case StringType => classOf[UTF8String] case CalendarIntervalType => classOf[CalendarInterval] diff --git a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala b/core/3.1/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala similarity index 98% rename from core/3.2/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala rename to core/3.1/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala index 9395019c..f9b5a52f 100644 --- a/core/3.2/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala +++ b/core/3.1/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala @@ -77,7 +77,7 @@ class KDataTypeWrapper( override private[ sql ] def getFieldIndex(name: String) = dt.getFieldIndex(name) - private[ sql ] def findNestedField(fieldNames: Seq[ String ], includeCollections: Boolean, resolver: Resolver) = + override private[ sql ] def findNestedField(fieldNames: Seq[ String ], includeCollections: Boolean, resolver: Resolver) = dt.findNestedField(fieldNames, includeCollections, resolver) override private[ sql ] def buildFormattedString(prefix: String, stringConcat: StringUtils.StringConcat, maxDepth: Int): Unit = diff --git a/core/3.2/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala b/core/3.1/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala similarity index 100% rename from core/3.2/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala rename to core/3.1/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala diff --git a/core/3.2/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala b/core/3.1/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala similarity index 100% rename from core/3.2/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala rename to core/3.1/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala diff --git a/core/3.2/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala b/core/3.1/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala similarity index 100% rename from core/3.2/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala rename to core/3.1/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala diff --git a/dummy/pom.xml b/dummy/pom.xml index 1fd6ab08..988b5e42 100644 --- a/dummy/pom.xml +++ b/dummy/pom.xml @@ -16,7 +16,7 @@ org.jetbrains.kotlinx.spark - examples-3.2_2.12 + examples-3.1_2.12 ${project.parent.version} diff --git a/examples/pom-3.2_2.12.xml b/examples/pom-3.1_2.12.xml similarity index 93% rename from examples/pom-3.2_2.12.xml rename to examples/pom-3.1_2.12.xml index c3b95b0e..f4021872 100644 --- a/examples/pom-3.2_2.12.xml +++ b/examples/pom-3.1_2.12.xml @@ -3,9 +3,9 @@ 4.0.0 - Kotlin Spark API: Examples for Spark 3.2+ (Scala 2.12) + Kotlin Spark API: Examples for Spark 3.1+ (Scala 2.12) Example of usage - examples-3.2_2.12 + examples-3.1_2.12 org.jetbrains.kotlinx.spark kotlin-spark-api-parent_2.12 @@ -16,7 +16,7 @@ org.jetbrains.kotlinx.spark - kotlin-spark-api-3.2 + kotlin-spark-api-3.1 ${project.version} @@ -39,7 +39,7 @@ src/main/kotlin src/test/kotlin - target/3.2/${scala.compat.version} + target/3.1/${scala.compat.version} org.jetbrains.kotlin diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.1/pom_2.12.xml similarity index 96% rename from kotlin-spark-api/3.2/pom_2.12.xml rename to kotlin-spark-api/3.1/pom_2.12.xml index 99172895..5334d74a 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.1/pom_2.12.xml @@ -3,9 +3,9 @@ 4.0.0 - Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) - kotlin-spark-api-3.2 - Kotlin API compatible with spark 3.2.0 Kotlin for Apache Spark + Kotlin Spark API: API for Spark 3.1+ (Scala 2.12) + kotlin-spark-api-3.1 + Kotlin API compatible with spark 3.1.3 Kotlin for Apache Spark org.jetbrains.kotlinx.spark kotlin-spark-api-parent_2.12 @@ -25,7 +25,7 @@ org.jetbrains.kotlinx.spark - core-3.2_${scala.compat.version} + core-3.1_${scala.compat.version} org.jetbrains.kotlinx.spark diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt b/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt rename to kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt b/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt rename to kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt b/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt rename to kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt b/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt rename to kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt b/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt rename to kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt b/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt similarity index 96% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt rename to kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt index eafd460f..643237ec 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt +++ b/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt @@ -47,32 +47,27 @@ import java.time.LocalDate import java.time.Period import java.util.* import java.util.concurrent.ConcurrentHashMap -import kotlin.Any -import kotlin.Array import kotlin.Boolean import kotlin.BooleanArray import kotlin.Byte import kotlin.ByteArray import kotlin.Double import kotlin.DoubleArray -import kotlin.ExperimentalStdlibApi import kotlin.Float import kotlin.FloatArray -import kotlin.IllegalArgumentException import kotlin.Int import kotlin.IntArray import kotlin.Long import kotlin.LongArray -import kotlin.OptIn import kotlin.Short import kotlin.ShortArray import kotlin.String -import kotlin.Suppress -import kotlin.reflect.* +import kotlin.reflect.KClass +import kotlin.reflect.KType import kotlin.reflect.full.findAnnotation import kotlin.reflect.full.isSubclassOf import kotlin.reflect.full.primaryConstructor -import kotlin.to +import kotlin.reflect.typeOf @JvmField val ENCODERS: Map, Encoder<*>> = mapOf( @@ -90,10 +85,17 @@ val ENCODERS: Map, Encoder<*>> = mapOf( Timestamp::class to TIMESTAMP(), Instant::class to INSTANT(), // 3.0+ ByteArray::class to BINARY(), - Duration::class to DURATION(), // 3.2+ - Period::class to PERIOD(), // 3.2+ +// Duration::class to DURATION(), // 3.2+ +// Period::class to PERIOD(), // 3.2+ ) +private fun checkIfEncoderRequiresNewerVersion(klass: KClass<*>) { + when (klass) { + Duration::class, Period::class -> throw IllegalArgumentException("$klass is supported in Spark 3.2") + } +} + + private val knownDataTypes: Map, DataType> = mapOf( Byte::class to DataTypes.ByteType, Short::class to DataTypes.ShortType, @@ -129,11 +131,13 @@ inline fun encoder(): Encoder = generateEncoder(typeOf(), T::c * @see encoder */ @Suppress("UNCHECKED_CAST") -fun generateEncoder(type: KType, cls: KClass<*>): Encoder = - when { +fun generateEncoder(type: KType, cls: KClass<*>): Encoder { + checkIfEncoderRequiresNewerVersion(cls) + return when { isSupportedByKotlinClassEncoder(cls) -> kotlinClassEncoder(schema = memoizedSchema(type), kClass = cls) else -> ENCODERS[cls] as? Encoder? ?: bean(cls.java) } as Encoder +} private fun isSupportedByKotlinClassEncoder(cls: KClass<*>): Boolean = when { diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt b/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt rename to kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt b/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt rename to kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt b/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt rename to kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt rename to kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt b/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt rename to kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt b/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt rename to kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt similarity index 100% rename from kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt rename to kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt b/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt similarity index 100% rename from kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt rename to kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt b/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt similarity index 95% rename from kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt rename to kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt index 29a073ad..17364072 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt +++ b/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt @@ -21,14 +21,20 @@ package org.jetbrains.kotlinx.spark.api import ch.tutteli.atrium.api.fluent.en_GB.* import ch.tutteli.atrium.api.verbs.expect +import io.kotest.assertions.throwables.shouldThrow import io.kotest.core.spec.style.ShouldSpec import io.kotest.matchers.shouldBe import org.apache.spark.sql.Dataset import org.apache.spark.sql.types.Decimal import org.apache.spark.unsafe.types.CalendarInterval -import org.jetbrains.kotlinx.spark.api.tuples.* +import org.jetbrains.kotlinx.spark.api.tuples.component1 +import org.jetbrains.kotlinx.spark.api.tuples.component2 +import org.jetbrains.kotlinx.spark.api.tuples.component3 +import org.jetbrains.kotlinx.spark.api.tuples.t import org.jetbrains.kotlinx.spark.extensions.DemoCaseClass -import scala.* +import scala.Product +import scala.Some +import scala.Tuple2 import java.math.BigDecimal import java.sql.Date import java.sql.Timestamp @@ -61,21 +67,25 @@ class EncodingTest : ShouldSpec({ } should("handle Duration Datasets") { - val dataset = dsOf(Duration.ZERO) - dataset.collectAsList() shouldBe listOf(Duration.ZERO) + shouldThrow { + val dataset = dsOf(Duration.ZERO) + dataset.collectAsList() shouldBe listOf(Duration.ZERO) + } } should("handle Period Datasets") { - val periods = listOf(Period.ZERO, Period.ofDays(2)) - val dataset = periods.toDS() + shouldThrow { + val periods = listOf(Period.ZERO, Period.ofDays(2)) + val dataset = periods.toDS() - dataset.show(false) + dataset.show(false) - dataset.collectAsList().let { - it[0] shouldBe Period.ZERO + dataset.collectAsList().let { + it[0] shouldBe Period.ZERO - // NOTE Spark truncates java.time.Period to months. - it[1] shouldBe Period.ofDays(0) + // NOTE Spark truncates java.time.Period to months. + it[1] shouldBe Period.ofDays(0) + } } } diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt b/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt similarity index 100% rename from kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt rename to kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt b/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt similarity index 100% rename from kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt rename to kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt b/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt similarity index 100% rename from kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt rename to kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt similarity index 100% rename from kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt rename to kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt b/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt similarity index 100% rename from kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt rename to kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegisterTest.kt b/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegisterTest.kt similarity index 100% rename from kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegisterTest.kt rename to kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegisterTest.kt diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/struct/model/models.kt b/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/struct/model/models.kt similarity index 100% rename from kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/struct/model/models.kt rename to kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/struct/model/models.kt diff --git a/pom.xml b/pom.xml index 8c99ec41..4af9613c 100644 --- a/pom.xml +++ b/pom.xml @@ -17,7 +17,7 @@ 1.1.0 1.3.1 3.1.0 - 3.2.1 + 3.1.3 3.3.1 diff --git a/pom_2.12.xml b/pom_2.12.xml index 29b854c1..a3f46c09 100644 --- a/pom_2.12.xml +++ b/pom_2.12.xml @@ -19,17 +19,17 @@ - core/3.2/pom_2.12.xml + core/3.1/pom_2.12.xml scala-tuples-in-kotlin/pom_2.12.xml - kotlin-spark-api/3.2/pom_2.12.xml - examples/pom-3.2_2.12.xml + kotlin-spark-api/3.1/pom_2.12.xml + examples/pom-3.1_2.12.xml org.jetbrains.kotlinx.spark - core-3.2_${scala.compat.version} + core-3.1_${scala.compat.version} ${project.version} From 0cd9b615a0110ee087c22c383796d2e1acdd396a Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 26 Apr 2022 16:01:27 +0200 Subject: [PATCH 168/213] spark 3.2 -> spark 3.1 --- qodana.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/qodana.yaml b/qodana.yaml index ff38d36f..00dce9ad 100644 --- a/qodana.yaml +++ b/qodana.yaml @@ -6,4 +6,4 @@ exclude: - name: All paths: - scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples - - kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt + - kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt From faf3512e98ed8f40863262bde47b5f01d5240e3b Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 26 Apr 2022 16:44:54 +0200 Subject: [PATCH 169/213] spark 3.1 -> spark 3.0 --- core/{3.1 => 3.0}/pom_2.12.xml | 6 +++--- .../scala/org/apache/spark/sql/KotlinReflection.scala | 0 .../main/scala/org/apache/spark/sql/KotlinWrappers.scala | 0 .../spark/sql/catalyst/CatalystTypeConverters.scala | 0 .../kotlinx/spark/extensions/DemoCaseClass.scala | 0 .../kotlinx/spark/extensions/KSparkExtensions.scala | 0 dummy/pom.xml | 2 +- examples/{pom-3.1_2.12.xml => pom-3.0_2.12.xml} | 8 ++++---- kotlin-spark-api/{3.1 => 3.0}/pom_2.12.xml | 8 ++++---- .../kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt | 0 .../main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt | 0 .../kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt | 0 .../org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt | 0 .../kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt | 0 .../kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt | 0 .../kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt | 0 .../kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt | 0 .../jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt | 0 .../org/jetbrains/kotlinx/spark/api/SparkSession.kt | 0 .../org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt | 0 .../kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt | 0 .../kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt | 0 .../jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt | 0 .../org/jetbrains/kotlinx/spark/api/EncodingTest.kt | 0 .../kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt | 0 .../org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt | 2 +- .../org/jetbrains/kotlinx/spark/api/ProjectConfig.kt | 0 .../org/jetbrains/kotlinx/spark/api/StreamingTest.kt | 0 .../org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt | 0 .../org/jetbrains/kotlinx/spark/api/UDFRegisterTest.kt | 0 .../jetbrains/kotlinx/spark/api/struct/model/models.kt | 0 pom.xml | 2 +- pom_2.12.xml | 8 ++++---- qodana.yaml | 2 +- 34 files changed, 19 insertions(+), 19 deletions(-) rename core/{3.1 => 3.0}/pom_2.12.xml (93%) rename core/{3.1 => 3.0}/src/main/scala/org/apache/spark/sql/KotlinReflection.scala (100%) rename core/{3.1 => 3.0}/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala (100%) rename core/{3.1 => 3.0}/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala (100%) rename core/{3.1 => 3.0}/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala (100%) rename core/{3.1 => 3.0}/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala (100%) rename examples/{pom-3.1_2.12.xml => pom-3.0_2.12.xml} (93%) rename kotlin-spark-api/{3.1 => 3.0}/pom_2.12.xml (96%) rename kotlin-spark-api/{3.1 => 3.0}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt (99%) rename kotlin-spark-api/{3.1 => 3.0}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegisterTest.kt (100%) rename kotlin-spark-api/{3.1 => 3.0}/src/test/kotlin/org/jetbrains/kotlinx/spark/api/struct/model/models.kt (100%) diff --git a/core/3.1/pom_2.12.xml b/core/3.0/pom_2.12.xml similarity index 93% rename from core/3.1/pom_2.12.xml rename to core/3.0/pom_2.12.xml index 2bd0dbb7..77b21c92 100644 --- a/core/3.1/pom_2.12.xml +++ b/core/3.0/pom_2.12.xml @@ -2,9 +2,9 @@ 4.0.0 - Kotlin Spark API: Scala core for Spark 3.1+ (Scala 2.12) - Scala-Spark 3.1+ compatibility layer for Kotlin for Apache Spark - core-3.1_2.12 + Kotlin Spark API: Scala core for Spark 3.0+ (Scala 2.12) + Scala-Spark 3.0+ compatibility layer for Kotlin for Apache Spark + core-3.0_2.12 org.jetbrains.kotlinx.spark kotlin-spark-api-parent_2.12 diff --git a/core/3.1/src/main/scala/org/apache/spark/sql/KotlinReflection.scala b/core/3.0/src/main/scala/org/apache/spark/sql/KotlinReflection.scala similarity index 100% rename from core/3.1/src/main/scala/org/apache/spark/sql/KotlinReflection.scala rename to core/3.0/src/main/scala/org/apache/spark/sql/KotlinReflection.scala diff --git a/core/3.1/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala b/core/3.0/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala similarity index 100% rename from core/3.1/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala rename to core/3.0/src/main/scala/org/apache/spark/sql/KotlinWrappers.scala diff --git a/core/3.1/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala b/core/3.0/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala similarity index 100% rename from core/3.1/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala rename to core/3.0/src/main/scala/org/apache/spark/sql/catalyst/CatalystTypeConverters.scala diff --git a/core/3.1/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala b/core/3.0/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala similarity index 100% rename from core/3.1/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala rename to core/3.0/src/main/scala/org/jetbrains/kotlinx/spark/extensions/DemoCaseClass.scala diff --git a/core/3.1/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala b/core/3.0/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala similarity index 100% rename from core/3.1/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala rename to core/3.0/src/main/scala/org/jetbrains/kotlinx/spark/extensions/KSparkExtensions.scala diff --git a/dummy/pom.xml b/dummy/pom.xml index 988b5e42..51de1db9 100644 --- a/dummy/pom.xml +++ b/dummy/pom.xml @@ -16,7 +16,7 @@ org.jetbrains.kotlinx.spark - examples-3.1_2.12 + examples-3.0_2.12 ${project.parent.version} diff --git a/examples/pom-3.1_2.12.xml b/examples/pom-3.0_2.12.xml similarity index 93% rename from examples/pom-3.1_2.12.xml rename to examples/pom-3.0_2.12.xml index f4021872..bd6acc66 100644 --- a/examples/pom-3.1_2.12.xml +++ b/examples/pom-3.0_2.12.xml @@ -3,9 +3,9 @@ 4.0.0 - Kotlin Spark API: Examples for Spark 3.1+ (Scala 2.12) + Kotlin Spark API: Examples for Spark 3.0+ (Scala 2.12) Example of usage - examples-3.1_2.12 + examples-3.0_2.12 org.jetbrains.kotlinx.spark kotlin-spark-api-parent_2.12 @@ -16,7 +16,7 @@ org.jetbrains.kotlinx.spark - kotlin-spark-api-3.1 + kotlin-spark-api-3.0 ${project.version} @@ -39,7 +39,7 @@ src/main/kotlin src/test/kotlin - target/3.1/${scala.compat.version} + target/3.0/${scala.compat.version} org.jetbrains.kotlin diff --git a/kotlin-spark-api/3.1/pom_2.12.xml b/kotlin-spark-api/3.0/pom_2.12.xml similarity index 96% rename from kotlin-spark-api/3.1/pom_2.12.xml rename to kotlin-spark-api/3.0/pom_2.12.xml index 5334d74a..b06946d9 100644 --- a/kotlin-spark-api/3.1/pom_2.12.xml +++ b/kotlin-spark-api/3.0/pom_2.12.xml @@ -3,9 +3,9 @@ 4.0.0 - Kotlin Spark API: API for Spark 3.1+ (Scala 2.12) - kotlin-spark-api-3.1 - Kotlin API compatible with spark 3.1.3 Kotlin for Apache Spark + Kotlin Spark API: API for Spark 3.0+ (Scala 2.12) + kotlin-spark-api-3.0 + Kotlin API compatible with spark 3.0.3 Kotlin for Apache Spark org.jetbrains.kotlinx.spark kotlin-spark-api-parent_2.12 @@ -25,7 +25,7 @@ org.jetbrains.kotlinx.spark - core-3.1_${scala.compat.version} + core-3.0_${scala.compat.version} org.jetbrains.kotlinx.spark diff --git a/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt b/kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt similarity index 100% rename from kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt rename to kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt diff --git a/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt b/kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt similarity index 100% rename from kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt rename to kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Column.kt diff --git a/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt b/kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt similarity index 100% rename from kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt rename to kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Conversions.kt diff --git a/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt b/kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt similarity index 100% rename from kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt rename to kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/DataStreamWriter.kt diff --git a/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt b/kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt similarity index 100% rename from kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt rename to kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Dataset.kt diff --git a/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt b/kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt similarity index 100% rename from kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt rename to kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Encoding.kt diff --git a/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt b/kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt similarity index 100% rename from kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt rename to kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/GroupState.kt diff --git a/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt b/kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt similarity index 100% rename from kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt rename to kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Iterators.kt diff --git a/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt b/kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt similarity index 100% rename from kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt rename to kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/KeyValueGroupedDataset.kt diff --git a/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt similarity index 100% rename from kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt rename to kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt diff --git a/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt b/kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt similarity index 100% rename from kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt rename to kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt diff --git a/kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt b/kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt similarity index 100% rename from kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt rename to kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegister.kt diff --git a/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt b/kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt similarity index 100% rename from kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt rename to kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ApiTest.kt diff --git a/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt b/kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt similarity index 100% rename from kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt rename to kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/DatasetFunctionTest.kt diff --git a/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt b/kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt similarity index 100% rename from kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt rename to kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/EncodingTest.kt diff --git a/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt b/kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt similarity index 100% rename from kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt rename to kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt diff --git a/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt b/kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt similarity index 99% rename from kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt rename to kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt index b755b1e6..4b4edaff 100644 --- a/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt +++ b/kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt @@ -41,7 +41,7 @@ class KafkaStreamingTest : ShouldSpec({ // making sure it can be skipped on Github actions since it times out tags(Kafka) - context("kafka") { + xcontext("kafka") { val port = 9092 val broker = "localhost:$port" val topic1 = "test1" diff --git a/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt b/kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt similarity index 100% rename from kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt rename to kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt diff --git a/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt similarity index 100% rename from kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt rename to kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt diff --git a/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt b/kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt similarity index 100% rename from kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt rename to kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/TypeInferenceTest.kt diff --git a/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegisterTest.kt b/kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegisterTest.kt similarity index 100% rename from kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegisterTest.kt rename to kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/UDFRegisterTest.kt diff --git a/kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/struct/model/models.kt b/kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/struct/model/models.kt similarity index 100% rename from kotlin-spark-api/3.1/src/test/kotlin/org/jetbrains/kotlinx/spark/api/struct/model/models.kt rename to kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/struct/model/models.kt diff --git a/pom.xml b/pom.xml index 4af9613c..1293db7c 100644 --- a/pom.xml +++ b/pom.xml @@ -17,7 +17,7 @@ 1.1.0 1.3.1 3.1.0 - 3.1.3 + 3.0.3 3.3.1 diff --git a/pom_2.12.xml b/pom_2.12.xml index a3f46c09..e81f99e2 100644 --- a/pom_2.12.xml +++ b/pom_2.12.xml @@ -19,17 +19,17 @@ - core/3.1/pom_2.12.xml + core/3.0/pom_2.12.xml scala-tuples-in-kotlin/pom_2.12.xml - kotlin-spark-api/3.1/pom_2.12.xml - examples/pom-3.1_2.12.xml + kotlin-spark-api/3.0/pom_2.12.xml + examples/pom-3.0_2.12.xml org.jetbrains.kotlinx.spark - core-3.1_${scala.compat.version} + core-3.0_${scala.compat.version} ${project.version} diff --git a/qodana.yaml b/qodana.yaml index 00dce9ad..3c9f0225 100644 --- a/qodana.yaml +++ b/qodana.yaml @@ -6,4 +6,4 @@ exclude: - name: All paths: - scala-tuples-in-kotlin/src/main/kotlin/org/jetbrains/kotlinx/spark/api/tuples - - kotlin-spark-api/3.1/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt + - kotlin-spark-api/3.0/src/main/kotlin/org/jetbrains/kotlinx/spark/api/Arities.kt From 390defc7ddaa96dba43585ae1521eaa19fd587b7 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 28 Apr 2022 13:12:14 +0200 Subject: [PATCH 170/213] added functions to integration since json will support different integration classes --- kotlin-spark-api/3.2/pom_2.12.xml | 5 +++ .../spark/api/jupyter/SparkIntegration.kt | 18 ++++++++++ .../kotlinx/spark/api/JupyterTests.kt | 33 ++++--------------- pom.xml | 2 +- 4 files changed, 30 insertions(+), 28 deletions(-) diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 95e6761d..0bc4c74b 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -63,6 +63,11 @@ spark-sql_${scala.compat.version} ${spark3.version} + + org.apache.spark + spark-repl_${scala.compat.version} + ${spark3.version} + org.apache.spark spark-streaming_${scala.compat.version} diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt index 2dca6ac9..3b8479af 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt @@ -87,6 +87,24 @@ internal class SparkIntegration : JupyterIntegration() { import("org.apache.spark.streaming.*") // onLoaded is only done for the non-streaming variant of kotlin-spark-api in the json file + onLoaded { + execute("%dumpClassesForSpark") + + execute("val spark = org.jetbrains.kotlinx.spark.api.SparkSession.builder().master(SparkConf().get(\"spark.master\", \"local[*]\")).appName(\"Jupyter\").config(\"spark.sql.codegen.wholeStage\", false).getOrCreate()") + execute("spark.sparkContext.setLogLevel(org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR)") + execute("val sc by lazy { org.apache.spark.api.java.JavaSparkContext(spark.sparkContext) }") + execute("println(\"Spark session has been started and is running. No `withSpark { }` necessary, you can access `spark` and `sc` directly. To use Spark streaming, use `%use kotlin-spark-api-streaming` instead.\")") + + execute("inline fun List.toDS(): Dataset = toDS(spark)") + execute("inline fun Array.toDS(): Dataset = spark.dsOf(*this)") + execute("inline fun dsOf(vararg arg: T): Dataset = spark.dsOf(*arg)") + execute("inline fun RDD.toDS(): Dataset = toDS(spark)") + execute("inline fun JavaRDDLike.toDS(): Dataset = toDS(spark)") + execute("inline fun RDD.toDF(): Dataset = toDF(spark)") + execute("inline fun JavaRDDLike.toDF(): Dataset = toDF(spark)") + execute("val udf: UDFRegistration get() = spark.udf()") + } + // Render Dataset render> { diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt index eeaa2dae..32287a4f 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt @@ -25,6 +25,7 @@ import io.kotest.matchers.shouldNotBe import io.kotest.matchers.string.shouldContain import io.kotest.matchers.types.shouldBeInstanceOf import jupyter.kotlin.DependsOn +import org.apache.spark.SparkConf import org.apache.spark.api.java.JavaSparkContext import org.intellij.lang.annotations.Language import org.jetbrains.kotlinx.jupyter.EvalRequestData @@ -48,36 +49,12 @@ class JupyterTests : ShouldSpec({ context("Jupyter") { withRepl { - exec("""@file:DependsOn("org.apache.spark:spark-repl_2.12:3.2.1")""") - - println(currentClasspath.filter { "repl" in it }) - - // init - val _a = exec( - """%dumpClassesForSpark""" - ) - - @Language("kts") - val _b = exec( - """val spark = org.jetbrains.kotlinx.spark.api.SparkSession.builder().master(SparkConf().get("spark.master", "local[*]")).appName("Jupyter").getOrCreate()""" - ) - - @Language("kts") - val _c = exec( - """spark.sparkContext.setLogLevel(org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR)""" - ) - - @Language("kts") - val _d = exec( - """val sc = org.apache.spark.api.java.JavaSparkContext(spark.sparkContext)""" - ) - should("Allow functions on local data classes") { @Language("kts") val klass = exec("""data class Test(val a: Int, val b: String)""") @Language("kts") - val ds = exec("""val ds = spark.dsOf(Test(1, "hi"), Test(2, "something"))""") + val ds = exec("""val ds = dsOf(Test(1, "hi"), Test(2, "something"))""") @Language("kts") val filtered = exec("""val filtered = ds.filter { it.a > 1 }""") @@ -102,7 +79,7 @@ class JupyterTests : ShouldSpec({ @Language("kts") val html = execHtml( """ - val ds = listOf(1, 2, 3).toDS(spark) + val ds = listOf(1, 2, 3).toDS() ds """.trimIndent() ) @@ -170,7 +147,6 @@ class JupyterTests : ShouldSpec({ html shouldContain "Cannot render this RDD of this class." } - should("render JavaPairRDDs") { @Language("kts") val html = execHtml( @@ -220,6 +196,9 @@ class JupyterTests : ShouldSpec({ html shouldContain "1, 2, 3" html shouldContain "4, 5, 6" } + + @Language("kts") + val _stop = exec("""spark.stop()""") } } }) diff --git a/pom.xml b/pom.xml index e4020b53..817e30bd 100644 --- a/pom.xml +++ b/pom.xml @@ -17,7 +17,7 @@ 1.1.0 3.1.0 3.2.1 - 0.11.0-76 + 0.11.0-77 0.7.3 3.3.1 From 32a54f8515061620fffcd8dc6c44c323819141aa Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 28 Apr 2022 13:56:15 +0200 Subject: [PATCH 171/213] added functions to integration since json will support different integration classes --- .../spark/api/jupyter/SparkIntegration.kt | 87 +++++++++++++------ 1 file changed, 59 insertions(+), 28 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt index 3b8479af..c77ef111 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt @@ -40,6 +40,7 @@ import java.io.Serializable import scala.collection.Iterable as ScalaIterable import scala.collection.Iterator as ScalaIterator +@Suppress("UNUSED_VARIABLE", "LocalVariableName") @OptIn(ExperimentalStdlibApi::class) internal class SparkIntegration : JupyterIntegration() { @@ -72,37 +73,67 @@ internal class SparkIntegration : JupyterIntegration() { println("SparkIntegration loaded") - import("org.jetbrains.kotlinx.spark.api.*") - import("org.jetbrains.kotlinx.spark.api.tuples.*") - import(*(1..22).map { "scala.Tuple$it" }.toTypedArray()) - import("org.apache.spark.sql.functions.*") - import("org.apache.spark.*") - import("org.apache.spark.sql.*") - import("org.apache.spark.api.java.*") - import("scala.collection.Seq") - import("org.apache.spark.rdd.*") - import("java.io.Serializable") - import("org.apache.spark.streaming.api.java.*") - import("org.apache.spark.streaming.api.*") - import("org.apache.spark.streaming.*") + import( + "org.jetbrains.kotlinx.spark.api.*", + "org.jetbrains.kotlinx.spark.api.tuples.*", + *(1..22).map { "scala.Tuple$it" }.toTypedArray(), + "org.apache.spark.sql.functions.*", + "org.apache.spark.*", + "org.apache.spark.sql.*", + "org.apache.spark.api.java.*", + "scala.collection.Seq", + "org.apache.spark.rdd.*", + "java.io.Serializable", + "org.apache.spark.streaming.api.java.*", + "org.apache.spark.streaming.api.*", + "org.apache.spark.streaming.*", + ) // onLoaded is only done for the non-streaming variant of kotlin-spark-api in the json file onLoaded { - execute("%dumpClassesForSpark") - - execute("val spark = org.jetbrains.kotlinx.spark.api.SparkSession.builder().master(SparkConf().get(\"spark.master\", \"local[*]\")).appName(\"Jupyter\").config(\"spark.sql.codegen.wholeStage\", false).getOrCreate()") - execute("spark.sparkContext.setLogLevel(org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR)") - execute("val sc by lazy { org.apache.spark.api.java.JavaSparkContext(spark.sparkContext) }") - execute("println(\"Spark session has been started and is running. No `withSpark { }` necessary, you can access `spark` and `sc` directly. To use Spark streaming, use `%use kotlin-spark-api-streaming` instead.\")") - - execute("inline fun List.toDS(): Dataset = toDS(spark)") - execute("inline fun Array.toDS(): Dataset = spark.dsOf(*this)") - execute("inline fun dsOf(vararg arg: T): Dataset = spark.dsOf(*arg)") - execute("inline fun RDD.toDS(): Dataset = toDS(spark)") - execute("inline fun JavaRDDLike.toDS(): Dataset = toDS(spark)") - execute("inline fun RDD.toDF(): Dataset = toDF(spark)") - execute("inline fun JavaRDDLike.toDF(): Dataset = toDF(spark)") - execute("val udf: UDFRegistration get() = spark.udf()") + val _0 = execute("""%dumpClassesForSpark""") + + @Language("kts") + val _1 = listOf( + """ + val spark = org.jetbrains.kotlinx.spark.api.SparkSession + .builder() + .master(SparkConf().get("spark.master", "local[*]")) + .appName("Jupyter") + .config("spark.sql.codegen.wholeStage", false) + .getOrCreate()""".trimIndent(), + """ + spark.sparkContext.setLogLevel(org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR)""".trimIndent(), + """ + val sc by lazy { + org.apache.spark.api.java.JavaSparkContext(spark.sparkContext) + }""".trimIndent(), + """ + println("Spark session has been started and is running. No `withSpark { }` necessary, you can access `spark` and `sc` directly. To use Spark streaming, use `%use kotlin-spark-api-streaming` instead.")""".trimIndent(), + """ + inline fun List.toDS(): Dataset = toDS(spark)""".trimIndent(), + """ + inline fun Array.toDS(): Dataset = spark.dsOf(*this)""".trimIndent(), + """ + inline fun dsOf(vararg arg: T): Dataset = spark.dsOf(*arg)""".trimIndent(), + """ + inline fun RDD.toDS(): Dataset = toDS(spark)""".trimIndent(), + """ + inline fun JavaRDDLike.toDS(): Dataset = toDS(spark)""".trimIndent(), + """ + inline fun RDD.toDF(): Dataset = toDF(spark)""".trimIndent(), + """ + inline fun JavaRDDLike.toDF(): Dataset = toDF(spark)""".trimIndent(), + """ + val udf: UDFRegistration get() = spark.udf()""".trimIndent(), + ).map(::execute) + } + + onShutdown { + @Language("kts") + val _0 = execute(""" + spark.stop()""".trimIndent() + ) } From a0cf372a02d4f0eb3f9fb4f2a1d8fe5808608829 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 28 Apr 2022 16:06:20 +0200 Subject: [PATCH 172/213] temp test to see if we can publish to gh packages from a branch --- .github/workflows/publish_dev_version.yml | 33 +++++++++++++++++++ .../kotlinx/spark/api/JupyterTests.kt | 8 +++-- pom.xml | 7 ++-- 3 files changed, 43 insertions(+), 5 deletions(-) create mode 100644 .github/workflows/publish_dev_version.yml diff --git a/.github/workflows/publish_dev_version.yml b/.github/workflows/publish_dev_version.yml new file mode 100644 index 00000000..b1492fe2 --- /dev/null +++ b/.github/workflows/publish_dev_version.yml @@ -0,0 +1,33 @@ +name: Generate and publish docs + +on: + push: + branches: + - "jupyter-test" # TODO change to spark-3.2 + +jobs: + build-and-deploy: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up JDK 11 + uses: actions/setup-java@v1 + with: + distributions: adopt + java-version: 11 + check-latest: true + - name: Cache Maven packages + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: ${{ runner.os }}-m2 + - name: Build with Maven + run: ./mvnw -B package --file pom.xml -Pscala-2.12 -Dkotest.tags="!Kafka" + - name: Deploy to GH Packages + run: mvn --batch-mode deploy + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt index 32287a4f..d70d1bba 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt @@ -45,7 +45,6 @@ class JupyterTests : ShouldSpec({ fun createRepl(): ReplForJupyter = replProvider(scriptClasspath) suspend fun withRepl(action: suspend ReplForJupyter.() -> Unit): Unit = createRepl().action() - context("Jupyter") { withRepl { @@ -126,15 +125,18 @@ class JupyterTests : ShouldSpec({ } xshould("not render JavaRDDs with custom class") { + @Language("kts") - val html = execHtml( - """ + val klass = exec(""" data class Test( val longFirstName: String, val second: LongArray, val somethingSpecial: Map, ): Serializable + """.trimIndent()) + @Language("kts") + val html = execHtml(""" val rdd = sc.parallelize( listOf( Test("aaaaaaaaa", longArrayOf(1L, 100000L, 24L), mapOf(1 to "one", 2 to "two")), diff --git a/pom.xml b/pom.xml index 817e30bd..192ddb6f 100644 --- a/pom.xml +++ b/pom.xml @@ -311,8 +311,11 @@ https://oss.sonatype.org/service/local/staging/deploy/maven2/ - ossrh - https://oss.sonatype.org/content/repositories/snapshots + github + JetBrains + https://github.com/JetBrains/kotlin-spark-api + + From 4e9e05a6d39fdd4bdba6fa448c310976930bba67 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 29 Apr 2022 10:57:57 +0200 Subject: [PATCH 173/213] temp test to see if we can publish to gh packages from a branch --- .github/workflows/publish_dev_version.yml | 2 +- README.md | 20 +++++++------------- 2 files changed, 8 insertions(+), 14 deletions(-) diff --git a/.github/workflows/publish_dev_version.yml b/.github/workflows/publish_dev_version.yml index b1492fe2..15e25865 100644 --- a/.github/workflows/publish_dev_version.yml +++ b/.github/workflows/publish_dev_version.yml @@ -26,7 +26,7 @@ jobs: - name: Build with Maven run: ./mvnw -B package --file pom.xml -Pscala-2.12 -Dkotest.tags="!Kafka" - name: Deploy to GH Packages - run: mvn --batch-mode deploy + run: ./mvnw --batch-mode deploy env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/README.md b/README.md index adfe2d7f..9aa51579 100644 --- a/README.md +++ b/README.md @@ -88,22 +88,16 @@ To define a certain version of Spark or the API itself, simply add it like this: ``` Inside the notebook a Spark session will be initiated automatically. This can be accessed via the `spark` value. -`sc: JavaSparkContext` can also be accessed directly. - -One limitation of the notebooks is that the `SparkSession` context cannot be applied -implicitly to function calls. This means that instead of writing: -```kotlin -val ds = listOf(...).toDS() -``` -you'll need to write: -```kotlin -val ds = listOf(...).toDS(spark) -``` - -Other than that, the API operates pretty similarly. +`sc: JavaSparkContext` can also be accessed directly. The API operates pretty similarly. There is also support for HTML rendering of Datasets and simple (Java)RDDs. +To use Spark Streaming abilities, instead use +```jupyterpython +%use kotlin-spark-api-streaming +``` +This does not start a Spark session right away, meaning you can call `withSparkStreaming(batchDuration) {}` +in whichever cell you want. ## Kotlin for Apache Spark features From bf119be39e4f83a90be781e1896c2e7343114483 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 29 Apr 2022 11:12:54 +0200 Subject: [PATCH 174/213] temp test to see if we can publish to gh packages from a branch --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 192ddb6f..e6dcc8ba 100644 --- a/pom.xml +++ b/pom.xml @@ -313,7 +313,7 @@ github JetBrains - https://github.com/JetBrains/kotlin-spark-api + https://maven.pkg.github.com/JetBrains/kotlin-spark-api From 8c84471234b92f8f0b47795ba330ec5dba6a1942 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 29 Apr 2022 11:51:57 +0200 Subject: [PATCH 175/213] temp test to see if we can publish to gh packages from a branch --- examples/pom-3.2_2.12.xml | 214 +++++++++++++++++++------------------- pom.xml | 2 +- 2 files changed, 109 insertions(+), 107 deletions(-) diff --git a/examples/pom-3.2_2.12.xml b/examples/pom-3.2_2.12.xml index 7ad28cb7..788750bc 100644 --- a/examples/pom-3.2_2.12.xml +++ b/examples/pom-3.2_2.12.xml @@ -1,112 +1,114 @@ - + - 4.0.0 + 4.0.0 - Kotlin Spark API: Examples for Spark 3.2+ (Scala 2.12) - Example of usage - examples-3.2_2.12 - - org.jetbrains.kotlinx.spark - kotlin-spark-api-parent_2.12 - 1.0.4-SNAPSHOT - ../pom_2.12.xml - + Kotlin Spark API: Examples for Spark 3.2+ (Scala 2.12) + Example of usage + examples-3.2_2.12 + + org.jetbrains.kotlinx.spark + kotlin-spark-api-parent_2.12 + 1.0.4-SNAPSHOT + ../pom_2.12.xml + - - - org.jetbrains.kotlin - kotlin-reflect - - - org.jetbrains.kotlinx.spark - kotlin-spark-api-3.2 - ${project.version} - - - org.apache.spark - spark-sql_${scala.compat.version} - ${spark3.version} - - - org.apache.spark - spark-streaming_${scala.compat.version} - ${spark3.version} - - - org.apache.spark - spark-streaming-kafka-0-10_${scala.compat.version} - ${spark3.version} - - + + + org.jetbrains.kotlin + kotlin-reflect + + + org.jetbrains.kotlinx.spark + kotlin-spark-api-3.2 + ${project.version} + + + org.apache.spark + spark-sql_${scala.compat.version} + ${spark3.version} + + + org.apache.spark + spark-streaming_${scala.compat.version} + ${spark3.version} + + + org.apache.spark + spark-streaming-kafka-0-10_${scala.compat.version} + ${spark3.version} + + - - src/main/kotlin - src/test/kotlin - target/3.2/${scala.compat.version} - - - org.jetbrains.kotlin - kotlin-maven-plugin - - - compile - - compile - - - - test-compile - - test-compile - - - - - - org.apache.maven.plugins - maven-assembly-plugin - ${maven-assembly-plugin.version} - - - jar-with-dependencies - - - - org.jetbrains.spark.api.examples.WordCountKt - - - - - - org.apache.maven.plugins - maven-site-plugin - - true - - - - org.apache.maven.plugins - maven-deploy-plugin - - true - - - - org.sonatype.plugins - nexus-staging-maven-plugin - - true - - - - org.apache.maven.plugins - maven-compiler-plugin - - 8 - 8 - - - - + + src/main/kotlin + src/test/kotlin + target/3.2/${scala.compat.version} + + + org.jetbrains.kotlin + kotlin-maven-plugin + + + compile + + compile + + + + test-compile + + test-compile + + + + + + org.apache.maven.plugins + maven-assembly-plugin + ${maven-assembly-plugin.version} + + + jar-with-dependencies + + + + org.jetbrains.spark.api.examples.WordCountKt + + + + + + org.apache.maven.plugins + maven-site-plugin + + true + + + + org.apache.maven.plugins + maven-deploy-plugin + + true + + + + org.sonatype.plugins + nexus-staging-maven-plugin + + true + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + 8 + 8 + + + + diff --git a/pom.xml b/pom.xml index e6dcc8ba..3f503fe8 100644 --- a/pom.xml +++ b/pom.xml @@ -312,7 +312,7 @@ github - JetBrains + GitHub JetBrains Apache Maven Packages https://maven.pkg.github.com/JetBrains/kotlin-spark-api From 2fb01c38f8d6cb45097017cd82bf3255e4b7eeb5 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 29 Apr 2022 16:20:24 +0200 Subject: [PATCH 176/213] enabling snappy setting for lz4 compression codec --- .../spark/api/jupyter/SparkIntegration.kt | 1 + pom.xml | 24 +++++++++---------- scala-tuples-in-kotlin/pom_2.12.xml | 4 ++-- 3 files changed, 15 insertions(+), 14 deletions(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt index c77ef111..0994e9f7 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt @@ -101,6 +101,7 @@ internal class SparkIntegration : JupyterIntegration() { .master(SparkConf().get("spark.master", "local[*]")) .appName("Jupyter") .config("spark.sql.codegen.wholeStage", false) + .config("spark.io.compression.codec", "snappy") .getOrCreate()""".trimIndent(), """ spark.sparkContext.setLogLevel(org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR)""".trimIndent(), diff --git a/pom.xml b/pom.xml index 3f503fe8..2a6ff4a6 100644 --- a/pom.xml +++ b/pom.xml @@ -256,18 +256,18 @@ scala-2.12,release-sign - - org.sonatype.plugins - nexus-staging-maven-plugin - ${nexus-staging-plugin.version} - true - - ossrh - https://oss.sonatype.org/ - false - 20 - - + + + + + + + + + + + + diff --git a/scala-tuples-in-kotlin/pom_2.12.xml b/scala-tuples-in-kotlin/pom_2.12.xml index cf67af41..7e5f02fc 100644 --- a/scala-tuples-in-kotlin/pom_2.12.xml +++ b/scala-tuples-in-kotlin/pom_2.12.xml @@ -142,7 +142,7 @@ org.apache.maven.plugins maven-deploy-plugin - true + false @@ -150,7 +150,7 @@ org.sonatype.plugins nexus-staging-maven-plugin - true + false From b7c9297681935120dbad520a43c120369af3cc71 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 2 May 2022 12:41:00 +0200 Subject: [PATCH 177/213] jupyter api allows multiple integration files now --- .../kotlinx/spark/api/jupyter/Integration.kt | 96 ++++++++++++++++ .../spark/api/jupyter/SparkIntegration.kt | 108 ++++-------------- .../api/jupyter/SparkStreamingIntegration.kt | 60 ++++++++++ .../kotlin-jupyter-libraries/libraries.json | 3 + pom.xml | 2 +- 5 files changed, 182 insertions(+), 87 deletions(-) create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt create mode 100644 kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt new file mode 100644 index 00000000..2dacf62a --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -0,0 +1,96 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.api.jupyter + +import org.apache.spark.api.java.JavaRDDLike +import org.apache.spark.rdd.RDD +import org.apache.spark.sql.Dataset +import org.intellij.lang.annotations.Language +import org.jetbrains.kotlinx.jupyter.api.HTML +import org.jetbrains.kotlinx.jupyter.api.KotlinKernelHost +import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration + +abstract class Integration : JupyterIntegration() { + + private val kotlinVersion = "1.6.21" + private val scalaCompatVersion = "2.12" + private val scalaVersion = "2.12.15" + private val spark3Version = "3.2.1" + + abstract fun KotlinKernelHost.onLoaded() + + override fun Builder.onLoaded() { + + dependencies( + "org.apache.spark:spark-repl_$scalaCompatVersion:$spark3Version", + "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion", + "org.jetbrains.kotlin:kotlin-reflect:$kotlinVersion", + "org.apache.spark:spark-sql_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-streaming_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-mllib_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-sql_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-graphx_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-launcher_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-catalyst_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-streaming_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-core_$scalaCompatVersion:$spark3Version", + "org.scala-lang:scala-library:$scalaVersion", + "org.scala-lang.modules:scala-xml_$scalaCompatVersion:2.0.1", + "org.scala-lang:scala-reflect:$scalaVersion", + "org.scala-lang:scala-compiler:$scalaVersion", + "commons-io:commons-io:2.11.0", + ) + + println("SparkIntegration loaded") + + import( + "org.jetbrains.kotlinx.spark.api.*", + "org.jetbrains.kotlinx.spark.api.tuples.*", + *(1..22).map { "scala.Tuple$it" }.toTypedArray(), + "org.apache.spark.sql.functions.*", + "org.apache.spark.*", + "org.apache.spark.sql.*", + "org.apache.spark.api.java.*", + "scala.collection.Seq", + "org.apache.spark.rdd.*", + "java.io.Serializable", + "org.apache.spark.streaming.api.java.*", + "org.apache.spark.streaming.api.*", + "org.apache.spark.streaming.*", + ) + + onLoaded { + onLoaded() + } + + // Render Dataset + render> { + HTML(it.toHtml()) + } + + render> { + HTML(it.toJavaRDD().toHtml()) + } + + render> { + HTML(it.toHtml()) + } + } +} diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt index 0994e9f7..44a0614e 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt @@ -33,6 +33,7 @@ import java.io.InputStreamReader import org.apache.spark.* +import org.jetbrains.kotlinx.jupyter.api.KotlinKernelHost import scala.collection.* import org.jetbrains.kotlinx.spark.api.SparkSession import scala.Product @@ -40,62 +41,19 @@ import java.io.Serializable import scala.collection.Iterable as ScalaIterable import scala.collection.Iterator as ScalaIterator +/** + * %use kotlin-spark-api + */ @Suppress("UNUSED_VARIABLE", "LocalVariableName") @OptIn(ExperimentalStdlibApi::class) -internal class SparkIntegration : JupyterIntegration() { - - private val kotlinVersion = "1.6.21" - private val scalaCompatVersion = "2.12" - private val scalaVersion = "2.12.15" - private val spark3Version = "3.2.1" - - override fun Builder.onLoaded() { - - dependencies( - "org.apache.spark:spark-repl_$scalaCompatVersion:$spark3Version", - "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion", - "org.jetbrains.kotlin:kotlin-reflect:$kotlinVersion", - "org.apache.spark:spark-sql_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-streaming_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-mllib_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-sql_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-graphx_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-launcher_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-catalyst_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-streaming_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-core_$scalaCompatVersion:$spark3Version", - "org.scala-lang:scala-library:$scalaVersion", - "org.scala-lang.modules:scala-xml_$scalaCompatVersion:2.0.1", - "org.scala-lang:scala-reflect:$scalaVersion", - "org.scala-lang:scala-compiler:$scalaVersion", - "commons-io:commons-io:2.11.0", - ) - - println("SparkIntegration loaded") - - import( - "org.jetbrains.kotlinx.spark.api.*", - "org.jetbrains.kotlinx.spark.api.tuples.*", - *(1..22).map { "scala.Tuple$it" }.toTypedArray(), - "org.apache.spark.sql.functions.*", - "org.apache.spark.*", - "org.apache.spark.sql.*", - "org.apache.spark.api.java.*", - "scala.collection.Seq", - "org.apache.spark.rdd.*", - "java.io.Serializable", - "org.apache.spark.streaming.api.java.*", - "org.apache.spark.streaming.api.*", - "org.apache.spark.streaming.*", - ) +internal class SparkIntegration : Integration() { - // onLoaded is only done for the non-streaming variant of kotlin-spark-api in the json file - onLoaded { - val _0 = execute("""%dumpClassesForSpark""") + override fun KotlinKernelHost.onLoaded() { + val _0 = execute("""%dumpClassesForSpark""") - @Language("kts") - val _1 = listOf( - """ + @Language("kts") + val _1 = listOf( + """ val spark = org.jetbrains.kotlinx.spark.api.SparkSession .builder() .master(SparkConf().get("spark.master", "local[*]")) @@ -103,52 +61,30 @@ internal class SparkIntegration : JupyterIntegration() { .config("spark.sql.codegen.wholeStage", false) .config("spark.io.compression.codec", "snappy") .getOrCreate()""".trimIndent(), - """ + """ spark.sparkContext.setLogLevel(org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR)""".trimIndent(), - """ + """ val sc by lazy { org.apache.spark.api.java.JavaSparkContext(spark.sparkContext) }""".trimIndent(), - """ + """ println("Spark session has been started and is running. No `withSpark { }` necessary, you can access `spark` and `sc` directly. To use Spark streaming, use `%use kotlin-spark-api-streaming` instead.")""".trimIndent(), - """ + """ inline fun List.toDS(): Dataset = toDS(spark)""".trimIndent(), - """ + """ inline fun Array.toDS(): Dataset = spark.dsOf(*this)""".trimIndent(), - """ + """ inline fun dsOf(vararg arg: T): Dataset = spark.dsOf(*arg)""".trimIndent(), - """ + """ inline fun RDD.toDS(): Dataset = toDS(spark)""".trimIndent(), - """ + """ inline fun JavaRDDLike.toDS(): Dataset = toDS(spark)""".trimIndent(), - """ + """ inline fun RDD.toDF(): Dataset = toDF(spark)""".trimIndent(), - """ + """ inline fun JavaRDDLike.toDF(): Dataset = toDF(spark)""".trimIndent(), - """ + """ val udf: UDFRegistration get() = spark.udf()""".trimIndent(), - ).map(::execute) - } - - onShutdown { - @Language("kts") - val _0 = execute(""" - spark.stop()""".trimIndent() - ) - } - - - // Render Dataset - render> { - HTML(it.toHtml()) - } - - render> { - HTML(it.toJavaRDD().toHtml()) - } - - render> { - HTML(it.toHtml()) - } + ).map(::execute) } } diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt new file mode 100644 index 00000000..a0834cc7 --- /dev/null +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt @@ -0,0 +1,60 @@ +/*- + * =LICENSE= + * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + * ---------- + * Copyright (C) 2019 - 2022 JetBrains + * ---------- + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * =LICENSEEND= + */ +package org.jetbrains.kotlinx.spark.api.jupyter + +import kotlinx.html.* +import kotlinx.html.stream.appendHTML +import org.apache.spark.api.java.JavaRDDLike +import org.apache.spark.rdd.RDD +import org.apache.spark.sql.Dataset +import org.apache.spark.unsafe.array.ByteArrayMethods +import org.intellij.lang.annotations.Language +import org.jetbrains.kotlinx.jupyter.api.HTML +import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration +import org.jetbrains.kotlinx.spark.api.* +import java.io.InputStreamReader + + +import org.apache.spark.* +import org.jetbrains.kotlinx.jupyter.api.KotlinKernelHost +import scala.collection.* +import org.jetbrains.kotlinx.spark.api.SparkSession +import scala.Product +import java.io.Serializable +import scala.collection.Iterable as ScalaIterable +import scala.collection.Iterator as ScalaIterator + +/** + * %use kotlin-spark-api-streaming + */ +@Suppress("UNUSED_VARIABLE", "LocalVariableName") +@OptIn(ExperimentalStdlibApi::class) +internal class SparkStreamingIntegration : Integration() { + + override fun KotlinKernelHost.onLoaded() { + val _0 = execute("""%dumpClassesForSpark""") + + @Language("kts") + val _1 = listOf( + """ + println("To start a spark streaming session, simply use `withSparkStreaming { }` inside a cell. To use Spark normally, use `withSpark { }` in a cell, or use `%use kotlin-spark-api` to start a Spark session for the whole notebook.")""".trimIndent(), + ).map(::execute) + } +} diff --git a/kotlin-spark-api/3.2/src/main/resources/META-INF/kotlin-jupyter-libraries/libraries.json b/kotlin-spark-api/3.2/src/main/resources/META-INF/kotlin-jupyter-libraries/libraries.json index d751cf79..82c7354e 100644 --- a/kotlin-spark-api/3.2/src/main/resources/META-INF/kotlin-jupyter-libraries/libraries.json +++ b/kotlin-spark-api/3.2/src/main/resources/META-INF/kotlin-jupyter-libraries/libraries.json @@ -3,6 +3,9 @@ "producers": [ { "fqn": "org.jetbrains.kotlinx.spark.api.jupyter.SparkIntegration" + }, + { + "fqn": "org.jetbrains.kotlinx.spark.api.jupyter.SparkStreamingIntegration" } ] } diff --git a/pom.xml b/pom.xml index 2a6ff4a6..80b1245d 100644 --- a/pom.xml +++ b/pom.xml @@ -17,7 +17,7 @@ 1.1.0 3.1.0 3.2.1 - 0.11.0-77 + 0.11.0-79 0.7.3 3.3.1 From 10c3a90cc2b769d6221a6f0cd976b063f8325a9c Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 2 May 2022 15:18:49 +0200 Subject: [PATCH 178/213] This build will fail. Requires mavenLocal build of kotlin jupyter with this pull request https://github.com/Kotlin/kotlin-jupyter/pull/364 --- .../kotlinx/spark/api/JupyterTests.kt | 32 ++++++++++++++++++- pom.xml | 3 +- 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt index d70d1bba..290eacdc 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt @@ -25,20 +25,50 @@ import io.kotest.matchers.shouldNotBe import io.kotest.matchers.string.shouldContain import io.kotest.matchers.types.shouldBeInstanceOf import jupyter.kotlin.DependsOn +import kotlinx.serialization.decodeFromString +import kotlinx.serialization.encodeToString +import kotlinx.serialization.json.Json import org.apache.spark.SparkConf import org.apache.spark.api.java.JavaSparkContext import org.intellij.lang.annotations.Language import org.jetbrains.kotlinx.jupyter.EvalRequestData import org.jetbrains.kotlinx.jupyter.ReplForJupyter +import org.jetbrains.kotlinx.jupyter.ReplForJupyterImpl import org.jetbrains.kotlinx.jupyter.api.Code +import org.jetbrains.kotlinx.jupyter.api.KotlinKernelHost import org.jetbrains.kotlinx.jupyter.api.MimeTypedResult +import org.jetbrains.kotlinx.jupyter.api.libraries.* +import org.jetbrains.kotlinx.jupyter.dependencies.ResolverConfig +import org.jetbrains.kotlinx.jupyter.libraries.EmptyResolutionInfoProvider +import org.jetbrains.kotlinx.jupyter.libraries.LibrariesScanner +import org.jetbrains.kotlinx.jupyter.libraries.LibraryResolver import org.jetbrains.kotlinx.jupyter.libraries.buildDependenciesInitCode import org.jetbrains.kotlinx.jupyter.repl.EvalResultEx import org.jetbrains.kotlinx.jupyter.testkit.ReplProvider +import org.jetbrains.kotlinx.jupyter.util.NameAcceptanceRule +import org.jetbrains.kotlinx.jupyter.util.PatternNameAcceptanceRule import kotlin.script.experimental.jvm.util.classpathFromClassloader class JupyterTests : ShouldSpec({ - val replProvider: ReplProvider = ReplProvider.withoutLibraryResolution + val replProvider = ReplProvider { classpath -> + ReplForJupyterImpl( + resolutionInfoProvider = EmptyResolutionInfoProvider, + scriptClasspath = classpath, + isEmbedded = true, + ).apply { + eval { + librariesScanner.addLibrariesFromClassLoader( + classLoader = currentClassLoader, + host = this, + integrationTypeNameRules = listOf( + PatternNameAcceptanceRule(false, "org.jetbrains.kotlinx.spark.api.jupyter.**"), + PatternNameAcceptanceRule(true, "org.jetbrains.kotlinx.spark.api.jupyter.SparkIntegration"), + ), + ) + } + } + } + val currentClassLoader = DependsOn::class.java.classLoader val scriptClasspath = classpathFromClassloader(currentClassLoader).orEmpty() diff --git a/pom.xml b/pom.xml index 80b1245d..08146145 100644 --- a/pom.xml +++ b/pom.xml @@ -17,7 +17,8 @@ 1.1.0 3.1.0 3.2.1 - 0.11.0-79 + + 0.11.0-100500-1 0.7.3 3.3.1 From 5009355cee1260f4f2a08006af6d3b6e91640f95 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 5 May 2022 15:19:55 +0200 Subject: [PATCH 179/213] Updated kotlin jupyter version, added streaming test for jupyter as well --- examples/pom-3.2_2.12.xml | 4 +- .../kotlinx/spark/api/jupyter/Integration.kt | 2 - .../kotlinx/spark/api/JupyterTests.kt | 95 ++++++++++++++++--- .../kotlinx/spark/api/StreamingTest.kt | 2 - pom.xml | 3 +- 5 files changed, 87 insertions(+), 19 deletions(-) diff --git a/examples/pom-3.2_2.12.xml b/examples/pom-3.2_2.12.xml index 788750bc..8d2cb858 100644 --- a/examples/pom-3.2_2.12.xml +++ b/examples/pom-3.2_2.12.xml @@ -105,8 +105,8 @@ maven-compiler-plugin ${maven-compiler-plugin.version} - 8 - 8 + 9 + 9 diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt index 2dacf62a..f26f56a4 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -58,8 +58,6 @@ abstract class Integration : JupyterIntegration() { "commons-io:commons-io:2.11.0", ) - println("SparkIntegration loaded") - import( "org.jetbrains.kotlinx.spark.api.*", "org.jetbrains.kotlinx.spark.api.tuples.*", diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt index 290eacdc..89f1db1a 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt @@ -19,34 +19,31 @@ */ package org.jetbrains.kotlinx.spark.api +import io.kotest.assertions.throwables.shouldThrowAny import io.kotest.core.spec.style.ShouldSpec +import io.kotest.matchers.collections.shouldBeIn import io.kotest.matchers.nulls.shouldNotBeNull +import io.kotest.matchers.shouldBe import io.kotest.matchers.shouldNotBe import io.kotest.matchers.string.shouldContain import io.kotest.matchers.types.shouldBeInstanceOf import jupyter.kotlin.DependsOn -import kotlinx.serialization.decodeFromString -import kotlinx.serialization.encodeToString -import kotlinx.serialization.json.Json -import org.apache.spark.SparkConf import org.apache.spark.api.java.JavaSparkContext +import org.apache.spark.streaming.Duration import org.intellij.lang.annotations.Language import org.jetbrains.kotlinx.jupyter.EvalRequestData import org.jetbrains.kotlinx.jupyter.ReplForJupyter import org.jetbrains.kotlinx.jupyter.ReplForJupyterImpl import org.jetbrains.kotlinx.jupyter.api.Code -import org.jetbrains.kotlinx.jupyter.api.KotlinKernelHost import org.jetbrains.kotlinx.jupyter.api.MimeTypedResult -import org.jetbrains.kotlinx.jupyter.api.libraries.* -import org.jetbrains.kotlinx.jupyter.dependencies.ResolverConfig import org.jetbrains.kotlinx.jupyter.libraries.EmptyResolutionInfoProvider -import org.jetbrains.kotlinx.jupyter.libraries.LibrariesScanner -import org.jetbrains.kotlinx.jupyter.libraries.LibraryResolver -import org.jetbrains.kotlinx.jupyter.libraries.buildDependenciesInitCode import org.jetbrains.kotlinx.jupyter.repl.EvalResultEx import org.jetbrains.kotlinx.jupyter.testkit.ReplProvider -import org.jetbrains.kotlinx.jupyter.util.NameAcceptanceRule import org.jetbrains.kotlinx.jupyter.util.PatternNameAcceptanceRule +import org.jetbrains.kotlinx.spark.api.tuples.X +import org.jetbrains.kotlinx.spark.api.tuples.component1 +import org.jetbrains.kotlinx.spark.api.tuples.component2 +import java.util.* import kotlin.script.experimental.jvm.util.classpathFromClassloader class JupyterTests : ShouldSpec({ @@ -235,6 +232,82 @@ class JupyterTests : ShouldSpec({ } }) +class JupyterStreamingTests : ShouldSpec({ + val replProvider = ReplProvider { classpath -> + ReplForJupyterImpl( + resolutionInfoProvider = EmptyResolutionInfoProvider, + scriptClasspath = classpath, + isEmbedded = true, + ).apply { + eval { + librariesScanner.addLibrariesFromClassLoader( + classLoader = currentClassLoader, + host = this, + integrationTypeNameRules = listOf( + PatternNameAcceptanceRule(false, "org.jetbrains.kotlinx.spark.api.jupyter.**"), + PatternNameAcceptanceRule(true, + "org.jetbrains.kotlinx.spark.api.jupyter.SparkStreamingIntegration"), + ), + ) + } + } + } + + val currentClassLoader = DependsOn::class.java.classLoader + val scriptClasspath = classpathFromClassloader(currentClassLoader).orEmpty() + + fun createRepl(): ReplForJupyter = replProvider(scriptClasspath) + suspend fun withRepl(action: suspend ReplForJupyter.() -> Unit): Unit = createRepl().action() + + context("Jupyter") { + withRepl { + + should("Not have spark instance") { + shouldThrowAny { + @Language("kts") + val spark = exec("""spark""") + Unit + } + } + + should("Not have sc instance") { + shouldThrowAny { + @Language("kts") + val sc = exec("""sc""") + Unit + } + } + + should("stream") { + val input = listOf("aaa", "bbb", "aaa", "ccc") + val counter = Counter(0) + + withSparkStreaming(Duration(10), timeout = 1000) { + + val (counterBroadcast, queue) = withSpark(ssc) { + spark.broadcast(counter) X LinkedList(listOf(sc.parallelize(input))) + } + + val inputStream = ssc.queueStream(queue) + + inputStream.foreachRDD { rdd, _ -> + withSpark(rdd) { + rdd.toDS().forEach { + it shouldBeIn input + counterBroadcast.value.value++ + } + } + } + } + + counter.value shouldBe input.size + } + + } + } +}) + + private fun ReplForJupyter.execEx(code: Code): EvalResultEx = evalEx(EvalRequestData(code)) private fun ReplForJupyter.exec(code: Code): Any? = execEx(code).renderedValue diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt index 8ae7f5c2..9719e8fc 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/StreamingTest.kt @@ -49,7 +49,6 @@ class StreamingTest : ShouldSpec({ context("streaming") { should("stream") { - val input = listOf("aaa", "bbb", "aaa", "ccc") val counter = Counter(0) @@ -72,7 +71,6 @@ class StreamingTest : ShouldSpec({ } counter.value shouldBe input.size - } should("Work with checkpointpath") { diff --git a/pom.xml b/pom.xml index 08146145..ecdf8382 100644 --- a/pom.xml +++ b/pom.xml @@ -17,8 +17,7 @@ 1.1.0 3.1.0 3.2.1 - - 0.11.0-100500-1 + 0.11.0-83 0.7.3 3.3.1 From 0be6b70d166a181fc48d90187e843e2a1be32170 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 5 May 2022 16:18:24 +0200 Subject: [PATCH 180/213] removed snappy again --- .../org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt | 1 - 1 file changed, 1 deletion(-) diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt index 44a0614e..5f663b1b 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt @@ -59,7 +59,6 @@ internal class SparkIntegration : Integration() { .master(SparkConf().get("spark.master", "local[*]")) .appName("Jupyter") .config("spark.sql.codegen.wholeStage", false) - .config("spark.io.compression.codec", "snappy") .getOrCreate()""".trimIndent(), """ spark.sparkContext.setLogLevel(org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR)""".trimIndent(), From 84bf0d5a5baac895fe1e2c2bb934218fde6e38bc Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 6 May 2022 11:53:23 +0200 Subject: [PATCH 181/213] fixed gh actions --- .github/workflows/publish_dev_version.yml | 4 +--- .../jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt | 1 + 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/publish_dev_version.yml b/.github/workflows/publish_dev_version.yml index 15e25865..5f7a564e 100644 --- a/.github/workflows/publish_dev_version.yml +++ b/.github/workflows/publish_dev_version.yml @@ -23,10 +23,8 @@ jobs: path: ~/.m2 key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: ${{ runner.os }}-m2 - - name: Build with Maven - run: ./mvnw -B package --file pom.xml -Pscala-2.12 -Dkotest.tags="!Kafka" - name: Deploy to GH Packages - run: ./mvnw --batch-mode deploy + run: ./mvnw --batch-mode deploy -Dkotest.tags="!Kafka" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt index 5f663b1b..fbb381e0 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt @@ -38,6 +38,7 @@ import scala.collection.* import org.jetbrains.kotlinx.spark.api.SparkSession import scala.Product import java.io.Serializable +import java.util.Random import scala.collection.Iterable as ScalaIterable import scala.collection.Iterator as ScalaIterator From 2b6aca4f8a68b193ca00e3c7d7daccb1d1cafdf7 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 6 May 2022 13:30:16 +0200 Subject: [PATCH 182/213] some cleaning per request --- kotlin-spark-api-streaming.json | 14 --------- kotlin-spark-api.json | 30 ------------------- .../spark/api/jupyter/HtmlRendering.kt | 29 ++++++++---------- .../kotlinx/spark/api/jupyter/Integration.kt | 1 - .../spark/api/jupyter/SparkIntegration.kt | 21 +------------ pom.xml | 8 ----- 6 files changed, 14 insertions(+), 89 deletions(-) delete mode 100644 kotlin-spark-api-streaming.json delete mode 100644 kotlin-spark-api.json diff --git a/kotlin-spark-api-streaming.json b/kotlin-spark-api-streaming.json deleted file mode 100644 index fb3709d3..00000000 --- a/kotlin-spark-api-streaming.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "description": "Kotlin for Apache® Spark™", - "properties": { - "spark": "3.2", - "v": "2.0.0" - }, - "link": "https://github.com/JetBrains/kotlin-spark-api", - "dependencies": [ - "org.jetbrains.kotlinx.spark:kotlin-spark-api-$spark:$v" - ], - "init": [ - "%dumpClassesForSpark" - ] -} \ No newline at end of file diff --git a/kotlin-spark-api.json b/kotlin-spark-api.json deleted file mode 100644 index 1aef9797..00000000 --- a/kotlin-spark-api.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "description": "Kotlin for Apache® Spark™", - "properties": { - "spark": "3.2", - "v": "2.0.0" - }, - "link": "https://github.com/JetBrains/kotlin-spark-api", - "dependencies": [ - "org.jetbrains.kotlinx.spark:kotlin-spark-api-$spark:$v" - ], - "init": [ - "%dumpClassesForSpark", - "val spark = org.jetbrains.kotlinx.spark.api.SparkSession.builder().master(SparkConf().get(\"spark.master\", \"local[*]\")).appName(\"Jupyter\").getOrCreate()", - "spark.sparkContext.setLogLevel(org.jetbrains.kotlinx.spark.api.SparkLogLevel.ERROR)", - "val sc by lazy { org.apache.spark.api.java.JavaSparkContext(spark.sparkContext) }", - "println(\"Spark session has been started and is running. No `withSpark { }` necessary, you can access `spark` and `sc` directly. To use Spark streaming, use `%use kotlin-spark-api-streaming` instead.\")", - - "inline fun List.toDS(): Dataset = toDS(spark)", - "inline fun Array.toDS(): Dataset = spark.dsOf(*this)", - "inline fun dsOf(vararg arg: T): Dataset = spark.dsOf(*arg)", - "inline fun RDD.toDS(): Dataset = toDS(spark)", - "inline fun JavaRDDLike.toDS(): Dataset = toDS(spark)", - "inline fun RDD.toDF(): Dataset = toDF(spark)", - "inline fun JavaRDDLike.toDF(): Dataset = toDF(spark)", - "val udf: UDFRegistration get() = spark.udf()" - ], - "shutdown": [ - "spark.stop()" - ] -} \ No newline at end of file diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/HtmlRendering.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/HtmlRendering.kt index 5abf4d3c..ad083962 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/HtmlRendering.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/HtmlRendering.kt @@ -84,14 +84,12 @@ internal fun JavaRDDLike.toHtml(limit: Int = 20, truncate: Int = 30): else -> row.toString() } - +string.let { - if (truncate > 0 && it.length > truncate) { - // do not show ellipses for strings shorter than 4 characters. - if (truncate < 4) it.substring(0, truncate) - else it.substring(0, truncate - 3) + "..." - } else { - it - } + +if (truncate > 0 && string.length > truncate) { + // do not show ellipses for strings shorter than 4 characters. + if (truncate < 4) string.substring(0, truncate) + else string.substring(0, truncate - 3) + "..." + } else { + string } } } @@ -115,15 +113,14 @@ internal fun Dataset.toHtml(limit: Int = 20, truncate: Int = 30): String tr { for (header in rows.first()) th { - +header.let { - if (truncate > 0 && it.length > truncate) { - // do not show ellipses for strings shorter than 4 characters. - if (truncate < 4) it.substring(0, truncate) - else it.substring(0, truncate - 3) + "..." - } else { - it - } + +if (truncate > 0 && header.length > truncate) { + // do not show ellipses for strings shorter than 4 characters. + if (truncate < 4) header.substring(0, truncate) + else header.substring(0, truncate - 3) + "..." + } else { + header } + } } diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt index f26f56a4..0b2a8306 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -22,7 +22,6 @@ package org.jetbrains.kotlinx.spark.api.jupyter import org.apache.spark.api.java.JavaRDDLike import org.apache.spark.rdd.RDD import org.apache.spark.sql.Dataset -import org.intellij.lang.annotations.Language import org.jetbrains.kotlinx.jupyter.api.HTML import org.jetbrains.kotlinx.jupyter.api.KotlinKernelHost import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt index fbb381e0..fe758700 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt @@ -19,28 +19,9 @@ */ package org.jetbrains.kotlinx.spark.api.jupyter -import kotlinx.html.* -import kotlinx.html.stream.appendHTML -import org.apache.spark.api.java.JavaRDDLike -import org.apache.spark.rdd.RDD -import org.apache.spark.sql.Dataset -import org.apache.spark.unsafe.array.ByteArrayMethods -import org.intellij.lang.annotations.Language -import org.jetbrains.kotlinx.jupyter.api.HTML -import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration -import org.jetbrains.kotlinx.spark.api.* -import java.io.InputStreamReader - -import org.apache.spark.* +import org.intellij.lang.annotations.Language import org.jetbrains.kotlinx.jupyter.api.KotlinKernelHost -import scala.collection.* -import org.jetbrains.kotlinx.spark.api.SparkSession -import scala.Product -import java.io.Serializable -import java.util.Random -import scala.collection.Iterable as ScalaIterable -import scala.collection.Iterator as ScalaIterator /** * %use kotlin-spark-api diff --git a/pom.xml b/pom.xml index ecdf8382..4f697954 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,6 @@ 0.11.0-83 0.7.3 3.3.1 - 2.10.0 @@ -67,13 +66,6 @@ kotlin-jupyter-test-kit ${kotlin-jupyter-api.version} - - - - - - - From 52398d66fedbe80d01c9d58fd46c86cd6c69df85 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 6 May 2022 14:36:55 +0200 Subject: [PATCH 183/213] split off jupyter into a separate module --- jupyter/pom.xml | 151 ++++++++++++++++++ .../spark/api/jupyter/HtmlRendering.kt | 0 .../kotlinx/spark/api/jupyter/Integration.kt | 0 .../spark/api/jupyter/SparkIntegration.kt | 0 .../api/jupyter/SparkStreamingIntegration.kt | 0 .../kotlin-jupyter-libraries/libraries.json | 0 .../src/main/resources/table.css | 0 .../spark/api/jupyter}/JupyterTests.kt | 17 +- kotlin-spark-api/3.2/pom_2.12.xml | 31 ---- pom.xml | 12 +- pom_2.12.xml | 1 + 11 files changed, 163 insertions(+), 49 deletions(-) create mode 100644 jupyter/pom.xml rename {kotlin-spark-api/3.2 => jupyter}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/HtmlRendering.kt (100%) rename {kotlin-spark-api/3.2 => jupyter}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt (100%) rename {kotlin-spark-api/3.2 => jupyter}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt (100%) rename {kotlin-spark-api/3.2 => jupyter}/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt (100%) rename {kotlin-spark-api/3.2 => jupyter}/src/main/resources/META-INF/kotlin-jupyter-libraries/libraries.json (100%) rename {kotlin-spark-api/3.2 => jupyter}/src/main/resources/table.css (100%) rename {kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api => jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter}/JupyterTests.kt (96%) diff --git a/jupyter/pom.xml b/jupyter/pom.xml new file mode 100644 index 00000000..2c816308 --- /dev/null +++ b/jupyter/pom.xml @@ -0,0 +1,151 @@ + + + + 4.0.0 + + Kotlin Spark API: Jupyter integration for Spark 3.2+ (Scala 2.12) + kotlin-spark-api-jupyter-3.2 + Jupyter integration + + org.jetbrains.kotlinx.spark + kotlin-spark-api-parent_2.12 + 1.0.4-SNAPSHOT + ../pom_2.12.xml + + jar + + + + 11 + 11 + + + + + kotlinx-html + kotlinx-html + https://maven.pkg.jetbrains.space/public/p/kotlinx-html/maven + + + kotlin + kotlin + https://maven.pkg.jetbrains.space/kotlin/p/kotlin/dev + + + + + + org.jetbrains.kotlinx.spark + kotlin-spark-api-3.2 + ${project.version} + + + org.jetbrains.kotlinx + kotlinx-html-jvm + ${kotlinx.html.version} + + + org.apache.spark + spark-repl_${scala.compat.version} + ${spark3.version} + + + org.jetbrains.kotlinx + kotlin-jupyter-api + ${kotlin-jupyter-api.version} + + + + + io.kotest + kotest-runner-junit5-jvm + ${kotest.version} + test + + + io.kotest.extensions + kotest-extensions-allure + ${kotest-extensions-allure.version} + test + + + org.jetbrains.kotlinx + kotlin-jupyter-test-kit + ${kotlin-jupyter-api.version} + test + + + + + src/main/kotlin + src/test/kotlin + target/${scala.compat.version} + + + org.jetbrains.kotlin + kotlin-maven-plugin + + + compile + + compile + + + + test-compile + + test-compile + + + + + + org.apache.maven.plugins + maven-assembly-plugin + ${maven-assembly-plugin.version} + + + jar-with-dependencies + + + + org.jetbrains.spark.api.examples.WordCountKt + + + + + + org.apache.maven.plugins + maven-site-plugin + + true + + + + org.apache.maven.plugins + maven-deploy-plugin + + false + + + + org.sonatype.plugins + nexus-staging-maven-plugin + + false + + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + 9 + 9 + + + + + \ No newline at end of file diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/HtmlRendering.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/HtmlRendering.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/HtmlRendering.kt rename to jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/HtmlRendering.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt rename to jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt rename to jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt similarity index 100% rename from kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt rename to jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt diff --git a/kotlin-spark-api/3.2/src/main/resources/META-INF/kotlin-jupyter-libraries/libraries.json b/jupyter/src/main/resources/META-INF/kotlin-jupyter-libraries/libraries.json similarity index 100% rename from kotlin-spark-api/3.2/src/main/resources/META-INF/kotlin-jupyter-libraries/libraries.json rename to jupyter/src/main/resources/META-INF/kotlin-jupyter-libraries/libraries.json diff --git a/kotlin-spark-api/3.2/src/main/resources/table.css b/jupyter/src/main/resources/table.css similarity index 100% rename from kotlin-spark-api/3.2/src/main/resources/table.css rename to jupyter/src/main/resources/table.css diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt b/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt similarity index 96% rename from kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt rename to jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt index 89f1db1a..2f35bee4 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/JupyterTests.kt +++ b/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt @@ -17,7 +17,7 @@ * limitations under the License. * =LICENSEEND= */ -package org.jetbrains.kotlinx.spark.api +package org.jetbrains.kotlinx.spark.api.jupyter import io.kotest.assertions.throwables.shouldThrowAny import io.kotest.core.spec.style.ShouldSpec @@ -40,9 +40,10 @@ import org.jetbrains.kotlinx.jupyter.libraries.EmptyResolutionInfoProvider import org.jetbrains.kotlinx.jupyter.repl.EvalResultEx import org.jetbrains.kotlinx.jupyter.testkit.ReplProvider import org.jetbrains.kotlinx.jupyter.util.PatternNameAcceptanceRule -import org.jetbrains.kotlinx.spark.api.tuples.X -import org.jetbrains.kotlinx.spark.api.tuples.component1 -import org.jetbrains.kotlinx.spark.api.tuples.component2 +import org.jetbrains.kotlinx.spark.api.tuples.* +import org.jetbrains.kotlinx.spark.api.* +import scala.Tuple2 +import java.io.Serializable import java.util.* import kotlin.script.experimental.jvm.util.classpathFromClassloader @@ -151,7 +152,7 @@ class JupyterTests : ShouldSpec({ html shouldContain "4, 5, 6" } - xshould("not render JavaRDDs with custom class") { + should("render JavaRDDs with custom class") { @Language("kts") val klass = exec(""" @@ -173,7 +174,7 @@ class JupyterTests : ShouldSpec({ rdd """.trimIndent() ) - html shouldContain "Cannot render this RDD of this class." + html shouldContain "Test(longFirstName=aaaaaaaa..." } should("render JavaPairRDDs") { @@ -326,4 +327,6 @@ private fun ReplForJupyter.execHtml(code: Code): String { val html = res["text/html"] html.shouldNotBeNull() return html -} \ No newline at end of file +} + +class Counter(@Volatile var value: Int) : Serializable diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 0bc4c74b..00439e27 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -14,18 +14,6 @@ jar - - - kotlinx-html - kotlinx-html - https://maven.pkg.jetbrains.space/public/p/kotlinx-html/maven - - - kotlin - kotlin - https://maven.pkg.jetbrains.space/kotlin/p/kotlin/dev - - @@ -49,25 +37,11 @@ spark-streaming-kafka-0-10_${scala.compat.version} ${spark3.version} - - org.jetbrains.kotlinx - kotlinx-html-jvm - ${kotlinx.html.version} - - - org.jetbrains.kotlinx - kotlin-jupyter-api - org.apache.spark spark-sql_${scala.compat.version} ${spark3.version} - - org.apache.spark - spark-repl_${scala.compat.version} - ${spark3.version} - org.apache.spark spark-streaming_${scala.compat.version} @@ -124,11 +98,6 @@ 3.1.0 test - - org.jetbrains.kotlinx - kotlin-jupyter-test-kit - test - diff --git a/pom.xml b/pom.xml index 4f697954..a7aa7ab8 100644 --- a/pom.xml +++ b/pom.xml @@ -18,7 +18,7 @@ 3.1.0 3.2.1 0.11.0-83 - 0.7.3 + 0.7.5 3.3.1 @@ -56,16 +56,6 @@ kotlin-reflect ${kotlin.version} - - org.jetbrains.kotlinx - kotlin-jupyter-api - ${kotlin-jupyter-api.version} - - - org.jetbrains.kotlinx - kotlin-jupyter-test-kit - ${kotlin-jupyter-api.version} - diff --git a/pom_2.12.xml b/pom_2.12.xml index 6be7e614..bdd644ce 100644 --- a/pom_2.12.xml +++ b/pom_2.12.xml @@ -24,6 +24,7 @@ scala-tuples-in-kotlin/pom_2.12.xml kotlin-spark-api/3.2/pom_2.12.xml examples/pom-3.2_2.12.xml + jupyter/pom.xml From 81448ee8132865b8478d022543f36625b0db1a07 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 6 May 2022 14:48:58 +0200 Subject: [PATCH 184/213] gh actions deploying is working, so setting correct branch --- .github/workflows/publish_dev_version.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish_dev_version.yml b/.github/workflows/publish_dev_version.yml index 5f7a564e..80266c62 100644 --- a/.github/workflows/publish_dev_version.yml +++ b/.github/workflows/publish_dev_version.yml @@ -3,7 +3,7 @@ name: Generate and publish docs on: push: branches: - - "jupyter-test" # TODO change to spark-3.2 + - "spark-3.2" jobs: build-and-deploy: From 7c2d652d8db8fc5c923c6265c7688113f4f97de6 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 6 May 2022 16:47:53 +0200 Subject: [PATCH 185/213] fixed dependencies as requested --- core/3.2/pom_2.12.xml | 151 ++++++------- examples/pom-3.2_2.12.xml | 4 - jupyter/pom.xml | 17 +- kotlin-spark-api/3.2/pom_2.12.xml | 342 +++++++++++++++--------------- pom_2.12.xml | 2 +- 5 files changed, 267 insertions(+), 249 deletions(-) diff --git a/core/3.2/pom_2.12.xml b/core/3.2/pom_2.12.xml index 0752c43f..5fed5559 100644 --- a/core/3.2/pom_2.12.xml +++ b/core/3.2/pom_2.12.xml @@ -1,80 +1,81 @@ - - 4.0.0 + + 4.0.0 - Kotlin Spark API: Scala core for Spark 3.2+ (Scala 2.12) - Scala-Spark 3.2+ compatibility layer for Kotlin for Apache Spark - core-3.2_2.12 - - org.jetbrains.kotlinx.spark - kotlin-spark-api-parent_2.12 - 1.0.4-SNAPSHOT - ../../pom_2.12.xml - + Kotlin Spark API: Scala core for Spark 3.2+ (Scala 2.12) + Scala-Spark 3.2+ compatibility layer for Kotlin for Apache Spark + core-3.2_2.12 + + org.jetbrains.kotlinx.spark + kotlin-spark-api-parent_2.12 + 1.0.4-SNAPSHOT + ../../pom_2.12.xml + + + + + org.scala-lang + scala-library + ${scala.version} + + + org.jetbrains.kotlin + kotlin-reflect + + + + + + org.apache.spark + spark-sql_${scala.compat.version} + ${spark3.version} + provided + + + + + + src/main/scala + src/test/scala + target/${scala.compat.version} + + + net.alchim31.maven + scala-maven-plugin + ${scala-maven-plugin.version} + + + compile + + compile + testCompile + + + + -dependencyfile + ${project.build.directory}/.scala_dependencies + + + + + docjar + + doc-jar + + pre-integration-test + + + + + org.apache.maven.plugins + maven-site-plugin + + true + + + + - - - org.scala-lang - scala-library - ${scala.version} - - - org.jetbrains.kotlin - kotlin-reflect - - - - org.apache.spark - spark-sql_${scala.compat.version} - ${spark3.version} - - - - - - - - - - src/main/scala - src/test/scala - target/${scala.compat.version} - - - net.alchim31.maven - scala-maven-plugin - ${scala-maven-plugin.version} - - - compile - - compile - testCompile - - - - -dependencyfile - ${project.build.directory}/.scala_dependencies - - - - - docjar - - doc-jar - - pre-integration-test - - - - - org.apache.maven.plugins - maven-site-plugin - - true - - - - diff --git a/examples/pom-3.2_2.12.xml b/examples/pom-3.2_2.12.xml index 8d2cb858..d069c058 100644 --- a/examples/pom-3.2_2.12.xml +++ b/examples/pom-3.2_2.12.xml @@ -15,10 +15,6 @@ - - org.jetbrains.kotlin - kotlin-reflect - org.jetbrains.kotlinx.spark kotlin-spark-api-3.2 diff --git a/jupyter/pom.xml b/jupyter/pom.xml index 2c816308..bfc67e6d 100644 --- a/jupyter/pom.xml +++ b/jupyter/pom.xml @@ -46,11 +46,26 @@ kotlinx-html-jvm ${kotlinx.html.version} - + + org.apache.spark + spark-sql_${scala.compat.version} + ${spark3.version} + + org.apache.spark spark-repl_${scala.compat.version} ${spark3.version} + + org.apache.spark + spark-streaming_${scala.compat.version} + ${spark3.version} + + + org.apache.hadoop + hadoop-client + ${hadoop.version} + org.jetbrains.kotlinx kotlin-jupyter-api diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 00439e27..32c5f0f9 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -1,182 +1,188 @@ - + - 4.0.0 + 4.0.0 - Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) - kotlin-spark-api-3.2 - Kotlin API compatible with spark 3.2.0 Kotlin for Apache Spark - - org.jetbrains.kotlinx.spark - kotlin-spark-api-parent_2.12 - 1.0.4-SNAPSHOT - ../../pom_2.12.xml - - jar + Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) + kotlin-spark-api-3.2 + Kotlin API compatible with spark 3.2.0 Kotlin for Apache Spark + + org.jetbrains.kotlinx.spark + kotlin-spark-api-parent_2.12 + 1.0.4-SNAPSHOT + ../../pom_2.12.xml + + jar - - - org.jetbrains.kotlin - kotlin-stdlib-jdk8 - - - org.jetbrains.kotlin - kotlin-reflect - - - org.jetbrains.kotlinx.spark - core-3.2_${scala.compat.version} - - - org.jetbrains.kotlinx.spark - scala-tuples-in-kotlin - - - org.apache.spark - spark-streaming-kafka-0-10_${scala.compat.version} - ${spark3.version} - - - org.apache.spark - spark-sql_${scala.compat.version} - ${spark3.version} - - - org.apache.spark - spark-streaming_${scala.compat.version} - ${spark3.version} - - - org.apache.hadoop - hadoop-client - ${hadoop.version} - provided - + + + org.jetbrains.kotlin + kotlin-stdlib-jdk8 + + + org.jetbrains.kotlin + kotlin-reflect + + + org.jetbrains.kotlinx.spark + core-3.2_${scala.compat.version} + + + org.jetbrains.kotlinx.spark + scala-tuples-in-kotlin + - - - io.kotest - kotest-runner-junit5-jvm - ${kotest.version} - test - - - io.kotest.extensions - kotest-extensions-allure - ${kotest-extensions-allure.version} - test - - - io.github.embeddedkafka - embedded-kafka_${scala.compat.version} - ${embedded-kafka.version} - test - - - com.beust - klaxon - ${klaxon.version} - test - - - ch.tutteli.atrium - atrium-fluent-en_GB - ${atrium.version} - test - - - org.apache.spark - spark-streaming_${scala.compat.version} - ${spark3.version} - tests - test - - - org.apache.kafka - kafka-streams-test-utils - 3.1.0 - test - - + + + org.apache.spark + spark-sql_${scala.compat.version} + ${spark3.version} + provided + + + org.apache.spark + spark-streaming_${scala.compat.version} + ${spark3.version} + provided + + + org.apache.hadoop + hadoop-client + ${hadoop.version} + provided + - - src/main/kotlin - src/test/kotlin - target/${scala.compat.version} - + + + org.apache.spark + spark-streaming-kafka-0-10_${scala.compat.version} + ${spark3.version} + test + + + io.kotest + kotest-runner-junit5-jvm + ${kotest.version} + test + + + io.kotest.extensions + kotest-extensions-allure + ${kotest-extensions-allure.version} + test + + + io.github.embeddedkafka + embedded-kafka_${scala.compat.version} + ${embedded-kafka.version} + test + + + com.beust + klaxon + ${klaxon.version} + test + + + ch.tutteli.atrium + atrium-fluent-en_GB + ${atrium.version} + test + + + org.apache.spark + spark-streaming_${scala.compat.version} + ${spark3.version} + tests + test + + + org.apache.kafka + kafka-streams-test-utils + 3.1.0 + test + + - - org.jetbrains.kotlin - kotlin-maven-plugin - - - compile - - compile - - - - test-compile - - test-compile - - - - + + src/main/kotlin + src/test/kotlin + target/${scala.compat.version} + - - org.apache.maven.plugins - maven-surefire-plugin - + + org.jetbrains.kotlin + kotlin-maven-plugin + + + compile + + compile + + + + test-compile + + test-compile + + + + - - org.jetbrains.dokka - dokka-maven-plugin - ${dokka.version} - - 8 - - - - dokka - - dokka - - pre-site - - - javadocjar - - javadocJar - - pre-integration-test - - - + + org.apache.maven.plugins + maven-surefire-plugin + - - io.qameta.allure - allure-maven - - ${project.basedir}/allure-results/${scala.compat.version} - - + + org.jetbrains.dokka + dokka-maven-plugin + ${dokka.version} + + 8 + + + + dokka + + dokka + + pre-site + + + javadocjar + + javadocJar + + pre-integration-test + + + - - org.jacoco - jacoco-maven-plugin - - - org.apache.maven.plugins - maven-compiler-plugin - ${maven-compiler-plugin.version} - - 8 - 8 - - + + io.qameta.allure + allure-maven + + ${project.basedir}/allure-results/${scala.compat.version} + + - - + + org.jacoco + jacoco-maven-plugin + + + org.apache.maven.plugins + maven-compiler-plugin + ${maven-compiler-plugin.version} + + 8 + 8 + + + + + diff --git a/pom_2.12.xml b/pom_2.12.xml index bdd644ce..f59970af 100644 --- a/pom_2.12.xml +++ b/pom_2.12.xml @@ -24,7 +24,7 @@ scala-tuples-in-kotlin/pom_2.12.xml kotlin-spark-api/3.2/pom_2.12.xml examples/pom-3.2_2.12.xml - jupyter/pom.xml + jupyter From 234430d22652371421f3d8ad1539ba5a4d2b53ab Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 6 May 2022 16:54:29 +0200 Subject: [PATCH 186/213] moved nexus plugin to release sign, which was renamed to central-deploy --- pom.xml | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/pom.xml b/pom.xml index a7aa7ab8..c224b020 100644 --- a/pom.xml +++ b/pom.xml @@ -235,21 +235,9 @@ true false forked-path - scala-2.12,release-sign + scala-2.12,central-deploy - - - - - - - - - - - - @@ -312,7 +300,7 @@ - release-sign + central-deploy performRelease @@ -321,6 +309,18 @@ + + org.sonatype.plugins + nexus-staging-maven-plugin + ${nexus-staging-plugin.version} + true + + ossrh + https://oss.sonatype.org/ + false + 20 + + org.apache.maven.plugins maven-gpg-plugin From e8f4ee48b3afc2c78dc03fa17ac5c742bc19d7f2 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Fri, 6 May 2022 17:21:22 +0200 Subject: [PATCH 187/213] disabled qodana... --- .github/workflows/build.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 63d49c25..98b033b4 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -26,11 +26,11 @@ jobs: restore-keys: ${{ runner.os }}-m2 - name: Build with Maven run: ./mvnw -B package --file pom.xml -Pscala-2.12 -Dkotest.tags="!Kafka" - qodana: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: 'Qodana Scan' - uses: JetBrains/qodana-action@v5.0.2 +# qodana: +# runs-on: ubuntu-latest +# steps: +# - uses: actions/checkout@v3 +# - name: 'Qodana Scan' +# uses: JetBrains/qodana-action@v5.0.2 # vim: ts=2:sts=2:sw=2:expandtab From 05636747b060a715d8240ac6f44365be7d8f9c25 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 9 May 2022 12:46:32 +0200 Subject: [PATCH 188/213] 3.2 -> 3.1 --- jupyter/pom.xml | 6 +++--- .../org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/jupyter/pom.xml b/jupyter/pom.xml index bfc67e6d..68e2f124 100644 --- a/jupyter/pom.xml +++ b/jupyter/pom.xml @@ -5,8 +5,8 @@ 4.0.0 - Kotlin Spark API: Jupyter integration for Spark 3.2+ (Scala 2.12) - kotlin-spark-api-jupyter-3.2 + Kotlin Spark API: Jupyter integration for Spark 3.1+ (Scala 2.12) + kotlin-spark-api-jupyter-3.1 Jupyter integration org.jetbrains.kotlinx.spark @@ -38,7 +38,7 @@ org.jetbrains.kotlinx.spark - kotlin-spark-api-3.2 + kotlin-spark-api-3.1 ${project.version} diff --git a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt index 0b2a8306..acd32ae3 100644 --- a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -31,7 +31,7 @@ abstract class Integration : JupyterIntegration() { private val kotlinVersion = "1.6.21" private val scalaCompatVersion = "2.12" private val scalaVersion = "2.12.15" - private val spark3Version = "3.2.1" + private val spark3Version = "3.1.3" abstract fun KotlinKernelHost.onLoaded() From 018e63eb7bb62942620e8360b89f91281a8eab60 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 9 May 2022 13:14:25 +0200 Subject: [PATCH 189/213] 3.1 -> 3.0 --- jupyter/pom.xml | 6 +++--- .../org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/jupyter/pom.xml b/jupyter/pom.xml index 68e2f124..4492c4ee 100644 --- a/jupyter/pom.xml +++ b/jupyter/pom.xml @@ -5,8 +5,8 @@ 4.0.0 - Kotlin Spark API: Jupyter integration for Spark 3.1+ (Scala 2.12) - kotlin-spark-api-jupyter-3.1 + Kotlin Spark API: Jupyter integration for Spark 3.0+ (Scala 2.12) + kotlin-spark-api-jupyter-3.0 Jupyter integration org.jetbrains.kotlinx.spark @@ -38,7 +38,7 @@ org.jetbrains.kotlinx.spark - kotlin-spark-api-3.1 + kotlin-spark-api-3.0 ${project.version} diff --git a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt index acd32ae3..2b5018a5 100644 --- a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -31,7 +31,7 @@ abstract class Integration : JupyterIntegration() { private val kotlinVersion = "1.6.21" private val scalaCompatVersion = "2.12" private val scalaVersion = "2.12.15" - private val spark3Version = "3.1.3" + private val spark3Version = "3.0.3" abstract fun KotlinKernelHost.onLoaded() From a664548fcbe70a57f109c0e00a1aab2a361e17c9 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 9 May 2022 14:27:23 +0200 Subject: [PATCH 190/213] 3.1 -> 3.0 --- jupyter/pom.xml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/jupyter/pom.xml b/jupyter/pom.xml index 4492c4ee..d5723e43 100644 --- a/jupyter/pom.xml +++ b/jupyter/pom.xml @@ -61,11 +61,11 @@ spark-streaming_${scala.compat.version} ${spark3.version} - - org.apache.hadoop - hadoop-client - ${hadoop.version} - + + + + + org.jetbrains.kotlinx kotlin-jupyter-api From f2f6f4a040fc279097129015174be56f1f3d1906 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 9 May 2022 14:37:01 +0200 Subject: [PATCH 191/213] updating versions for release 1.1.0 --- README.md | 39 ++++++++++++++++++++------------------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index 9aa51579..36e04709 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Kotlin for Apache® Spark™ [![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:org.jetbrains.kotlinx.spark%20AND%20v:1.0.2) [![official JetBrains project](http://jb.gg/badges/official.svg)](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub) [![Join the chat at https://gitter.im/JetBrains/kotlin-spark-api](https://badges.gitter.im/JetBrains/kotlin-spark-api.svg)](https://gitter.im/JetBrains/kotlin-spark-api?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) +# Kotlin for Apache® Spark™ [![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:org.jetbrains.kotlinx.spark%20AND%20v:1.1.0) [![official JetBrains project](http://jb.gg/badges/official.svg)](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub) [![Join the chat at https://gitter.im/JetBrains/kotlin-spark-api](https://badges.gitter.im/JetBrains/kotlin-spark-api.svg)](https://gitter.im/JetBrains/kotlin-spark-api?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) Your next API to work with [Apache Spark](https://spark.apache.org/). @@ -31,12 +31,13 @@ We have opened a Spark Project Improvement Proposal: [Kotlin support for Apache ## Supported versions of Apache Spark -| Apache Spark | Scala | Kotlin for Apache Spark | +| Apache Spark | Scala | Kotlin for Apache Spark | |:------------:|:-----:|:-------------------------------:| -| 3.0.0+ | 2.12 | kotlin-spark-api-3.0:1.0.2 | -| 2.4.1+ | 2.12 | kotlin-spark-api-2.4_2.12:1.0.2 | -| 2.4.1+ | 2.11 | kotlin-spark-api-2.4_2.11:1.0.2 | -| 3.2.0+ | 2.12 | kotlin-spark-api-3.2:1.0.3 | +| 3.2.1+ | 2.12 | kotlin-spark-api-3.2:1.1.0 | +| 3.1.3+ | 2.12 | kotlin-spark-api-3.1:1.1.0 | +| 3.0.3+ | 2.12 | kotlin-spark-api-3.0:1.1.0 | +| 2.4.1+ | 2.12 | kotlin-spark-api-2.4_2.12:1.0.2 | +| 2.4.1+ | 2.11 | kotlin-spark-api-2.4_2.11:1.0.2 | ## Releases @@ -44,7 +45,7 @@ The list of Kotlin for Apache Spark releases is available [here](https://github. The Kotlin for Spark artifacts adhere to the following convention: `[Apache Spark version]_[Scala core version]:[Kotlin for Apache Spark API version]` -[![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:"org.jetbrains.kotlinx.spark"%20AND%20a:"kotlin-spark-api-3.0") +[![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:"org.jetbrains.kotlinx.spark"%20AND%20a:"kotlin-spark-api-3.2") ## How to configure Kotlin for Apache Spark in your project @@ -55,7 +56,7 @@ Here's an example `pom.xml`: ```xml org.jetbrains.kotlinx.spark - kotlin-spark-api-3.0 + kotlin-spark-api-3.2 ${kotlin-spark-api.version} @@ -84,7 +85,7 @@ To it, simply add to the top of your notebook. This will get the latest version of the API, together with the latest version of Spark. To define a certain version of Spark or the API itself, simply add it like this: ```jupyterpython -%use kotlin-spark-api(spark=3.2, version=1.0.4) +%use kotlin-spark-api(spark=3.2, v=1.1.0) ``` Inside the notebook a Spark session will be initiated automatically. This can be accessed via the `spark` value. @@ -134,8 +135,8 @@ Do not use this when running the Kotlin Spark API from a Jupyter notebook. ```kotlin withSpark { dsOf(1, 2) - .map { it X it } // creates Tuple2 - .show() + .map { it X it } // creates Tuple2 + .show() } ``` @@ -152,14 +153,14 @@ To solve these problems we've added `withCached` function ```kotlin withSpark { dsOf(1, 2, 3, 4, 5) - .map { tupleOf(it, it + 2) } - .withCached { - showDS() - - filter { it._1 % 2 == 0 }.showDS() - } - .map { tupleOf(it._1, it._2, (it._1 + it._2) * 2) } - .show() + .map { tupleOf(it, it + 2) } + .withCached { + showDS() + + filter { it._1 % 2 == 0 }.showDS() + } + .map { tupleOf(it._1, it._2, (it._1 + it._2) * 2) } + .show() } ``` From 89f549f8fdeacba2c335b7410a8d821785fa22c2 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 9 May 2022 15:25:19 +0200 Subject: [PATCH 192/213] renaming jupyter integration --- README.md | 6 +++--- .../jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt | 4 ++-- .../kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 36e04709..ebc5bc67 100644 --- a/README.md +++ b/README.md @@ -80,12 +80,12 @@ The Kotlin Spark API also supports Kotlin Jupyter notebooks. To it, simply add ```jupyterpython -%use kotlin-spark-api +%use spark ``` to the top of your notebook. This will get the latest version of the API, together with the latest version of Spark. To define a certain version of Spark or the API itself, simply add it like this: ```jupyterpython -%use kotlin-spark-api(spark=3.2, v=1.1.0) +%use spark(spark=3.2, v=1.1.0) ``` Inside the notebook a Spark session will be initiated automatically. This can be accessed via the `spark` value. @@ -95,7 +95,7 @@ There is also support for HTML rendering of Datasets and simple (Java)RDDs. To use Spark Streaming abilities, instead use ```jupyterpython -%use kotlin-spark-api-streaming +%use spark-streaming ``` This does not start a Spark session right away, meaning you can call `withSparkStreaming(batchDuration) {}` in whichever cell you want. diff --git a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt index fe758700..635ed654 100644 --- a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt +++ b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt @@ -24,7 +24,7 @@ import org.intellij.lang.annotations.Language import org.jetbrains.kotlinx.jupyter.api.KotlinKernelHost /** - * %use kotlin-spark-api + * %use spark */ @Suppress("UNUSED_VARIABLE", "LocalVariableName") @OptIn(ExperimentalStdlibApi::class) @@ -49,7 +49,7 @@ internal class SparkIntegration : Integration() { org.apache.spark.api.java.JavaSparkContext(spark.sparkContext) }""".trimIndent(), """ - println("Spark session has been started and is running. No `withSpark { }` necessary, you can access `spark` and `sc` directly. To use Spark streaming, use `%use kotlin-spark-api-streaming` instead.")""".trimIndent(), + println("Spark session has been started and is running. No `withSpark { }` necessary, you can access `spark` and `sc` directly. To use Spark streaming, use `%use spark-streaming` instead.")""".trimIndent(), """ inline fun List.toDS(): Dataset = toDS(spark)""".trimIndent(), """ diff --git a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt index a0834cc7..1684769b 100644 --- a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt +++ b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt @@ -42,7 +42,7 @@ import scala.collection.Iterable as ScalaIterable import scala.collection.Iterator as ScalaIterator /** - * %use kotlin-spark-api-streaming + * %use spark-streaming */ @Suppress("UNUSED_VARIABLE", "LocalVariableName") @OptIn(ExperimentalStdlibApi::class) @@ -54,7 +54,7 @@ internal class SparkStreamingIntegration : Integration() { @Language("kts") val _1 = listOf( """ - println("To start a spark streaming session, simply use `withSparkStreaming { }` inside a cell. To use Spark normally, use `withSpark { }` in a cell, or use `%use kotlin-spark-api` to start a Spark session for the whole notebook.")""".trimIndent(), + println("To start a spark streaming session, simply use `withSparkStreaming { }` inside a cell. To use Spark normally, use `withSpark { }` in a cell, or use `%use spark` to start a Spark session for the whole notebook.")""".trimIndent(), ).map(::execute) } } From f78a52f69b5bad28d43d7cc803489dcba13acb80 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 9 May 2022 15:51:23 +0200 Subject: [PATCH 193/213] Added Jupyter example --- .../spark/examples/JupyterExample.ipynb | 351 ++++++++++++++++++ 1 file changed, 351 insertions(+) create mode 100644 examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JupyterExample.ipynb diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JupyterExample.ipynb b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JupyterExample.ipynb new file mode 100644 index 00000000..33d9e27b --- /dev/null +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JupyterExample.ipynb @@ -0,0 +1,351 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "source": [ + "By default the latest version of the API and the latest supported Spark version is chosen.\n", + "To specify your own: `%use spark(spark=3.2, v=1.1.0)`" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%% md\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 2, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Spark session has been started and is running. No `withSpark { }` necessary, you can access `spark` and `sc` directly. To use Spark streaming, use `%use spark-streaming` instead.\n" + ] + } + ], + "source": [ + "%use spark" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "markdown", + "source": [ + "Let's define some enums and data classes to work with." + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%% md\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "pycharm": { + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "enum class EyeColor {\n", + " BLUE, BROWN, GREEN\n", + "}\n", + "\n", + "enum class Gender {\n", + " MALE, FEMALE, OTHER\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "pycharm": { + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "data class Person(\n", + " val eyeColor: EyeColor,\n", + " val name: String,\n", + " val gender: Gender,\n", + " val length: Double,\n", + " val age: Int,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "source": [ + "And now we can simply create a Dataset. We can see the contents of a Dataset by simply stating it. As seen below:" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%% md\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "pycharm": { + "name": "#%%\n" + } + }, + "outputs": [ + { + "data": { + "text/html": "\n \n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
eyeColornamegenderlengthage
BLUEAliceFEMALE1.725
BLUEBobMALE1.6725
BROWNCharlieOTHER1.817
\n" + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "val ds: Dataset = dsOf(\n", + " Person(\n", + " eyeColor = EyeColor.BLUE,\n", + " name = \"Alice\",\n", + " gender = Gender.FEMALE,\n", + " length = 1.70,\n", + " age = 25,\n", + " ),\n", + " Person(\n", + " eyeColor = EyeColor.BLUE,\n", + " name = \"Bob\",\n", + " gender = Gender.MALE,\n", + " length = 1.67,\n", + " age = 25,\n", + " ),\n", + " Person(\n", + " eyeColor = EyeColor.BROWN,\n", + " name = \"Charlie\",\n", + " gender = Gender.OTHER,\n", + " length = 1.80,\n", + " age = 17,\n", + " ),\n", + ")\n", + "\n", + "ds" + ] + }, + { + "cell_type": "markdown", + "source": [ + "The effects of operations like filtering can also be seen immediately, as well as sorting, selecting columns etc..." + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%% md\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "pycharm": { + "name": "#%%\n" + } + }, + "outputs": [ + { + "data": { + "text/html": "\n \n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
eyeColornamegenderlengthage
BLUEAliceFEMALE1.725
BLUEBobMALE1.6725
\n" + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ds.filter { it.age > 20 }" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "outputs": [ + { + "data": { + "text/html": "\n \n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
eyeColornamegenderlengthage
BROWNCharlieOTHER1.817
BLUEBobMALE1.6725
BLUEAliceFEMALE1.725
\n" + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ds.sort(col(Person::age), col(Person::length))" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 9, + "outputs": [ + { + "data": { + "text/html": "\n \n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
agelength
251.7
251.67
171.8
\n" + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ds.selectTyped(col(Person::age), col(Person::length))" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 10, + "outputs": [ + { + "data": { + "text/plain": "Average length: 1.7233333333333334" + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "\"Average length: \" +\n", + " ds\n", + " .map { it.length }\n", + " .reduceK { a, b -> a + b } / ds.count()" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "markdown", + "source": [ + "Extension methods that usually only work in the `withSpark {}` context of the Kotlin Spark API work out of the box in Jupyter.\n", + "This means we can also create a Dataset like this:" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%% md\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 11, + "outputs": [ + { + "data": { + "text/html": "\n \n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
value
1
2
3
4
\n" + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "listOf(1, 2, 3, 4).toDS()" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "markdown", + "source": [ + "We can also create RDDs using `sc: JavaSparkContext` which are rendered similarly to Datasets.\n", + "You can see that all Tuple helper functions are immediately available too." + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%% md\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 12, + "outputs": [ + { + "data": { + "text/html": "\n \n\n\n \n \n \n \n \n \n \n \n \n \n \n \n
Values
[1, aaa]
[2, bbb]
[3, ccc]
\n" + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "val rdd: JavaRDD> = sc.parallelize(\n", + " listOf(\n", + " 1 X \"aaa\",\n", + " t(2, \"bbb\"),\n", + " tupleOf(3, \"ccc\"),\n", + " )\n", + ")\n", + "\n", + "rdd" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + } + ], + "metadata": { + "kernelspec": { + "display_name": "Kotlin", + "language": "kotlin", + "name": "kotlin" + }, + "language_info": { + "codemirror_mode": "text/x-kotlin", + "file_extension": ".kt", + "mimetype": "text/x-kotlin", + "name": "kotlin", + "nbconvert_exporter": "", + "pygments_lexer": "kotlin", + "version": "1.7.0-dev-3303" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} \ No newline at end of file From 51085088620f3d7622abd6d8b6280e182e5a3fa1 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 9 May 2022 16:13:56 +0200 Subject: [PATCH 194/213] Added Jupyter streaming example --- .../streaming/JupyterStreamingExample.ipynb | 191 ++++++++++++++++++ 1 file changed, 191 insertions(+) create mode 100644 examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/JupyterStreamingExample.ipynb diff --git a/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/JupyterStreamingExample.ipynb b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/JupyterStreamingExample.ipynb new file mode 100644 index 00000000..2159cfa9 --- /dev/null +++ b/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/JupyterStreamingExample.ipynb @@ -0,0 +1,191 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "source": [ + "By default the latest version of the API and the latest supported Spark version is chosen. To specify your own: %use spark-streaming(spark=3.2, v=1.1.0)" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%% md\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 2, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "To start a spark streaming session, simply use `withSparkStreaming { }` inside a cell. To use Spark normally, use `withSpark { }` in a cell, or use `%use spark` to start a Spark session for the whole notebook.\n" + ] + } + ], + "source": [ + "%use spark-streaming" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "markdown", + "source": [ + "Let's define some data class to work with." + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%% md\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 4, + "outputs": [], + "source": [ + "data class TestRow(\n", + " val word: String,\n", + ")" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + }, + { + "cell_type": "markdown", + "source": [ + "To run this on your local machine, you need to first run a Netcat server: `$ nc -lk 9999`.\n", + "\n", + "This example will collect the data from this stream for 10 seconds and 1 second intervals, splitting and counting the input per word." + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%% md\n" + } + } + }, + { + "cell_type": "code", + "execution_count": 5, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "+---+--------+\n", + "|key|count(1)|\n", + "+---+--------+\n", + "+---+--------+\n", + "\n", + "+-----+--------+\n", + "| key|count(1)|\n", + "+-----+--------+\n", + "|hello| 8|\n", + "|Hello| 6|\n", + "|world| 3|\n", + "| | 2|\n", + "| test| 4|\n", + "+-----+--------+\n", + "\n", + "+-----+--------+\n", + "| key|count(1)|\n", + "+-----+--------+\n", + "|hello| 3|\n", + "+-----+--------+\n", + "\n", + "+---+--------+\n", + "|key|count(1)|\n", + "+---+--------+\n", + "+---+--------+\n", + "\n", + "+---+--------+\n", + "|key|count(1)|\n", + "+---+--------+\n", + "+---+--------+\n", + "\n", + "+---+--------+\n", + "|key|count(1)|\n", + "+---+--------+\n", + "+---+--------+\n", + "\n", + "+---+--------+\n", + "|key|count(1)|\n", + "+---+--------+\n", + "+---+--------+\n", + "\n", + "+---+--------+\n", + "|key|count(1)|\n", + "+---+--------+\n", + "+---+--------+\n", + "\n", + "+-----+--------+\n", + "| key|count(1)|\n", + "+-----+--------+\n", + "|hello| 1|\n", + "|world| 2|\n", + "+-----+--------+\n", + "\n", + "+---+--------+\n", + "|key|count(1)|\n", + "+---+--------+\n", + "+---+--------+\n", + "\n" + ] + } + ], + "source": [ + "withSparkStreaming(batchDuration = Durations.seconds(1), timeout = 10_000) { // this: KSparkStreamingSession\n", + "\n", + " val lines: JavaReceiverInputDStream = ssc.socketTextStream(\"localhost\", 9999)\n", + " val words: JavaDStream = lines.flatMap { it.split(\" \").iterator() }\n", + "\n", + " words.foreachRDD { rdd: JavaRDD, _: Time ->\n", + " withSpark(rdd) { // this: KSparkSession\n", + " val dataframe: Dataset = rdd.map { TestRow(it) }.toDS()\n", + " dataframe\n", + " .groupByKey { it.word }\n", + " .count()\n", + " .show()\n", + " }\n", + " }\n", + "}" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%%\n" + } + } + } + ], + "metadata": { + "kernelspec": { + "display_name": "Kotlin", + "language": "kotlin", + "name": "kotlin" + }, + "language_info": { + "name": "kotlin", + "version": "1.7.0-dev-1825", + "mimetype": "text/x-kotlin", + "file_extension": ".kt", + "pygments_lexer": "kotlin", + "codemirror_mode": "text/x-kotlin", + "nbconvert_exporter": "" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file From 1be27dd65396f0578e244bb77e804750fc03b562 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 9 May 2022 16:32:17 +0200 Subject: [PATCH 195/213] Added Jupyter streaming example --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index ebc5bc67..0d1b4db0 100644 --- a/README.md +++ b/README.md @@ -92,6 +92,8 @@ Inside the notebook a Spark session will be initiated automatically. This can be `sc: JavaSparkContext` can also be accessed directly. The API operates pretty similarly. There is also support for HTML rendering of Datasets and simple (Java)RDDs. +Check out the [example](examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/JupyterExample.ipynb) as well. + To use Spark Streaming abilities, instead use ```jupyterpython @@ -99,6 +101,7 @@ To use Spark Streaming abilities, instead use ``` This does not start a Spark session right away, meaning you can call `withSparkStreaming(batchDuration) {}` in whichever cell you want. +Check out the [example](examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/JupyterStreamingExample.ipynb). ## Kotlin for Apache Spark features From 79a596a78da44dd83e42de9248ccaff731b02129 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 10 May 2022 12:37:25 +0200 Subject: [PATCH 196/213] referring to wiki in readme --- README.md | 45 +------------------ .../kotlinx/spark/api/StreamingKeyValues.kt | 2 +- 2 files changed, 3 insertions(+), 44 deletions(-) diff --git a/README.md b/README.md index 0d1b4db0..26863f07 100644 --- a/README.md +++ b/README.md @@ -257,49 +257,7 @@ val a: Tuple2 = tupleOf(1, 2L) val b: Tuple3 = t("test", 1.0, 2) val c: Tuple3 = 5f X "aaa" X 1 ``` -Tuples can be expanded and merged like this: -```kotlin -// expand -tupleOf(1, 2).appendedBy(3) == tupleOf(1, 2, 3) -tupleOf(1, 2) + 3 == tupleOf(1, 2, 3) -tupleOf(2, 3).prependedBy(1) == tupleOf(1, 2, 3) -1 + tupleOf(2, 3) == tupleOf(1, 2, 3) - -// merge -tupleOf(1, 2) concat tupleOf(3, 4) == tupleOf(1, 2, 3, 4) -tupleOf(1, 2) + tupleOf(3, 4) == tupleOf(1, 2, 3, 4) - -// extend tuple instead of merging with it -tupleOf(1, 2).appendedBy(tupleOf(3, 4)) == tupleOf(1, 2, tupleOf(3, 4)) -tupleOf(1, 2) + tupleOf(tupleOf(3, 4)) == tupleOf(1, 2, tupleOf(3, 4)) -``` - -The concept of `EmptyTuple` from Scala 3 is also already present: -```kotlin -tupleOf(1).dropLast() == tupleOf() == emptyTuple() -``` - -Finally, all these tuple helper functions are also baked in: - -- `componentX()` for destructuring: `val (a, b) = tuple` -- `dropLast() / dropFirst()` -- `contains(x)` for `if (x in tuple) { ... }` -- `iterator()` for `for (x in tuple) { ... }` -- `asIterable()` -- `size` -- `get(n) / get(i..j)` for `tuple[1] / tuple[i..j]` -- `getOrNull(n) / getOrNull(i..j)` -- `getAs(n) / getAs(i..j)` -- `getAsOrNull(n) / getAsOrNull(i..j)` -- `copy(_1 = ..., _5 = ...)` -- `first() / last()` -- `_1`, `_6` etc. (instead of `_1()`, `_6()`) -- `zip` -- `dropN() / dropLastN()` -- `takeN() / takeLastN()` -- `splitAtN()` -- `map` -- `cast` +To read more about tuples and all the added functions, refer to the [wiki](https://github.com/JetBrains/kotlin-spark-api/wiki/Tuples). ### Streaming @@ -342,6 +300,7 @@ withSparkStreaming(batchDuration = Durations.seconds(1), timeout = 10_000) { // } ``` +For more information, check the [wiki](https://github.com/JetBrains/kotlin-spark-api/wiki/Streaming). ## Examples diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt index 8664081b..f9044b5b 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/StreamingKeyValues.kt @@ -40,7 +40,7 @@ fun JavaDStream>.toPairDStream(): JavaPairDStream = fun JavaPairDStream.toTupleDStream(): JavaDStream> = toJavaDStream() -fun JavaRDD>.toPairRDD(): JavaPairRDD = +fun JavaRDD>.toPairRDD(): JavaPairRDD = JavaPairRDD.fromJavaRDD(this) fun JavaPairRDD.toTupleRDD(): JavaRDD> = From 50521ffe05e8dbe05f942afe7e9e96990e311d04 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 10 May 2022 13:10:14 +0200 Subject: [PATCH 197/213] referring to wiki in readme --- README.md | 44 +------------------------------------------- 1 file changed, 1 insertion(+), 43 deletions(-) diff --git a/README.md b/README.md index 26863f07..c20a1d61 100644 --- a/README.md +++ b/README.md @@ -189,49 +189,7 @@ dataset.where( col("colA") `===` 6 ) dataset.where( col("colA") eq 6) ``` -In short, all supported operators are: - -- `==`, -- `!=`, -- `eq` / `` `===` ``, -- `neq` / `` `=!=` ``, -- `-col(...)`, -- `!col(...)`, -- `gt`, -- `lt`, -- `geq`, -- `leq`, -- `or`, -- `and` / `` `&&` ``, -- `+`, -- `-`, -- `*`, -- `/`, -- `%` - -Secondly, there are some quality of life additions as well: - -In Kotlin, Ranges are often -used to solve inclusive/exclusive situations for a range. So, you can now do: -```kotlin -dataset.where( col("colA") inRangeOf 0..2 ) -``` - -Also, for columns containing map- or array like types: - -```kotlin -dataset.where( col("colB")[0] geq 5 ) -``` - -Finally, thanks to Kotlin reflection, we can provide a type- and refactor safe way -to create `TypedColumn`s and with those a new Dataset from pieces of another using the `selectTyped()` function, added to the API: -```kotlin -val dataset: Dataset = ... -val newDataset: Dataset> = dataset.selectTyped(col(YourClass::colA), col(YourClass::colB)) - -// Alternatively, for instance when working with a Dataset -val typedDataset: Dataset> = otherDataset.selectTyped(col("a").`as`(), col("b").`as`()) -``` +To read more, check the [wiki](https://github.com/JetBrains/kotlin-spark-api/wiki/Column-functions). ### Overload resolution ambiguity From 92e480b30d18edd75faf9e550376621927913047 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 10 May 2022 13:15:12 +0200 Subject: [PATCH 198/213] referring to wiki in readme --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index c20a1d61..54f9fdfa 100644 --- a/README.md +++ b/README.md @@ -119,7 +119,8 @@ This is not needed when running the Kotlin Spark API from a Jupyter notebook. ```kotlin spark.dsOf("a" to 1, "b" to 2) ``` -The example above produces `Dataset>`. While Kotlin Pairs and Triples are supported, Scala Tuples are reccomended for better support. +The example above produces `Dataset>`. While Kotlin Pairs and Triples are supported, Scala Tuples are +recommended for better support. ### Null safety There are several aliases in API, like `leftJoin`, `rightJoin` etc. These are null-safe by design. From 92e9e78b869c3006a13becd5dc0a7d2dd5ddd386 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 10 May 2022 13:29:21 +0200 Subject: [PATCH 199/213] referring to wiki in readme --- README.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 54f9fdfa..6d27b645 100644 --- a/README.md +++ b/README.md @@ -126,8 +126,12 @@ recommended for better support. There are several aliases in API, like `leftJoin`, `rightJoin` etc. These are null-safe by design. For example, `leftJoin` is aware of nullability and returns `Dataset>`. Note that we are forcing `RIGHT` to be nullable for you as a developer to be able to handle this situation. -`NullPointerException`s are hard to debug in Spark, and we doing our best to make them as rare as possible. +`NullPointerException`s are hard to debug in Spark, and we're doing our best to make them as rare as possible. +In Spark, you might also come across Scala-native `Option<*>` or Java-compatible `Optional<*>` classes. +We provide `getOrNull()` and `getOrElse()` functions for these to use Kotlin's null safety for good. + +Similarly, you can also create `Option<*>`s and `Optional<*>`s like `T?.toOptional()` if a Spark function requires it. ### withSpark function We provide you with useful function `withSpark`, which accepts everything that may be needed to run Spark — properties, name, master location and so on. It also accepts a block of code to execute inside Spark context. From 54a2eb82ccffbed0749cdfaaaf6df3c3e0c40489 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 11 May 2022 13:16:40 +0200 Subject: [PATCH 200/213] renamed parent to include 3.2 --- README.md | 4 ++-- core/3.2/pom_2.12.xml | 2 +- dummy/pom.xml | 2 +- examples/pom-3.2_2.12.xml | 2 +- jupyter/pom.xml | 2 +- kotlin-spark-api/3.2/pom_2.12.xml | 2 +- pom.xml | 4 ++-- pom_2.12.xml | 4 ++-- scala-tuples-in-kotlin/pom_2.12.xml | 2 +- 9 files changed, 12 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 6d27b645..1481b7e8 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Kotlin for Apache® Spark™ [![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:org.jetbrains.kotlinx.spark%20AND%20v:1.1.0) [![official JetBrains project](http://jb.gg/badges/official.svg)](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub) [![Join the chat at https://gitter.im/JetBrains/kotlin-spark-api](https://badges.gitter.im/JetBrains/kotlin-spark-api.svg)](https://gitter.im/JetBrains/kotlin-spark-api?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) +# Kotlin for Apache® Spark™ [![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent-3.2.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:org.jetbrains.kotlinx.spark%20AND%20v:1.1.0) [![official JetBrains project](http://jb.gg/badges/official.svg)](https://confluence.jetbrains.com/display/ALL/JetBrains+on+GitHub) [![Join the chat at https://gitter.im/JetBrains/kotlin-spark-api](https://badges.gitter.im/JetBrains/kotlin-spark-api.svg)](https://gitter.im/JetBrains/kotlin-spark-api?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) Your next API to work with [Apache Spark](https://spark.apache.org/). @@ -45,7 +45,7 @@ The list of Kotlin for Apache Spark releases is available [here](https://github. The Kotlin for Spark artifacts adhere to the following convention: `[Apache Spark version]_[Scala core version]:[Kotlin for Apache Spark API version]` -[![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:"org.jetbrains.kotlinx.spark"%20AND%20a:"kotlin-spark-api-3.2") +[![Maven Central](https://img.shields.io/maven-central/v/org.jetbrains.kotlinx.spark/kotlin-spark-api-parent-3.2.svg?label=Maven%20Central)](https://search.maven.org/search?q=g:"org.jetbrains.kotlinx.spark"%20AND%20a:"kotlin-spark-api-3.2") ## How to configure Kotlin for Apache Spark in your project diff --git a/core/3.2/pom_2.12.xml b/core/3.2/pom_2.12.xml index 5fed5559..fac961e3 100644 --- a/core/3.2/pom_2.12.xml +++ b/core/3.2/pom_2.12.xml @@ -8,7 +8,7 @@ core-3.2_2.12 org.jetbrains.kotlinx.spark - kotlin-spark-api-parent_2.12 + kotlin-spark-api-parent-3.2_2.12 1.0.4-SNAPSHOT ../../pom_2.12.xml diff --git a/dummy/pom.xml b/dummy/pom.xml index 1fd6ab08..79e476c4 100644 --- a/dummy/pom.xml +++ b/dummy/pom.xml @@ -1,7 +1,7 @@ - kotlin-spark-api-parent + kotlin-spark-api-parent-3.2 org.jetbrains.kotlinx.spark 1.0.4-SNAPSHOT diff --git a/examples/pom-3.2_2.12.xml b/examples/pom-3.2_2.12.xml index d069c058..6379a482 100644 --- a/examples/pom-3.2_2.12.xml +++ b/examples/pom-3.2_2.12.xml @@ -9,7 +9,7 @@ examples-3.2_2.12 org.jetbrains.kotlinx.spark - kotlin-spark-api-parent_2.12 + kotlin-spark-api-parent-3.2_2.12 1.0.4-SNAPSHOT ../pom_2.12.xml diff --git a/jupyter/pom.xml b/jupyter/pom.xml index bfc67e6d..69473488 100644 --- a/jupyter/pom.xml +++ b/jupyter/pom.xml @@ -10,7 +10,7 @@ Jupyter integration org.jetbrains.kotlinx.spark - kotlin-spark-api-parent_2.12 + kotlin-spark-api-parent-3.2_2.12 1.0.4-SNAPSHOT ../pom_2.12.xml diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 32c5f0f9..3cbe8644 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -9,7 +9,7 @@ Kotlin API compatible with spark 3.2.0 Kotlin for Apache Spark org.jetbrains.kotlinx.spark - kotlin-spark-api-parent_2.12 + kotlin-spark-api-parent-3.2_2.12 1.0.4-SNAPSHOT ../../pom_2.12.xml diff --git a/pom.xml b/pom.xml index c224b020..65c544e1 100644 --- a/pom.xml +++ b/pom.xml @@ -2,10 +2,10 @@ 4.0.0 - Kotlin Spark API: Parent + Kotlin Spark API: Parent for Spark 3.2+ Parent project for Kotlin for Apache Spark org.jetbrains.kotlinx.spark - kotlin-spark-api-parent + kotlin-spark-api-parent-3.2 1.0.4-SNAPSHOT pom diff --git a/pom_2.12.xml b/pom_2.12.xml index f59970af..b66a4774 100644 --- a/pom_2.12.xml +++ b/pom_2.12.xml @@ -4,10 +4,10 @@ Kotlin Spark API: Parent (Scala 2.12) Parent project for Kotlin for Apache Spark - kotlin-spark-api-parent_2.12 + kotlin-spark-api-parent-3.2_2.12 org.jetbrains.kotlinx.spark - kotlin-spark-api-parent + kotlin-spark-api-parent-3.2 1.0.4-SNAPSHOT pom.xml diff --git a/scala-tuples-in-kotlin/pom_2.12.xml b/scala-tuples-in-kotlin/pom_2.12.xml index 7e5f02fc..8f4c4881 100644 --- a/scala-tuples-in-kotlin/pom_2.12.xml +++ b/scala-tuples-in-kotlin/pom_2.12.xml @@ -7,7 +7,7 @@ scala-tuples-in-kotlin org.jetbrains.kotlinx.spark - kotlin-spark-api-parent_2.12 + kotlin-spark-api-parent-3.2_2.12 1.0.4-SNAPSHOT ../pom_2.12.xml From 8bd7b6c90e2056429c17aa11f7f1d9442a32f06c Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 11 May 2022 13:42:38 +0200 Subject: [PATCH 201/213] 3.2 -> 3.1 --- README.md | 2 +- core/3.1/pom_2.12.xml | 2 +- dummy/pom.xml | 2 +- examples/pom-3.1_2.12.xml | 2 +- jupyter/pom.xml | 2 +- kotlin-spark-api/3.1/pom_2.12.xml | 2 +- pom.xml | 4 ++-- pom_2.12.xml | 4 ++-- scala-tuples-in-kotlin/pom_2.12.xml | 2 +- 9 files changed, 11 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 1481b7e8..c1d1d050 100644 --- a/README.md +++ b/README.md @@ -56,7 +56,7 @@ Here's an example `pom.xml`: ```xml org.jetbrains.kotlinx.spark - kotlin-spark-api-3.2 + kotlin-spark-api-3.1 ${kotlin-spark-api.version} diff --git a/core/3.1/pom_2.12.xml b/core/3.1/pom_2.12.xml index a0acebb4..61e14b2e 100644 --- a/core/3.1/pom_2.12.xml +++ b/core/3.1/pom_2.12.xml @@ -8,7 +8,7 @@ core-3.1_2.12 org.jetbrains.kotlinx.spark - kotlin-spark-api-parent-3.2_2.12 + kotlin-spark-api-parent-3.1_2.12 1.0.4-SNAPSHOT ../../pom_2.12.xml diff --git a/dummy/pom.xml b/dummy/pom.xml index dbdcb193..e25eee3a 100644 --- a/dummy/pom.xml +++ b/dummy/pom.xml @@ -1,7 +1,7 @@ - kotlin-spark-api-parent-3.2 + kotlin-spark-api-parent-3.1 org.jetbrains.kotlinx.spark 1.0.4-SNAPSHOT diff --git a/examples/pom-3.1_2.12.xml b/examples/pom-3.1_2.12.xml index 150a8e0a..53b4ac18 100644 --- a/examples/pom-3.1_2.12.xml +++ b/examples/pom-3.1_2.12.xml @@ -9,7 +9,7 @@ examples-3.1_2.12 org.jetbrains.kotlinx.spark - kotlin-spark-api-parent-3.2_2.12 + kotlin-spark-api-parent-3.1_2.12 1.0.4-SNAPSHOT ../pom_2.12.xml diff --git a/jupyter/pom.xml b/jupyter/pom.xml index 5fe56793..450c4923 100644 --- a/jupyter/pom.xml +++ b/jupyter/pom.xml @@ -10,7 +10,7 @@ Jupyter integration org.jetbrains.kotlinx.spark - kotlin-spark-api-parent-3.2_2.12 + kotlin-spark-api-parent-3.1_2.12 1.0.4-SNAPSHOT ../pom_2.12.xml diff --git a/kotlin-spark-api/3.1/pom_2.12.xml b/kotlin-spark-api/3.1/pom_2.12.xml index 9ea163e4..b60378c0 100644 --- a/kotlin-spark-api/3.1/pom_2.12.xml +++ b/kotlin-spark-api/3.1/pom_2.12.xml @@ -9,7 +9,7 @@ Kotlin API compatible with spark 3.1.3 Kotlin for Apache Spark org.jetbrains.kotlinx.spark - kotlin-spark-api-parent-3.2_2.12 + kotlin-spark-api-parent-3.1_2.12 1.0.4-SNAPSHOT ../../pom_2.12.xml diff --git a/pom.xml b/pom.xml index b9cfbbec..7419c191 100644 --- a/pom.xml +++ b/pom.xml @@ -2,10 +2,10 @@ 4.0.0 - Kotlin Spark API: Parent for Spark 3.2+ + Kotlin Spark API: Parent for Spark 3.1+ Parent project for Kotlin for Apache Spark org.jetbrains.kotlinx.spark - kotlin-spark-api-parent-3.2 + kotlin-spark-api-parent-3.1 1.0.4-SNAPSHOT pom diff --git a/pom_2.12.xml b/pom_2.12.xml index 9f4deda0..cd13ce13 100644 --- a/pom_2.12.xml +++ b/pom_2.12.xml @@ -4,10 +4,10 @@ Kotlin Spark API: Parent (Scala 2.12) Parent project for Kotlin for Apache Spark - kotlin-spark-api-parent-3.2_2.12 + kotlin-spark-api-parent-3.1_2.12 org.jetbrains.kotlinx.spark - kotlin-spark-api-parent-3.2 + kotlin-spark-api-parent-3.1 1.0.4-SNAPSHOT pom.xml diff --git a/scala-tuples-in-kotlin/pom_2.12.xml b/scala-tuples-in-kotlin/pom_2.12.xml index 8f4c4881..30162bb1 100644 --- a/scala-tuples-in-kotlin/pom_2.12.xml +++ b/scala-tuples-in-kotlin/pom_2.12.xml @@ -7,7 +7,7 @@ scala-tuples-in-kotlin org.jetbrains.kotlinx.spark - kotlin-spark-api-parent-3.2_2.12 + kotlin-spark-api-parent-3.1_2.12 1.0.4-SNAPSHOT ../pom_2.12.xml From 187bddeb87c527f4dcafaf5c18349a67a2aa0b4a Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 11 May 2022 13:53:58 +0200 Subject: [PATCH 202/213] 3.1 -> 3.0 --- README.md | 2 +- core/3.0/pom_2.12.xml | 2 +- dummy/pom.xml | 2 +- examples/pom-3.0_2.12.xml | 2 +- jupyter/pom.xml | 2 +- kotlin-spark-api/3.0/pom_2.12.xml | 2 +- pom.xml | 4 ++-- pom_2.12.xml | 4 ++-- scala-tuples-in-kotlin/pom_2.12.xml | 2 +- 9 files changed, 11 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index c1d1d050..e60a7ce0 100644 --- a/README.md +++ b/README.md @@ -56,7 +56,7 @@ Here's an example `pom.xml`: ```xml org.jetbrains.kotlinx.spark - kotlin-spark-api-3.1 + kotlin-spark-api-3.0 ${kotlin-spark-api.version} diff --git a/core/3.0/pom_2.12.xml b/core/3.0/pom_2.12.xml index b15661e7..bbb7e94f 100644 --- a/core/3.0/pom_2.12.xml +++ b/core/3.0/pom_2.12.xml @@ -8,7 +8,7 @@ core-3.0_2.12 org.jetbrains.kotlinx.spark - kotlin-spark-api-parent-3.1_2.12 + kotlin-spark-api-parent-3.0_2.12 1.0.4-SNAPSHOT ../../pom_2.12.xml diff --git a/dummy/pom.xml b/dummy/pom.xml index 8b009d65..49b50e49 100644 --- a/dummy/pom.xml +++ b/dummy/pom.xml @@ -1,7 +1,7 @@ - kotlin-spark-api-parent-3.1 + kotlin-spark-api-parent-3.0 org.jetbrains.kotlinx.spark 1.0.4-SNAPSHOT diff --git a/examples/pom-3.0_2.12.xml b/examples/pom-3.0_2.12.xml index 67b52b91..34b47e36 100644 --- a/examples/pom-3.0_2.12.xml +++ b/examples/pom-3.0_2.12.xml @@ -9,7 +9,7 @@ examples-3.0_2.12 org.jetbrains.kotlinx.spark - kotlin-spark-api-parent-3.1_2.12 + kotlin-spark-api-parent-3.0_2.12 1.0.4-SNAPSHOT ../pom_2.12.xml diff --git a/jupyter/pom.xml b/jupyter/pom.xml index 350ba8be..98d8a646 100644 --- a/jupyter/pom.xml +++ b/jupyter/pom.xml @@ -10,7 +10,7 @@ Jupyter integration org.jetbrains.kotlinx.spark - kotlin-spark-api-parent-3.1_2.12 + kotlin-spark-api-parent-3.0_2.12 1.0.4-SNAPSHOT ../pom_2.12.xml diff --git a/kotlin-spark-api/3.0/pom_2.12.xml b/kotlin-spark-api/3.0/pom_2.12.xml index 1f3e17f4..3c72dce1 100644 --- a/kotlin-spark-api/3.0/pom_2.12.xml +++ b/kotlin-spark-api/3.0/pom_2.12.xml @@ -9,7 +9,7 @@ Kotlin API compatible with spark 3.0.3 Kotlin for Apache Spark org.jetbrains.kotlinx.spark - kotlin-spark-api-parent-3.1_2.12 + kotlin-spark-api-parent-3.0_2.12 1.0.4-SNAPSHOT ../../pom_2.12.xml diff --git a/pom.xml b/pom.xml index 21f60741..9f38fa12 100644 --- a/pom.xml +++ b/pom.xml @@ -2,10 +2,10 @@ 4.0.0 - Kotlin Spark API: Parent for Spark 3.1+ + Kotlin Spark API: Parent for Spark 3.0+ Parent project for Kotlin for Apache Spark org.jetbrains.kotlinx.spark - kotlin-spark-api-parent-3.1 + kotlin-spark-api-parent-3.0 1.0.4-SNAPSHOT pom diff --git a/pom_2.12.xml b/pom_2.12.xml index 0091418a..8a745a68 100644 --- a/pom_2.12.xml +++ b/pom_2.12.xml @@ -4,10 +4,10 @@ Kotlin Spark API: Parent (Scala 2.12) Parent project for Kotlin for Apache Spark - kotlin-spark-api-parent-3.1_2.12 + kotlin-spark-api-parent-3.0_2.12 org.jetbrains.kotlinx.spark - kotlin-spark-api-parent-3.1 + kotlin-spark-api-parent-3.0 1.0.4-SNAPSHOT pom.xml diff --git a/scala-tuples-in-kotlin/pom_2.12.xml b/scala-tuples-in-kotlin/pom_2.12.xml index 30162bb1..95d0fde7 100644 --- a/scala-tuples-in-kotlin/pom_2.12.xml +++ b/scala-tuples-in-kotlin/pom_2.12.xml @@ -7,7 +7,7 @@ scala-tuples-in-kotlin org.jetbrains.kotlinx.spark - kotlin-spark-api-parent-3.1_2.12 + kotlin-spark-api-parent-3.0_2.12 1.0.4-SNAPSHOT ../pom_2.12.xml From 7ac08a5fd9f6585013c8551c8a9bda36c0b4c66b Mon Sep 17 00:00:00 2001 From: Pasha Finkelshteyn Date: Wed, 11 May 2022 18:49:04 +0300 Subject: [PATCH 203/213] ref: Migrating Kafka tests to TestContainers --- .github/workflows/build.yml | 2 +- .github/workflows/publish_dev_version.yml | 2 +- kotlin-spark-api/3.2/pom_2.12.xml | 6 + .../kotlinx/spark/api/KafkaHelper.kt | 148 ------------------ .../kotlinx/spark/api/KafkaStreamingTest.kt | 34 ++-- .../kotlinx/spark/api/ProjectConfig.kt | 7 +- pom.xml | 19 +-- 7 files changed, 43 insertions(+), 175 deletions(-) delete mode 100644 kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 98b033b4..98a7ac04 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -25,7 +25,7 @@ jobs: key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: ${{ runner.os }}-m2 - name: Build with Maven - run: ./mvnw -B package --file pom.xml -Pscala-2.12 -Dkotest.tags="!Kafka" + run: ./mvnw -B package --file pom.xml -Pscala-2.12 # qodana: # runs-on: ubuntu-latest # steps: diff --git a/.github/workflows/publish_dev_version.yml b/.github/workflows/publish_dev_version.yml index 80266c62..a0f2a374 100644 --- a/.github/workflows/publish_dev_version.yml +++ b/.github/workflows/publish_dev_version.yml @@ -24,7 +24,7 @@ jobs: key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} restore-keys: ${{ runner.os }}-m2 - name: Deploy to GH Packages - run: ./mvnw --batch-mode deploy -Dkotest.tags="!Kafka" + run: ./mvnw --batch-mode deploy env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/kotlin-spark-api/3.2/pom_2.12.xml b/kotlin-spark-api/3.2/pom_2.12.xml index 3cbe8644..ee48d1cc 100644 --- a/kotlin-spark-api/3.2/pom_2.12.xml +++ b/kotlin-spark-api/3.2/pom_2.12.xml @@ -73,6 +73,12 @@ ${kotest-extensions-allure.version} test + + io.kotest.extensions + kotest-extensions-testcontainers + ${kotest-extensions-testcontainers.version} + test + io.github.embeddedkafka embedded-kafka_${scala.compat.version} diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt deleted file mode 100644 index 6ec5924c..00000000 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaHelper.kt +++ /dev/null @@ -1,148 +0,0 @@ -/*- - * =LICENSE= - * Kotlin Spark API: API for Spark 3.2+ (Scala 2.12) - * ---------- - * Copyright (C) 2019 - 2022 JetBrains - * ---------- - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * =LICENSEEND= - */ -@file:Suppress("MemberVisibilityCanBePrivate", "BlockingMethodInNonBlockingContext") - -package org.jetbrains.kotlinx.spark.api - -/** - * Source: https://github.com/kotest/kotest-extensions-embedded-kafka - */ - -import io.github.embeddedkafka.EmbeddedKafka -import io.github.embeddedkafka.EmbeddedKafkaConfig -import io.kotest.core.listeners.TestListener -import io.kotest.core.spec.Spec -import org.apache.kafka.clients.CommonClientConfigs -import org.apache.kafka.clients.consumer.ConsumerConfig -import org.apache.kafka.clients.consumer.KafkaConsumer -import org.apache.kafka.clients.producer.KafkaProducer -import org.apache.kafka.common.serialization.BytesDeserializer -import org.apache.kafka.common.serialization.BytesSerializer -import org.apache.kafka.common.serialization.StringDeserializer -import org.apache.kafka.common.serialization.StringSerializer -import org.apache.kafka.common.utils.Bytes -import scala.Predef -import java.util.* - -val embeddedKafkaListener: EmbeddedKafkaListener = EmbeddedKafkaListener(EmbeddedKafkaConfig.defaultConfig()) - -class EmbeddedKafkaListener( - private val config: EmbeddedKafkaConfig, -) : TestListener { - - constructor(port: Int) : this( - EmbeddedKafkaConfig.apply( - port, - EmbeddedKafkaConfig.defaultZookeeperPort(), - Predef.Map().empty(), - Predef.Map().empty(), - Predef.Map().empty(), - ) - ) - - constructor(kafkaPort: Int, zookeeperPort: Int) : this( - EmbeddedKafkaConfig.apply( - kafkaPort, - zookeeperPort, - Predef.Map().empty(), - Predef.Map().empty(), - Predef.Map().empty(), - ) - ) - - val port: Int = config.kafkaPort() - -// val host: String = "127.0.0.1" -// val host: String = "0.0.0.0" - val host: String = "localhost" - - val bootstrapServer = "$host:$port" - - override suspend fun beforeSpec(spec: Spec) { - EmbeddedKafka.start(config) - while (!EmbeddedKafka.isRunning()) { - Thread.sleep(100) - } - } - - override suspend fun afterSpec(spec: Spec) { - EmbeddedKafka.stop() - while (EmbeddedKafka.isRunning()) { - Thread.sleep(100) - } - } - - /** - * Returns a kafka consumer configured with the details of the embedded broker. - */ - fun stringStringConsumer(configure: Properties.() -> Unit = {}): KafkaConsumer { - val props = Properties() - props[CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG] = "$host:$port" - props[ConsumerConfig.GROUP_ID_CONFIG] = "test_consumer_group_" + System.currentTimeMillis() - props[ConsumerConfig.AUTO_OFFSET_RESET_CONFIG] = "earliest" - props.configure() - return KafkaConsumer(props, StringDeserializer(), StringDeserializer()) - } - - /** - * Returns a kafka consumer subscribed to the given topic on the embedded broker. - */ - fun stringStringConsumer(topic: String, configure: Properties.() -> Unit = {}): KafkaConsumer { - val consumer = stringStringConsumer(configure) - consumer.subscribe(listOf(topic)) - return consumer - } - - /** - * Returns a kafka consumer configured with the details of the embedded broker. - */ - fun bytesBytesConsumer(configure: Properties.() -> Unit = {}): KafkaConsumer { - val props = Properties() - props[CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG] = "$host:$port" - props[ConsumerConfig.GROUP_ID_CONFIG] = "test_consumer_group_" + System.currentTimeMillis() - props[ConsumerConfig.AUTO_OFFSET_RESET_CONFIG] = "earliest" - props.configure() - return KafkaConsumer(props, BytesDeserializer(), BytesDeserializer()) - } - - /** - * Returns a kafka consumer subscribed to the given topic on the embedded broker. - */ - fun bytesBytesConsumer(topic: String, configure: Properties.() -> Unit = {}): KafkaConsumer { - val consumer = bytesBytesConsumer(configure) - consumer.subscribe(listOf(topic)) - return consumer - } - - fun bytesBytesProducer(configure: Properties.() -> Unit = {}): KafkaProducer { - val props = Properties() - props[CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG] = "$host:$port" - props.configure() - return KafkaProducer(props, BytesSerializer(), BytesSerializer()) - } - - fun stringStringProducer(configure: Properties.() -> Unit = {}): KafkaProducer { - val props = Properties() - props[CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG] = "$host:$port" - props.configure() - return KafkaProducer(props, StringSerializer(), StringSerializer()) - } -} - diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt index b755b1e6..eb413706 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,7 +20,11 @@ package org.jetbrains.kotlinx.spark.api import io.kotest.core.Tag +import io.kotest.core.extensions.install import io.kotest.core.spec.style.ShouldSpec +import io.kotest.extensions.testcontainers.TestContainerExtension +import io.kotest.extensions.testcontainers.kafka.createProducer +import io.kotest.extensions.testcontainers.kafka.createStringStringProducer import io.kotest.matchers.collections.shouldBeIn import org.apache.kafka.clients.consumer.ConsumerConfig import org.apache.kafka.clients.consumer.ConsumerRecord @@ -32,27 +36,33 @@ import org.apache.spark.streaming.kafka010.ConsumerStrategies import org.apache.spark.streaming.kafka010.KafkaUtils import org.apache.spark.streaming.kafka010.LocationStrategies import org.jetbrains.kotlinx.spark.api.tuples.* +import org.testcontainers.containers.KafkaContainer +import org.testcontainers.utility.DockerImageName import java.io.Serializable +import java.util.concurrent.TimeUnit object Kafka : Tag() class KafkaStreamingTest : ShouldSpec({ - // making sure it can be skipped on Github actions since it times out tags(Kafka) + val kafka = install(TestContainerExtension(KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:6.2.4")))) { + withEmbeddedZookeeper() + withEnv("KAFKA_AUTO_CREATE_TOPICS_ENABLE", "true") + } + context("kafka") { - val port = 9092 - val broker = "localhost:$port" val topic1 = "test1" val topic2 = "test2" - val kafkaListener = EmbeddedKafkaListener(port) - listener(kafkaListener) should("support kafka streams") { - val producer = kafkaListener.stringStringProducer() - producer.send(ProducerRecord(topic1, "Hello this is a test test test")) - producer.send(ProducerRecord(topic2, "This is also also a test test something")) + val producer = kafka.createStringStringProducer() + listOf( + producer.send(ProducerRecord(topic1, "Hello this is a test test test")), + producer.send(ProducerRecord(topic2, "This is also also a test test something")), + ) + .map { it.get(10, TimeUnit.SECONDS) } producer.close() withSparkStreaming( @@ -62,7 +72,7 @@ class KafkaStreamingTest : ShouldSpec({ ) { val kafkaParams: Map = mapOf( - ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG to broker, + ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG to kafka.bootstrapServers, ConsumerConfig.GROUP_ID_CONFIG to "consumer-group", ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, @@ -113,4 +123,4 @@ class KafkaStreamingTest : ShouldSpec({ } } -}) +}) \ No newline at end of file diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt index 4238cd78..f450f65d 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,7 +24,6 @@ import io.kotest.extensions.allure.AllureTestReporter @Suppress("unused") object ProjectConfig : AbstractProjectConfig() { - override fun listeners() = super.listeners() + AllureTestReporter(true) - override fun extensions() = super.extensions() + AllureTestReporter(true) + } diff --git a/pom.xml b/pom.xml index 65c544e1..c102d72c 100644 --- a/pom.xml +++ b/pom.xml @@ -10,34 +10,35 @@ pom - 1.6.21 - 1.6.10 0.17.0 - 5.2.3 - 1.1.0 + 1.6.10 3.1.0 - 3.2.1 + 3.3.1 + 1.1.0 + 1.3.1 + 5.2.3 0.11.0-83 + 1.6.21 0.7.5 - 3.3.1 + 3.2.1 2.10.0 0.8.7 5.5 + official 2.0.0 3.3.0 - 3.0.1 + 3.10.1 3.0.0-M1 3.0.0-M3 + 3.0.1 3.2.0 3.9.1 3.2.1 - 3.10.1 3.0.0-M6 1.6.8 4.5.6 - official From 48e9af421027962edd8e0f3dfe1605ae98c0259f Mon Sep 17 00:00:00 2001 From: Pasha Finkelshteyn Date: Thu, 12 May 2022 13:40:16 +0300 Subject: [PATCH 204/213] Some more attempts to fix tests --- .../kotlinx/spark/api/KafkaStreamingTest.kt | 72 +++++++++---------- 1 file changed, 36 insertions(+), 36 deletions(-) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt index eb413706..bec597a5 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt @@ -21,11 +21,11 @@ package org.jetbrains.kotlinx.spark.api import io.kotest.core.Tag import io.kotest.core.extensions.install -import io.kotest.core.spec.style.ShouldSpec +import io.kotest.core.spec.style.FunSpec import io.kotest.extensions.testcontainers.TestContainerExtension -import io.kotest.extensions.testcontainers.kafka.createProducer +import io.kotest.extensions.testcontainers.kafka.createStringStringConsumer import io.kotest.extensions.testcontainers.kafka.createStringStringProducer -import io.kotest.matchers.collections.shouldBeIn +import io.kotest.matchers.collections.shouldContain import org.apache.kafka.clients.consumer.ConsumerConfig import org.apache.kafka.clients.consumer.ConsumerRecord import org.apache.kafka.clients.producer.ProducerRecord @@ -38,37 +38,36 @@ import org.apache.spark.streaming.kafka010.LocationStrategies import org.jetbrains.kotlinx.spark.api.tuples.* import org.testcontainers.containers.KafkaContainer import org.testcontainers.utility.DockerImageName +import scala.Tuple3 import java.io.Serializable -import java.util.concurrent.TimeUnit +import java.time.Duration object Kafka : Tag() -class KafkaStreamingTest : ShouldSpec({ - - tags(Kafka) - val kafka = install(TestContainerExtension(KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:6.2.4")))) { - withEmbeddedZookeeper() - withEnv("KAFKA_AUTO_CREATE_TOPICS_ENABLE", "true") - } +class KafkaStreamingTest : FunSpec() { + init { + tags(Kafka) - context("kafka") { - val topic1 = "test1" - val topic2 = "test2" + val kafka = + install(TestContainerExtension(KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:7.0.1")))) { + withEmbeddedZookeeper() +// withEnv("KAFKA_AUTO_CREATE_TOPICS_ENABLE", "true") + } + println(kafka.bootstrapServers) + test("Streaming should support kafka") { + val topic1 = "test1" + val topic2 = "test2" - should("support kafka streams") { - val producer = kafka.createStringStringProducer() - listOf( - producer.send(ProducerRecord(topic1, "Hello this is a test test test")), - producer.send(ProducerRecord(topic2, "This is also also a test test something")), - ) - .map { it.get(10, TimeUnit.SECONDS) } - producer.close() + val producer = autoClose(kafka.createStringStringProducer()) + producer.send(ProducerRecord(topic1, "Hello this is a test test test")) + producer.send(ProducerRecord(topic2, "This is also also a test test something")) withSparkStreaming( - batchDuration = Durations.seconds(2), + batchDuration = Durations.milliseconds(1000), appName = "KotlinDirectKafkaWordCount", - timeout = 1000L, + timeout = 10000L, + master = "local" ) { val kafkaParams: Map = mapOf( @@ -77,19 +76,18 @@ class KafkaStreamingTest : ShouldSpec({ ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, ) - // Create direct kafka stream with brokers and topics val messages: JavaInputDStream> = KafkaUtils.createDirectStream( ssc, - LocationStrategies.PreferConsistent(), + LocationStrategies.PreferBrokers(), ConsumerStrategies.Subscribe(setOf(topic1, topic2), kafkaParams), ) // Get the lines, split them into words, count the words and print - val lines = messages.map { it.topic() X it.value() } - val words = lines.flatMapValues { it.split(" ").iterator() } - val wordCounts = words + val wordCounts = messages + .map { it.topic() X it.value() } + .flatMapValues { it.split(" ").iterator() } .map { t(it, 1) } .reduceByKey { a: Int, b: Int -> a + b } .map { (tup, counter) -> tup + counter } @@ -111,16 +109,18 @@ class KafkaStreamingTest : ShouldSpec({ "something" X 1, ) ) - + val data = arrayListOf>>() wordCounts.foreachRDD { rdd, _ -> - rdd.foreach { (topic, word, count) -> - t(word, count).shouldBeIn(collection = resultLists[topic]!!) + data.add(rdd.collect()) + } + ssc.awaitTerminationOrTimeout(10000) + resultLists.forEach { (topic, tuples) -> + tuples.forEach { (word, count) -> + data shouldContain t(topic, word, count) } } - - wordCounts.print() } - } + } } -}) \ No newline at end of file +} \ No newline at end of file From 07b8b6135c3cbd1990f22b1229d250c2d57e66a2 Mon Sep 17 00:00:00 2001 From: Pasha Finkelshteyn Date: Thu, 12 May 2022 14:42:57 +0300 Subject: [PATCH 205/213] Fixes test Signed-off-by: Pasha Finkelshteyn --- .../kotlinx/spark/api/KafkaStreamingTest.kt | 72 ++++++++++--------- 1 file changed, 38 insertions(+), 34 deletions(-) diff --git a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt index bec597a5..fd0b12f7 100644 --- a/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt +++ b/kotlin-spark-api/3.2/src/test/kotlin/org/jetbrains/kotlinx/spark/api/KafkaStreamingTest.kt @@ -26,6 +26,7 @@ import io.kotest.extensions.testcontainers.TestContainerExtension import io.kotest.extensions.testcontainers.kafka.createStringStringConsumer import io.kotest.extensions.testcontainers.kafka.createStringStringProducer import io.kotest.matchers.collections.shouldContain +import io.kotest.matchers.collections.shouldContainAll import org.apache.kafka.clients.consumer.ConsumerConfig import org.apache.kafka.clients.consumer.ConsumerRecord import org.apache.kafka.clients.producer.ProducerRecord @@ -49,29 +50,51 @@ class KafkaStreamingTest : FunSpec() { tags(Kafka) - val kafka = - install(TestContainerExtension(KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:7.0.1")))) { - withEmbeddedZookeeper() -// withEnv("KAFKA_AUTO_CREATE_TOPICS_ENABLE", "true") - } + val kafka = install( + TestContainerExtension(KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:7.0.1"))) + ) { + withEmbeddedZookeeper() + withEnv("KAFKA_AUTO_CREATE_TOPICS_ENABLE", "true") + } println(kafka.bootstrapServers) test("Streaming should support kafka") { val topic1 = "test1" val topic2 = "test2" - val producer = autoClose(kafka.createStringStringProducer()) - producer.send(ProducerRecord(topic1, "Hello this is a test test test")) - producer.send(ProducerRecord(topic2, "This is also also a test test something")) + val resultLists = mapOf( + topic1 to listOf( + "Hello" X 1, + "this" X 1, + "is" X 1, + "a" X 1, + "test" X 3, + ), + topic2 to listOf( + "This" X 1, + "is" X 1, + "also" X 2, + "a" X 1, + "test" X 2, + "something" X 1, + ) + ) + val data = arrayListOf>>() withSparkStreaming( batchDuration = Durations.milliseconds(1000), appName = "KotlinDirectKafkaWordCount", - timeout = 10000L, + timeout = 10_000L, master = "local" ) { + setRunAfterStart { + val producer = autoClose(kafka.createStringStringProducer()) + producer.send(ProducerRecord(topic1, "Hello this is a test test test")) + producer.send(ProducerRecord(topic2, "This is also also a test test something")) + } + val kafkaParams: Map = mapOf( - ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG to kafka.bootstrapServers, + ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG to "${kafka.host}:${kafka.getMappedPort(KafkaContainer.KAFKA_PORT)}", ConsumerConfig.GROUP_ID_CONFIG to "consumer-group", ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG to StringDeserializer::class.java, @@ -92,35 +115,16 @@ class KafkaStreamingTest : FunSpec() { .reduceByKey { a: Int, b: Int -> a + b } .map { (tup, counter) -> tup + counter } - val resultLists = mapOf( - topic1 to listOf( - "Hello" X 1, - "this" X 1, - "is" X 1, - "a" X 1, - "test" X 3, - ), - topic2 to listOf( - "This" X 1, - "is" X 1, - "also" X 2, - "a" X 1, - "test" X 2, - "something" X 1, - ) - ) - val data = arrayListOf>>() + wordCounts.foreachRDD { rdd, _ -> data.add(rdd.collect()) } - ssc.awaitTerminationOrTimeout(10000) - resultLists.forEach { (topic, tuples) -> - tuples.forEach { (word, count) -> - data shouldContain t(topic, word, count) - } - } } + val resultList = resultLists.flatMap { (topic, tuples) -> + tuples.map { it.prependedBy(topic) } + } + data.flatten() shouldContainAll resultList } } } \ No newline at end of file From bd36681627c369aa2fdbbe00a033647e2da68b78 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Thu, 12 May 2022 14:41:26 +0200 Subject: [PATCH 206/213] added dokka to jupyter module, updated readme --- README.md | 4 ++++ jupyter/pom.xml | 24 ++++++++++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/README.md b/README.md index 1481b7e8..e2a24e29 100644 --- a/README.md +++ b/README.md @@ -103,6 +103,10 @@ This does not start a Spark session right away, meaning you can call `withSparkS in whichever cell you want. Check out the [example](examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples/streaming/JupyterStreamingExample.ipynb). +NOTE: You need `kotlin-jupyter-kernel` to be at least version 0.11.0.83 for the Kotlin Spark API to work. Also, if the +`%use spark` magic does not output "Spark session has been started...", and `%use spark-streaming` doesn't work at all, +add `%useLatestDescriptors` above it. + ## Kotlin for Apache Spark features ### Creating a SparkSession in Kotlin diff --git a/jupyter/pom.xml b/jupyter/pom.xml index 69473488..92c2b038 100644 --- a/jupyter/pom.xml +++ b/jupyter/pom.xml @@ -98,6 +98,30 @@ src/test/kotlin target/${scala.compat.version} + + org.jetbrains.dokka + dokka-maven-plugin + ${dokka.version} + + 8 + + + + dokka + + dokka + + pre-site + + + javadocjar + + javadocJar + + pre-integration-test + + + org.jetbrains.kotlin kotlin-maven-plugin From a841611b3a9f2101b4ee5878dea7b544f68d31d2 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 16 May 2022 22:34:26 +0200 Subject: [PATCH 207/213] fixed readme examples, fixed scala printlns not showing up the second time in jupyter cells, working on streams being allowed to be interrupted in jupyter --- README.md | 2 +- .../kotlinx/spark/api/jupyter/Integration.kt | 86 ++++++++++-------- .../spark/api/jupyter/SparkIntegration.kt | 3 + .../api/jupyter/SparkStreamingIntegration.kt | 89 +++++++++++++++++++ .../kotlinx/spark/api/jupyter/JupyterTests.kt | 23 +++-- .../kotlinx/spark/api/SparkSession.kt | 62 +++++++------ 6 files changed, 197 insertions(+), 68 deletions(-) diff --git a/README.md b/README.md index e2a24e29..843cfbb9 100644 --- a/README.md +++ b/README.md @@ -271,7 +271,7 @@ For more information, check the [wiki](https://github.com/JetBrains/kotlin-spark ## Examples -For more, check out [examples](https://github.com/JetBrains/kotlin-spark-api/tree/master/examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples) module. +For more, check out [examples](examples/src/main/kotlin/org/jetbrains/kotlinx/spark/examples) module. To get up and running quickly, check out this [tutorial](https://github.com/JetBrains/kotlin-spark-api/wiki/Quick-Start-Guide). ## Reporting issues/Support diff --git a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt index 0b2a8306..9c7509b1 100644 --- a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -22,6 +22,7 @@ package org.jetbrains.kotlinx.spark.api.jupyter import org.apache.spark.api.java.JavaRDDLike import org.apache.spark.rdd.RDD import org.apache.spark.sql.Dataset +import org.jetbrains.kotlinx.jupyter.api.FieldValue import org.jetbrains.kotlinx.jupyter.api.HTML import org.jetbrains.kotlinx.jupyter.api.KotlinKernelHost import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration @@ -33,50 +34,65 @@ abstract class Integration : JupyterIntegration() { private val scalaVersion = "2.12.15" private val spark3Version = "3.2.1" + /** + * Will be run after importing all dependencies + */ abstract fun KotlinKernelHost.onLoaded() - override fun Builder.onLoaded() { + abstract fun KotlinKernelHost.afterCellExecution(snippetInstance: Any, result: FieldValue) + + open val dependencies: Array = arrayOf( + "org.apache.spark:spark-repl_$scalaCompatVersion:$spark3Version", + "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion", + "org.jetbrains.kotlin:kotlin-reflect:$kotlinVersion", + "org.apache.spark:spark-sql_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-streaming_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-mllib_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-sql_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-graphx_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-launcher_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-catalyst_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-streaming_$scalaCompatVersion:$spark3Version", + "org.apache.spark:spark-core_$scalaCompatVersion:$spark3Version", + "org.scala-lang:scala-library:$scalaVersion", + "org.scala-lang.modules:scala-xml_$scalaCompatVersion:2.0.1", + "org.scala-lang:scala-reflect:$scalaVersion", + "org.scala-lang:scala-compiler:$scalaVersion", + "commons-io:commons-io:2.11.0", + ) - dependencies( - "org.apache.spark:spark-repl_$scalaCompatVersion:$spark3Version", - "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlinVersion", - "org.jetbrains.kotlin:kotlin-reflect:$kotlinVersion", - "org.apache.spark:spark-sql_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-streaming_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-mllib_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-sql_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-graphx_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-launcher_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-catalyst_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-streaming_$scalaCompatVersion:$spark3Version", - "org.apache.spark:spark-core_$scalaCompatVersion:$spark3Version", - "org.scala-lang:scala-library:$scalaVersion", - "org.scala-lang.modules:scala-xml_$scalaCompatVersion:2.0.1", - "org.scala-lang:scala-reflect:$scalaVersion", - "org.scala-lang:scala-compiler:$scalaVersion", - "commons-io:commons-io:2.11.0", - ) + open val imports: Array = arrayOf( + "org.jetbrains.kotlinx.spark.api.*", + "org.jetbrains.kotlinx.spark.api.tuples.*", + *(1..22).map { "scala.Tuple$it" }.toTypedArray(), + "org.apache.spark.sql.functions.*", + "org.apache.spark.*", + "org.apache.spark.sql.*", + "org.apache.spark.api.java.*", + "scala.collection.Seq", + "org.apache.spark.rdd.*", + "java.io.Serializable", + "org.apache.spark.streaming.api.java.*", + "org.apache.spark.streaming.api.*", + "org.apache.spark.streaming.*", + ) - import( - "org.jetbrains.kotlinx.spark.api.*", - "org.jetbrains.kotlinx.spark.api.tuples.*", - *(1..22).map { "scala.Tuple$it" }.toTypedArray(), - "org.apache.spark.sql.functions.*", - "org.apache.spark.*", - "org.apache.spark.sql.*", - "org.apache.spark.api.java.*", - "scala.collection.Seq", - "org.apache.spark.rdd.*", - "java.io.Serializable", - "org.apache.spark.streaming.api.java.*", - "org.apache.spark.streaming.api.*", - "org.apache.spark.streaming.*", - ) + override fun Builder.onLoaded() { + dependencies(*dependencies) + import(*imports) onLoaded { onLoaded() } + beforeCellExecution { + execute("""scala.Console.setOut(System.out)""") + } + + afterCellExecution { snippetInstance, result -> + afterCellExecution(snippetInstance, result) + } + // Render Dataset render> { HTML(it.toHtml()) diff --git a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt index 635ed654..2e4c987c 100644 --- a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt +++ b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt @@ -21,6 +21,7 @@ package org.jetbrains.kotlinx.spark.api.jupyter import org.intellij.lang.annotations.Language +import org.jetbrains.kotlinx.jupyter.api.FieldValue import org.jetbrains.kotlinx.jupyter.api.KotlinKernelHost /** @@ -68,4 +69,6 @@ internal class SparkIntegration : Integration() { val udf: UDFRegistration get() = spark.udf()""".trimIndent(), ).map(::execute) } + + override fun KotlinKernelHost.afterCellExecution(snippetInstance: Any, result: FieldValue) = Unit } diff --git a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt index 1684769b..ba4099b5 100644 --- a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt +++ b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt @@ -33,6 +33,8 @@ import java.io.InputStreamReader import org.apache.spark.* +import org.apache.spark.streaming.api.java.JavaStreamingContext +import org.jetbrains.kotlinx.jupyter.api.FieldValue import org.jetbrains.kotlinx.jupyter.api.KotlinKernelHost import scala.collection.* import org.jetbrains.kotlinx.spark.api.SparkSession @@ -48,13 +50,100 @@ import scala.collection.Iterator as ScalaIterator @OptIn(ExperimentalStdlibApi::class) internal class SparkStreamingIntegration : Integration() { + override val imports: Array = super.imports + arrayOf( + "org.apache.spark.deploy.SparkHadoopUtil", + "org.apache.hadoop.conf.Configuration", + ) + override fun KotlinKernelHost.onLoaded() { val _0 = execute("""%dumpClassesForSpark""") @Language("kts") val _1 = listOf( + """ + val sscCollection = mutableSetOf() + """.trimIndent(), + """ + @JvmOverloads + fun withSparkStreaming( + batchDuration: Duration = Durations.seconds(1L), + checkpointPath: String? = null, + hadoopConf: Configuration = SparkHadoopUtil.get().conf(), + createOnError: Boolean = false, + props: Map = emptyMap(), + master: String = SparkConf().get("spark.master", "local[*]"), + appName: String = "Kotlin Spark Sample", + timeout: Long = -1L, + startStreamingContext: Boolean = true, + func: KSparkStreamingSession.() -> Unit, + ) { + var ssc: JavaStreamingContext? = null + try { + + // will only be set when a new context is created + var kSparkStreamingSession: KSparkStreamingSession? = null + + val creatingFunc = { + val sc = SparkConf() + .setAppName(appName) + .setMaster(master) + .setAll( + props + .map { (key, value) -> key X value.toString() } + .asScalaIterable() + ) + + val ssc1 = JavaStreamingContext(sc, batchDuration) + ssc1.checkpoint(checkpointPath) + + kSparkStreamingSession = KSparkStreamingSession(ssc1) + func(kSparkStreamingSession!!) + + ssc1 + } + + ssc = when { + checkpointPath != null -> + JavaStreamingContext.getOrCreate(checkpointPath, creatingFunc, hadoopConf, createOnError) + + else -> creatingFunc() + } + + sscCollection += ssc!! + + if (startStreamingContext) { + ssc!!.start() + kSparkStreamingSession?.invokeRunAfterStart() + } + ssc!!.awaitTerminationOrTimeout(timeout) + } finally { + ssc?.stop() + println("stopping ssc") + ssc?.awaitTermination() + println("ssc stopped") + ssc?.let(sscCollection::remove) + } + } + """.trimIndent(), """ println("To start a spark streaming session, simply use `withSparkStreaming { }` inside a cell. To use Spark normally, use `withSpark { }` in a cell, or use `%use spark` to start a Spark session for the whole notebook.")""".trimIndent(), ).map(::execute) } + + override fun KotlinKernelHost.afterCellExecution(snippetInstance: Any, result: FieldValue) { + + @Language("kts") + val _1 = listOf( + """ + while (sscCollection.isNotEmpty()) + sscCollection.first().let { + it.stop() + sscCollection.remove(it) + } + """.trimIndent(), + """ + println("afterCellExecution cleanup!") + """.trimIndent() + ).map(::execute) + } } diff --git a/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt b/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt index 2f35bee4..34b85483 100644 --- a/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt +++ b/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt @@ -30,6 +30,7 @@ import io.kotest.matchers.types.shouldBeInstanceOf import jupyter.kotlin.DependsOn import org.apache.spark.api.java.JavaSparkContext import org.apache.spark.streaming.Duration +import org.apache.spark.streaming.api.java.JavaStreamingContext import org.intellij.lang.annotations.Language import org.jetbrains.kotlinx.jupyter.EvalRequestData import org.jetbrains.kotlinx.jupyter.ReplForJupyter @@ -155,16 +156,19 @@ class JupyterTests : ShouldSpec({ should("render JavaRDDs with custom class") { @Language("kts") - val klass = exec(""" + val klass = exec( + """ data class Test( val longFirstName: String, val second: LongArray, val somethingSpecial: Map, ): Serializable - """.trimIndent()) + """.trimIndent() + ) @Language("kts") - val html = execHtml(""" + val html = execHtml( + """ val rdd = sc.parallelize( listOf( Test("aaaaaaaaa", longArrayOf(1L, 100000L, 24L), mapOf(1 to "one", 2 to "two")), @@ -246,8 +250,10 @@ class JupyterStreamingTests : ShouldSpec({ host = this, integrationTypeNameRules = listOf( PatternNameAcceptanceRule(false, "org.jetbrains.kotlinx.spark.api.jupyter.**"), - PatternNameAcceptanceRule(true, - "org.jetbrains.kotlinx.spark.api.jupyter.SparkStreamingIntegration"), + PatternNameAcceptanceRule( + true, + "org.jetbrains.kotlinx.spark.api.jupyter.SparkStreamingIntegration" + ), ), ) } @@ -263,6 +269,13 @@ class JupyterStreamingTests : ShouldSpec({ context("Jupyter") { withRepl { + should("Have sscCollection instance") { + + @Language("kts") + val sscCollection = exec("""sscCollection""") + sscCollection as? MutableSet shouldNotBe null + } + should("Not have spark instance") { shouldThrowAny { @Language("kts") diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index 27513ffc..2460e073 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -308,42 +308,50 @@ fun withSparkStreaming( startStreamingContext: Boolean = true, func: KSparkStreamingSession.() -> Unit, ) { + var ssc: JavaStreamingContext? = null + try { - // will only be set when a new context is created - var kSparkStreamingSession: KSparkStreamingSession? = null + // will only be set when a new context is created + var kSparkStreamingSession: KSparkStreamingSession? = null - val creatingFunc = { - val sc = SparkConf() - .setAppName(appName) - .setMaster(master) - .setAll( - props - .map { (key, value) -> key X value.toString() } - .asScalaIterable() - ) + val creatingFunc = { + val sc = SparkConf() + .setAppName(appName) + .setMaster(master) + .setAll( + props + .map { (key, value) -> key X value.toString() } + .asScalaIterable() + ) - val ssc = JavaStreamingContext(sc, batchDuration) - ssc.checkpoint(checkpointPath) + val ssc = JavaStreamingContext(sc, batchDuration) + ssc.checkpoint(checkpointPath) - kSparkStreamingSession = KSparkStreamingSession(ssc) - func(kSparkStreamingSession!!) + kSparkStreamingSession = KSparkStreamingSession(ssc) + func(kSparkStreamingSession!!) - ssc - } + ssc + } - val ssc = when { - checkpointPath != null -> - JavaStreamingContext.getOrCreate(checkpointPath, creatingFunc, hadoopConf, createOnError) + ssc = when { + checkpointPath != null -> + JavaStreamingContext.getOrCreate(checkpointPath, creatingFunc, hadoopConf, createOnError) - else -> creatingFunc() - } + else -> creatingFunc() + } - if (startStreamingContext) { - ssc.start() - kSparkStreamingSession?.invokeRunAfterStart() + if (startStreamingContext) { + ssc!!.start() + kSparkStreamingSession?.invokeRunAfterStart() + } + ssc!!.awaitTerminationOrTimeout(timeout) + } finally { + // TODO remove printlns + ssc?.stop() + println("stopping ssc") + ssc?.awaitTermination() + println("ssc stopped") } - ssc.awaitTerminationOrTimeout(timeout) - ssc.stop() } From 1c05f55e44e468b81cc3bfbeefd43fbe9fd5e60d Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Tue, 17 May 2022 14:51:19 +0200 Subject: [PATCH 208/213] figured we cannot fix this without onInterrupt {}, reverted trials. Found broken jupyter streaming test --- .../kotlinx/spark/api/jupyter/Integration.kt | 6 + .../spark/api/jupyter/SparkIntegration.kt | 4 + .../api/jupyter/SparkStreamingIntegration.kt | 180 ++++++++---------- .../kotlinx/spark/api/jupyter/JupyterTests.kt | 63 +++--- .../kotlinx/spark/api/SparkSession.kt | 64 +++---- 5 files changed, 155 insertions(+), 162 deletions(-) diff --git a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt index 9c7509b1..24ae04ce 100644 --- a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -39,6 +39,8 @@ abstract class Integration : JupyterIntegration() { */ abstract fun KotlinKernelHost.onLoaded() + abstract fun KotlinKernelHost.onShutdown() + abstract fun KotlinKernelHost.afterCellExecution(snippetInstance: Any, result: FieldValue) open val dependencies: Array = arrayOf( @@ -93,6 +95,10 @@ abstract class Integration : JupyterIntegration() { afterCellExecution(snippetInstance, result) } + onShutdown { + onShutdown() + } + // Render Dataset render> { HTML(it.toHtml()) diff --git a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt index 2e4c987c..a3ec6dc5 100644 --- a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt +++ b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt @@ -70,5 +70,9 @@ internal class SparkIntegration : Integration() { ).map(::execute) } + override fun KotlinKernelHost.onShutdown() { + execute("""spark.stop()""") + } + override fun KotlinKernelHost.afterCellExecution(snippetInstance: Any, result: FieldValue) = Unit } diff --git a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt index ba4099b5..3ca3322c 100644 --- a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt +++ b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt @@ -19,29 +19,10 @@ */ package org.jetbrains.kotlinx.spark.api.jupyter -import kotlinx.html.* -import kotlinx.html.stream.appendHTML -import org.apache.spark.api.java.JavaRDDLike -import org.apache.spark.rdd.RDD -import org.apache.spark.sql.Dataset -import org.apache.spark.unsafe.array.ByteArrayMethods -import org.intellij.lang.annotations.Language -import org.jetbrains.kotlinx.jupyter.api.HTML -import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration -import org.jetbrains.kotlinx.spark.api.* -import java.io.InputStreamReader - -import org.apache.spark.* -import org.apache.spark.streaming.api.java.JavaStreamingContext +import org.intellij.lang.annotations.Language import org.jetbrains.kotlinx.jupyter.api.FieldValue import org.jetbrains.kotlinx.jupyter.api.KotlinKernelHost -import scala.collection.* -import org.jetbrains.kotlinx.spark.api.SparkSession -import scala.Product -import java.io.Serializable -import scala.collection.Iterable as ScalaIterable -import scala.collection.Iterator as ScalaIterator /** * %use spark-streaming @@ -60,90 +41,87 @@ internal class SparkStreamingIntegration : Integration() { @Language("kts") val _1 = listOf( - """ - val sscCollection = mutableSetOf() - """.trimIndent(), - """ - @JvmOverloads - fun withSparkStreaming( - batchDuration: Duration = Durations.seconds(1L), - checkpointPath: String? = null, - hadoopConf: Configuration = SparkHadoopUtil.get().conf(), - createOnError: Boolean = false, - props: Map = emptyMap(), - master: String = SparkConf().get("spark.master", "local[*]"), - appName: String = "Kotlin Spark Sample", - timeout: Long = -1L, - startStreamingContext: Boolean = true, - func: KSparkStreamingSession.() -> Unit, - ) { - var ssc: JavaStreamingContext? = null - try { - - // will only be set when a new context is created - var kSparkStreamingSession: KSparkStreamingSession? = null - - val creatingFunc = { - val sc = SparkConf() - .setAppName(appName) - .setMaster(master) - .setAll( - props - .map { (key, value) -> key X value.toString() } - .asScalaIterable() - ) - - val ssc1 = JavaStreamingContext(sc, batchDuration) - ssc1.checkpoint(checkpointPath) - - kSparkStreamingSession = KSparkStreamingSession(ssc1) - func(kSparkStreamingSession!!) - - ssc1 - } - - ssc = when { - checkpointPath != null -> - JavaStreamingContext.getOrCreate(checkpointPath, creatingFunc, hadoopConf, createOnError) - - else -> creatingFunc() - } - - sscCollection += ssc!! - - if (startStreamingContext) { - ssc!!.start() - kSparkStreamingSession?.invokeRunAfterStart() - } - ssc!!.awaitTerminationOrTimeout(timeout) - } finally { - ssc?.stop() - println("stopping ssc") - ssc?.awaitTermination() - println("ssc stopped") - ssc?.let(sscCollection::remove) - } - } - """.trimIndent(), +// For when onInterrupt is implemented in the Jupyter kernel +// """ +// val sscCollection = mutableSetOf() +// """.trimIndent(), +// """ +// @JvmOverloads +// fun withSparkStreaming( +// batchDuration: Duration = Durations.seconds(1L), +// checkpointPath: String? = null, +// hadoopConf: Configuration = SparkHadoopUtil.get().conf(), +// createOnError: Boolean = false, +// props: Map = emptyMap(), +// master: String = SparkConf().get("spark.master", "local[*]"), +// appName: String = "Kotlin Spark Sample", +// timeout: Long = -1L, +// startStreamingContext: Boolean = true, +// func: KSparkStreamingSession.() -> Unit, +// ) { +// +// // will only be set when a new context is created +// var kSparkStreamingSession: KSparkStreamingSession? = null +// +// val creatingFunc = { +// val sc = SparkConf() +// .setAppName(appName) +// .setMaster(master) +// .setAll( +// props +// .map { (key, value) -> key X value.toString() } +// .asScalaIterable() +// ) +// +// val ssc = JavaStreamingContext(sc, batchDuration) +// ssc.checkpoint(checkpointPath) +// +// kSparkStreamingSession = KSparkStreamingSession(ssc) +// func(kSparkStreamingSession!!) +// +// ssc +// } +// +// val ssc = when { +// checkpointPath != null -> +// JavaStreamingContext.getOrCreate(checkpointPath, creatingFunc, hadoopConf, createOnError) +// +// else -> creatingFunc() +// } +// sscCollection += ssc +// +// if (startStreamingContext) { +// ssc.start() +// kSparkStreamingSession?.invokeRunAfterStart() +// } +// ssc.awaitTerminationOrTimeout(timeout) +// ssc.stop() +// } +// """.trimIndent(), """ println("To start a spark streaming session, simply use `withSparkStreaming { }` inside a cell. To use Spark normally, use `withSpark { }` in a cell, or use `%use spark` to start a Spark session for the whole notebook.")""".trimIndent(), ).map(::execute) } - override fun KotlinKernelHost.afterCellExecution(snippetInstance: Any, result: FieldValue) { - - @Language("kts") - val _1 = listOf( - """ - while (sscCollection.isNotEmpty()) - sscCollection.first().let { - it.stop() - sscCollection.remove(it) - } - """.trimIndent(), - """ - println("afterCellExecution cleanup!") - """.trimIndent() - ).map(::execute) - } + override fun KotlinKernelHost.onShutdown() = Unit + + override fun KotlinKernelHost.afterCellExecution(snippetInstance: Any, result: FieldValue) = Unit + +// For when this feature is implemented in the Jupyter kernel +// override fun KotlinKernelHost.onInterrupt() { +// +// @Language("kts") +// val _1 = listOf( +// """ +// while (sscCollection.isNotEmpty()) +// sscCollection.first().let { +// it.stop() +// sscCollection.remove(it) +// } +// """.trimIndent(), +// """ +// println("onInterrupt cleanup!") +// """.trimIndent() +// ).map(::execute) +// } } diff --git a/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt b/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt index 34b85483..d23d336f 100644 --- a/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt +++ b/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt @@ -21,7 +21,6 @@ package org.jetbrains.kotlinx.spark.api.jupyter import io.kotest.assertions.throwables.shouldThrowAny import io.kotest.core.spec.style.ShouldSpec -import io.kotest.matchers.collections.shouldBeIn import io.kotest.matchers.nulls.shouldNotBeNull import io.kotest.matchers.shouldBe import io.kotest.matchers.shouldNotBe @@ -29,7 +28,6 @@ import io.kotest.matchers.string.shouldContain import io.kotest.matchers.types.shouldBeInstanceOf import jupyter.kotlin.DependsOn import org.apache.spark.api.java.JavaSparkContext -import org.apache.spark.streaming.Duration import org.apache.spark.streaming.api.java.JavaStreamingContext import org.intellij.lang.annotations.Language import org.jetbrains.kotlinx.jupyter.EvalRequestData @@ -41,11 +39,8 @@ import org.jetbrains.kotlinx.jupyter.libraries.EmptyResolutionInfoProvider import org.jetbrains.kotlinx.jupyter.repl.EvalResultEx import org.jetbrains.kotlinx.jupyter.testkit.ReplProvider import org.jetbrains.kotlinx.jupyter.util.PatternNameAcceptanceRule -import org.jetbrains.kotlinx.spark.api.tuples.* -import org.jetbrains.kotlinx.spark.api.* -import scala.Tuple2 +import org.jetbrains.kotlinx.spark.api.SparkSession import java.io.Serializable -import java.util.* import kotlin.script.experimental.jvm.util.classpathFromClassloader class JupyterTests : ShouldSpec({ @@ -269,7 +264,8 @@ class JupyterStreamingTests : ShouldSpec({ context("Jupyter") { withRepl { - should("Have sscCollection instance") { + // For when onInterrupt is implemented in the Jupyter kernel + xshould("Have sscCollection instance") { @Language("kts") val sscCollection = exec("""sscCollection""") @@ -292,29 +288,46 @@ class JupyterStreamingTests : ShouldSpec({ } } - should("stream") { - val input = listOf("aaa", "bbb", "aaa", "ccc") - val counter = Counter(0) + xshould("stream") { - withSparkStreaming(Duration(10), timeout = 1000) { - - val (counterBroadcast, queue) = withSpark(ssc) { - spark.broadcast(counter) X LinkedList(listOf(sc.parallelize(input))) - } - - val inputStream = ssc.queueStream(queue) - - inputStream.foreachRDD { rdd, _ -> - withSpark(rdd) { - rdd.toDS().forEach { - it shouldBeIn input - counterBroadcast.value.value++ + @Language("kts") + val value = exec( + """ + import java.util.LinkedList + import org.apache.spark.api.java.function.ForeachFunction + import org.apache.spark.util.LongAccumulator + + + val input = arrayListOf("aaa", "bbb", "aaa", "ccc") + + @Volatile + var counter: LongAccumulator? = null + + withSparkStreaming(Duration(10), timeout = 1_000) { + + val queue = withSpark(ssc) { + LinkedList(listOf(sc.parallelize(input))) + } + + val inputStream = ssc.queueStream(queue) + + inputStream.foreachRDD { rdd, _ -> + withSpark(rdd) { + if (counter == null) + counter = sc.sc().longAccumulator() + + rdd.toDS().showDS().forEach { + if (it !in input) error(it + " should be in input") + counter!!.add(1L) + } } } } - } + counter!!.sum() + """.trimIndent() + ) as Long - counter.value shouldBe input.size + value shouldBe 4L } } diff --git a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt index 2460e073..652e52b7 100644 --- a/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt +++ b/kotlin-spark-api/3.2/src/main/kotlin/org/jetbrains/kotlinx/spark/api/SparkSession.kt @@ -113,7 +113,7 @@ class KSparkStreamingSession(@Transient val ssc: JavaStreamingContext) : Seriali runAfterStart = block } - internal fun invokeRunAfterStart(): Unit = runAfterStart() + fun invokeRunAfterStart(): Unit = runAfterStart() /** Creates new spark session from given [sc]. */ @@ -308,50 +308,42 @@ fun withSparkStreaming( startStreamingContext: Boolean = true, func: KSparkStreamingSession.() -> Unit, ) { - var ssc: JavaStreamingContext? = null - try { - // will only be set when a new context is created - var kSparkStreamingSession: KSparkStreamingSession? = null + // will only be set when a new context is created + var kSparkStreamingSession: KSparkStreamingSession? = null - val creatingFunc = { - val sc = SparkConf() - .setAppName(appName) - .setMaster(master) - .setAll( - props - .map { (key, value) -> key X value.toString() } - .asScalaIterable() - ) + val creatingFunc = { + val sc = SparkConf() + .setAppName(appName) + .setMaster(master) + .setAll( + props + .map { (key, value) -> key X value.toString() } + .asScalaIterable() + ) - val ssc = JavaStreamingContext(sc, batchDuration) - ssc.checkpoint(checkpointPath) + val ssc = JavaStreamingContext(sc, batchDuration) + ssc.checkpoint(checkpointPath) - kSparkStreamingSession = KSparkStreamingSession(ssc) - func(kSparkStreamingSession!!) + kSparkStreamingSession = KSparkStreamingSession(ssc) + func(kSparkStreamingSession!!) - ssc - } + ssc + } - ssc = when { - checkpointPath != null -> - JavaStreamingContext.getOrCreate(checkpointPath, creatingFunc, hadoopConf, createOnError) + val ssc = when { + checkpointPath != null -> + JavaStreamingContext.getOrCreate(checkpointPath, creatingFunc, hadoopConf, createOnError) - else -> creatingFunc() - } + else -> creatingFunc() + } - if (startStreamingContext) { - ssc!!.start() - kSparkStreamingSession?.invokeRunAfterStart() - } - ssc!!.awaitTerminationOrTimeout(timeout) - } finally { - // TODO remove printlns - ssc?.stop() - println("stopping ssc") - ssc?.awaitTermination() - println("ssc stopped") + if (startStreamingContext) { + ssc.start() + kSparkStreamingSession?.invokeRunAfterStart() } + ssc.awaitTerminationOrTimeout(timeout) + ssc.stop() } From e379ea75d2dece3927c5478c5909a5e65c5cdae7 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Wed, 18 May 2022 11:45:55 +0200 Subject: [PATCH 209/213] removed commented out code to jupyter-interrupted-fix branch --- .../api/jupyter/SparkStreamingIntegration.kt | 75 ------------------- .../kotlinx/spark/api/jupyter/JupyterTests.kt | 9 --- 2 files changed, 84 deletions(-) diff --git a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt index 3ca3322c..4982830c 100644 --- a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt +++ b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt @@ -41,63 +41,6 @@ internal class SparkStreamingIntegration : Integration() { @Language("kts") val _1 = listOf( -// For when onInterrupt is implemented in the Jupyter kernel -// """ -// val sscCollection = mutableSetOf() -// """.trimIndent(), -// """ -// @JvmOverloads -// fun withSparkStreaming( -// batchDuration: Duration = Durations.seconds(1L), -// checkpointPath: String? = null, -// hadoopConf: Configuration = SparkHadoopUtil.get().conf(), -// createOnError: Boolean = false, -// props: Map = emptyMap(), -// master: String = SparkConf().get("spark.master", "local[*]"), -// appName: String = "Kotlin Spark Sample", -// timeout: Long = -1L, -// startStreamingContext: Boolean = true, -// func: KSparkStreamingSession.() -> Unit, -// ) { -// -// // will only be set when a new context is created -// var kSparkStreamingSession: KSparkStreamingSession? = null -// -// val creatingFunc = { -// val sc = SparkConf() -// .setAppName(appName) -// .setMaster(master) -// .setAll( -// props -// .map { (key, value) -> key X value.toString() } -// .asScalaIterable() -// ) -// -// val ssc = JavaStreamingContext(sc, batchDuration) -// ssc.checkpoint(checkpointPath) -// -// kSparkStreamingSession = KSparkStreamingSession(ssc) -// func(kSparkStreamingSession!!) -// -// ssc -// } -// -// val ssc = when { -// checkpointPath != null -> -// JavaStreamingContext.getOrCreate(checkpointPath, creatingFunc, hadoopConf, createOnError) -// -// else -> creatingFunc() -// } -// sscCollection += ssc -// -// if (startStreamingContext) { -// ssc.start() -// kSparkStreamingSession?.invokeRunAfterStart() -// } -// ssc.awaitTerminationOrTimeout(timeout) -// ssc.stop() -// } -// """.trimIndent(), """ println("To start a spark streaming session, simply use `withSparkStreaming { }` inside a cell. To use Spark normally, use `withSpark { }` in a cell, or use `%use spark` to start a Spark session for the whole notebook.")""".trimIndent(), ).map(::execute) @@ -106,22 +49,4 @@ internal class SparkStreamingIntegration : Integration() { override fun KotlinKernelHost.onShutdown() = Unit override fun KotlinKernelHost.afterCellExecution(snippetInstance: Any, result: FieldValue) = Unit - -// For when this feature is implemented in the Jupyter kernel -// override fun KotlinKernelHost.onInterrupt() { -// -// @Language("kts") -// val _1 = listOf( -// """ -// while (sscCollection.isNotEmpty()) -// sscCollection.first().let { -// it.stop() -// sscCollection.remove(it) -// } -// """.trimIndent(), -// """ -// println("onInterrupt cleanup!") -// """.trimIndent() -// ).map(::execute) -// } } diff --git a/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt b/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt index d23d336f..96d5d1fa 100644 --- a/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt +++ b/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt @@ -28,7 +28,6 @@ import io.kotest.matchers.string.shouldContain import io.kotest.matchers.types.shouldBeInstanceOf import jupyter.kotlin.DependsOn import org.apache.spark.api.java.JavaSparkContext -import org.apache.spark.streaming.api.java.JavaStreamingContext import org.intellij.lang.annotations.Language import org.jetbrains.kotlinx.jupyter.EvalRequestData import org.jetbrains.kotlinx.jupyter.ReplForJupyter @@ -264,14 +263,6 @@ class JupyterStreamingTests : ShouldSpec({ context("Jupyter") { withRepl { - // For when onInterrupt is implemented in the Jupyter kernel - xshould("Have sscCollection instance") { - - @Language("kts") - val sscCollection = exec("""sscCollection""") - sscCollection as? MutableSet shouldNotBe null - } - should("Not have spark instance") { shouldThrowAny { @Language("kts") From ab849909634b3a45eee5dd951e5dfbb162a14664 Mon Sep 17 00:00:00 2001 From: Jolan Rensen Date: Mon, 23 May 2022 11:55:25 +0200 Subject: [PATCH 210/213] Truncate and limit variables and onInterrupt stream cleanup for jupyter notebooks (1.1.0) (#158) * added truncate and limit variables for jupyter notebooks * added support for onInterrupt in spark streaming --- .../kotlinx/spark/api/jupyter/Integration.kt | 71 +++++++++++-- .../spark/api/jupyter/SparkIntegration.kt | 2 - .../api/jupyter/SparkStreamingIntegration.kt | 99 ++++++++++++++++++- .../kotlinx/spark/api/jupyter/JupyterTests.kt | 9 ++ pom.xml | 2 +- 5 files changed, 167 insertions(+), 16 deletions(-) diff --git a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt index 24ae04ce..19ddda50 100644 --- a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt +++ b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/Integration.kt @@ -22,10 +22,9 @@ package org.jetbrains.kotlinx.spark.api.jupyter import org.apache.spark.api.java.JavaRDDLike import org.apache.spark.rdd.RDD import org.apache.spark.sql.Dataset -import org.jetbrains.kotlinx.jupyter.api.FieldValue -import org.jetbrains.kotlinx.jupyter.api.HTML -import org.jetbrains.kotlinx.jupyter.api.KotlinKernelHost +import org.jetbrains.kotlinx.jupyter.api.* import org.jetbrains.kotlinx.jupyter.api.libraries.JupyterIntegration +import kotlin.reflect.typeOf abstract class Integration : JupyterIntegration() { @@ -34,14 +33,25 @@ abstract class Integration : JupyterIntegration() { private val scalaVersion = "2.12.15" private val spark3Version = "3.2.1" + private val displayLimit = "DISPLAY_LIMIT" + private val displayLimitDefault = 20 + private val displayTruncate = "DISPLAY_TRUNCATE" + private val displayTruncateDefault = 30 + /** * Will be run after importing all dependencies */ - abstract fun KotlinKernelHost.onLoaded() + open fun KotlinKernelHost.onLoaded() = Unit + + open fun KotlinKernelHost.onShutdown() = Unit + + open fun KotlinKernelHost.onInterrupt() = Unit + + open fun KotlinKernelHost.beforeCellExecution() = Unit - abstract fun KotlinKernelHost.onShutdown() + open fun KotlinKernelHost.afterCellExecution(snippetInstance: Any, result: FieldValue) = Unit - abstract fun KotlinKernelHost.afterCellExecution(snippetInstance: Any, result: FieldValue) + open fun Builder.onLoadedAlsoDo() = Unit open val dependencies: Array = arrayOf( "org.apache.spark:spark-repl_$scalaCompatVersion:$spark3Version", @@ -84,32 +94,75 @@ abstract class Integration : JupyterIntegration() { import(*imports) onLoaded { + declare( + VariableDeclaration( + name = displayLimit, + value = displayLimitDefault, + type = typeOf(), + isMutable = true, + ), + VariableDeclaration( + name = displayTruncate, + value = displayTruncateDefault, + type = typeOf(), + isMutable = true, + ), + ) + onLoaded() } beforeCellExecution { execute("""scala.Console.setOut(System.out)""") + + beforeCellExecution() } afterCellExecution { snippetInstance, result -> afterCellExecution(snippetInstance, result) } + onInterrupt { + onInterrupt() + } + onShutdown { onShutdown() } + fun getLimitAndTruncate() = Pair( + notebook + .variablesState[displayLimit] + ?.value + ?.getOrNull() as? Int + ?: displayLimitDefault, + notebook + .variablesState[displayTruncate] + ?.value + ?.getOrNull() as? Int + ?: displayTruncateDefault + ) + + // Render Dataset render> { - HTML(it.toHtml()) + val (limit, truncate) = getLimitAndTruncate() + + HTML(it.toHtml(limit = limit, truncate = truncate)) } render> { - HTML(it.toJavaRDD().toHtml()) + val (limit, truncate) = getLimitAndTruncate() + + HTML(it.toJavaRDD().toHtml(limit = limit, truncate = truncate)) } render> { - HTML(it.toHtml()) + val (limit, truncate) = getLimitAndTruncate() + + HTML(it.toHtml(limit = limit, truncate = truncate)) } + + onLoadedAlsoDo() } } diff --git a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt index a3ec6dc5..e817b282 100644 --- a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt +++ b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkIntegration.kt @@ -73,6 +73,4 @@ internal class SparkIntegration : Integration() { override fun KotlinKernelHost.onShutdown() { execute("""spark.stop()""") } - - override fun KotlinKernelHost.afterCellExecution(snippetInstance: Any, result: FieldValue) = Unit } diff --git a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt index 4982830c..441672d3 100644 --- a/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt +++ b/jupyter/src/main/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/SparkStreamingIntegration.kt @@ -20,15 +20,18 @@ package org.jetbrains.kotlinx.spark.api.jupyter +import org.apache.spark.streaming.StreamingContextState +import org.apache.spark.streaming.api.java.JavaStreamingContext import org.intellij.lang.annotations.Language -import org.jetbrains.kotlinx.jupyter.api.FieldValue import org.jetbrains.kotlinx.jupyter.api.KotlinKernelHost +import org.jetbrains.kotlinx.jupyter.api.VariableDeclaration +import org.jetbrains.kotlinx.jupyter.api.declare +import kotlin.reflect.typeOf /** * %use spark-streaming */ @Suppress("UNUSED_VARIABLE", "LocalVariableName") -@OptIn(ExperimentalStdlibApi::class) internal class SparkStreamingIntegration : Integration() { override val imports: Array = super.imports + arrayOf( @@ -36,17 +39,105 @@ internal class SparkStreamingIntegration : Integration() { "org.apache.hadoop.conf.Configuration", ) + private val sscCollection = mutableSetOf() + override fun KotlinKernelHost.onLoaded() { + + declare( + VariableDeclaration( + name = ::sscCollection.name, + value = sscCollection, + isMutable = false, + type = typeOf>(), + ) + ) + val _0 = execute("""%dumpClassesForSpark""") @Language("kts") val _1 = listOf( + """ + @JvmOverloads + fun withSparkStreaming( + batchDuration: Duration = Durations.seconds(1L), + checkpointPath: String? = null, + hadoopConf: Configuration = SparkHadoopUtil.get().conf(), + createOnError: Boolean = false, + props: Map = emptyMap(), + master: String = SparkConf().get("spark.master", "local[*]"), + appName: String = "Kotlin Spark Sample", + timeout: Long = -1L, + startStreamingContext: Boolean = true, + func: KSparkStreamingSession.() -> Unit, + ) { + + // will only be set when a new context is created + var kSparkStreamingSession: KSparkStreamingSession? = null + + val creatingFunc = { + val sc = SparkConf() + .setAppName(appName) + .setMaster(master) + .setAll( + props + .map { (key, value) -> key X value.toString() } + .asScalaIterable() + ) + + val ssc = JavaStreamingContext(sc, batchDuration) + ssc.checkpoint(checkpointPath) + + kSparkStreamingSession = KSparkStreamingSession(ssc) + func(kSparkStreamingSession!!) + + ssc + } + + val ssc = when { + checkpointPath != null -> + JavaStreamingContext.getOrCreate(checkpointPath, creatingFunc, hadoopConf, createOnError) + + else -> creatingFunc() + } + sscCollection += ssc + + if (startStreamingContext) { + ssc.start() + kSparkStreamingSession?.invokeRunAfterStart() + } + ssc.awaitTerminationOrTimeout(timeout) + ssc.stop() + } + """.trimIndent(), """ println("To start a spark streaming session, simply use `withSparkStreaming { }` inside a cell. To use Spark normally, use `withSpark { }` in a cell, or use `%use spark` to start a Spark session for the whole notebook.")""".trimIndent(), ).map(::execute) } - override fun KotlinKernelHost.onShutdown() = Unit + private fun cleanUp(e: Throwable): String { + while (sscCollection.isNotEmpty()) + sscCollection.first().let { + while (it.state != StreamingContextState.STOPPED) { + try { + it.stop(true, true) + } catch (_: Exception) { + } + } + sscCollection.remove(it) + } + + return "Spark streams cleaned up. Cause: $e" + } + + override fun Builder.onLoadedAlsoDo() { + renderThrowable { + cleanUp(it) + } + } - override fun KotlinKernelHost.afterCellExecution(snippetInstance: Any, result: FieldValue) = Unit + override fun KotlinKernelHost.onInterrupt() { + println( + cleanUp(InterruptedException("Kernel was interrupted.")) + ) + } } diff --git a/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt b/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt index 96d5d1fa..b4b750ee 100644 --- a/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt +++ b/jupyter/src/test/kotlin/org/jetbrains/kotlinx/spark/api/jupyter/JupyterTests.kt @@ -28,6 +28,7 @@ import io.kotest.matchers.string.shouldContain import io.kotest.matchers.types.shouldBeInstanceOf import jupyter.kotlin.DependsOn import org.apache.spark.api.java.JavaSparkContext +import org.apache.spark.streaming.api.java.JavaStreamingContext import org.intellij.lang.annotations.Language import org.jetbrains.kotlinx.jupyter.EvalRequestData import org.jetbrains.kotlinx.jupyter.ReplForJupyter @@ -263,6 +264,14 @@ class JupyterStreamingTests : ShouldSpec({ context("Jupyter") { withRepl { + // For when onInterrupt is implemented in the Jupyter kernel + should("Have sscCollection instance") { + + @Language("kts") + val sscCollection = exec("""sscCollection""") + sscCollection as? MutableSet shouldNotBe null + } + should("Not have spark instance") { shouldThrowAny { @Language("kts") diff --git a/pom.xml b/pom.xml index c102d72c..f4a488db 100644 --- a/pom.xml +++ b/pom.xml @@ -17,7 +17,7 @@ 1.1.0 1.3.1 5.2.3 - 0.11.0-83 + 0.11.0-95 1.6.21 0.7.5 3.2.1 From 0616b4a0b7410e152851f53c59a9bd7a70e91030 Mon Sep 17 00:00:00 2001 From: Jolanrensen Date: Mon, 23 May 2022 12:07:34 +0200 Subject: [PATCH 211/213] updated link to jupyter wiki --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 843cfbb9..a3a7b3b7 100644 --- a/README.md +++ b/README.md @@ -107,6 +107,8 @@ NOTE: You need `kotlin-jupyter-kernel` to be at least version 0.11.0.83 for the `%use spark` magic does not output "Spark session has been started...", and `%use spark-streaming` doesn't work at all, add `%useLatestDescriptors` above it. +For more information, check the [wiki](https://github.com/JetBrains/kotlin-spark-api/wiki/Jupyter). + ## Kotlin for Apache Spark features ### Creating a SparkSession in Kotlin From 041dff401a4cf1920812e6ca50b3c641ac5c7b75 Mon Sep 17 00:00:00 2001 From: Pasha Finkelshteyn Date: Tue, 24 May 2022 14:08:41 +0300 Subject: [PATCH 212/213] Removes allure --- jupyter/pom.xml | 6 ------ kotlin-spark-api/3.0/pom_2.12.xml | 14 -------------- .../jetbrains/kotlinx/spark/api/ProjectConfig.kt | 2 -- pom.xml | 7 ------- scala-tuples-in-kotlin/pom_2.12.xml | 6 ------ 5 files changed, 35 deletions(-) diff --git a/jupyter/pom.xml b/jupyter/pom.xml index d3bc3141..22971a65 100644 --- a/jupyter/pom.xml +++ b/jupyter/pom.xml @@ -79,12 +79,6 @@ ${kotest.version} test - - io.kotest.extensions - kotest-extensions-allure - ${kotest-extensions-allure.version} - test - org.jetbrains.kotlinx kotlin-jupyter-test-kit diff --git a/kotlin-spark-api/3.0/pom_2.12.xml b/kotlin-spark-api/3.0/pom_2.12.xml index c1d0d69e..79d00cb6 100644 --- a/kotlin-spark-api/3.0/pom_2.12.xml +++ b/kotlin-spark-api/3.0/pom_2.12.xml @@ -67,12 +67,6 @@ ${kotest.version} test - - io.kotest.extensions - kotest-extensions-allure - ${kotest-extensions-allure.version} - test - io.kotest.extensions kotest-extensions-testcontainers @@ -167,14 +161,6 @@
- - io.qameta.allure - allure-maven - - ${project.basedir}/allure-results/${scala.compat.version} - - - org.jacoco jacoco-maven-plugin diff --git a/kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt b/kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt index f450f65d..63b1251c 100644 --- a/kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt +++ b/kotlin-spark-api/3.0/src/test/kotlin/org/jetbrains/kotlinx/spark/api/ProjectConfig.kt @@ -20,10 +20,8 @@ package org.jetbrains.kotlinx.spark.api import io.kotest.core.config.AbstractProjectConfig -import io.kotest.extensions.allure.AllureTestReporter @Suppress("unused") object ProjectConfig : AbstractProjectConfig() { - override fun extensions() = super.extensions() + AllureTestReporter(true) } diff --git a/pom.xml b/pom.xml index 44ffc09d..ea50ea76 100644 --- a/pom.xml +++ b/pom.xml @@ -14,7 +14,6 @@ 1.6.10 3.1.0 3.3.1 - 1.1.0 1.3.1 5.2.3 0.11.0-95 @@ -23,7 +22,6 @@ 3.0.3 - 2.10.0 0.8.7 5.5 official @@ -73,11 +71,6 @@ nexus-staging-maven-plugin ${nexus-staging-plugin.version} - - io.qameta.allure - allure-maven - ${allure-maven.version} - org.jacoco jacoco-maven-plugin diff --git a/scala-tuples-in-kotlin/pom_2.12.xml b/scala-tuples-in-kotlin/pom_2.12.xml index 95d0fde7..f45f09d1 100644 --- a/scala-tuples-in-kotlin/pom_2.12.xml +++ b/scala-tuples-in-kotlin/pom_2.12.xml @@ -31,12 +31,6 @@ ${kotest.version} test
- - io.kotest.extensions - kotest-extensions-allure - ${kotest-extensions-allure.version} - test - com.beust klaxon From 3c5bcc54d1d1471a5933dd1ab5362e11136bf976 Mon Sep 17 00:00:00 2001 From: Pasha Finkelshteyn Date: Tue, 24 May 2022 14:40:17 +0300 Subject: [PATCH 213/213] [maven-release-plugin] prepare release 3.0-1.1.0 --- core/3.0/pom_2.12.xml | 5 ++--- dummy/pom.xml | 2 +- examples/pom-3.0_2.12.xml | 5 ++--- jupyter/pom.xml | 6 ++---- kotlin-spark-api/3.0/pom_2.12.xml | 5 ++--- pom.xml | 4 ++-- pom_2.12.xml | 2 +- scala-tuples-in-kotlin/pom_2.12.xml | 2 +- 8 files changed, 13 insertions(+), 18 deletions(-) diff --git a/core/3.0/pom_2.12.xml b/core/3.0/pom_2.12.xml index bbb7e94f..5038f4e1 100644 --- a/core/3.0/pom_2.12.xml +++ b/core/3.0/pom_2.12.xml @@ -1,6 +1,5 @@ - + 4.0.0 Kotlin Spark API: Scala core for Spark 3.0+ (Scala 2.12) @@ -9,7 +8,7 @@ org.jetbrains.kotlinx.spark kotlin-spark-api-parent-3.0_2.12 - 1.0.4-SNAPSHOT + 1.1.0 ../../pom_2.12.xml diff --git a/dummy/pom.xml b/dummy/pom.xml index 49b50e49..0b92e47d 100644 --- a/dummy/pom.xml +++ b/dummy/pom.xml @@ -3,7 +3,7 @@ kotlin-spark-api-parent-3.0 org.jetbrains.kotlinx.spark - 1.0.4-SNAPSHOT + 1.1.0 pom 4.0.0 diff --git a/examples/pom-3.0_2.12.xml b/examples/pom-3.0_2.12.xml index 34b47e36..92ae4dcd 100644 --- a/examples/pom-3.0_2.12.xml +++ b/examples/pom-3.0_2.12.xml @@ -1,6 +1,5 @@ - + 4.0.0 @@ -10,7 +9,7 @@ org.jetbrains.kotlinx.spark kotlin-spark-api-parent-3.0_2.12 - 1.0.4-SNAPSHOT + 1.1.0 ../pom_2.12.xml diff --git a/jupyter/pom.xml b/jupyter/pom.xml index 22971a65..a8e6a107 100644 --- a/jupyter/pom.xml +++ b/jupyter/pom.xml @@ -1,7 +1,5 @@ - + 4.0.0 @@ -11,7 +9,7 @@ org.jetbrains.kotlinx.spark kotlin-spark-api-parent-3.0_2.12 - 1.0.4-SNAPSHOT + 1.1.0 ../pom_2.12.xml jar diff --git a/kotlin-spark-api/3.0/pom_2.12.xml b/kotlin-spark-api/3.0/pom_2.12.xml index 79d00cb6..2b24557f 100644 --- a/kotlin-spark-api/3.0/pom_2.12.xml +++ b/kotlin-spark-api/3.0/pom_2.12.xml @@ -1,6 +1,5 @@ - + 4.0.0 @@ -10,7 +9,7 @@ org.jetbrains.kotlinx.spark kotlin-spark-api-parent-3.0_2.12 - 1.0.4-SNAPSHOT + 1.1.0 ../../pom_2.12.xml jar diff --git a/pom.xml b/pom.xml index ea50ea76..19b3632e 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ Parent project for Kotlin for Apache Spark org.jetbrains.kotlinx.spark kotlin-spark-api-parent-3.0 - 1.0.4-SNAPSHOT + 1.1.0 pom @@ -267,7 +267,7 @@ scm:git:https://github.com/JetBrains/kotlin-spark-api.git https://github.com/JetBrains/kotlin-spark-api - HEAD + 3.0-1.1.0 diff --git a/pom_2.12.xml b/pom_2.12.xml index 8a745a68..55044bf0 100644 --- a/pom_2.12.xml +++ b/pom_2.12.xml @@ -8,7 +8,7 @@ org.jetbrains.kotlinx.spark kotlin-spark-api-parent-3.0 - 1.0.4-SNAPSHOT + 1.1.0 pom.xml pom diff --git a/scala-tuples-in-kotlin/pom_2.12.xml b/scala-tuples-in-kotlin/pom_2.12.xml index f45f09d1..687de2ac 100644 --- a/scala-tuples-in-kotlin/pom_2.12.xml +++ b/scala-tuples-in-kotlin/pom_2.12.xml @@ -8,7 +8,7 @@ org.jetbrains.kotlinx.spark kotlin-spark-api-parent-3.0_2.12 - 1.0.4-SNAPSHOT + 1.1.0 ../pom_2.12.xml