Skip to content

Commit

Permalink
fixing tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Jolanrensen committed Mar 22, 2024
1 parent b7c1711 commit 7069a9a
Show file tree
Hide file tree
Showing 9 changed files with 493 additions and 20 deletions.
2 changes: 1 addition & 1 deletion buildSrc/src/main/kotlin/Versions.kt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ object Versions : Dsl<Versions> {
const val project = "2.0.0-SNAPSHOT"
const val kotlinSparkApiGradlePlugin = "2.0.0-SNAPSHOT"
const val groupID = "org.jetbrains.kotlinx.spark"
// const val kotlin = "2.0.0-Beta5" todo issues with NonSerializable lambdas
// const val kotlin = "2.0.0-Beta5" // todo issues with NonSerializable lambdas
const val kotlin = "1.9.23"
const val jvmTarget = "8"
const val jupyterJvmTarget = "8"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import org.jetbrains.kotlin.ir.declarations.IrDeclaration
import org.jetbrains.kotlin.ir.declarations.IrFile
import org.jetbrains.kotlin.ir.declarations.IrModuleFragment
import org.jetbrains.kotlin.ir.declarations.IrProperty
import org.jetbrains.kotlin.ir.expressions.IrBlockBody
import org.jetbrains.kotlin.ir.expressions.IrConst
import org.jetbrains.kotlin.ir.expressions.impl.IrConstImpl
import org.jetbrains.kotlin.ir.expressions.impl.IrConstructorCallImpl
Expand Down Expand Up @@ -40,9 +41,13 @@ class DataClassPropertyAnnotationGenerator(

override fun visitElement(element: IrElement) {
when (element) {
is IrDeclaration,
is IrFile,
is IrModuleFragment -> element.acceptChildrenVoid(this)
// is IrDeclaration,
// is IrFile,
// is IrBlockBody,
// is IrModuleFragment -> element.acceptChildrenVoid(this)

// test for now
else -> element.acceptChildrenVoid(this)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,12 @@ public void testAllFilesPresentInBox() {
KtTestUtil.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("/mnt/data/Projects/kotlin-spark-api/compiler-plugin/src/test/resources/testData/box"), Pattern.compile("^(.+)\\.kt$"), null, TargetBackend.JVM_IR, true);
}

@Test
@TestMetadata("dataClassInFunctionTest.kt")
public void testDataClassInFunctionTest() {
runTest("/mnt/data/Projects/kotlin-spark-api/compiler-plugin/src/test/resources/testData/box/dataClassInFunctionTest.kt");
}

@Test
@TestMetadata("dataClassTest.kt")
public void testDataClassTest() {
Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
FILE: dataClassInFunctionTest.kt
package foo.bar

public final annotation class Sparkify : R|kotlin/Annotation| {
public constructor(): R|foo/bar/Sparkify| {
super<R|kotlin/Any|>()
}

}
public final annotation class ColumnName : R|kotlin/Annotation| {
public constructor(name: R|kotlin/String|): R|foo/bar/ColumnName| {
super<R|kotlin/Any|>()
}

public final val name: R|kotlin/String| = R|<local>/name|
public get(): R|kotlin/String|

}
public final fun box(): R|kotlin/String| {
@R|foo/bar/Sparkify|() local final data class User : R|kotlin/Any| {
public constructor(name: R|kotlin/String| = String(John Doe), age: R|kotlin/Int| = Int(25), @R|foo/bar/ColumnName|(name = String(a)) test: R|kotlin/Double| = Double(1.0), test2: R|kotlin/Double| = Double(2.0)): R|<local>/User| {
super<R|kotlin/Any|>()
}

public final val name: R|kotlin/String| = R|<local>/name|
public get(): R|kotlin/String|

public final val age: R|kotlin/Int| = R|<local>/age|
public get(): R|kotlin/Int|

public final val test: R|kotlin/Double| = R|<local>/test|
public get(): R|kotlin/Double|

public final val test2: R|kotlin/Double| = R|<local>/test2|
@PROPERTY_GETTER:R|foo/bar/ColumnName|(name = String(b)) public get(): R|kotlin/Double|

public final operator fun component1(): R|kotlin/String|

public final operator fun component2(): R|kotlin/Int|

public final operator fun component3(): R|kotlin/Double|

public final operator fun component4(): R|kotlin/Double|

public final fun copy(name: R|kotlin/String| = this@R|<local>/User|.R|<local>/name|, age: R|kotlin/Int| = this@R|<local>/User|.R|<local>/age|, @R|foo/bar/ColumnName|(name = String(a)) test: R|kotlin/Double| = this@R|<local>/User|.R|<local>/test|, test2: R|kotlin/Double| = this@R|<local>/User|.R|<local>/test2|): R|<local>/User|

}

lval user: R|<local>/User| = R|<local>/User.User|()
lval name: R|kotlin/Any!| = <getClass>(Q|<local>/User|).R|kotlin/jvm/java|<R|<local>/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(name)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
lval age: R|kotlin/Any!| = <getClass>(Q|<local>/User|).R|kotlin/jvm/java|<R|<local>/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(age)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
lval a: R|kotlin/Any!| = <getClass>(Q|<local>/User|).R|kotlin/jvm/java|<R|<local>/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(a)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
lval b: R|kotlin/Any!| = <getClass>(Q|<local>/User|).R|kotlin/jvm/java|<R|<local>/User|>.R|SubstitutionOverride<java/lang/Class.getMethod: R|java/lang/reflect/Method!|>|(String(b)).R|java/lang/reflect/Method.invoke|(R|<local>/user|)
when () {
!=(R|<local>/name|, String(John Doe)) || !=(R|<local>/age|, Int(25)) || !=(R|<local>/a|, Double(1.0)) || !=(R|<local>/b|, Double(2.0)) -> {
^box String(Could not invoke functions name(), age(), a(), or b() from Java)
}
}

^box String(OK)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
package foo.bar

annotation class Sparkify
annotation class ColumnName(val name: String)

fun box(): String {

@Sparkify
data class User(
val name: String = "John Doe",
val age: Int = 25,
@ColumnName("a") val test: Double = 1.0,
@get:ColumnName("b") val test2: Double = 2.0,
)

val user = User()
val name = User::class.java.getMethod("name").invoke(user)
val age = User::class.java.getMethod("age").invoke(user)
val a = User::class.java.getMethod("a").invoke(user)
val b = User::class.java.getMethod("b").invoke(user)

if (name != "John Doe" || age != 25 || a != 1.0 || b != 2.0) {
return "Could not invoke functions name(), age(), a(), or b() from Java"
}
return "OK"
}
Original file line number Diff line number Diff line change
Expand Up @@ -358,11 +358,11 @@ fun <T1, T2> Dataset<Tuple2<T1, T2>>.sortByValue(): Dataset<Tuple2<T1, T2>> = so

/** Returns a dataset sorted by the first (`first`) value of each [Pair] inside. */
@JvmName("sortByPairKey")
fun <T1, T2> Dataset<Pair<T1, T2>>.sortByKey(): Dataset<Pair<T1, T2>> = sort("first")
fun <T1, T2> Dataset<Pair<T1, T2>>.sortByKey(): Dataset<Pair<T1, T2>> = sort("getFirst")

/** Returns a dataset sorted by the second (`second`) value of each [Pair] inside. */
@JvmName("sortByPairValue")
fun <T1, T2> Dataset<Pair<T1, T2>>.sortByValue(): Dataset<Pair<T1, T2>> = sort("second")
fun <T1, T2> Dataset<Pair<T1, T2>>.sortByValue(): Dataset<Pair<T1, T2>> = sort("getSecond")

/**
* This function creates block, where one can call any further computations on already cached dataset
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,8 @@ class DatasetFunctionTest : ShouldSpec({

val first = dsOf(Left(1, "a"), Left(2, "b"))
val second = dsOf(Right(1, 100), Right(3, 300))
first.show()
second.show()
val result = first
.leftJoin(second, first.col("id") eq second.col("id"))
.map { it._1.id X it._1.name X it._2?.value }
Expand Down Expand Up @@ -211,8 +213,7 @@ class DatasetFunctionTest : ShouldSpec({
s = key
s shouldBe key

if (collected.size > 1) collected.iterator()
else emptyList<Tuple2<Int, String>>().iterator()
if (collected.size > 1) collected else emptyList()
}

flatMappedWithState.count() shouldBe 2
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class TypeInferenceTest : ShouldSpec({
@Sparkify data class Test2<T>(val vala2: T, val para2: Pair<T, String>)
@Sparkify data class Test<T>(val vala: T, val tripl1: Triple<T, Test2<Long>, T>)

val struct = Struct.fromJson(kotlinEncoderFor<Pair<String, Test<Int>>>().schema().prettyJson())!!
val struct = Struct.fromJson(schemaFor<Pair<String, Test<Int>>>().prettyJson())!!
should("contain correct typings") {
expect(struct.fields).notToEqualNull().toContain.inAnyOrder.only.entries(
hasField("first", "string"),
Expand Down Expand Up @@ -70,7 +70,7 @@ class TypeInferenceTest : ShouldSpec({
data class Test2<T>(val vala2: T, val para2: Pair<T, Single<Double>>)
@Sparkify data class Test<T>(val vala: T, val tripl1: Triple<T, Test2<Long>, T>)

val struct = Struct.fromJson(kotlinEncoderFor<Pair<String, Test<Int>>>().schema().prettyJson())!!
val struct = Struct.fromJson(schemaFor<Pair<String, Test<Int>>>().prettyJson())!!
should("contain correct typings") {
expect(struct.fields).notToEqualNull().toContain.inAnyOrder.only.entries(
hasField("first", "string"),
Expand Down Expand Up @@ -101,7 +101,7 @@ class TypeInferenceTest : ShouldSpec({
context("org.jetbrains.spark.api.org.jetbrains.spark.api.schema without generics") {
data class Test(val a: String, val b: Int, val c: Double)

val struct = Struct.fromJson(kotlinEncoderFor<Test>().schema().prettyJson())!!
val struct = Struct.fromJson(schemaFor<Test>().prettyJson())!!
should("return correct types too") {
expect(struct.fields).notToEqualNull().toContain.inAnyOrder.only.entries(
hasField("a", "string"),
Expand All @@ -111,7 +111,7 @@ class TypeInferenceTest : ShouldSpec({
}
}
context("type with list of ints") {
val struct = Struct.fromJson(kotlinEncoderFor<List<Int>>().schema().prettyJson())!!
val struct = Struct.fromJson(schemaFor<List<Int>>().prettyJson())!!
should("return correct types too") {
expect(struct) {
isOfType("array")
Expand All @@ -120,7 +120,7 @@ class TypeInferenceTest : ShouldSpec({
}
}
context("type with list of Pairs int to long") {
val struct = Struct.fromJson(kotlinEncoderFor<List<Pair<Int, Long>>>().schema().prettyJson())!!
val struct = Struct.fromJson(schemaFor<List<Pair<Int, Long>>>().prettyJson())!!
should("return correct types too") {
expect(struct) {
isOfType("array")
Expand All @@ -136,7 +136,7 @@ class TypeInferenceTest : ShouldSpec({
context("type with list of generic data class with E generic name") {
data class Test<E>(val e: E)

val struct = Struct.fromJson(kotlinEncoderFor<List<Test<String>>>().schema().prettyJson())!!
val struct = Struct.fromJson(schemaFor<List<Test<String>>>().prettyJson())!!
should("return correct types too") {
expect(struct) {
isOfType("array")
Expand All @@ -149,7 +149,7 @@ class TypeInferenceTest : ShouldSpec({
}
}
context("type with list of list of int") {
val struct = Struct.fromJson(kotlinEncoderFor<List<List<Int>>>().schema().prettyJson())!!
val struct = Struct.fromJson(schemaFor<List<List<Int>>>().prettyJson())!!
should("return correct types too") {
expect(struct) {
isOfType("array")
Expand All @@ -160,7 +160,7 @@ class TypeInferenceTest : ShouldSpec({
}
}
context("Subtypes of list") {
val struct = Struct.fromJson(kotlinEncoderFor<ArrayList<Int>>().schema().prettyJson())!!
val struct = Struct.fromJson(schemaFor<ArrayList<Int>>().prettyJson())!!
should("return correct types too") {
expect(struct) {
isOfType("array")
Expand All @@ -170,7 +170,7 @@ class TypeInferenceTest : ShouldSpec({
}
}
context("Subtypes of list with nullable values") {
val struct = Struct.fromJson(kotlinEncoderFor<ArrayList<Int?>>().schema().prettyJson())!!
val struct = Struct.fromJson(schemaFor<ArrayList<Int?>>().prettyJson())!!
should("return correct types too") {
expect(struct) {
isOfType("array")
Expand All @@ -182,7 +182,7 @@ class TypeInferenceTest : ShouldSpec({
context("data class with props in order lon → lat") {
data class Test(val lon: Double, val lat: Double)

val struct = Struct.fromJson(kotlinEncoderFor<Test>().schema().prettyJson())!!
val struct = Struct.fromJson(schemaFor<Test>().prettyJson())!!
should("Not change order of fields") {
expect(struct.fields).notToEqualNull().containsExactly(
hasField("lon", "double"),
Expand All @@ -193,7 +193,7 @@ class TypeInferenceTest : ShouldSpec({
context("data class with nullable list inside") {
data class Sample(val optionList: List<Int>?)

val struct = Struct.fromJson(kotlinEncoderFor<Sample>().schema().prettyJson())!!
val struct = Struct.fromJson(schemaFor<Sample>().prettyJson())!!

should("show that list is nullable and element is not") {
expect(struct)
Expand All @@ -215,7 +215,7 @@ class TypeInferenceTest : ShouldSpec({
}

should("generate valid serializer schema") {
expect(kotlinEncoderFor<Sample>().schema()) {
expect(schemaFor<Sample>() as org.apache.spark.sql.types.StructType) {
this
.feature("data type", { this.fields()?.toList() }) {
this.notToEqualNull().toContain.inOrder.only.entry {
Expand Down

0 comments on commit 7069a9a

Please sign in to comment.