diff --git a/testkit/src/main/java/org/apache/calcite/test/SqlOperatorTest.java b/testkit/src/main/java/org/apache/calcite/test/SqlOperatorTest.java index d50d86a03f4..f0e220e5764 100644 --- a/testkit/src/main/java/org/apache/calcite/test/SqlOperatorTest.java +++ b/testkit/src/main/java/org/apache/calcite/test/SqlOperatorTest.java @@ -146,7 +146,6 @@ import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assumptions.assumeTrue; -import static java.lang.Double.parseDouble; import static java.nio.charset.StandardCharsets.UTF_8; /** @@ -328,11 +327,11 @@ static void forEach(Consumer consumer) { } double maxNumericAsDouble() { - return parseDouble(maxNumericString); + return Double.parseDouble(maxNumericString); } double minNumericAsDouble() { - return parseDouble(minNumericString); + return Double.parseDouble(minNumericString); } } @@ -432,15 +431,15 @@ protected SqlOperatorFixture fixture() { // now are legal as decimal literals. SqlOperatorFixture f = fixture(); f.checkCastFails("9223372036854775808", "INTEGER", - OUT_OF_RANGE_MESSAGE, true, SqlOperatorFixture.CastType.CAST); + OUT_OF_RANGE_MESSAGE, true, CastType.CAST); f.checkCastFails("9223372036854775808.1", "INTEGER", - "Numeric literal.*out of range", false, SqlOperatorFixture.CastType.CAST); + "Numeric literal.*out of range", false, CastType.CAST); f.checkCastFails("223372036854775808", "INTEGER", - OUT_OF_RANGE_MESSAGE, true, SqlOperatorFixture.CastType.CAST); + OUT_OF_RANGE_MESSAGE, true, CastType.CAST); f.checkCastFails("9223372036854775808", "BIGINT", - "Overflow", true, SqlOperatorFixture.CastType.CAST); + "Overflow", true, CastType.CAST); f.checkCastFails("'" + Numeric.TINYINT.maxOverflowNumericString + "'", - "TINYINT", OUT_OF_RANGE_MESSAGE, true, SqlOperatorFixture.CastType.CAST); + "TINYINT", OUT_OF_RANGE_MESSAGE, true, CastType.CAST); String largePrecision = "1234567891011.0"; String largeScale = "1.01234567891011"; f.checkScalarExact(largePrecision, "DECIMAL(14, 1) NOT NULL", largePrecision); @@ -7373,31 +7372,6 @@ void checkRegexpExtract(SqlOperatorFixture f0, FunctionAlias functionAlias) { "Cannot take logarithm of zero or negative number", true); } - /** Test case for - * [CALCITE-6549] - * Add LOG1P function (enabled in Spark library). */ - @Test void testLog1PFunc() { - final SqlOperatorFixture f0 = fixture() - .setFor(SqlLibraryOperators.LOG1P, VmName.EXPAND); - f0.checkFails("^log1p(4)^", - "No match found for function signature LOG1P\\(\\)", false); - final SqlOperatorFixture f = f0.withLibrary(SqlLibrary.SPARK); - f.checkScalarApprox("log1p(0)", "DOUBLE", - isWithin(0.0, 0.000001)); - f.checkScalarApprox("log1p(1)", "DOUBLE", - isWithin(0.6931471805599453, 0.000001)); - f.checkScalarApprox("log1p(1e+22)", "DOUBLE", - isWithin(50.65687204586901, 0.000001)); - f.checkScalarApprox("log1p(1.2)", "DOUBLE", - isWithin(0.7884573603642702, 0.000001)); - f.checkScalarApprox("log1p(2.0/3)", "DOUBLE", - isWithin(0.5108256237659907, 0.000001)); - f.checkNull("log1p(cast(null as real))"); - f.checkNull("log1p(-1)"); - f.checkNull("log1p(null)"); - f.checkFails("^log1p()^", INVALID_ARGUMENTS_NUMBER, false); - } - @Test void testRandFunc() { final SqlOperatorFixture f = fixture(); f.setFor(SqlStdOperatorTable.RAND, VmName.EXPAND); @@ -7588,33 +7562,35 @@ void checkRegexpExtract(SqlOperatorFixture f0, FunctionAlias functionAlias) { "No match found for function signature " + "ARRAY_CONTAINS\\(, \\)", false); - final SqlOperatorFixture f = f0.withLibrary(SqlLibrary.SPARK); - f.checkScalar("array_contains(array[1, 2], 1)", true, - "BOOLEAN NOT NULL"); - f.checkScalar("array_contains(array[1], 1)", true, - "BOOLEAN NOT NULL"); - f.checkScalar("array_contains(array(), 1)", false, - "BOOLEAN NOT NULL"); - f.checkScalar("array_contains(array[array[1, 2], array[3, 4]], array[1, 2])", true, - "BOOLEAN NOT NULL"); - f.checkScalar("array_contains(array[map[1, 'a'], map[2, 'b']], map[1, 'a'])", true, - "BOOLEAN NOT NULL"); - f.checkNull("array_contains(cast(null as integer array), 1)"); - f.checkType("array_contains(cast(null as integer array), 1)", "BOOLEAN"); + final Consumer consumer = f -> { + f.checkScalar("array_contains(array[1, 2], 1)", true, + "BOOLEAN NOT NULL"); + f.checkScalar("array_contains(array[1], 1)", true, + "BOOLEAN NOT NULL"); + f.checkScalar("array_contains(array(), 1)", false, + "BOOLEAN NOT NULL"); + f.checkScalar("array_contains(array[array[1, 2], array[3, 4]], array[1, 2])", true, + "BOOLEAN NOT NULL"); + f.checkScalar("array_contains(array[map[1, 'a'], map[2, 'b']], map[1, 'a'])", true, + "BOOLEAN NOT NULL"); + f.checkNull("array_contains(cast(null as integer array), 1)"); + f.checkType("array_contains(cast(null as integer array), 1)", "BOOLEAN"); // Flink and Spark differ on the following. The expression // array_contains(array[1, null], cast(null as integer)) // returns TRUE in Flink, and returns UNKNOWN in Spark. The current // function has Spark behavior, but if we supported a Flink function // library (i.e. "fun=flink") we could add a function with Flink behavior. - f.checkNull("array_contains(array[1, null], cast(null as integer))"); - f.checkType("array_contains(array[1, null], cast(null as integer))", "BOOLEAN"); - f.checkFails("^array_contains(array[1, 2], true)^", - "INTEGER is not comparable to BOOLEAN", false); + f.checkNull("array_contains(array[1, null], cast(null as integer))"); + f.checkType("array_contains(array[1, null], cast(null as integer))", "BOOLEAN"); + f.checkFails("^array_contains(array[1, 2], true)^", + "INTEGER is not comparable to BOOLEAN", false); // check null without cast - f.checkNull("array_contains(array[1, 2], null)"); - f.checkFails("array_contains(^null^, array[1, 2])", "Illegal use of 'NULL'", false); - f.checkFails("array_contains(^null^, null)", "Illegal use of 'NULL'", false); + f.checkNull("array_contains(array[1, 2], null)"); + f.checkFails("array_contains(^null^, array[1, 2])", "Illegal use of 'NULL'", false); + f.checkFails("array_contains(^null^, null)", "Illegal use of 'NULL'", false); + }; + f0.forEachLibrary(list(SqlLibrary.SPARK, SqlLibrary.HIVE), consumer); } /** Tests {@code ARRAY_DISTINCT} function from Spark. */ @@ -8295,38 +8271,40 @@ void checkArrayReverseFunc(SqlOperatorFixture f0, SqlFunction function, f0.checkFails("^sort_array(array[null, 1, null, 2], true)^", "No match found for function signature SORT_ARRAY\\(, \\)", false); - final SqlOperatorFixture f = f0.withLibrary(SqlLibrary.SPARK); - f.checkScalar("sort_array(array[2, null, 1])", "[null, 1, 2]", - "INTEGER ARRAY NOT NULL"); - f.checkScalar("sort_array(array(2, null, 1), false)", "[2, 1, null]", - "INTEGER ARRAY NOT NULL"); - f.checkScalar("sort_array(array[true, false, null])", "[null, false, true]", - "BOOLEAN ARRAY NOT NULL"); - f.checkScalar("sort_array(array[true, false, null], false)", "[true, false, null]", - "BOOLEAN ARRAY NOT NULL"); - f.checkScalar("sort_array(array[null])", "[null]", - "NULL ARRAY NOT NULL"); - f.checkScalar("sort_array(array())", "[]", - "UNKNOWN NOT NULL ARRAY NOT NULL"); - f.checkNull("sort_array(null)"); + final Consumer consumer = f -> { + f.checkScalar("sort_array(array[2, null, 1])", "[null, 1, 2]", + "INTEGER ARRAY NOT NULL"); + f.checkScalar("sort_array(array(2, null, 1), false)", "[2, 1, null]", + "INTEGER ARRAY NOT NULL"); + f.checkScalar("sort_array(array[true, false, null])", "[null, false, true]", + "BOOLEAN ARRAY NOT NULL"); + f.checkScalar("sort_array(array[true, false, null], false)", "[true, false, null]", + "BOOLEAN ARRAY NOT NULL"); + f.checkScalar("sort_array(array[null])", "[null]", + "NULL ARRAY NOT NULL"); + f.checkScalar("sort_array(array())", "[]", + "UNKNOWN NOT NULL ARRAY NOT NULL"); + f.checkNull("sort_array(null)"); // elements cast - f.checkScalar("sort_array(array[cast(1 as tinyint), 2])", "[1, 2]", - "INTEGER NOT NULL ARRAY NOT NULL"); - f.checkScalar("sort_array(array[null, 1, cast(2 as tinyint)])", "[null, 1, 2]", - "INTEGER ARRAY NOT NULL"); - f.checkScalar("sort_array(array[cast(1 as bigint), 2])", "[1, 2]", - "BIGINT NOT NULL ARRAY NOT NULL"); - f.checkScalar("sort_array(array[cast(1 as decimal), 2])", "[1, 2]", - "DECIMAL(19, 0) NOT NULL ARRAY NOT NULL"); + f.checkScalar("sort_array(array[cast(1 as tinyint), 2])", "[1, 2]", + "INTEGER NOT NULL ARRAY NOT NULL"); + f.checkScalar("sort_array(array[null, 1, cast(2 as tinyint)])", "[null, 1, 2]", + "INTEGER ARRAY NOT NULL"); + f.checkScalar("sort_array(array[cast(1 as bigint), 2])", "[1, 2]", + "BIGINT NOT NULL ARRAY NOT NULL"); + f.checkScalar("sort_array(array[cast(1 as decimal), 2])", "[1, 2]", + "DECIMAL(19, 0) NOT NULL ARRAY NOT NULL"); - f.checkFails("^sort_array(array[2, null, 1], cast(1 as boolean))^", - "Argument to function 'SORT_ARRAY' must be a literal", false); - f.checkFails("^sort_array(array[2, null, 1], 1)^", - "Cannot apply 'SORT_ARRAY' to arguments of type " + f.checkFails("^sort_array(array[2, null, 1], cast(1 as boolean))^", + "Argument to function 'SORT_ARRAY' must be a literal", false); + f.checkFails("^sort_array(array[2, null, 1], 1)^", + "Cannot apply 'SORT_ARRAY' to arguments of type " + "'SORT_ARRAY\\(, \\)'\\." + " Supported form\\(s\\): 'SORT_ARRAY\\(\\)'\n" + "'SORT_ARRAY\\(, \\)'", false); + }; + f0.forEachLibrary(list(SqlLibrary.SPARK, SqlLibrary.HIVE), consumer); } /** Test case for @@ -8753,49 +8731,6 @@ void checkArrayReverseFunc(SqlOperatorFixture f0, SqlFunction function, f0.forEachLibrary(list(SqlLibrary.SPARK, SqlLibrary.HIVE), consumer); } - /** Test case for - * [CALCITE-5807] - * Add SUBSTRING_INDEX function (enabled in Spark library).. - */ - @Test void testSubstringIndexFunc() { - final SqlOperatorFixture f0 = fixture(); - f0.setFor(SqlLibraryOperators.SUBSTRING_INDEX); - f0.checkFails("^substring_index('a', ',')^", - "No match found for function signature SUBSTRING_INDEX\\(" - + ", \\)", false); - - final SqlOperatorFixture f = f0.withLibrary(SqlLibrary.SPARK); - f.checkString("substring_index('www.apache.org', '.', 2)", - "www.apache", "VARCHAR(14) NOT NULL"); - f.checkString("substring_index('www.apache.org', '.', 1)", - "www", "VARCHAR(14) NOT NULL"); - f.checkString("substring_index('www.apache.org', '.', 3)", - "www.apache.org", "VARCHAR(14) NOT NULL"); - f.checkString("substring_index('www.apache.org', '.', -1)", - "org", "VARCHAR(14) NOT NULL"); - - f.checkString("substring_index('aBc', 'B', -1)", - "c", "VARCHAR(3) NOT NULL"); - f.checkString("substring_index('aBc', 'b', -1)", - "aBc", "VARCHAR(3) NOT NULL"); - - f.checkString("substring_index('aBc', 'B', 0)", - "", "VARCHAR(3) NOT NULL"); - f.checkString("substring_index('aBc', 'b', 0)", - "", "VARCHAR(3) NOT NULL"); - - f.checkNull("substring_index(cast(null as varchar(1))," - + " cast(null as varchar(1)), cast(null as integer))"); - f.checkNull("substring_index(cast(null as varchar(1))," - + " cast(null as varchar(1)), 2)"); - f.checkNull("substring_index('abc', cast(null as varchar(1))," - + " cast(null as integer))"); - f.checkNull("substring_index(cast(null as varchar(1)), '.'," - + " cast(null as integer))"); - f.checkNull("substring_index('abc', '.', cast(null as integer))"); - f.checkNull("substring_index('abc', cast(null as varchar(1)), 2)"); - } - /** Tests {@code UNIX_SECONDS} and other datetime functions from BigQuery. */ @Test void testUnixSecondsFunc() { SqlOperatorFixture f = fixture() @@ -11379,7 +11314,8 @@ void assertSubFunReturns(boolean binary, String s, int start, f.checkScalar("greatest(CAST(NULL AS INTEGER), CAST(NULL AS INTEGER))", isNullValue(), "INTEGER"); }; - final List libraries = list(SqlLibrary.POSTGRESQL, SqlLibrary.REDSHIFT); + final List libraries = + list(SqlLibrary.POSTGRESQL, SqlLibrary.REDSHIFT, SqlLibrary.HIVE); f0.forEachLibrary(libraries, consumer); } @@ -11425,7 +11361,8 @@ void assertSubFunReturns(boolean binary, String s, int start, f.checkScalar("least(CAST(NULL AS INTEGER), CAST(NULL AS INTEGER))", isNullValue(), "INTEGER"); }; - final List libraries = list(SqlLibrary.POSTGRESQL, SqlLibrary.REDSHIFT); + final List libraries = + list(SqlLibrary.POSTGRESQL, SqlLibrary.REDSHIFT, SqlLibrary.HIVE); f0.forEachLibrary(libraries, consumer); } @@ -12525,7 +12462,7 @@ private static void checkArrayConcatAggFuncFails(SqlOperatorFixture t) { @Test void testArrayQueryConstructor() { final SqlOperatorFixture f = fixture(); - f.setFor(SqlStdOperatorTable.ARRAY_QUERY, SqlOperatorFixture.VmName.EXPAND); + f.setFor(SqlStdOperatorTable.ARRAY_QUERY, VmName.EXPAND); // Test case for [CALCITE-4999] ARRAY, MULTISET functions should // return a collection of scalars if a sub-query returns 1 column @@ -12565,7 +12502,7 @@ private static void checkArrayConcatAggFuncFails(SqlOperatorFixture t) { // Test case for [CALCITE-4999] ARRAY, MULTISET functions should // return an collection of scalars if a sub-query returns 1 column - f.setFor(SqlStdOperatorTable.MULTISET_QUERY, SqlOperatorFixture.VmName.EXPAND); + f.setFor(SqlStdOperatorTable.MULTISET_QUERY, VmName.EXPAND); f.checkScalar("multiset(select 1)", "[1]", "INTEGER NOT NULL MULTISET NOT NULL"); f.check("select multiset(select ROW(1,2))", "RecordType(INTEGER NOT NULL EXPR$0, INTEGER NOT NULL EXPR$1) NOT NULL MULTISET NOT NULL", @@ -14625,7 +14562,7 @@ void testTimestampDiff(boolean coercionEnabled) { @Test void testPercentileContBigQueryFunc() { final SqlOperatorFixture f = fixture() - .setFor(SqlLibraryOperators.PERCENTILE_CONT2, SqlOperatorFixture.VmName.EXPAND) + .setFor(SqlLibraryOperators.PERCENTILE_CONT2, VmName.EXPAND) .withLibrary(SqlLibrary.BIG_QUERY); f.checkType("percentile_cont(1, .5)", "DOUBLE NOT NULL"); @@ -14639,7 +14576,7 @@ void testTimestampDiff(boolean coercionEnabled) { @Test void testPercentileDiscBigQueryFunc() { final SqlOperatorFixture f = fixture() - .setFor(SqlLibraryOperators.PERCENTILE_DISC2, SqlOperatorFixture.VmName.EXPAND) + .setFor(SqlLibraryOperators.PERCENTILE_DISC2, VmName.EXPAND) .withLibrary(SqlLibrary.BIG_QUERY); f.checkType("percentile_disc(1, .5)", "INTEGER NOT NULL"); @@ -15182,7 +15119,7 @@ void testTimestampDiff(boolean coercionEnabled) { * ANY/SOME, ALL operators should support collection expressions. */ @Test void testQuantifyCollectionOperators() { final SqlOperatorFixture f = fixture(); - QUANTIFY_OPERATORS.forEach(operator -> f.setFor(operator, SqlOperatorFixture.VmName.EXPAND)); + QUANTIFY_OPERATORS.forEach(operator -> f.setFor(operator, VmName.EXPAND)); Function2 checkBoolean = (sql, result) -> { f.checkBoolean(sql.replace("COLLECTION", "ARRAY"), result); @@ -15511,132 +15448,6 @@ private static void checkLogicalOrFunc(SqlOperatorFixture f) { f.checkAgg("logical_or(x)", values4, isNullValue()); } - @Test void testBitAndScalarFunc() { - final SqlOperatorFixture f = fixture(); - f.setFor(SqlStdOperatorTable.BITAND, VmName.EXPAND); - f.checkFails("bitand(^*^)", "Unknown identifier '\\*'", false); - f.checkScalar("bitand(2, 3)", "2", "INTEGER NOT NULL"); - f.checkScalar("bitand(CAST(2 AS INTEGER), CAST(3 AS BIGINT))", "2", "BIGINT NOT NULL"); - f.checkScalar("bitand(-5, 7)", "3", "INTEGER NOT NULL"); - f.checkScalar("bitand(-5, -31)", "-31", "INTEGER NOT NULL"); - f.checkScalar("bitand(CAST(-5 AS TINYINT), CAST(7 AS TINYINT))", "3", "TINYINT NOT NULL"); - f.checkScalar("bitand(CAST(-5 AS TINYINT), CAST(-31 AS TINYINT))", "-31", "TINYINT NOT NULL"); - f.checkType("bitand(CAST(2 AS TINYINT), CAST(6 AS TINYINT))", "TINYINT NOT NULL"); - f.checkType("bitand(CAST(2 AS SMALLINT), CAST(6 AS SMALLINT))", "SMALLINT NOT NULL"); - f.checkType("bitand(CAST(2 AS BIGINT), CAST(6 AS BIGINT))", "BIGINT NOT NULL"); - f.checkScalar("bitand(CAST(x'0201' AS BINARY(2)), CAST(x'07f9' AS BINARY(2)))", "0201", - "BINARY(2) NOT NULL"); - f.checkScalar("bitand(CAST(x'0201' AS VARBINARY(2)), CAST(x'07f9' AS VARBINARY(2)))", "0201", - "VARBINARY(2) NOT NULL"); - f.checkFails("^bitand(1.2, 1.3)^", - "Cannot apply 'BITAND' to arguments of type '" - + "BITAND\\(, \\)'\\. Supported form\\(s\\): '" - + "BITAND\\(, \\)'\n" - + "'BITAND\\(, \\)'", - false); - f.checkFails("^bitand()^", - "Invalid number of arguments to function 'BITAND'. Was expecting 2 arguments", - false); - f.checkFails("^bitand(1)^", - "Invalid number of arguments to function 'BITAND'. Was expecting 2 arguments", - false); - f.checkFails("^bitand(1, 2, 3)^", - "Invalid number of arguments to function 'BITAND'. Was expecting 2 arguments", - false); - f.checkNull("bitand(NULL, 1)"); - f.checkNull("bitand(1, NULL)"); - f.checkFails("^bitand(NULL, NULL)^", - "At least one argument to function 'BITAND' must not be NULL", - false); - f.checkFails("bitand(CAST(x'0201' AS VARBINARY), CAST(x'02' AS VARBINARY))", - "Different length for bitwise operands: the first: 2, the second: 1", - true); - } - - @Test void testBitOrScalarFunc() { - final SqlOperatorFixture f = fixture(); - f.setFor(SqlStdOperatorTable.BITOR, VmName.EXPAND); - f.checkFails("bitor(^*^)", "Unknown identifier '\\*'", false); - f.checkScalar("bitor(2, 4)", "6", "INTEGER NOT NULL"); - f.checkScalar("bitor(CAST(2 AS INTEGER), CAST(4 AS BIGINT))", "6", "BIGINT NOT NULL"); - f.checkScalar("bitor(-5, 7)", "-1", "INTEGER NOT NULL"); - f.checkScalar("bitor(-5, -31)", "-5", "INTEGER NOT NULL"); - f.checkScalar("bitor(CAST(-5 AS TINYINT), CAST(7 AS TINYINT))", "-1", "TINYINT NOT NULL"); - f.checkScalar("bitor(CAST(-5 AS TINYINT), CAST(-31 AS TINYINT))", "-5", "TINYINT NOT NULL"); - f.checkType("bitor(CAST(2 AS TINYINT), CAST(6 AS TINYINT))", "TINYINT NOT NULL"); - f.checkType("bitor(CAST(2 AS SMALLINT), CAST(6 AS SMALLINT))", "SMALLINT NOT NULL"); - f.checkType("bitor(CAST(2 AS BIGINT), CAST(6 AS BIGINT))", "BIGINT NOT NULL"); - f.checkScalar("bitor(CAST(x'0201' AS BINARY(2)), CAST(x'07f9' AS BINARY(2)))", "07f9", - "BINARY(2) NOT NULL"); - f.checkScalar("bitor(CAST(x'0201' AS VARBINARY(2)), CAST(x'07f9' AS VARBINARY(2)))", "07f9", - "VARBINARY(2) NOT NULL"); - f.checkFails("^bitor(1.2, 1.3)^", - "Cannot apply 'BITOR' to arguments of type '" - + "BITOR\\(, \\)'\\. Supported form\\(s\\): '" - + "BITOR\\(, \\)'\n" - + "'BITOR\\(, \\)'", - false); - f.checkFails("^bitor()^", - "Invalid number of arguments to function 'BITOR'. Was expecting 2 arguments", - false); - f.checkFails("^bitor(1)^", - "Invalid number of arguments to function 'BITOR'. Was expecting 2 arguments", - false); - f.checkFails("^bitor(1, 2, 3)^", - "Invalid number of arguments to function 'BITOR'. Was expecting 2 arguments", - false); - f.checkNull("bitor(NULL, 1)"); - f.checkNull("bitor(1, NULL)"); - f.checkFails("^bitor(NULL, NULL)^", - "At least one argument to function 'BITOR' must not be NULL", - false); - f.checkFails("bitor(CAST(x'0201' AS VARBINARY), CAST(x'02' AS VARBINARY))", - "Different length for bitwise operands: the first: 2, the second: 1", - true); - } - - @Test void testBitXorScalarFunc() { - final SqlOperatorFixture f = fixture(); - f.setFor(SqlStdOperatorTable.BITXOR, VmName.EXPAND); - f.checkFails("bitxor(^*^)", "Unknown identifier '\\*'", false); - f.checkScalar("bitxor(2, 3)", "1", "INTEGER NOT NULL"); - f.checkScalar("bitxor(CAST(2 AS INTEGER), CAST(3 AS BIGINT))", "1", "BIGINT NOT NULL"); - f.checkScalar("bitxor(-5, 7)", "-4", "INTEGER NOT NULL"); - f.checkScalar("bitxor(-5, -31)", "26", "INTEGER NOT NULL"); - f.checkScalar("bitxor(CAST(-5 AS TINYINT), CAST(7 AS TINYINT))", "-4", "TINYINT NOT NULL"); - f.checkScalar("bitxor(CAST(-5 AS TINYINT), CAST(-31 AS TINYINT))", "26", "TINYINT NOT NULL"); - f.checkType("bitxor(CAST(2 AS TINYINT), CAST(6 AS TINYINT))", "TINYINT NOT NULL"); - f.checkType("bitxor(CAST(2 AS SMALLINT), CAST(6 AS SMALLINT))", "SMALLINT NOT NULL"); - f.checkType("bitxor(CAST(2 AS BIGINT), CAST(6 AS BIGINT))", "BIGINT NOT NULL"); - f.checkScalar("bitxor(CAST(x'0201' AS BINARY(2)), CAST(x'07f9' AS BINARY(2)))", "05f8", - "BINARY(2) NOT NULL"); - f.checkScalar("bitxor(CAST(x'0201' AS VARBINARY(2)), CAST(x'07f9' AS VARBINARY(2)))", "05f8", - "VARBINARY(2) NOT NULL"); - f.checkFails("^bitxor(1.2, 1.3)^", - "Cannot apply 'BITXOR' to arguments of type '" - + "BITXOR\\(, \\)'\\. Supported form\\(s\\): '" - + "BITXOR\\(, \\)'\n" - + "'BITXOR\\(, \\)'", - false); - f.checkFails("^bitxor()^", - "Invalid number of arguments to function 'BITXOR'. Was expecting 2 arguments", - false); - f.checkFails("^bitxor(1)^", - "Invalid number of arguments to function 'BITXOR'. Was expecting 2 arguments", - false); - f.checkFails("^bitxor(1, 2, 3)^", - "Invalid number of arguments to function 'BITXOR'. Was expecting 2 arguments", - false); - f.checkNull("bitxor(NULL, 1)"); - f.checkNull("bitxor(1, NULL)"); - f.checkFails("^bitxor(NULL, NULL)^", - "At least one argument to function 'BITXOR' must not be NULL", - false); - f.checkFails("bitxor(CAST(x'0201' AS VARBINARY), CAST(x'02' AS VARBINARY))", - "Different length for bitwise operands: the first: 2, the second: 1", - true); - } - @Test void testBitAndAggFunc() { final SqlOperatorFixture f = fixture(); f.setFor(SqlLibraryOperators.BITAND_AGG, VmName.EXPAND); @@ -15690,71 +15501,6 @@ void checkBitAnd(SqlOperatorFixture f0, FunctionAlias functionAlias) { f0.forEachLibrary(list(functionAlias.libraries), consumer); } - @Test void testBitCountFunc() { - checkBitCount(SqlStdOperatorTable.BITCOUNT, null, false); - } - - @Test void testBitCountBigQueryFunc() { - checkBitCount(SqlLibraryOperators.BIT_COUNT_BIG_QUERY, - list(SqlLibrary.BIG_QUERY, SqlLibrary.SPARK), false); - } - - @Test void testBitCountMySQLFunc() { - checkBitCount(SqlLibraryOperators.BIT_COUNT_MYSQL, list(SqlLibrary.MYSQL), true); - } - - void checkBitCount(SqlFunction function, @Nullable List libraries, - boolean testDecimal) { - final SqlOperatorFixture f0 = fixture(); - f0.setFor(function, VmName.EXPAND); - final String functionName = function.getName(); - final Consumer consumer = f -> { - f.checkFails(functionName + "(^*^)", "Unknown identifier '\\*'", false); - f.checkType(functionName + "(1)", "BIGINT NOT NULL"); - f.checkType(functionName + "(CAST(2 AS TINYINT))", "BIGINT NOT NULL"); - f.checkType(functionName + "(CAST(2 AS SMALLINT))", "BIGINT NOT NULL"); - f.checkFails( - "^" + functionName + "()^", - "Invalid number of arguments to function '" + functionName - + "'. Was expecting 1 arguments", - false); - f.checkFails( - "^" + functionName + "(1, 2)^", - "Invalid number of arguments to function '" + functionName - + "'. Was expecting 1 arguments", - false); - f.checkScalar(functionName + "(8)", "1", "BIGINT NOT NULL"); - f.checkScalar(functionName + "(CAST(x'ad' AS BINARY(1)))", "5", "BIGINT NOT NULL"); - f.checkScalar(functionName + "(CAST(x'ad' AS VARBINARY(1)))", "5", "BIGINT NOT NULL"); - f.checkScalar(functionName + "(-1)", "64", "BIGINT NOT NULL"); - f.checkNull(functionName + "(cast(NULL as TINYINT))"); - f.checkNull(functionName + "(cast(NULL as BINARY))"); - f.checkNull(functionName + "(NULL)"); - if (testDecimal) { - f.checkType(functionName + "(CAST(2 AS DOUBLE))", "BIGINT NOT NULL"); - // Verify that only bits in the integer portion of a decimal value are counted - f.checkScalar(functionName + "(5.23)", "2", "BIGINT NOT NULL"); - f.checkScalar(functionName + "(CAST('-9223372036854775808' AS DECIMAL(19, 0)))", "1", - "BIGINT NOT NULL"); - f.checkScalar(functionName + "(CAST('-9223372036854775809' AS DECIMAL(19, 0)))", "1", - "BIGINT NOT NULL"); - } else { - f.checkType(functionName + "(CAST(x'ad' AS BINARY(1)))", "BIGINT NOT NULL"); - f.checkFails("^" + functionName + "(1.2)^", - "Cannot apply '" + functionName + "' to arguments of type '" + functionName - + "\\(\\)'\\. Supported form\\(s\\): '" + functionName - + "\\(\\)'\n" - + "'" + functionName + "\\(\\)'", - false); - } - }; - if (libraries == null) { - consumer.accept(f0); - } else { - f0.forEachLibrary(libraries, consumer); - } - } - @Test void testBitOrAggFunc() { final SqlOperatorFixture f = fixture(); f.setFor(SqlLibraryOperators.BITOR_AGG, VmName.EXPAND); @@ -16181,7 +15927,7 @@ private static class ValueOrExceptionResultChecker this.patterns = patterns; } - @Override public void checkResult(String sql, ResultSet result) { + @Override public void checkResult(String sql, ResultSet result) throws Exception { Throwable thrown = null; try { if (!result.next()) { @@ -16206,7 +15952,7 @@ private static class ValueOrExceptionResultChecker } /** - * Implementation of {@link org.apache.calcite.sql.test.SqlTester} based on a + * Implementation of {@link SqlTester} based on a * JDBC connection. */ protected static class TesterImpl extends SqlRuntimeTester { @@ -16228,9 +15974,9 @@ public TesterImpl() { } @Override public void check(SqlTestFactory factory, String query, - SqlTester.TypeChecker typeChecker, - SqlTester.ParameterChecker parameterChecker, - SqlTester.ResultChecker resultChecker) { + TypeChecker typeChecker, + ParameterChecker parameterChecker, + ResultChecker resultChecker) { super.check(factory, query, typeChecker, parameterChecker, resultChecker); final RelDataTypeSystem typeSystem = factory.typeSystemTransform.apply(RelDataTypeSystem.DEFAULT);