Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !columns_out_of_order01 --
3 6 1 4 2 5

-- !columns_out_of_order02 --
1 2 3 4 5 6

-- !columns_out_of_order01 --
3 6 1 4 2 5

-- !columns_out_of_order02 --
1 2 3 4 5 6

Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !c1 --
1 25-34 M true
2 18-24 F false
1 25 M true
2 18 F false

-- !c1 --
1 25-34 M true
2 18-24 F false
1 25 M true
2 18 F false

Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,17 @@
// specific language governing permissions and limitations
// under the License.

suite("test_glue_rest_s3tables", "p2,external,iceberg,external_remote,external_remote_iceberg,new_catalog_property") {
def format_compressions = ["parquet_zstd"]
import java.util.concurrent.ThreadLocalRandom

suite("test_s3tables_glue_insert", "p2,external,iceberg,external_remote,external_remote_iceberg") {
def format_compressions = ["parquet_zstd", "orc_zlib"]

def q01 = { String format_compression, String catalog_name ->
def parts = format_compression.split("_")
def format = parts[0]
def compression = parts[1]
def all_types_table = "iceberg_glue_rest_${format_compression}_master"
def all_types_table = "iceberg_all_types_${format_compression}_master"
def all_types_partition_table = "iceberg_all_types_par_${format_compression}_master"
sql """ DROP TABLE IF EXISTS `${all_types_table}`; """
sql """
CREATE TABLE `${all_types_table}`(
Expand Down Expand Up @@ -321,7 +324,8 @@ suite("test_glue_rest_s3tables", "p2,external,iceberg,external_remote,external_r
def parts = format_compression.split("_")
def format = parts[0]
def compression = parts[1]
def all_types_partition_table = "iceberg_all_types_par_glue_rest_${format_compression}_master"
def all_types_table = "iceberg_all_types_${format_compression}_master"
def all_types_partition_table = "iceberg_all_types_par_${format_compression}_master"
sql """ DROP TABLE IF EXISTS `${all_types_partition_table}`; """
sql """
CREATE TABLE `${all_types_partition_table}`(
Expand Down Expand Up @@ -641,7 +645,7 @@ suite("test_glue_rest_s3tables", "p2,external,iceberg,external_remote,external_r
return
}

String catalog_name = "test_s3tables_glue_rest"
String catalog_name = "test_s3tables_glue_rest_insert"
String props = context.config.otherConfigs.get("icebergS3TablesCatalogGlueRest")
sql """drop catalog if exists ${catalog_name};"""
sql """
Expand All @@ -650,10 +654,11 @@ suite("test_glue_rest_s3tables", "p2,external,iceberg,external_remote,external_r
);
"""

def tmpdb = "s3table_glue_db_insert_" + ThreadLocalRandom.current().nextInt(1000);
sql """ switch ${catalog_name};"""
sql """ drop database if exists iceberg_s3tables_glue_rest_master force"""
sql """ create database iceberg_s3tables_glue_rest_master"""
sql """ use iceberg_s3tables_glue_rest_master;"""
sql """ drop database if exists ${tmpdb} force"""
sql """ create database ${tmpdb}"""
sql """ use ${tmpdb};"""
sql """ set enable_fallback_to_original_planner=false """

try {
Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

import java.util.concurrent.ThreadLocalRandom

suite("test_s3tables_glue_insert_partitions", "p0,external,iceberg,external_docker,external_docker_iceberg") {
def format_compressions = ["parquet_snappy", "orc_zlib"]

def test_s3_columns_out_of_order = { String format_compression, String catalog_name ->
def parts = format_compression.split("_")
def format = parts[0]
def compression = parts[1]
def source_tbl = "s3_columns_out_of_order_source_tbl_${format_compression}_master"
def target_tbl = "s3_columns_out_of_order_target_tbl_${format_compression}_master"
sql """ drop table if exists ${source_tbl} """
sql """
CREATE TABLE ${source_tbl} (
`col3` bigint,
`col6` int,
`col1` bigint,
`col4` int,
`col2` bigint,
`col5` int
) ENGINE = iceberg
properties (
"compression-codec" = ${compression},
"write-format"=${format}
)
""";
sql """ drop table if exists ${target_tbl}"""
sql """
CREATE TABLE ${target_tbl} (
`col1` bigint,
`col2` bigint,
`col3` bigint,
`col4` int,
`col5` int,
`col6` int
) ENGINE = iceberg
PARTITION BY LIST (
col4, col5, col6
)()
properties (
"compression-codec" = ${compression},
"write-format"=${format}
)
""";

sql """
INSERT INTO ${source_tbl} (
col1, col2, col3, col4, col5, col6
) VALUES (1, 2, 3, 4, 5, 6);
"""
order_qt_columns_out_of_order01 """ SELECT * FROM ${source_tbl} """

sql """
INSERT INTO ${target_tbl} (
col1, col2, col3, col4, col5, col6
) VALUES (1, 2, 3, 4, 5, 6);
"""

order_qt_columns_out_of_order02 """ SELECT * FROM ${target_tbl} """

sql """ drop table ${source_tbl} """
sql """ drop table ${target_tbl} """
sql """ drop database if exists `test_s3_columns_out_of_order` """;
}

String enabled = context.config.otherConfigs.get("enableExternalIcebergTest")
if (enabled == null || !enabled.equalsIgnoreCase("true")) {
logger.info("disable iceberg test")
return
}

String catalog_name = "test_s3tables_glue_rest_insert_partitions"
String props = context.config.otherConfigs.get("icebergS3TablesCatalogGlueRest")
sql """drop catalog if exists ${catalog_name};"""
sql """
create catalog ${catalog_name} properties (
${props}
);
"""

def tmpdb = "s3table_glue_db_insert_partitions_" + ThreadLocalRandom.current().nextInt(1000);
sql """ switch ${catalog_name};"""
sql """ drop database if exists ${tmpdb} force"""
sql """ create database ${tmpdb}"""
sql """ use ${tmpdb};"""
sql """ set enable_fallback_to_original_planner=false """

try {
for (String format_compression in format_compressions) {
logger.info("Process format_compression " + format_compression)
test_s3_columns_out_of_order(format_compression, catalog_name)
}
} finally {
sql """drop database if exists ${tmpdb} force"""
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@
// under the License.
import java.util.concurrent.ThreadLocalRandom
suite("test_s3tables_insert_overwrite", "p0,external,iceberg,external_docker,external_docker_iceberg") {
// disable this test by default, glue + s3table is recommended
def run_test = false;
if (!run_test) {
return;
}
def format_compressions = ["parquet_zstd", "orc_zlib"]

def q01 = { String format_compression, String catalog_name ->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,11 @@ import java.util.concurrent.ThreadLocalRandom
// under the License.

suite("test_s3tables_write_insert", "p2,external,iceberg,external_remote,external_remote_iceberg") {
// disable this test by default, glue + s3table is recommended
def run_test = false;
if (!run_test) {
return;
}
def format_compressions = ["parquet_zstd", "orc_zlib"]

def q01 = { String format_compression, String catalog_name ->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,11 @@ import java.util.concurrent.ThreadLocalRandom
// under the License.

suite("test_s3tables_write_partitions", "p0,external,iceberg,external_docker,external_docker_iceberg") {
// disable this test by default, glue + s3table is recommended
def run_test = false;
if (!run_test) {
return;
}
def format_compressions = ["parquet_snappy", "orc_zlib"]

def test_s3_columns_out_of_order = { String format_compression, String catalog_name ->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,12 @@ suite("test_paimon_dlf_rest_catalog", "p2,external,paimon,external_remote,extern
sql """ use ${catalog}.new_dlf_paimon_db"""

sql """set force_jni_scanner=false"""
qt_c1 """ select * from users_samples order by user_id """
sql """select * from users_samples\$files;"""
qt_c1 """ select * from user_samples order by user_id """
sql """select * from user_samples\$files;"""

sql """set force_jni_scanner=true"""
qt_c1 """ select * from users_samples order by user_id """
sql """select * from users_samples\$files;"""
qt_c1 """ select * from user_samples order by user_id """
sql """select * from user_samples\$files;"""

} finally {
sql """set force_jni_scanner=false"""
Expand Down
Loading