Skip to content

Commit

Permalink
[fix](regression-test) Add tvf regression tests (apache#26455)
Browse files Browse the repository at this point in the history
  • Loading branch information
BePPPower authored and seawinde committed Nov 12, 2023
1 parent 665f887 commit 2bd0db9
Show file tree
Hide file tree
Showing 7 changed files with 278 additions and 15 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !select --
1 doris 10

29 changes: 29 additions & 0 deletions regression-test/data/external_table_p0/tvf/test_s3_tvf.out
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !select_base --
1 doris1 18
2 doris2 19
3 doris3 99
4 doris4 \N
5 doris5 15

-- !select_1 --
1 doris1 18
2 doris2 19
3 doris3 99
4 doris4 \N
5 doris5 15

-- !select_2 --
1 doris1 18
2 doris2 19
3 doris3 99
4 doris4 \N
5 doris5 15

-- !select_3 --
1 doris1 18
2 doris2 19
3 doris3 99
4 doris4 \N
5 doris5 15

22 changes: 22 additions & 0 deletions regression-test/data/external_table_p2/tvf/test_iceberg_meta.out
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !q01 --
2879562

-- !q02 --
1
11
3
5
6
7
8

-- !tvf_1 --
2023-10-16T21:01:06 4012471924714711043 5784892960796156942 append
2023-10-16T21:01:06 5784892960796156942 -1 append
2023-10-16T21:01:06 7235593032487457798 4012471924714711043 append
2023-10-16T21:01:07 1953697979105284524 7235593032487457798 append

-- !tvf_2 --
2023-10-16T21:01:06 7235593032487457798 4012471924714711043 append

Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

suite("test_queries_tvf","p0,external,tvf,external_docker") {
def table_name = "test_queries_tvf"
sql """ DROP TABLE IF EXISTS ${table_name} """
sql """
CREATE TABLE IF NOT EXISTS ${table_name} (
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`name` STRING COMMENT "用户名称",
`age` INT COMMENT "用户年龄",
)
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
"""

sql """insert into ${table_name} values (1, 'doris', 10);"""

sql """select * from ${table_name};"""

def res = sql """ select QueryId from queries() where `Sql` like "%${table_name}%"; """
logger.info("res = " + res)
assertEquals(2, res.size())
}
30 changes: 15 additions & 15 deletions regression-test/suites/external_table_p0/tvf/test_numbers.groovy
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.


suite("test_numbers","p0,external,external_docker") {
Expand Down
120 changes: 120 additions & 0 deletions regression-test/suites/external_table_p0/tvf/test_s3_tvf.groovy
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

suite("test_s3_tvf", "p0") {
// open nereids
sql """ set enable_nereids_planner=true """
sql """ set enable_fallback_to_original_planner=false """

String ak = getS3AK()
String sk = getS3SK()
String s3_endpoint = getS3Endpoint()
String region = getS3Region()
String bucket = context.config.otherConfigs.get("s3BucketName");


def export_table_name = "test_s3_tvf_export_test"
def outFilePath = "${bucket}/est_s3_tvf/export_test/exp_"


def create_table = {table_name ->
sql """ DROP TABLE IF EXISTS ${table_name} """
sql """
CREATE TABLE IF NOT EXISTS ${table_name} (
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`name` STRING COMMENT "用户名称",
`age` INT COMMENT "用户年龄",
)
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
"""
}

def outfile_to_S3 = {
// select ... into outfile ...
def res = sql """
SELECT * FROM ${export_table_name} t ORDER BY user_id
INTO OUTFILE "s3://${outFilePath}"
FORMAT AS ORC
PROPERTIES (
"s3.endpoint" = "${s3_endpoint}",
"s3.region" = "${region}",
"s3.secret_key"="${sk}",
"s3.access_key" = "${ak}"
);
"""

return res[0][3]
}

// create table to export data
create_table(export_table_name)

// insert data
sql """ insert into ${export_table_name} values (1, 'doris1', 18); """
sql """ insert into ${export_table_name} values (2, 'doris2', 19); """
sql """ insert into ${export_table_name} values (3, 'doris3', 99); """
sql """ insert into ${export_table_name} values (4, 'doris4', null); """
sql """ insert into ${export_table_name} values (5, 'doris5', 15); """

// test base data
qt_select_base """ SELECT * FROM ${export_table_name} t ORDER BY user_id; """

// test outfile to s3
def outfile_url = outfile_to_S3()

// 1. normal
try {
order_qt_select_1 """ SELECT * FROM S3 (
"uri" = "http://${s3_endpoint}${outfile_url.substring(4)}0.orc",
"ACCESS_KEY"= "${ak}",
"SECRET_KEY" = "${sk}",
"format" = "orc",
"region" = "${region}"
);
"""
} finally {
}


// 2. test endpoint property
try {
order_qt_select_2 """ SELECT * FROM S3 (
"uri" = "http://${outfile_url.substring(5)}0.orc",
"s3.access_key"= "${ak}",
"s3.secret_key" = "${sk}",
"s3.endpoint" = "${s3_endpoint}",
"format" = "orc",
"region" = "${region}"
);
"""
} finally {
}

// 3.test use_path_style
try {
order_qt_select_3 """ SELECT * FROM S3 (
"uri" = "http://${s3_endpoint}${outfile_url.substring(4)}0.orc",
"s3.access_key"= "${ak}",
"s3.secret_key" = "${sk}",
"format" = "orc",
"use_path_style" = "true",
"region" = "${region}"
);
"""
} finally {
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

suite("test_iceberg_meta", "p2,external,iceberg,external_remote,external_remote_iceberg") {
String enabled = context.config.otherConfigs.get("enableExternalHiveTest")
if (enabled != null && enabled.equalsIgnoreCase("true")) {
String iceberg_catalog_name = "test_iceberg_meta_tvf"
String extHiveHmsHost = context.config.otherConfigs.get("extHiveHmsHost")
String extHdfsPort = context.config.otherConfigs.get("extHdfsPort")
String db = "multi_catalog"
sql """drop catalog if exists ${iceberg_catalog_name};"""
sql """
create catalog if not exists ${iceberg_catalog_name} properties (
'type'='iceberg',
'iceberg.catalog.type'='hadoop',
'warehouse' = 'hdfs://${extHiveHmsHost}:${extHdfsPort}/usr/hive/warehouse/hadoop_catalog'
);
"""

sql """switch ${iceberg_catalog_name};"""
sql """ use `${db}`; """

order_qt_q01 """ select count(*) from iceberg_hadoop_catalog """
order_qt_q02 """ select c_custkey from iceberg_hadoop_catalog group by c_custkey order by c_custkey limit 7 """

order_qt_tvf_1 """ select committed_at, snapshot_id, parent_id, operation from iceberg_meta(
"table" = "${iceberg_catalog_name}.${db}.multi_partition",
"query_type" = "snapshots");
"""

order_qt_tvf_2 """ select committed_at, snapshot_id, parent_id, operation from iceberg_meta(
"table" = "${iceberg_catalog_name}.${db}.multi_partition",
"query_type" = "snapshots")
where snapshot_id = 7235593032487457798;
"""
}
}

0 comments on commit 2bd0db9

Please sign in to comment.