Skip to content

Commit

Permalink
[Branch-2.0](regression-test) Add tvf regression tests apache#26322 a…
Browse files Browse the repository at this point in the history
  • Loading branch information
BePPPower authored and gnehil committed Dec 4, 2023
1 parent a745f7f commit 8f0de87
Show file tree
Hide file tree
Showing 10 changed files with 622 additions and 15 deletions.
259 changes: 259 additions & 0 deletions regression-test/data/external_table_p0/tvf/test_hdfs_tvf.out

Large diffs are not rendered by default.

29 changes: 29 additions & 0 deletions regression-test/data/external_table_p0/tvf/test_s3_tvf.out
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !select_base --
1 doris1 18
2 doris2 19
3 doris3 99
4 doris4 \N
5 doris5 15

-- !select_1 --
1 doris1 18
2 doris2 19
3 doris3 99
4 doris4 \N
5 doris5 15

-- !select_2 --
1 doris1 18
2 doris2 19
3 doris3 99
4 doris4 \N
5 doris5 15

-- !select_3 --
1 doris1 18
2 doris2 19
3 doris3 99
4 doris4 \N
5 doris5 15

22 changes: 22 additions & 0 deletions regression-test/data/external_table_p2/tvf/test_iceberg_meta.out
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
-- This file is automatically generated. You should know what you did if you want to edit this
-- !q01 --
2879562

-- !q02 --
1
11
3
5
6
7
8

-- !tvf_1 --
2023-10-16T21:01:06 4012471924714711043 5784892960796156942 append
2023-10-16T21:01:06 5784892960796156942 -1 append
2023-10-16T21:01:06 7235593032487457798 4012471924714711043 append
2023-10-16T21:01:07 1953697979105284524 7235593032487457798 append

-- !tvf_2 --
2023-10-16T21:01:06 7235593032487457798 4012471924714711043 append

Original file line number Diff line number Diff line change
Expand Up @@ -58,4 +58,13 @@ suite("test_backends_tvf","p0,external,tvf,external_docker") {
MaxDiskUsedPct, RemoteUsedCapacity, Tag, ErrMsg, Version, Status
HeartbeatFailureCounter, NodeRole from backends();
"""


// test exception
test {
sql """ select * from backends("backendId" = "10003"); """

// check exception
exception "backends table-valued-function does not support any params"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -68,4 +68,12 @@ suite("test_catalogs_tvf","p0,external,tvf,external_docker") {
qt_create """ select CatalogName,CatalogType,Property,Value from catalogs() where CatalogName in ("catalog_test_es00","catalog_test_hive00") and Property="type" order by Value"""

sql """ drop catalog catalog_test_es00 """

// test exception
test {
sql """ select * from catalogs("Host" = "127.0.0.1"); """

// check exception
exception "catalogs table-valued-function does not support any params"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -47,4 +47,12 @@ suite("test_frontends_tvf","p0,external,tvf,external_docker") {
`Join`, Alive, ReplayedJournalId, LastHeartbeat
IsHelper, ErrMsg, Version, CurrentConnected from frontends();
"""

// test exception
test {
sql """ select * from frontends("Host" = "127.0.0.1"); """

// check exception
exception "frontends table-valued-function does not support any params"
}
}
86 changes: 86 additions & 0 deletions regression-test/suites/external_table_p0/tvf/test_hdfs_tvf.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,93 @@ suite("test_hdfs_tvf","external,hive,tvf,external_docker") {
"uri" = "${uri}",
"hadoop.username" = "${hdfsUserName}",
"format" = "${format}"); """


// test hdfs function compatible
// because the property `fs.defaultFS` has been delete by pr https://github.com/apache/doris/pull/24706
// we should test the compatible of `fs.defaultFS`
uri = "${defaultFS}" + "/user/doris/preinstalled_data/csv_format_test/all_types.csv"
format = "csv"
order_qt_hdfs_compatible """ select * from HDFS(
"uri" = "${uri}",
"fs.defaultFS"= "${defaultFS}",
"hadoop.username" = "${hdfsUserName}",
"column_separator" = ",",
"format" = "${format}") order by c1; """

// test csv_schema property
uri = "${defaultFS}" + "/user/doris/preinstalled_data/csv_format_test/all_types.csv"
format = "csv"
order_qt_hdfs_csv_schema """ select * from HDFS(
"uri" = "${uri}",
"csv_schema" = "id:int;tinyint_col:tinyint;smallint_col:smallint;bigint_col:bigint;largeint_col:largeint;float_col:float;double_col:double;decimal_col:decimal(10,5);string_col:string;string_col:string;string_col:string;date_col:date;datetime_col:datetime(3)",
"hadoop.username" = "${hdfsUserName}",
"column_separator" = ",",
"format" = "${format}") order by id; """

order_qt_hdfs_desc_csv_schema """ desc function HDFS(
"uri" = "${uri}",
"csv_schema" = "id:int;tinyint_col:tinyint;smallint_col:smallint;bigint_col:bigint;largeint_col:largeint;float_col:float;double_col:double;decimal_col:decimal(10,5);string_col:string;string_col:string;string_col:string;date_col:date;datetime_col:datetime(3)",
"hadoop.username" = "${hdfsUserName}",
"column_separator" = ",",
"format" = "${format}"); """

} finally {
}
}

// test exception
test {
sql """ select * from HDFS(
"uri" = "",
"hadoop.username" = "${hdfsUserName}",
"column_separator" = ",",
"format" = "csv") order by c1;
"""

// check exception
exception """Properties 'uri' is required"""
}

// test exception
test {
sql """ select * from HDFS(
"uri" = "xx",
"hadoop.username" = "${hdfsUserName}",
"column_separator" = ",",
"format" = "csv") order by c1;
"""

// check exception
exception """Invalid export path, there is no schema of URI found. please check your path"""
}

// test exception
test {
sql """ select * from HDFS(
"uri" = "xx",
"hadoop.username" = "${hdfsUserName}",
"column_separator" = "",
"format" = "csv") order by c1;
"""

// check exception
exception """column_separator can not be empty"""
}


// test exception
test {
sql """ select * from HDFS(
"uri" = "xx",
"hadoop.username" = "${hdfsUserName}",
"line_delimiter" = "",
"format" = "csv") order by c1;
"""

// check exception
exception """line_delimiter can not be empty"""
}


}
45 changes: 30 additions & 15 deletions regression-test/suites/external_table_p0/tvf/test_numbers.groovy
Original file line number Diff line number Diff line change
@@ -1,19 +1,19 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.


suite("test_numbers","external,hive,tvf,external_docker") {
Expand Down Expand Up @@ -123,4 +123,19 @@
// test subquery
order_qt_subquery_1 """ with a as (select number from numbers("number"="3")) select * from a; """
order_qt_subquery_2 """ select * from (select number from numbers("number"="3")) a join (select * from (select number from numbers("number"="1")) a join (select 1) b) b; """

// test exception
test {
sql """ select * from numbers('number' = 'abc'); """

// check exception
exception "can not parse `number` param to natural number"
}

test {
sql """ select * from numbers(); """

// check exception
exception """can not find `number` param, please specify `number`, like: numbers("number" = "10")"""
}
}
120 changes: 120 additions & 0 deletions regression-test/suites/external_table_p0/tvf/test_s3_tvf.groovy
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

suite("test_s3_tvf", "p0") {
// open nereids
sql """ set enable_nereids_planner=true """
sql """ set enable_fallback_to_original_planner=false """

String ak = getS3AK()
String sk = getS3SK()
String s3_endpoint = getS3Endpoint()
String region = getS3Region()
String bucket = context.config.otherConfigs.get("s3BucketName");


def export_table_name = "test_s3_tvf_export_test"
def outFilePath = "${bucket}/test_s3_tvf/export_test/exp_"


def create_table = {table_name ->
sql """ DROP TABLE IF EXISTS ${table_name} """
sql """
CREATE TABLE IF NOT EXISTS ${table_name} (
`user_id` LARGEINT NOT NULL COMMENT "用户id",
`name` STRING COMMENT "用户名称",
`age` INT COMMENT "用户年龄",
)
DISTRIBUTED BY HASH(user_id) PROPERTIES("replication_num" = "1");
"""
}

def outfile_to_S3 = {
// select ... into outfile ...
def res = sql """
SELECT * FROM ${export_table_name} t ORDER BY user_id
INTO OUTFILE "s3://${outFilePath}"
FORMAT AS ORC
PROPERTIES (
"s3.endpoint" = "${s3_endpoint}",
"s3.region" = "${region}",
"s3.secret_key"="${sk}",
"s3.access_key" = "${ak}"
);
"""

return res[0][3]
}

// create table to export data
create_table(export_table_name)

// insert data
sql """ insert into ${export_table_name} values (1, 'doris1', 18); """
sql """ insert into ${export_table_name} values (2, 'doris2', 19); """
sql """ insert into ${export_table_name} values (3, 'doris3', 99); """
sql """ insert into ${export_table_name} values (4, 'doris4', null); """
sql """ insert into ${export_table_name} values (5, 'doris5', 15); """

// test base data
qt_select_base """ SELECT * FROM ${export_table_name} t ORDER BY user_id; """

// test outfile to s3
def outfile_url = outfile_to_S3()

// 1. normal
try {
order_qt_select_1 """ SELECT * FROM S3 (
"uri" = "http://${s3_endpoint}${outfile_url.substring(4)}0.orc",
"ACCESS_KEY"= "${ak}",
"SECRET_KEY" = "${sk}",
"format" = "orc",
"region" = "${region}"
);
"""
} finally {
}


// 2. test endpoint property
try {
order_qt_select_2 """ SELECT * FROM S3 (
"uri" = "http://${outfile_url.substring(5)}0.orc",
"s3.access_key"= "${ak}",
"s3.secret_key" = "${sk}",
"s3.endpoint" = "${s3_endpoint}",
"format" = "orc",
"region" = "${region}"
);
"""
} finally {
}

// 3.test use_path_style
try {
order_qt_select_3 """ SELECT * FROM S3 (
"uri" = "http://${s3_endpoint}${outfile_url.substring(4)}0.orc",
"s3.access_key"= "${ak}",
"s3.secret_key" = "${sk}",
"format" = "orc",
"use_path_style" = "true",
"region" = "${region}"
);
"""
} finally {
}
}
Loading

0 comments on commit 8f0de87

Please sign in to comment.