Skip to content

Commit 28de63e

Browse files
HaydenOrzwewoor
authored andcommitted
test: hiveSQL createStatement unit tests
1 parent e97e0b2 commit 28de63e

10 files changed

Lines changed: 297 additions & 0 deletions
Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
import HiveSQL from '../../../../src/parser/hive';
2+
import { readSQL } from '../../../helper';
3+
4+
const parser = new HiveSQL();
5+
6+
const features = {
7+
databases: readSQL(__dirname, 'createDatabase.sql'),
8+
tables: readSQL(__dirname, 'createTable.sql'),
9+
views: readSQL(__dirname, 'createView.sql'),
10+
functions: readSQL(__dirname, 'createFunction.sql'),
11+
roles: readSQL(__dirname, 'createRole.sql'),
12+
indexes: readSQL(__dirname, 'createIndex.sql'),
13+
macros: readSQL(__dirname, 'createMacro.sql'),
14+
connectors: readSQL(__dirname, 'createConnector.sql'),
15+
scheduledQueries: readSQL(__dirname, 'createScheduledQuery.sql')
16+
};
17+
18+
describe('Hive Create Syntax Tests', () => {
19+
describe('CREATE DATABASE', () => {
20+
features.databases.forEach((database) => {
21+
it(database, () => {
22+
expect(parser.validate(database).length).toBe(0);
23+
});
24+
});
25+
});
26+
27+
describe('CREATE TABLE', () => {
28+
features.tables.forEach((table) => {
29+
it(table, () => {
30+
expect(parser.validate(table).length).toBe(0);
31+
});
32+
});
33+
});
34+
35+
describe('CREATE VIEW', () => {
36+
features.views.forEach((view) => {
37+
it(view, () => {
38+
expect(parser.validate(view).length).toBe(0);
39+
});
40+
});
41+
});
42+
43+
describe('CREATE FUNCTION', () => {
44+
features.functions.forEach((func) => {
45+
it(func, () => {
46+
expect(parser.validate(func).length).toBe(0);
47+
});
48+
});
49+
});
50+
51+
describe('CREATE ROLE', () => {
52+
features.roles.forEach((role) => {
53+
it(role, () => {
54+
expect(parser.validate(role).length).toBe(0);
55+
});
56+
});
57+
});
58+
59+
// describe('CREATE INDEX', () => {
60+
// features.indexes.forEach((index) => {
61+
// it(index, () => {
62+
// expect(parser.validate(index).length).toBe(0);
63+
// });
64+
// });
65+
// });
66+
67+
describe('CREATE MACRO', () => {
68+
features.macros.forEach((macro) => {
69+
it(macro, () => {
70+
expect(parser.validate(macro).length).toBe(0);
71+
});
72+
});
73+
});
74+
75+
describe('CREATE CONNECTOR', () => {
76+
features.connectors.forEach((cnctor) => {
77+
it(cnctor, () => {
78+
expect(parser.validate(cnctor).length).toBe(0);
79+
});
80+
});
81+
});
82+
83+
describe('CREATE SCHEDULE QUERY', () => {
84+
features.scheduledQueries.forEach((sq) => {
85+
it(sq, () => {
86+
expect(parser.validate(sq).length).toBe(0);
87+
});
88+
});
89+
});
90+
});
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
CREATE CONNECTOR IF NOT EXISTS mysql_local
2+
TYPE 'mysql'
3+
URL 'jdbc:mysql://localhost:5432';
4+
5+
CREATE CONNECTOR pg_local
6+
TYPE 'postgres'
7+
URL 'jdbc:postgresql://localhost:5432'
8+
COMMENT '这是一个 postgres 连接器'
9+
WITH DCPROPERTIES ("hive.sql.dbcp.username"="postgres", "hive.sql.dbcp.password"="postgres");
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
CREATE DATABASE mydb;
2+
3+
CREATE SCHEMA myschema;
4+
5+
CREATE DATABASE IF NOT EXISTS mydb
6+
COMMENT 'my test db'
7+
LOCATION '/myhive/myoutdb'
8+
MANAGEDLOCATION '/myhive/myindb'
9+
WITH DBPROPERTIES ('creator'='ypc','date'='2021-03-09');
10+
11+
CREATE SCHEMA IF NOT EXISTS myschema
12+
COMMENT 'my test myschema'
13+
LOCATION '/myhive/myoutschema'
14+
MANAGEDLOCATION '/myhive/myinschema'
15+
WITH DBPROPERTIES ('creator'='ypc','date'='2021-03-09');
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
CREATE FUNCTION base_analizer AS 'com.udf.BaseFieldUDF';
2+
CREATE TEMPORARY FUNCTION flat_analizer AS 'com.udtf.EventJsonUDTF';
3+
4+
CREATE FUNCTION base_analizer
5+
AS "com.BaseFieldUDF"
6+
USING JAR 'hdfs://hadoop12:9000/user/hive/jars/hivefunction-1.0-SNAPSHOT.jar';
7+
8+
CREATE FUNCTION test_udf
9+
AS "com.BaseFieldUDF"
10+
USING JAR 'hdfs://hadoop12:9000/user/hive/jars/hivetestfunc-1.0-SNAPSHOT.jar'
11+
, FILE 'hdfs://hadoop12:9000/user/hive/files/hivetestfunc.java'
12+
, ARCHIVE 'hdfs://hadoop12:9000/user/hive/files/hivetestfunc.txt'
Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
CREATE INDEX table01_index
2+
ON TABLE table01 (column2)
3+
AS 'org.apache.hadoop.hive.ql.index.compact.CompactIndexHandler'
4+
COMMENT '这是一个索引';
5+
6+
CREATE INDEX table02_index
7+
ON TABLE table02 (column3)
8+
AS 'org.apache.hadoop.hive.ql.index.compact.CompactIndexHandler'
9+
WITH DEFERRED REBUILD;
10+
11+
CREATE INDEX table03_index
12+
ON TABLE table03 (column4)
13+
AS 'COMPACT'
14+
TBLPROPERTIES ("prop1"="value1", "prop2"="value2");
15+
16+
CREATE INDEX table04_index
17+
ON TABLE table04 (column5)
18+
AS 'COMPACT'
19+
IN TABLE indextable1
20+
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler';
21+
IDXPROPERTIES ("prop3"="value3", "prop4"="value4");
22+
23+
CREATE INDEX table05_index
24+
ON TABLE table05 (column6)
25+
AS 'COMPACT'
26+
STORED AS RCFILE;
27+
28+
CREATE INDEX table06_index
29+
ON TABLE table06 (column7)
30+
AS 'COMPACT'
31+
ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
32+
STORED AS TEXTFILE;
33+
34+
35+
36+
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
CREATE TEMPORARY MACRO fixed_number() 42;
2+
3+
CREATE TEMPORARY MACRO string_len_plus_two(x STRING) length(x) + 2;
4+
5+
CREATE TEMPORARY MACRO simple_add (x INT, y INT) x + y;
6+
7+
CREATE TEMPORARY MACRO get_degree(degree_type STRING)
8+
IF (degree_type IS NOT NULL,
9+
CASE degree_type
10+
WHEN 1 THEN '小学'
11+
WHEN 2 THEN '初中'
12+
WHEN 3 THEN '职业高中'
13+
WHEN 4 THEN '中专'
14+
WHEN 5 THEN '高中'
15+
WHEN 6 THEN '大专'
16+
WHEN 7 THEN '本科'
17+
WHEN 8 THEN '硕士'
18+
WHEN 9 THEN '博士'
19+
ELSE NULL
20+
END,
21+
NULL);
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
CREATE ROLE std_user;
Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
CREATE SCHEDULED QUERY sc1
2+
CRON '0 */10 * * * ? *'
3+
AS INSERT INTO t VALUES (1);
4+
5+
CREATE SCHEDULED QUERY t_analyze
6+
CRON '0 */1 * * * ? *'
7+
AS ANALYZE TABLE t
8+
COMPUTE STATISTICS FOR COLUMNS;
9+
10+
CREATE SCHEDULED QUERY s_day
11+
EVERY 2 DAY OFFSET BY 'offsetTs'
12+
EXECUTED AS 'SELECT * FROM aa'
13+
ENABLE
14+
DEFINED AS INSERT INTO t VALUES (1);
15+
16+
CREATE SCHEDULED QUERY s_hour
17+
EVERY HOUR AT '0:07:30'
18+
EXECUTED AS 'SELECT * FROM aa'
19+
DISABLE
20+
DEFINED AS INSERT INTO t VALUES (1);
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
CREATE TEMPORARY TABLE list_bucket_multiple (col1 STRING, col2 INT, col3 STRING);
2+
3+
CREATE TRANSACTIONAL TABLE transactional_table_test(
4+
key STRING,
5+
value STRING
6+
)
7+
PARTITIONED BY(ds STRING) STORED AS ORC;
8+
9+
CREATE TABLE IF NOT EXISTS copy_table LIKE origin_table;
10+
11+
CREATE TABLE IF NOT EXISTS derived_table AS SELECT * FROM origin_table;
12+
13+
CREATE TABLE `mydb.t1`(
14+
`id` INT,
15+
`dept_no` INT,
16+
`addr` STRING,
17+
`tel` STRING,
18+
`hobby` ARRAY<STRING>,
19+
`add` MAP<STRING,STRING>
20+
)
21+
PARTITIONED BY(`date` STRING)
22+
ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
23+
COLLECTION ITEMS TERMINATED BY '-'
24+
MAP KEYS TERMINATED BY ':';
25+
26+
CREATE EXTERNAL TABLE mydb.ext_table(
27+
id INT,
28+
name STRING,
29+
hobby ARRAY<STRING>,
30+
add MAP<STRING,STRING>
31+
)
32+
ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
33+
COLLECTION ITEMS TERMINATED BY '-'
34+
MAP KEYS TERMINATED BY ':'
35+
LOCATION '/user/mydb/ext_table'
36+
TBLPROPERTIES('author'='hayden','desc'='一个外部测试表')
37+
;
38+
39+
CREATE MANAGED TABLE managed_table (
40+
id INT COMMENT 'ID',
41+
name STRING COMMENT '名称'
42+
)
43+
COMMENT '测试分桶'
44+
CLUSTERED BY(id) SORTED BY (id) INTO 4 BUCKETS
45+
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler';
46+
47+
CREATE TABLE list_bucket_multiple (
48+
col1 STRING,
49+
col2 INT,
50+
col3 STRING
51+
)
52+
SKEWED BY (col1, col2) ON (('s1',1), ('s3',3), ('s13',13), ('s78',78))
53+
STORED AS DIRECTORIES;
54+
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
-- Common View
2+
CREATE VIEW IF NOT EXISTS mydb.bro_view
3+
COMMENT '一个测试视图'
4+
AS SELECT * FROM mydb.sale_tbl;
5+
6+
CREATE VIEW mydb.task_view (
7+
taskId COMMENT '任务id',
8+
taskName COMMENT '任务名称',
9+
taskRunTime COMMENT '任务运行时长'
10+
)
11+
COMMENT '一个任务信息视图'
12+
TBLPROPERTIES(
13+
'author'='hayden'
14+
)
15+
AS SELECT DISTINCT id, `name`, runtime
16+
FROM task_tbl
17+
WHERE type='day';
18+
19+
20+
-- Materialized View
21+
CREATE MATERIALIZED VIEW druid_wiki_mv
22+
COMMENT '这是一个物化视图'
23+
STORED AS PARQUET
24+
AS
25+
SELECT page, `user`, c_added, c_removed
26+
FROM src;
27+
28+
CREATE MATERIALIZED VIEW IF NOT EXISTS mv2
29+
DISABLE REWRITE
30+
PARTITIONED ON (lo_revenue)
31+
ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
32+
LOCATION '/user/hive/materialized_view'
33+
TBLPROPERTIES('author'='hayden','desc'='一个物化视图')
34+
AS
35+
SELECT lo_revenue,
36+
lo_extendedprice * lo_discount AS d_price,
37+
lo_revenue - lo_supplycost AS d_balance
38+
FROM customer, dates, lineorder, part, supplier
39+
WHERE lo_orderdate = d_datekey;

0 commit comments

Comments
 (0)