Back to home page

OSCL-LXR

 
 

    


0001 -- Portions Copyright (c) 1996-2019, PostgreSQL Global Development Group
0002 --
0003 -- Window Functions Testing
0004 -- https://github.com/postgres/postgres/blob/REL_12_STABLE/src/test/regress/sql/window.sql#L564-L911
0005 
0006 -- Test window operator with codegen on and off.
0007 --CONFIG_DIM1 spark.sql.codegen.wholeStage=true
0008 --CONFIG_DIM1 spark.sql.codegen.wholeStage=false,spark.sql.codegen.factoryMode=CODEGEN_ONLY
0009 --CONFIG_DIM1 spark.sql.codegen.wholeStage=false,spark.sql.codegen.factoryMode=NO_CODEGEN
0010 
0011 CREATE TEMPORARY VIEW tenk2 AS SELECT * FROM tenk1;
0012 
0013 CREATE TABLE empsalary (
0014     depname string,
0015     empno integer,
0016     salary int,
0017     enroll_date date
0018 ) USING parquet;
0019 
0020 INSERT INTO empsalary VALUES
0021   ('develop', 10, 5200, date '2007-08-01'),
0022   ('sales', 1, 5000, date '2006-10-01'),
0023   ('personnel', 5, 3500, date '2007-12-10'),
0024   ('sales', 4, 4800, date '2007-08-08'),
0025   ('personnel', 2, 3900, date '2006-12-23'),
0026   ('develop', 7, 4200, date '2008-01-01'),
0027   ('develop', 9, 4500, date '2008-01-01'),
0028   ('sales', 3, 4800, date '2007-08-01'),
0029   ('develop', 8, 6000, date '2006-10-01'),
0030   ('develop', 11, 5200, date '2007-08-15');
0031 
0032 -- Test in_range for other datetime datatypes
0033 
0034 -- Spark only supports timestamp
0035 -- [SPARK-29636] Spark can't parse '11:00 BST' or '2000-10-19 10:23:54+01' signatures to timestamp
0036 create table datetimes (
0037     id int,
0038     f_time timestamp,
0039     f_timetz timestamp,
0040     f_interval timestamp,
0041     f_timestamptz timestamp,
0042     f_timestamp timestamp
0043 ) using parquet;
0044 
0045 -- Spark cannot safely cast string to timestamp
0046 -- [SPARK-29636] Spark can't parse '11:00 BST' or '2000-10-19 10:23:54+01' signatures to timestamp
0047 insert into datetimes values
0048 (1, timestamp '11:00', cast ('11:00 BST' as timestamp), cast ('1 year' as timestamp), cast ('2000-10-19 10:23:54+01' as timestamp), timestamp '2000-10-19 10:23:54'),
0049 (2, timestamp '12:00', cast ('12:00 BST' as timestamp), cast ('2 years' as timestamp), cast ('2001-10-19 10:23:54+01' as timestamp), timestamp '2001-10-19 10:23:54'),
0050 (3, timestamp '13:00', cast ('13:00 BST' as timestamp), cast ('3 years' as timestamp), cast ('2001-10-19 10:23:54+01' as timestamp), timestamp '2001-10-19 10:23:54'),
0051 (4, timestamp '14:00', cast ('14:00 BST' as timestamp), cast ('4 years' as timestamp), cast ('2002-10-19 10:23:54+01' as timestamp), timestamp '2002-10-19 10:23:54'),
0052 (5, timestamp '15:00', cast ('15:00 BST' as timestamp), cast ('5 years' as timestamp), cast ('2003-10-19 10:23:54+01' as timestamp), timestamp '2003-10-19 10:23:54'),
0053 (6, timestamp '15:00', cast ('15:00 BST' as timestamp), cast ('5 years' as timestamp), cast ('2004-10-19 10:23:54+01' as timestamp), timestamp '2004-10-19 10:23:54'),
0054 (7, timestamp '17:00', cast ('17:00 BST' as timestamp), cast ('7 years' as timestamp), cast ('2005-10-19 10:23:54+01' as timestamp), timestamp '2005-10-19 10:23:54'),
0055 (8, timestamp '18:00', cast ('18:00 BST' as timestamp), cast ('8 years' as timestamp), cast ('2006-10-19 10:23:54+01' as timestamp), timestamp '2006-10-19 10:23:54'),
0056 (9, timestamp '19:00', cast ('19:00 BST' as timestamp), cast ('9 years' as timestamp), cast ('2007-10-19 10:23:54+01' as timestamp), timestamp '2007-10-19 10:23:54'),
0057 (10, timestamp '20:00', cast ('20:00 BST' as timestamp), cast ('10 years' as timestamp), cast ('2008-10-19 10:23:54+01' as timestamp), timestamp '2008-10-19 10:23:54');
0058 
0059 -- [SPARK-28429] SQL Datetime util function being casted to double instead of timestamp
0060 -- select id, f_time, first(id) over w, last(id) over w
0061 -- from datetimes
0062 -- window w as (order by f_time range between
0063 --              '70 min' preceding and '2 hours' following);
0064 
0065 -- [SPARK-28429] SQL Datetime util function being casted to double instead of timestamp
0066 -- select id, f_time, first(id) over w, last(id) over w
0067 -- from datetimes
0068 -- window w as (order by f_time desc range between
0069 --              '70 min' preceding and '2 hours' following);
0070 
0071 -- [SPARK-28429] SQL Datetime util function being casted to double instead of timestamp
0072 -- select id, f_timetz, first(id) over w, last(id) over w
0073 -- from datetimes
0074 -- window w as (order by f_timetz range between
0075 --              '70 min' preceding and '2 hours' following);
0076 
0077 -- [SPARK-28429] SQL Datetime util function being casted to double instead of timestamp
0078 -- select id, f_timetz, first(id) over w, last(id) over w
0079 -- from datetimes
0080 -- window w as (order by f_timetz desc range between
0081 --              '70 min' preceding and '2 hours' following);
0082 
0083 -- [SPARK-28429] SQL Datetime util function being casted to double instead of timestamp
0084 -- select id, f_interval, first(id) over w, last(id) over w
0085 -- from datetimes
0086 -- window w as (order by f_interval range between
0087 --              '1 year' preceding and '1 year' following);
0088 
0089 -- [SPARK-28429] SQL Datetime util function being casted to double instead of timestamp
0090 -- select id, f_interval, first(id) over w, last(id) over w
0091 -- from datetimes
0092 -- window w as (order by f_interval desc range between
0093 --              '1 year' preceding and '1 year' following);
0094 
0095 -- [SPARK-28429] SQL Datetime util function being casted to double instead of timestamp
0096 -- select id, f_timestamptz, first(id) over w, last(id) over w
0097 -- from datetimes
0098 -- window w as (order by f_timestamptz range between
0099 --              '1 year' preceding and '1 year' following);
0100 
0101 -- [SPARK-28429] SQL Datetime util function being casted to double instead of timestamp
0102 -- select id, f_timestamptz, first(id) over w, last(id) over w
0103 -- from datetimes
0104 -- window w as (order by f_timestamptz desc range between
0105 --              '1 year' preceding and '1 year' following);
0106 
0107 -- [SPARK-28429] SQL Datetime util function being casted to double instead of timestamp
0108 -- select id, f_timestamp, first(id) over w, last(id) over w
0109 -- from datetimes
0110 -- window w as (order by f_timestamp range between
0111 --              '1 year' preceding and '1 year' following);
0112 
0113 -- [SPARK-28429] SQL Datetime util function being casted to double instead of timestamp
0114 -- select id, f_timestamp, first(id) over w, last(id) over w
0115 -- from datetimes
0116 -- window w as (order by f_timestamp desc range between
0117 --              '1 year' preceding and '1 year' following);
0118 
0119 -- RANGE offset PRECEDING/FOLLOWING error cases
0120 -- [SPARK-28428] Spark `exclude` always expecting `()`
0121 -- select sum(salary) over (order by enroll_date, salary range between '1 year' preceding and '2 years' following
0122 --      exclude ties), salary, enroll_date from empsalary;
0123 
0124 -- [SPARK-28428] Spark `exclude` always expecting `()`
0125 -- select sum(salary) over (range between '1 year' preceding and '2 years' following
0126 --      exclude ties), salary, enroll_date from empsalary;
0127 
0128 -- [SPARK-28428] Spark `exclude` always expecting `()`
0129 -- select sum(salary) over (order by depname range between '1 year' preceding and '2 years' following
0130 --      exclude ties), salary, enroll_date from empsalary;
0131 
0132 -- [SPARK-28428] Spark `exclude` always expecting `()`
0133 -- select max(enroll_date) over (order by enroll_date range between 1 preceding and 2 following
0134 --      exclude ties), salary, enroll_date from empsalary;
0135 
0136 -- [SPARK-28428] Spark `exclude` always expecting `()`
0137 -- select max(enroll_date) over (order by salary range between -1 preceding and 2 following
0138 --      exclude ties), salary, enroll_date from empsalary;
0139 
0140 -- [SPARK-28428] Spark `exclude` always expecting `()`
0141 -- select max(enroll_date) over (order by salary range between 1 preceding and -2 following
0142 --      exclude ties), salary, enroll_date from empsalary;
0143 
0144 -- [SPARK-28428] Spark `exclude` always expecting `()`
0145 -- select max(enroll_date) over (order by salary range between '1 year' preceding and '2 years' following
0146 --      exclude ties), salary, enroll_date from empsalary;
0147 
0148 -- [SPARK-28428] Spark `exclude` always expecting `()`
0149 -- select max(enroll_date) over (order by enroll_date range between '1 year' preceding and '-2 years' following
0150 --      exclude ties), salary, enroll_date from empsalary;
0151 
0152 -- GROUPS tests
0153 
0154 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0155 -- SELECT sum(unique1) over (order by four groups between unbounded preceding and current row),
0156 -- unique1, four
0157 -- FROM tenk1 WHERE unique1 < 10;
0158 
0159 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0160 -- SELECT sum(unique1) over (order by four groups between unbounded preceding and unbounded following),
0161 -- unique1, four
0162 -- FROM tenk1 WHERE unique1 < 10;
0163 
0164 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0165 -- SELECT sum(unique1) over (order by four groups between current row and unbounded following),
0166 -- unique1, four
0167 -- FROM tenk1 WHERE unique1 < 10;
0168 
0169 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0170 -- SELECT sum(unique1) over (order by four groups between 1 preceding and unbounded following),
0171 -- unique1, four
0172 -- FROM tenk1 WHERE unique1 < 10;
0173 
0174 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0175 -- SELECT sum(unique1) over (order by four groups between 1 following and unbounded following),
0176 -- unique1, four
0177 -- FROM tenk1 WHERE unique1 < 10;
0178 
0179 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0180 -- SELECT sum(unique1) over (order by four groups between unbounded preceding and 2 following),
0181 -- unique1, four
0182 -- FROM tenk1 WHERE unique1 < 10;
0183 
0184 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0185 -- SELECT sum(unique1) over (order by four groups between 2 preceding and 1 preceding),
0186 -- unique1, four
0187 -- FROM tenk1 WHERE unique1 < 10;
0188 
0189 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0190 -- SELECT sum(unique1) over (order by four groups between 2 preceding and 1 following),
0191 -- unique1, four
0192 -- FROM tenk1 WHERE unique1 < 10;
0193 
0194 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0195 -- SELECT sum(unique1) over (order by four groups between 0 preceding and 0 following),
0196 -- unique1, four
0197 -- FROM tenk1 WHERE unique1 < 10;
0198 
0199 -- [SPARK-28428] Spark `exclude` always expecting `()`
0200 -- SELECT sum(unique1) over (order by four groups between 2 preceding and 1 following
0201 --   exclude current row), unique1, four
0202 -- FROM tenk1 WHERE unique1 < 10;
0203 
0204 -- [SPARK-28428] Spark `exclude` always expecting `()`
0205 -- SELECT sum(unique1) over (order by four range between 2 preceding and 1 following
0206 --   exclude group), unique1, four
0207 -- FROM tenk1 WHERE unique1 < 10;
0208 
0209 -- [SPARK-28428] Spark `exclude` always expecting `()`
0210 -- SELECT sum(unique1) over (order by four range between 2 preceding and 1 following
0211 --   exclude ties), unique1, four
0212 -- FROM tenk1 WHERE unique1 < 10;
0213 
0214 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0215 -- SELECT sum(unique1) over (partition by ten
0216 --   order by four groups between 0 preceding and 0 following),unique1, four, ten
0217 -- FROM tenk1 WHERE unique1 < 10;
0218 
0219 -- [SPARK-28428] Spark `exclude` always expecting `()`
0220 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0221 -- SELECT sum(unique1) over (partition by ten
0222 --   order by four groups between 0 preceding and 0 following exclude current row), unique1, four, ten
0223 -- FROM tenk1 WHERE unique1 < 10;
0224 
0225 -- [SPARK-28428] Spark `exclude` always expecting `()`
0226 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0227 -- SELECT sum(unique1) over (partition by ten
0228 --   order by four groups between 0 preceding and 0 following exclude group), unique1, four, ten
0229 -- FROM tenk1 WHERE unique1 < 10;
0230 
0231 -- [SPARK-28428] Spark `exclude` always expecting `()`
0232 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0233 -- SELECT sum(unique1) over (partition by ten
0234 --   order by four groups between 0 preceding and 0 following exclude ties), unique1, four, ten
0235 -- FROM tenk1 WHERE unique1 < 10;
0236 
0237 -- [SPARK-27951] ANSI SQL: NTH_VALUE function
0238 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0239 -- select first_value(salary) over(order by enroll_date groups between 1 preceding and 1 following),
0240 -- lead(salary) over(order by enroll_date groups between 1 preceding and 1 following),
0241 -- nth_value(salary, 1) over(order by enroll_date groups between 1 preceding and 1 following),
0242 -- salary, enroll_date from empsalary;
0243 
0244 -- [SPARK-28508] Support for range frame+row frame in the same query
0245 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0246 -- select last(salary) over(order by enroll_date groups between 1 preceding and 1 following),
0247 -- lag(salary)         over(order by enroll_date groups between 1 preceding and 1 following),
0248 -- salary, enroll_date from empsalary;
0249 
0250 -- [SPARK-27951] ANSI SQL: NTH_VALUE function
0251 -- select first_value(salary) over(order by enroll_date groups between 1 following and 3 following
0252 --   exclude current row),
0253 -- lead(salary) over(order by enroll_date groups between 1 following and 3 following exclude ties),
0254 -- nth_value(salary, 1) over(order by enroll_date groups between 1 following and 3 following
0255 --   exclude ties),
0256 -- salary, enroll_date from empsalary;
0257 
0258 -- [SPARK-28428] Spark `exclude` always expecting `()`
0259 -- select last(salary) over(order by enroll_date groups between 1 following and 3 following
0260 --   exclude group),
0261 -- lag(salary) over(order by enroll_date groups between 1 following and 3 following exclude group),
0262 -- salary, enroll_date from empsalary;
0263 
0264 -- Show differences in offset interpretation between ROWS, RANGE, and GROUPS
0265 WITH cte (x) AS (
0266         SELECT * FROM range(1, 36, 2)
0267 )
0268 SELECT x, (sum(x) over w)
0269 FROM cte
0270 WINDOW w AS (ORDER BY x rows between 1 preceding and 1 following);
0271 
0272 WITH cte (x) AS (
0273         SELECT * FROM range(1, 36, 2)
0274 )
0275 SELECT x, (sum(x) over w)
0276 FROM cte
0277 WINDOW w AS (ORDER BY x range between 1 preceding and 1 following);
0278 
0279 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0280 -- WITH cte (x) AS (
0281 --         SELECT * FROM range(1, 36, 2)
0282 -- )
0283 -- SELECT x, (sum(x) over w)
0284 -- FROM cte
0285 -- WINDOW w AS (ORDER BY x groups between 1 preceding and 1 following);
0286 
0287 WITH cte (x) AS (
0288         select 1 union all select 1 union all select 1 union all
0289         SELECT * FROM range(5, 50, 2)
0290 )
0291 SELECT x, (sum(x) over w)
0292 FROM cte
0293 WINDOW w AS (ORDER BY x rows between 1 preceding and 1 following);
0294 
0295 WITH cte (x) AS (
0296         select 1 union all select 1 union all select 1 union all
0297         SELECT * FROM range(5, 50, 2)
0298 )
0299 SELECT x, (sum(x) over w)
0300 FROM cte
0301 WINDOW w AS (ORDER BY x range between 1 preceding and 1 following);
0302 
0303 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0304 -- WITH cte (x) AS (
0305 --         select 1 union all select 1 union all select 1 union all
0306 --         SELECT * FROM range(5, 50, 2)
0307 -- )
0308 -- SELECT x, (sum(x) over w)
0309 -- FROM cte
0310 -- WINDOW w AS (ORDER BY x groups between 1 preceding and 1 following);
0311 
0312 -- with UNION
0313 SELECT count(*) OVER (PARTITION BY four) FROM (SELECT * FROM tenk1 UNION ALL SELECT * FROM tenk2)s LIMIT 0;
0314 
0315 -- check some degenerate cases
0316 create table t1 (f1 int, f2 int) using parquet;
0317 insert into t1 values (1,1),(1,2),(2,2);
0318 
0319 select f1, sum(f1) over (partition by f1
0320                          range between 1 preceding and 1 following)
0321 from t1 where f1 = f2; -- error, must have order by
0322 
0323 -- Since EXPLAIN clause rely on host physical location, it is commented out
0324 -- explain
0325 -- select f1, sum(f1) over (partition by f1 order by f2
0326 -- range between 1 preceding and 1 following)
0327 -- from t1 where f1 = f2;
0328 
0329 select f1, sum(f1) over (partition by f1 order by f2
0330 range between 1 preceding and 1 following)
0331 from t1 where f1 = f2;
0332 
0333 select f1, sum(f1) over (partition by f1, f1 order by f2
0334 range between 2 preceding and 1 preceding)
0335 from t1 where f1 = f2;
0336 
0337 select f1, sum(f1) over (partition by f1, f2 order by f2
0338 range between 1 following and 2 following)
0339 from t1 where f1 = f2;
0340 
0341 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0342 -- select f1, sum(f1) over (partition by f1,
0343 -- groups between 1 preceding and 1 following)
0344 -- from t1 where f1 = f2;
0345 
0346 -- Since EXPLAIN clause rely on host physical location, it is commented out
0347 -- explain
0348 -- select f1, sum(f1) over (partition by f1 order by f2
0349 -- range between 1 preceding and 1 following)
0350 -- from t1 where f1 = f2;
0351 
0352 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0353 -- select f1, sum(f1) over (partition by f1 order by f2
0354 -- groups between 1 preceding and 1 following)
0355 -- from t1 where f1 = f2;
0356 
0357 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0358 -- select f1, sum(f1) over (partition by f1, f1 order by f2
0359 -- groups between 2 preceding and 1 preceding)
0360 -- from t1 where f1 = f2;
0361  
0362 -- [SPARK-28648] Adds support to `groups` unit type in window clauses
0363 -- select f1, sum(f1) over (partition by f1, f2 order by f2
0364 -- groups between 1 following and 2 following)
0365 -- from t1 where f1 = f2;
0366 
0367 -- ordering by a non-integer constant is allowed
0368 SELECT rank() OVER (ORDER BY length('abc'));
0369 
0370 -- can't order by another window function
0371 -- [SPARK-28566] window functions should not be allowed in window definitions
0372 -- SELECT rank() OVER (ORDER BY rank() OVER (ORDER BY random()));
0373 
0374 -- some other errors
0375 SELECT * FROM empsalary WHERE row_number() OVER (ORDER BY salary) < 10;
0376 
0377 SELECT * FROM empsalary INNER JOIN tenk1 ON row_number() OVER (ORDER BY salary) < 10;
0378 
0379 SELECT rank() OVER (ORDER BY 1), count(*) FROM empsalary GROUP BY 1;
0380 
0381 SELECT * FROM rank() OVER (ORDER BY random());
0382 
0383 -- Original query: DELETE FROM empsalary WHERE (rank() OVER (ORDER BY random())) > 10;
0384 SELECT * FROM empsalary WHERE (rank() OVER (ORDER BY random())) > 10;
0385 
0386 -- Original query: DELETE FROM empsalary RETURNING rank() OVER (ORDER BY random());
0387 SELECT * FROM empsalary WHERE rank() OVER (ORDER BY random());
0388 
0389 -- [SPARK-28645] Throw an error on window redefinition
0390 -- select count(*) OVER w FROM tenk1 WINDOW w AS (ORDER BY unique1), w AS (ORDER BY unique1);
0391 
0392 select rank() OVER (PARTITION BY four, ORDER BY ten) FROM tenk1;
0393 
0394 -- [SPARK-28646] Allow usage of `count` only for parameterless aggregate function
0395 -- select count() OVER () FROM tenk1;
0396 
0397 -- The output is the expected one: `range` is not a window or aggregate function.
0398 SELECT range(1, 100) OVER () FROM empsalary;
0399 
0400 SELECT ntile(0) OVER (ORDER BY ten), ten, four FROM tenk1;
0401 
0402 -- [SPARK-27951] ANSI SQL: NTH_VALUE function
0403 -- SELECT nth_value(four, 0) OVER (ORDER BY ten), ten, four FROM tenk1;
0404 
0405 -- filter
0406 
0407 -- [SPARK-30182] Support nested aggregates
0408 -- SELECT sum(salary), row_number() OVER (ORDER BY depname), sum(
0409 --     sum(salary) FILTER (WHERE enroll_date > '2007-01-01')
0410 -- )
0411 -- FROM empsalary GROUP BY depname;
0412 
0413 -- Test pushdown of quals into a subquery containing window functions
0414 
0415 -- pushdown is safe because all PARTITION BY clauses include depname:
0416 -- Since EXPLAIN clause rely on host physical location, it is commented out
0417 -- EXPLAIN
0418 -- SELECT * FROM
0419 -- (SELECT depname,
0420 -- sum(salary) OVER (PARTITION BY depname) depsalary,
0421 -- min(salary) OVER (PARTITION BY depname || 'A', depname) depminsalary
0422 -- FROM empsalary) emp
0423 -- WHERE depname = 'sales';
0424 
0425 -- pushdown is unsafe because there's a PARTITION BY clause without depname:
0426 -- Since EXPLAIN clause rely on host physical location, it is commented out
0427 -- EXPLAIN
0428 -- SELECT * FROM
0429 -- (SELECT depname,
0430 -- sum(salary) OVER (PARTITION BY enroll_date) enroll_salary,
0431 -- min(salary) OVER (PARTITION BY depname) depminsalary
0432 -- FROM empsalary) emp
0433 -- WHERE depname = 'sales';
0434 
0435 -- Test Sort node collapsing
0436 -- Since EXPLAIN clause rely on host physical location, it is commented out
0437 -- EXPLAIN
0438 -- SELECT * FROM
0439 -- (SELECT depname,
0440 -- sum(salary) OVER (PARTITION BY depname order by empno) depsalary,
0441 -- min(salary) OVER (PARTITION BY depname, empno order by enroll_date) depminsalary
0442 -- FROM empsalary) emp
0443 -- WHERE depname = 'sales';
0444 
0445 -- Test Sort node reordering
0446 -- Since EXPLAIN clause rely on host physical location, it is commented out
0447 -- EXPLAIN
0448 -- SELECT
0449 -- lead(1) OVER (PARTITION BY depname ORDER BY salary, enroll_date),
0450 -- lag(1) OVER (PARTITION BY depname ORDER BY salary,enroll_date,empno)
0451 -- FROM empsalary;
0452 
0453 -- cleanup
0454 DROP TABLE empsalary;
0455 DROP TABLE datetimes;
0456 DROP TABLE t1;