0001
0002
0003 create temporary view data as select * from values
0004 (1, 1),
0005 (1, 2),
0006 (2, 1),
0007 (2, 2),
0008 (3, 1),
0009 (3, 2)
0010 as data(a, b);
0011
0012
0013 select a, sum(b) from data group by 1;
0014
0015
0016 select 1, 2, sum(b) from data group by 1, 2;
0017
0018
0019 select a, 1, sum(b) from data group by a, 1;
0020 select a, 1, sum(b) from data group by 1, 2;
0021
0022
0023 select a, b + 2, count(2) from data group by a, 2;
0024
0025
0026 select a as aa, b + 2 as bb, count(2) from data group by 1, 2;
0027
0028
0029 select sum(b) from data group by 1 + 0;
0030
0031
0032 select a, b from data group by -1;
0033 select a, b from data group by 0;
0034 select a, b from data group by 3;
0035
0036
0037 select a, b, sum(b) from data group by 3;
0038 select a, b, sum(b) + 2 from data group by 3;
0039
0040
0041 select a, rand(0), sum(b)
0042 from
0043 (select a, b from data) group by a, 2;
0044
0045
0046 select * from data group by a, b, 1;
0047
0048
0049 select a, count(a) from (select 1 as a) tmp group by 1 order by 1;
0050
0051
0052 select count(a), a from (select 1 as a) tmp group by 2 having a > 0;
0053
0054
0055 select a, a AS k, count(b) from data group by k, 1;
0056
0057
0058 set spark.sql.groupByOrdinal=false;
0059
0060
0061 select sum(b) from data group by -1;