Skip to content

Commit

Permalink
pgwire: fix array and tuple encoding
Browse files Browse the repository at this point in the history
Match postgres encoding much more closely for tuples and arrays.

Implement binary tuple encoding. It's pretty simple: number of elements
then each element's OID and binary encoding.

Text encoding for arrays and tuples has some fairly simple quoting rules:
whitespace, quotes, or commas. We were also encoding unicode runes more
than needed.

Unwray datums to remove any OID oddities. This is already done in
non-array/tuple encoding.

Change the encoding tests to specify exactly the bytes the Text format
wants. We still keep the text format around in the encodings.json file
just for human consumption. Since text editors have problems displaying
weirdo things like the byte order mark it's safer to just print out
the expected bytes as numbers instead of hoping there's a value there
that our editors can't show. But in any case switch to %q for the Text
display to try to improve the debugability anyway.

Fixes #31847

Release note (bug fix): Correct pgwire encoding for arrays and tuples.
  • Loading branch information
maddyblue committed Nov 8, 2018
1 parent 77ce7de commit 100e2ae
Show file tree
Hide file tree
Showing 30 changed files with 665 additions and 331 deletions.
14 changes: 7 additions & 7 deletions pkg/cli/sql_util_test.go
Expand Up @@ -135,8 +135,8 @@ SET
}

expectedRows := [][]string{
{`parentID`, `INT`, `false`, `NULL`, ``, `{"primary"}`, `false`},
{`name`, `STRING`, `false`, `NULL`, ``, `{"primary"}`, `false`},
{`parentID`, `INT`, `false`, `NULL`, ``, `{primary}`, `false`},
{`name`, `STRING`, `false`, `NULL`, ``, `{primary}`, `false`},
{`id`, `INT`, `true`, `NULL`, ``, `{}`, `false`},
}
if !reflect.DeepEqual(expectedRows, rows) {
Expand All @@ -149,11 +149,11 @@ SET
}

expected = `
column_name | data_type | is_nullable | column_default | generation_expression | indices | is_hidden
+-------------+-----------+-------------+----------------+-----------------------+-------------+-----------+
parentID | INT | false | NULL | | {"primary"} | false
name | STRING | false | NULL | | {"primary"} | false
id | INT | true | NULL | | {} | false
column_name | data_type | is_nullable | column_default | generation_expression | indices | is_hidden
+-------------+-----------+-------------+----------------+-----------------------+-----------+-----------+
parentID | INT | false | NULL | | {primary} | false
name | STRING | false | NULL | | {primary} | false
id | INT | true | NULL | | {} | false
(3 rows)
`

Expand Down
35 changes: 32 additions & 3 deletions pkg/cmd/generate-binary/main.go
Expand Up @@ -92,7 +92,7 @@ func main() {
}
data = append(data, entry{
SQL: expr,
Text: string(text),
Text: text,
Binary: binary,
})
}
Expand All @@ -108,7 +108,7 @@ func main() {

type entry struct {
SQL string
Text string
Text []byte
Binary []byte
}

Expand All @@ -130,7 +130,8 @@ const outputJSON = `[
{{- if gt $idx 0 }},{{end}}
{
"SQL": {{.SQL | json}},
"Text": {{.Text | json}},
"Text": {{printf "%q" .Text}},
"TextAsBinary": {{.Text | binary}},
"Binary": {{.Binary | binary}}
}
{{- end}}
Expand Down Expand Up @@ -360,4 +361,32 @@ var inputs = map[string][]string{
"000000001",
"0010101000011010101111100100011001110101100001010101",
},

"array[%s]::text[]": {
`NULL`,
`''`,
`'test'`,
`'test with spaces'`,
`e'\f'`,
// byte order mark
`e'\uFEFF'`,
// snowman
`e'\u2603'`,
},

"array[%s]": {
`''`,
`'\x0d53e338548082'::BYTEA`,
`'test with spaces'::BYTEA`,
`'name'::NAME`,
},

"(%s,null)": {
`1::int8,2::int8,3::int8,4::int8`,
`'test with spaces'::BYTEA`,
`'test with spaces'::TEXT`,
`'name'::NAME`,
`'false'::JSONB`,
`'{"a": []}'::JSONB`,
},
}
4 changes: 2 additions & 2 deletions pkg/sql/descriptor_mutation_test.go
Expand Up @@ -987,8 +987,8 @@ CREATE TABLE t.test (a STRING PRIMARY KEY, b STRING, c STRING, INDEX foo (c));
mt.CheckQueryResults(t,
"SHOW COLUMNS FROM t.test",
[][]string{
{"a", "STRING", "false", "NULL", "", "{\"primary\",\"ufo\"}", "false"},
{"d", "STRING", "true", "NULL", "", "{\"ufo\"}", "false"},
{"a", "STRING", "false", "NULL", "", "{primary,ufo}", "false"},
{"d", "STRING", "true", "NULL", "", "{ufo}", "false"},
{"e", "STRING", "true", "NULL", "", "{}", "false"},
},
)
Expand Down
8 changes: 4 additions & 4 deletions pkg/sql/logictest/testdata/logic_test/aggregate
Expand Up @@ -561,7 +561,7 @@ NULL NULL NULL NULL
query TT
SELECT array_agg(k), array_agg(s) FROM (SELECT k, s FROM kv ORDER BY k)
----
{1,3,5,6,7,8} {"a","a",NULL,"b","b","A"}
{1,3,5,6,7,8} {a,a,NULL,b,b,A}

query T
SELECT array_agg(k) || 1 FROM (SELECT k FROM kv ORDER BY k)
Expand Down Expand Up @@ -1160,9 +1160,9 @@ SELECT 1 FROM kv GROUP BY v, w::DECIMAL HAVING w::DECIMAL > 1
query IT rowsort
SELECT v, array_agg('a') FROM kv GROUP BY v
----
2 {"a","a","a"}
4 {"a","a"}
NULL {"a"}
2 {a,a,a}
4 {a,a}
NULL {a}

# Regression test for #26419
query I
Expand Down
24 changes: 12 additions & 12 deletions pkg/sql/logictest/testdata/logic_test/alter_column_type
Expand Up @@ -24,12 +24,12 @@ ALTER TABLE t ALTER s TYPE BYTES, ALTER sl TYPE STRING(6), ALTER ts TYPE TIMESTA
query TTBTTTB colnames
SHOW COLUMNS FROM t
----
column_name data_type is_nullable column_default generation_expression indices is_hidden
s BYTES true NULL · {} false
sl STRING(6) true NULL · {} false
t TIME true NULL · {} false
ts TIMESTAMPTZ true NULL · {} false
rowid INT false unique_rowid() · {"primary"} true
column_name data_type is_nullable column_default generation_expression indices is_hidden
s BYTES true NULL · {} false
sl STRING(6) true NULL · {} false
t TIME true NULL · {} false
ts TIMESTAMPTZ true NULL · {} false
rowid INT false unique_rowid() · {primary} true

query TTTT
SELECT * FROM t
Expand Down Expand Up @@ -108,19 +108,19 @@ CREATE TABLE t (a INT)
query TTBTTTB colnames
SHOW COLUMNS FROM t
----
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT true NULL · {} false
rowid INT false unique_rowid() · {"primary"} true
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT true NULL · {} false
rowid INT false unique_rowid() · {primary} true

statement ok
ALTER TABLE t ALTER a TYPE INTEGER

query TTBTTTB colnames
SHOW COLUMNS FROM t
----
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT true NULL · {} false
rowid INT false unique_rowid() · {"primary"} true
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT true NULL · {} false
rowid INT false unique_rowid() · {primary} true

statement ok
DROP TABLE t
Expand Down
34 changes: 17 additions & 17 deletions pkg/sql/logictest/testdata/logic_test/alter_table
Expand Up @@ -21,10 +21,10 @@ ALTER TABLE t ADD b INT
query TTBTTTB colnames
SHOW COLUMNS FROM t
----
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT false NULL · {"primary","t_f_idx"} false
f INT true NULL · {"t_f_idx"} false
b INT true NULL · {} false
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT false NULL · {primary,t_f_idx} false
f INT true NULL · {t_f_idx} false
b INT true NULL · {} false

statement ok
ALTER TABLE t ADD CONSTRAINT foo UNIQUE (b)
Expand Down Expand Up @@ -404,12 +404,12 @@ ALTER TABLE tt ADD t DECIMAL UNIQUE DEFAULT 4.0
query TTBTTTB colnames
SHOW COLUMNS FROM tt
----
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT false NULL · {"primary","tt_s_key","tt_t_key"} false
q DECIMAL false NULL · {} false
r DECIMAL true NULL · {} false
s DECIMAL false NULL · {"tt_s_key"} false
t DECIMAL true 4.0:::DECIMAL · {"tt_t_key"} false
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT false NULL · {primary,tt_s_key,tt_t_key} false
q DECIMAL false NULL · {} false
r DECIMAL true NULL · {} false
s DECIMAL false NULL · {tt_s_key} false
t DECIMAL true 4.0:::DECIMAL · {tt_t_key} false

# Default values can be added and changed after table creation.
statement ok
Expand Down Expand Up @@ -598,9 +598,9 @@ user root
query TTBTTTB colnames
SHOW COLUMNS FROM privs
----
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT false NULL · {"primary"} false
b INT true NULL · {} false
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT false NULL · {primary} false
b INT true NULL · {} false

statement ok
GRANT CREATE ON privs TO testuser
Expand All @@ -616,10 +616,10 @@ ALTER TABLE privs ADD CONSTRAINT foo UNIQUE (b)
query TTBTTTB colnames
SHOW COLUMNS FROM privs
----
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT false NULL · {"primary","foo"} false
b INT true NULL · {"foo"} false
c INT true NULL · {} false
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT false NULL · {primary,foo} false
b INT true NULL · {foo} false
c INT true NULL · {} false

statement error pgcode 42P01 relation "nonexistent" does not exist
ALTER TABLE nonexistent SPLIT AT VALUES (42)
Expand Down
Binary file modified pkg/sql/logictest/testdata/logic_test/array
Binary file not shown.
4 changes: 2 additions & 2 deletions pkg/sql/logictest/testdata/logic_test/bit
Expand Up @@ -273,15 +273,15 @@ CREATE TABLE obitsa (
query T rowsort
SELECT * FROM obitsa
----
{01,}
{01,""}
{01,0}
{01,1}
{01,0000}
{01,0001}
{01,010}
{01,10}
{01,11}
{01,}
{01,""}
{01,00100}
{01,00110}
{01,00001}
Expand Down
12 changes: 6 additions & 6 deletions pkg/sql/logictest/testdata/logic_test/builtin_function
Expand Up @@ -1423,33 +1423,33 @@ SELECT foo.* IS NOT TRUE FROM (VALUES (1)) AS foo(x)
query T
SELECT current_schemas(true)
----
{"pg_catalog","public"}
{pg_catalog,public}

query T
SELECT current_schemas(false)
----
{"public"}
{public}

# Force the function to be evaluated at execution time and verify it doesn't
# break when distsql is on.
query T
SELECT current_schemas(x) FROM (VALUES (true), (false)) AS t(x);
----
{"pg_catalog","public"}
{"public"}
{pg_catalog,public}
{public}

statement ok
SET search_path=test,pg_catalog

query T
SELECT current_schemas(true)
----
{"pg_catalog"}
{pg_catalog}

query T
SELECT current_schemas(false)
----
{"pg_catalog"}
{pg_catalog}

statement ok
RESET search_path
Expand Down
12 changes: 6 additions & 6 deletions pkg/sql/logictest/testdata/logic_test/computed
Expand Up @@ -90,12 +90,12 @@ x CREATE TABLE x (
query TTBTTTB colnames
SHOW COLUMNS FROM x
----
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT true 3:::INT · {} false
b INT true 7:::INT · {} false
c INT true NULL a {} false
d INT true NULL a + b {} false
rowid INT false unique_rowid() · {"primary"} true
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT true 3:::INT · {} false
b INT true 7:::INT · {} false
c INT true NULL a {} false
d INT true NULL a + b {} false
rowid INT false unique_rowid() · {primary} true

statement error cannot write directly to computed column "c"
INSERT INTO x (c) VALUES (1)
Expand Down
8 changes: 4 additions & 4 deletions pkg/sql/logictest/testdata/logic_test/datetime
Expand Up @@ -921,10 +921,10 @@ CREATE TABLE tz (
query TTBTTTB
SHOW COLUMNS FROM tz
----
a INT false NULL · {"primary"} false
b TIMESTAMP true NULL · {} false
c TIMESTAMPTZ true NULL · {} false
d TIMESTAMPTZ true NULL · {} false
a INT false NULL · {primary} false
b TIMESTAMP true NULL · {} false
c TIMESTAMPTZ true NULL · {} false
d TIMESTAMPTZ true NULL · {} false

statement ok
INSERT INTO tz VALUES
Expand Down
16 changes: 8 additions & 8 deletions pkg/sql/logictest/testdata/logic_test/default
Expand Up @@ -34,10 +34,10 @@ CREATE TABLE t (
query TTBTTTB colnames
SHOW COLUMNS FROM t
----
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT false 42:::INT · {"primary"} false
b TIMESTAMP true now():::TIMESTAMP · {} false
c FLOAT8 true random() · {} false
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT false 42:::INT · {primary} false
b TIMESTAMP true now():::TIMESTAMP · {} false
c FLOAT8 true random() · {} false

statement ok
INSERT INTO t VALUES (DEFAULT, DEFAULT, DEFAULT)
Expand Down Expand Up @@ -116,7 +116,7 @@ UPDATE v SET (a, c) = (DEFAULT, DEFAULT)
query TTBTTTB colnames
SHOW COLUMNS FROM v
----
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT false NULL · {"primary"} false
b TIMESTAMP true NULL · {} false
c INT true NULL · {} false
column_name data_type is_nullable column_default generation_expression indices is_hidden
a INT false NULL · {primary} false
b TIMESTAMP true NULL · {} false
c INT true NULL · {} false
20 changes: 10 additions & 10 deletions pkg/sql/logictest/testdata/logic_test/distsql_stats
Expand Up @@ -47,7 +47,7 @@ query TTIII colnames
SELECT statistics_name, column_names, row_count, distinct_count, null_count FROM [SHOW STATISTICS FOR TABLE data]
----
statistics_name column_names row_count distinct_count null_count
s1 {"a"} 10000 10 0
s1 {a} 10000 10 0

let $hist_id_1
SELECT histogram_id FROM [SHOW STATISTICS FOR TABLE data] WHERE statistics_name = 's1'
Expand All @@ -74,8 +74,8 @@ query TTIII colnames
SELECT statistics_name, column_names, row_count, distinct_count, null_count FROM [SHOW STATISTICS FOR TABLE data]
----
statistics_name column_names row_count distinct_count null_count
s1 {"a"} 10000 10 0
NULL {"b"} 10000 10 0
s1 {a} 10000 10 0
NULL {b} 10000 10 0

# Verify that we can package statistics into a json object and later restore them.
let $json_stats
Expand All @@ -91,8 +91,8 @@ query TTIII colnames
SELECT statistics_name, column_names, row_count, distinct_count, null_count FROM [SHOW STATISTICS FOR TABLE data]
----
statistics_name column_names row_count distinct_count null_count
s1 {"a"} 10000 10 0
NULL {"b"} 10000 10 0
s1 {a} 10000 10 0
NULL {b} 10000 10 0

# Verify that any other statistics are blown away when we INJECT.
statement ok
Expand All @@ -102,9 +102,9 @@ query TTIII colnames
SELECT statistics_name, column_names, row_count, distinct_count, null_count FROM [SHOW STATISTICS FOR TABLE data]
----
statistics_name column_names row_count distinct_count null_count
s1 {"a"} 10000 10 0
NULL {"b"} 10000 10 0
s3 {"c"} 10000 10 0
s1 {a} 10000 10 0
NULL {b} 10000 10 0
s3 {c} 10000 10 0

statement ok
ALTER TABLE data INJECT STATISTICS '$json_stats'
Expand All @@ -113,5 +113,5 @@ query TTIII colnames
SELECT statistics_name, column_names, row_count, distinct_count, null_count FROM [SHOW STATISTICS FOR TABLE data]
----
statistics_name column_names row_count distinct_count null_count
s1 {"a"} 10000 10 0
NULL {"b"} 10000 10 0
s1 {a} 10000 10 0
NULL {b} 10000 10 0

0 comments on commit 100e2ae

Please sign in to comment.