Skip to content

Commit 45e15e0

Browse files
craig[bot]lucy-zhangdt
committed
Merge #37366 #37450
37366: sql: don't exclude unvalidated check constraints r=lucy-zhang a=lucy-zhang Previously, as of 19.1, AllActiveAndInactiveChecks(), which gets check constraints for ALTER TABLE as well as SHOW CONSTRAINT, etc., excluded unvalidated checks on the table descriptor, under the assumption that all checks would be validated once they'd been added to the table descriptor. This was to avoid double-counting constraints that were both on the table descriptor and in the list of mutations. This caused unvalidated checks added in earlier versions to disappear (though they are still on the table descriptor itself, and are still enforced for writes). This PR fixes the issue by only excluding `Validating` checks on the table descriptor to avoid double-counting. Fixes #37285. Release note (bug fix): Fixes bug where unvalidated check constraints disappear from the output of SHOW CONSTRAINT and cannot be referenced in ALTER TABLE after upgrading to 19.1. 37450: parser: Add IMPORT INTO prototype syntax r=dt a=dt Breaking this out of the implementation PR. Release note: None Co-authored-by: Lucy Zhang <[email protected]> Co-authored-by: David Taylor <[email protected]>
3 parents d99eeaf + 4a4e189 + fec8650 commit 45e15e0

File tree

8 files changed

+71
-36
lines changed

8 files changed

+71
-36
lines changed

docs/generated/sql/bnf/import_csv.bnf

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,3 +3,5 @@ import_stmt ::=
33
| 'IMPORT' 'TABLE' table_name 'CREATE' 'USING' file_location 'CSV' 'DATA' '(' file_location ( ( ',' file_location ) )* ')'
44
| 'IMPORT' 'TABLE' table_name '(' table_elem_list ')' 'CSV' 'DATA' '(' file_location ( ( ',' file_location ) )* ')' 'WITH' kv_option_list
55
| 'IMPORT' 'TABLE' table_name '(' table_elem_list ')' 'CSV' 'DATA' '(' file_location ( ( ',' file_location ) )* ')'
6+
| 'IMPORT' 'INTO' table_name '(' insert_column_list ')' 'CSV' 'DATA' '(' file_location ( ( ',' file_location ) )* ')' 'WITH' kv_option_list
7+
| 'IMPORT' 'INTO' table_name '(' insert_column_list ')' 'CSV' 'DATA' '(' file_location ( ( ',' file_location ) )* ')'

docs/generated/sql/bnf/import_dump.bnf

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,3 +3,5 @@ import_stmt ::=
33
| 'IMPORT' import_format file_location
44
| 'IMPORT' 'TABLE' table_name 'FROM' import_format file_location 'WITH' kv_option_list
55
| 'IMPORT' 'TABLE' table_name 'FROM' import_format file_location
6+
| 'IMPORT' 'INTO' table_name '(' insert_column_list ')' import_format 'DATA' '(' file_location ( ( ',' file_location ) )* ')' 'WITH' kv_option_list
7+
| 'IMPORT' 'INTO' table_name '(' insert_column_list ')' import_format 'DATA' '(' file_location ( ( ',' file_location ) )* ')'

docs/generated/sql/bnf/stmt_block.bnf

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -129,6 +129,7 @@ import_stmt ::=
129129
| 'IMPORT' 'TABLE' table_name 'FROM' import_format string_or_placeholder opt_with_options
130130
| 'IMPORT' 'TABLE' table_name 'CREATE' 'USING' string_or_placeholder import_format 'DATA' '(' string_or_placeholder_list ')' opt_with_options
131131
| 'IMPORT' 'TABLE' table_name '(' table_elem_list ')' import_format 'DATA' '(' string_or_placeholder_list ')' opt_with_options
132+
| 'IMPORT' 'INTO' table_name '(' insert_column_list ')' import_format 'DATA' '(' string_or_placeholder_list ')' opt_with_options
132133

133134
insert_stmt ::=
134135
opt_with_clause 'INSERT' 'INTO' insert_target insert_rest returning_clause
@@ -387,6 +388,9 @@ string_or_placeholder_list ::=
387388
table_elem_list ::=
388389
( table_elem ) ( ( ',' table_elem ) )*
389390

391+
insert_column_list ::=
392+
( insert_column_item ) ( ( ',' insert_column_item ) )*
393+
390394
insert_target ::=
391395
table_name
392396
| table_name 'AS' table_alias_name
@@ -1029,8 +1033,8 @@ table_elem ::=
10291033
| family_def
10301034
| table_constraint
10311035

1032-
insert_column_list ::=
1033-
( insert_column_item ) ( ( ',' insert_column_item ) )*
1036+
insert_column_item ::=
1037+
column_name
10341038

10351039
opt_conf_expr ::=
10361040
'(' name_list ')'
@@ -1396,8 +1400,8 @@ table_constraint ::=
13961400
'CONSTRAINT' constraint_name constraint_elem
13971401
| constraint_elem
13981402

1399-
insert_column_item ::=
1400-
column_name
1403+
column_name ::=
1404+
name
14011405

14021406
d_expr ::=
14031407
'ICONST'
@@ -1689,9 +1693,6 @@ signed_iconst ::=
16891693
target_name ::=
16901694
unrestricted_name
16911695

1692-
column_name ::=
1693-
name
1694-
16951696
col_qual_list ::=
16961697
( ) ( ( col_qualification ) )*
16971698

pkg/sql/parser/parse_test.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1265,6 +1265,7 @@ func TestParse(t *testing.T) {
12651265
{`IMPORT TABLE foo (id INT8 PRIMARY KEY, email STRING, age INT8) CSV DATA ('path/to/some/file', $1) WITH temp = 'path/to/temp'`},
12661266
{`IMPORT TABLE foo (id INT8, email STRING, age INT8) CSV DATA ('path/to/some/file', $1) WITH comma = ',', "nullif" = 'n/a', temp = $2`},
12671267
{`IMPORT TABLE foo FROM PGDUMPCREATE 'nodelocal:///foo/bar' WITH temp = 'path/to/temp'`},
1268+
{`IMPORT INTO foo(id, email) CSV DATA ('path/to/some/file', $1) WITH temp = 'path/to/temp'`},
12681269

12691270
{`IMPORT PGDUMP 'nodelocal:///foo/bar' WITH temp = 'path/to/temp'`},
12701271
{`EXPLAIN IMPORT PGDUMP 'nodelocal:///foo/bar' WITH temp = 'path/to/temp'`},

pkg/sql/parser/sql.y

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1741,7 +1741,7 @@ import_format:
17411741
// Formats:
17421742
// CSV
17431743
// MYSQLOUTFILE
1744-
// MYSQLDUMP (mysqldump's SQL output)
1744+
// MYSQLDUMP
17451745
// PGCOPY
17461746
// PGDUMP
17471747
//
@@ -1785,6 +1785,11 @@ import_stmt:
17851785
name := $3.unresolvedObjectName().ToTableName()
17861786
$$.val = &tree.Import{Table: &name, CreateDefs: $5.tblDefs(), FileFormat: $7, Files: $10.exprs(), Options: $12.kvOptions()}
17871787
}
1788+
| IMPORT INTO table_name '(' insert_column_list ')' import_format DATA '(' string_or_placeholder_list ')' opt_with_options
1789+
{
1790+
name := $3.unresolvedObjectName().ToTableName()
1791+
$$.val = &tree.Import{Table: &name, Into: true, IntoCols: $5.nameList(), FileFormat: $7, Files: $10.exprs(), Options: $12.kvOptions()}
1792+
}
17881793
| IMPORT error // SHOW HELP: IMPORT
17891794

17901795
// %Help: EXPORT - export data to file in a distributed manner
@@ -3697,7 +3702,7 @@ opt_on_targets_roles:
36973702
//
36983703
// 2. Now we must disambiguate the first rule "table_pattern_list"
36993704
// between one that recognizes ROLE and one that recognizes
3700-
// <some table pattern list>". So first, inline the definition of
3705+
// "<some table pattern list>". So first, inline the definition of
37013706
// table_pattern_list.
37023707
//
37033708
// targets ::=
@@ -3721,7 +3726,7 @@ opt_on_targets_roles:
37213726
// would match). We just need to focus on the first one "table_pattern".
37223727
// This needs to tweak "table_pattern".
37233728
//
3724-
// Here we could inline table_pattern but now we don't have to any
3729+
// Here we could inline table_pattern but now we do not have to any
37253730
// more, we just need to create a variant of it which is
37263731
// unambiguous with a single ROLE keyword. That is, we need a
37273732
// table_pattern which cannot contain a single name. We do
@@ -3744,7 +3749,7 @@ opt_on_targets_roles:
37443749
// that starts with ROLE cannot be matched by any of these remaining
37453750
// rules. This means that the prefix is now free to use, without
37463751
// ambiguity. We do this as follows, to gain a syntax rule for "ROLE
3747-
// <namelist>". (We'll handle a ROLE with no name list below.)
3752+
// <namelist>". (We will handle a ROLE with no name list below.)
37483753
//
37493754
// targets ::=
37503755
// ROLE name_list # <- here
@@ -3753,7 +3758,7 @@ opt_on_targets_roles:
37533758
// TABLE table_pattern_list
37543759
// DATABASE name_list
37553760
//
3756-
// 6. Now on to the finishing touches. First we'd like to regain the
3761+
// 6. Now on to the finishing touches. First we would like to regain the
37573762
// ability to use "<tablename>" when the table name is a simple
37583763
// identifier. This is done as follows:
37593764
//
@@ -3766,7 +3771,7 @@ opt_on_targets_roles:
37663771
// DATABASE name_list
37673772
//
37683773
// 7. Then, we want to recognize "ROLE" without any subsequent name
3769-
// list. This requires some care: we can't add "ROLE" to the set of
3774+
// list. This requires some care: we can not add "ROLE" to the set of
37703775
// rules above, because "name" would then overlap. To disambiguate,
37713776
// we must first inline "name" as follows:
37723777
//
@@ -4796,7 +4801,7 @@ index_params:
47964801

47974802
// Index attributes can be either simple column references, or arbitrary
47984803
// expressions in parens. For backwards-compatibility reasons, we allow an
4799-
// expression that's just a function call to be written without parens.
4804+
// expression that is just a function call to be written without parens.
48004805
index_elem:
48014806
a_expr opt_asc_desc
48024807
{

pkg/sql/sem/tree/import.go

Lines changed: 22 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,8 @@ package tree
1717
// Import represents a IMPORT statement.
1818
type Import struct {
1919
Table *TableName
20+
Into bool
21+
IntoCols NameList
2022
CreateFile Expr
2123
CreateDefs TableDefs
2224
FileFormat string
@@ -41,17 +43,27 @@ func (node *Import) Format(ctx *FmtCtx) {
4143
ctx.WriteByte(' ')
4244
ctx.FormatNode(&node.Files)
4345
} else {
44-
ctx.WriteString("TABLE ")
45-
ctx.FormatNode(node.Table)
46-
47-
if node.CreateFile != nil {
48-
ctx.WriteString(" CREATE USING ")
49-
ctx.FormatNode(node.CreateFile)
50-
ctx.WriteString(" ")
46+
if node.Into {
47+
ctx.WriteString("INTO ")
48+
ctx.FormatNode(node.Table)
49+
if node.IntoCols != nil {
50+
ctx.WriteByte('(')
51+
ctx.FormatNode(&node.IntoCols)
52+
ctx.WriteString(") ")
53+
}
5154
} else {
52-
ctx.WriteString(" (")
53-
ctx.FormatNode(&node.CreateDefs)
54-
ctx.WriteString(") ")
55+
ctx.WriteString("TABLE ")
56+
ctx.FormatNode(node.Table)
57+
58+
if node.CreateFile != nil {
59+
ctx.WriteString(" CREATE USING ")
60+
ctx.FormatNode(node.CreateFile)
61+
ctx.WriteString(" ")
62+
} else {
63+
ctx.WriteString(" (")
64+
ctx.FormatNode(&node.CreateDefs)
65+
ctx.WriteString(") ")
66+
}
5567
}
5668
ctx.WriteString(node.FileFormat)
5769
ctx.WriteString(" DATA (")

pkg/sql/sem/tree/pretty.go

Lines changed: 17 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1820,16 +1820,24 @@ func (node *Import) doc(p *PrettyCfg) pretty.Doc {
18201820
}
18211821
items = append(items, p.row(node.FileFormat, p.Doc(&node.Files)))
18221822
} else {
1823-
if node.CreateFile != nil {
1824-
items = append(items, p.row("TABLE", p.Doc(node.Table)))
1825-
items = append(items, p.row("CREATE USING", p.Doc(node.CreateFile)))
1823+
if node.Into {
1824+
into := p.Doc(node.Table)
1825+
if node.IntoCols != nil {
1826+
into = p.nestUnder(into, p.bracket("(", p.Doc(&node.IntoCols), ")"))
1827+
}
1828+
items = append(items, p.row("INTO", into))
18261829
} else {
1827-
table := p.bracketDoc(
1828-
pretty.ConcatSpace(p.Doc(node.Table), pretty.Text("(")),
1829-
p.Doc(&node.CreateDefs),
1830-
pretty.Text(")"),
1831-
)
1832-
items = append(items, p.row("TABLE", table))
1830+
if node.CreateFile != nil {
1831+
items = append(items, p.row("TABLE", p.Doc(node.Table)))
1832+
items = append(items, p.row("CREATE USING", p.Doc(node.CreateFile)))
1833+
} else {
1834+
table := p.bracketDoc(
1835+
pretty.ConcatSpace(p.Doc(node.Table), pretty.Text("(")),
1836+
p.Doc(&node.CreateDefs),
1837+
pretty.Text(")"),
1838+
)
1839+
items = append(items, p.row("TABLE", table))
1840+
}
18331841
}
18341842

18351843
data := p.bracketKeyword(

pkg/sql/sqlbase/structured.go

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -588,11 +588,15 @@ func (desc *TableDescriptor) AllNonDropIndexes() []*IndexDescriptor {
588588
// "active" ones on the table descriptor which are being enforced for all
589589
// writes, and "inactive" ones queued in the mutations list.
590590
func (desc *TableDescriptor) AllActiveAndInactiveChecks() []*TableDescriptor_CheckConstraint {
591-
// For now, a check constraint is either in the mutations list or Validated.
592-
// If it shows up twice after combining those two slices, it's a duplicate.
591+
// A check constraint could be both on the table descriptor and in the
592+
// list of mutations while the constraint is validated for existing rows. In
593+
// that case, the constraint is in the Validating state, and we avoid
594+
// including it twice. (Note that even though unvalidated check constraints
595+
// cannot be added as of 19.1, they can still exist if they were created under
596+
// previous versions.)
593597
checks := make([]*TableDescriptor_CheckConstraint, 0, len(desc.Checks)+len(desc.Mutations))
594598
for _, c := range desc.Checks {
595-
if c.Validity == ConstraintValidity_Validated {
599+
if c.Validity != ConstraintValidity_Validating {
596600
checks = append(checks, c)
597601
}
598602
}

0 commit comments

Comments
 (0)