Skip to content

parser: Add IMPORT INTO prototype syntax #37450

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
May 10, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions docs/generated/sql/bnf/import_csv.bnf
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,5 @@ import_stmt ::=
| 'IMPORT' 'TABLE' table_name 'CREATE' 'USING' file_location 'CSV' 'DATA' '(' file_location ( ( ',' file_location ) )* ')'
| 'IMPORT' 'TABLE' table_name '(' table_elem_list ')' 'CSV' 'DATA' '(' file_location ( ( ',' file_location ) )* ')' 'WITH' kv_option_list
| 'IMPORT' 'TABLE' table_name '(' table_elem_list ')' 'CSV' 'DATA' '(' file_location ( ( ',' file_location ) )* ')'
| 'IMPORT' 'INTO' table_name '(' insert_column_list ')' 'CSV' 'DATA' '(' file_location ( ( ',' file_location ) )* ')' 'WITH' kv_option_list
| 'IMPORT' 'INTO' table_name '(' insert_column_list ')' 'CSV' 'DATA' '(' file_location ( ( ',' file_location ) )* ')'
2 changes: 2 additions & 0 deletions docs/generated/sql/bnf/import_dump.bnf
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,5 @@ import_stmt ::=
| 'IMPORT' import_format file_location
| 'IMPORT' 'TABLE' table_name 'FROM' import_format file_location 'WITH' kv_option_list
| 'IMPORT' 'TABLE' table_name 'FROM' import_format file_location
| 'IMPORT' 'INTO' table_name '(' insert_column_list ')' import_format 'DATA' '(' file_location ( ( ',' file_location ) )* ')' 'WITH' kv_option_list
| 'IMPORT' 'INTO' table_name '(' insert_column_list ')' import_format 'DATA' '(' file_location ( ( ',' file_location ) )* ')'
15 changes: 8 additions & 7 deletions docs/generated/sql/bnf/stmt_block.bnf
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,7 @@ import_stmt ::=
| 'IMPORT' 'TABLE' table_name 'FROM' import_format string_or_placeholder opt_with_options
| 'IMPORT' 'TABLE' table_name 'CREATE' 'USING' string_or_placeholder import_format 'DATA' '(' string_or_placeholder_list ')' opt_with_options
| 'IMPORT' 'TABLE' table_name '(' table_elem_list ')' import_format 'DATA' '(' string_or_placeholder_list ')' opt_with_options
| 'IMPORT' 'INTO' table_name '(' insert_column_list ')' import_format 'DATA' '(' string_or_placeholder_list ')' opt_with_options

insert_stmt ::=
opt_with_clause 'INSERT' 'INTO' insert_target insert_rest returning_clause
Expand Down Expand Up @@ -387,6 +388,9 @@ string_or_placeholder_list ::=
table_elem_list ::=
( table_elem ) ( ( ',' table_elem ) )*

insert_column_list ::=
( insert_column_item ) ( ( ',' insert_column_item ) )*

insert_target ::=
table_name
| table_name 'AS' table_alias_name
Expand Down Expand Up @@ -1029,8 +1033,8 @@ table_elem ::=
| family_def
| table_constraint

insert_column_list ::=
( insert_column_item ) ( ( ',' insert_column_item ) )*
insert_column_item ::=
column_name

opt_conf_expr ::=
'(' name_list ')'
Expand Down Expand Up @@ -1396,8 +1400,8 @@ table_constraint ::=
'CONSTRAINT' constraint_name constraint_elem
| constraint_elem

insert_column_item ::=
column_name
column_name ::=
name

d_expr ::=
'ICONST'
Expand Down Expand Up @@ -1689,9 +1693,6 @@ signed_iconst ::=
target_name ::=
unrestricted_name

column_name ::=
name

col_qual_list ::=
( ) ( ( col_qualification ) )*

Expand Down
1 change: 1 addition & 0 deletions pkg/sql/parser/parse_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1265,6 +1265,7 @@ func TestParse(t *testing.T) {
{`IMPORT TABLE foo (id INT8 PRIMARY KEY, email STRING, age INT8) CSV DATA ('path/to/some/file', $1) WITH temp = 'path/to/temp'`},
{`IMPORT TABLE foo (id INT8, email STRING, age INT8) CSV DATA ('path/to/some/file', $1) WITH comma = ',', "nullif" = 'n/a', temp = $2`},
{`IMPORT TABLE foo FROM PGDUMPCREATE 'nodelocal:///foo/bar' WITH temp = 'path/to/temp'`},
{`IMPORT INTO foo(id, email) CSV DATA ('path/to/some/file', $1) WITH temp = 'path/to/temp'`},

{`IMPORT PGDUMP 'nodelocal:///foo/bar' WITH temp = 'path/to/temp'`},
{`EXPLAIN IMPORT PGDUMP 'nodelocal:///foo/bar' WITH temp = 'path/to/temp'`},
Expand Down
19 changes: 12 additions & 7 deletions pkg/sql/parser/sql.y
Original file line number Diff line number Diff line change
Expand Up @@ -1741,7 +1741,7 @@ import_format:
// Formats:
// CSV
// MYSQLOUTFILE
// MYSQLDUMP (mysqldump's SQL output)
// MYSQLDUMP
// PGCOPY
// PGDUMP
//
Expand Down Expand Up @@ -1785,6 +1785,11 @@ import_stmt:
name := $3.unresolvedObjectName().ToTableName()
$$.val = &tree.Import{Table: &name, CreateDefs: $5.tblDefs(), FileFormat: $7, Files: $10.exprs(), Options: $12.kvOptions()}
}
| IMPORT INTO table_name '(' insert_column_list ')' import_format DATA '(' string_or_placeholder_list ')' opt_with_options
{
name := $3.unresolvedObjectName().ToTableName()
$$.val = &tree.Import{Table: &name, Into: true, IntoCols: $5.nameList(), FileFormat: $7, Files: $10.exprs(), Options: $12.kvOptions()}
}
| IMPORT error // SHOW HELP: IMPORT

// %Help: EXPORT - export data to file in a distributed manner
Expand Down Expand Up @@ -3697,7 +3702,7 @@ opt_on_targets_roles:
//
// 2. Now we must disambiguate the first rule "table_pattern_list"
// between one that recognizes ROLE and one that recognizes
// <some table pattern list>". So first, inline the definition of
// "<some table pattern list>". So first, inline the definition of
// table_pattern_list.
//
// targets ::=
Expand All @@ -3721,7 +3726,7 @@ opt_on_targets_roles:
// would match). We just need to focus on the first one "table_pattern".
// This needs to tweak "table_pattern".
//
// Here we could inline table_pattern but now we don't have to any
// Here we could inline table_pattern but now we do not have to any
// more, we just need to create a variant of it which is
// unambiguous with a single ROLE keyword. That is, we need a
// table_pattern which cannot contain a single name. We do
Expand All @@ -3744,7 +3749,7 @@ opt_on_targets_roles:
// that starts with ROLE cannot be matched by any of these remaining
// rules. This means that the prefix is now free to use, without
// ambiguity. We do this as follows, to gain a syntax rule for "ROLE
// <namelist>". (We'll handle a ROLE with no name list below.)
// <namelist>". (We will handle a ROLE with no name list below.)
//
// targets ::=
// ROLE name_list # <- here
Expand All @@ -3753,7 +3758,7 @@ opt_on_targets_roles:
// TABLE table_pattern_list
// DATABASE name_list
//
// 6. Now on to the finishing touches. First we'd like to regain the
// 6. Now on to the finishing touches. First we would like to regain the
// ability to use "<tablename>" when the table name is a simple
// identifier. This is done as follows:
//
Expand All @@ -3766,7 +3771,7 @@ opt_on_targets_roles:
// DATABASE name_list
//
// 7. Then, we want to recognize "ROLE" without any subsequent name
// list. This requires some care: we can't add "ROLE" to the set of
// list. This requires some care: we can not add "ROLE" to the set of
// rules above, because "name" would then overlap. To disambiguate,
// we must first inline "name" as follows:
//
Expand Down Expand Up @@ -4796,7 +4801,7 @@ index_params:

// Index attributes can be either simple column references, or arbitrary
// expressions in parens. For backwards-compatibility reasons, we allow an
// expression that's just a function call to be written without parens.
// expression that is just a function call to be written without parens.
index_elem:
a_expr opt_asc_desc
{
Expand Down
32 changes: 22 additions & 10 deletions pkg/sql/sem/tree/import.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ package tree
// Import represents a IMPORT statement.
type Import struct {
Table *TableName
Into bool
IntoCols NameList
CreateFile Expr
CreateDefs TableDefs
FileFormat string
Expand All @@ -41,17 +43,27 @@ func (node *Import) Format(ctx *FmtCtx) {
ctx.WriteByte(' ')
ctx.FormatNode(&node.Files)
} else {
ctx.WriteString("TABLE ")
ctx.FormatNode(node.Table)

if node.CreateFile != nil {
ctx.WriteString(" CREATE USING ")
ctx.FormatNode(node.CreateFile)
ctx.WriteString(" ")
if node.Into {
ctx.WriteString("INTO ")
ctx.FormatNode(node.Table)
if node.IntoCols != nil {
ctx.WriteByte('(')
ctx.FormatNode(&node.IntoCols)
ctx.WriteString(") ")
}
} else {
ctx.WriteString(" (")
ctx.FormatNode(&node.CreateDefs)
ctx.WriteString(") ")
ctx.WriteString("TABLE ")
ctx.FormatNode(node.Table)

if node.CreateFile != nil {
ctx.WriteString(" CREATE USING ")
ctx.FormatNode(node.CreateFile)
ctx.WriteString(" ")
} else {
ctx.WriteString(" (")
ctx.FormatNode(&node.CreateDefs)
ctx.WriteString(") ")
}
}
ctx.WriteString(node.FileFormat)
ctx.WriteString(" DATA (")
Expand Down
26 changes: 17 additions & 9 deletions pkg/sql/sem/tree/pretty.go
Original file line number Diff line number Diff line change
Expand Up @@ -1820,16 +1820,24 @@ func (node *Import) doc(p *PrettyCfg) pretty.Doc {
}
items = append(items, p.row(node.FileFormat, p.Doc(&node.Files)))
} else {
if node.CreateFile != nil {
items = append(items, p.row("TABLE", p.Doc(node.Table)))
items = append(items, p.row("CREATE USING", p.Doc(node.CreateFile)))
if node.Into {
into := p.Doc(node.Table)
if node.IntoCols != nil {
into = p.nestUnder(into, p.bracket("(", p.Doc(&node.IntoCols), ")"))
}
items = append(items, p.row("INTO", into))
} else {
table := p.bracketDoc(
pretty.ConcatSpace(p.Doc(node.Table), pretty.Text("(")),
p.Doc(&node.CreateDefs),
pretty.Text(")"),
)
items = append(items, p.row("TABLE", table))
if node.CreateFile != nil {
items = append(items, p.row("TABLE", p.Doc(node.Table)))
items = append(items, p.row("CREATE USING", p.Doc(node.CreateFile)))
} else {
table := p.bracketDoc(
pretty.ConcatSpace(p.Doc(node.Table), pretty.Text("(")),
p.Doc(&node.CreateDefs),
pretty.Text(")"),
)
items = append(items, p.row("TABLE", table))
}
}

data := p.bracketKeyword(
Expand Down