Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions internal/syntax/lexing/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ var keywordSet = map[string]tokens.TokenType{
"using": tokens.TokenTypeUsing,
"values": tokens.TokenTypeValues,
"where": tokens.TokenTypeWhere,
"with": tokens.TokenTypeWith,
}

var punctuationMap = map[rune]tokens.TokenType{
Expand Down
61 changes: 49 additions & 12 deletions internal/syntax/parsing/statements.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ func (p *parser) initStatementParsers() {

// statement := ddlStatement | ( [ `EXPLAIN` ] explainableStatement )
// ddlStatement := ( `CREATE` createTail ) | ( `ALTER` alterTail )
// explainableStatement := ( `SELECT` selectTail ) | ( `INSERT` insertTail ) | ( `UPDATE` updateTail ) | ( `DELETE` deleteTail )
// explainableStatement := [ `WITH` name `AS` `(` selectInsertUpdateOrDelete `)` [, ...] ] selectInsertUpdateOrDelete
func (p *parser) parseStatement() (Query, error) {
for tokenType, parser := range p.ddlParsers {
token := p.current()
Expand All @@ -35,26 +35,63 @@ func (p *parser) parseStatement() (Query, error) {
}
}

isExplain := false
if p.advanceIf(isType(tokens.TokenTypeExplain)) {
isExplain = true
isExplain := p.advanceIf(isType(tokens.TokenTypeExplain))

type namedNode struct {
name string
node queries.Node
}
var namedNodes []namedNode
if p.advanceIf(isType(tokens.TokenTypeWith)) {
for {
name, err := p.parseIdent()
if err != nil {
return nil, err
}

for tokenType, parser := range p.explainableParsers {
token := p.current()
if p.advanceIf(isType(tokenType)) {
node, err := parser(token)
if _, err := p.mustAdvance(isType(tokens.TokenTypeAs)); err != nil {
return nil, err
}

node, err := parseParenthesized(p, func() (queries.Node, error) {
return p.parseSelectInsertUpdateOrDelete()
})
if err != nil {
return nil, err
}

if isExplain {
node = explain.NewExplain(node)
namedNodes = append(namedNodes, namedNode{name, node})

if !p.advanceIf(isType(tokens.TokenTypeComma)) {
break
}
}
}

return queries.NewQuery(node), nil
node, err := p.parseSelectInsertUpdateOrDelete()
if err != nil {
return nil, err
}

if len(namedNodes) > 0 {
fmt.Printf("> %#v\n", namedNodes)
}

if isExplain {
node = explain.NewExplain(node)
}

return queries.NewQuery(node), nil
}

// selectInsertUpdateOrDelete := ( `SELECT` selectTail ) | ( `INSERT` insertTail ) | ( `UPDATE` updateTail ) | ( `DELETE` deleteTail )
func (p *parser) parseSelectInsertUpdateOrDelete() (queries.Node, error) {
for tokenType, parser := range p.explainableParsers {
token := p.current()
if p.advanceIf(isType(tokenType)) {
return parser(token)
}
}

return nil, fmt.Errorf("expected start of statement (near %s)", p.current().Text)
return nil, fmt.Errorf("expected start of select, insert, update, or delete statement (near %s)", p.current().Text)
}
3 changes: 2 additions & 1 deletion internal/syntax/parsing/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import (
"strings"

"github.com/efritz/gostgres/internal/shared/types"
"github.com/efritz/gostgres/internal/syntax/tokens"
)

// basicType := ident
Expand Down Expand Up @@ -38,7 +39,7 @@ func (p *parser) parseBasicType() (types.Type, error) {
typ = types.TypeBool
// TODO - use multi-phrase keyword(s)
case "timestamp":
if !p.advanceIf(isIdent("with"), isIdent("time"), isIdent("zone")) {
if !p.advanceIf(isType(tokens.TokenTypeWith), isIdent("time"), isIdent("zone")) {
return types.TypeUnknown, fmt.Errorf("unknown type %q", "timestamp")
}
typ = types.TypeTimestampTz
Expand Down
1 change: 1 addition & 0 deletions internal/syntax/tokens/token_type.go
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ const (
TokenTypeUsing
TokenTypeValues
TokenTypeWhere
TokenTypeWith

//
// Single-character operators
Expand Down