Add more syntax to lexer and parser, and make improvements to tview
This commit is contained in:
57
sql/lexer.go
57
sql/lexer.go
@@ -21,6 +21,9 @@ const (
|
||||
COMMA
|
||||
ASTERIKS
|
||||
ASSIGN
|
||||
BACKQUOTE
|
||||
QUOTE
|
||||
SINGLE_QUOTE
|
||||
|
||||
// Keywords
|
||||
CREATE
|
||||
@@ -41,36 +44,42 @@ const (
|
||||
NOT
|
||||
IF
|
||||
EXISTS
|
||||
AUTOINCREMENT
|
||||
CONSTRAINT
|
||||
|
||||
TEXT
|
||||
INTEGER
|
||||
NULL
|
||||
REAL
|
||||
BLOB
|
||||
NUMERIC
|
||||
)
|
||||
|
||||
var keywords map[string]Token = map[string]Token{
|
||||
"CREATE": CREATE,
|
||||
"TABLE": TABLE,
|
||||
"PRIMARY": PRIMARY,
|
||||
"FOREIGN": FOREIGN,
|
||||
"REFERENCES": REFERENCES,
|
||||
"KEY": KEY,
|
||||
"NOT": NOT,
|
||||
"TEXT": TEXT,
|
||||
"INTEGER": INTEGER,
|
||||
"NULL": NULL,
|
||||
"IF": IF,
|
||||
"EXISTS": EXISTS,
|
||||
"SELECT": SELECT,
|
||||
"FROM": FROM,
|
||||
"WHERE": WHERE,
|
||||
"AND": AND,
|
||||
"OR": OR,
|
||||
"ORDER": ORDER,
|
||||
"TOP": TOP,
|
||||
"REAL": REAL,
|
||||
"BLOB": BLOB,
|
||||
"CREATE": CREATE,
|
||||
"TABLE": TABLE,
|
||||
"PRIMARY": PRIMARY,
|
||||
"FOREIGN": FOREIGN,
|
||||
"REFERENCES": REFERENCES,
|
||||
"KEY": KEY,
|
||||
"NOT": NOT,
|
||||
"TEXT": TEXT,
|
||||
"INTEGER": INTEGER,
|
||||
"NULL": NULL,
|
||||
"IF": IF,
|
||||
"EXISTS": EXISTS,
|
||||
"SELECT": SELECT,
|
||||
"FROM": FROM,
|
||||
"WHERE": WHERE,
|
||||
"AND": AND,
|
||||
"OR": OR,
|
||||
"ORDER": ORDER,
|
||||
"TOP": TOP,
|
||||
"REAL": REAL,
|
||||
"BLOB": BLOB,
|
||||
"AUTOINCREMENT": AUTOINCREMENT,
|
||||
"CONSTRAINT": CONSTRAINT,
|
||||
"NUMERIC": NUMERIC,
|
||||
}
|
||||
|
||||
type Position struct {
|
||||
@@ -116,6 +125,12 @@ func (l *Lexer) Lex() (Position, Token, string) {
|
||||
return l.pos, ASTERIKS, "*"
|
||||
case '=':
|
||||
return l.pos, ASSIGN, "="
|
||||
case '`':
|
||||
return l.pos, BACKQUOTE, "`"
|
||||
case '"':
|
||||
return l.pos, QUOTE, "\""
|
||||
case '\'':
|
||||
return l.pos, SINGLE_QUOTE, "'"
|
||||
default:
|
||||
if unicode.IsSpace(r) {
|
||||
continue
|
||||
|
||||
@@ -91,11 +91,22 @@ func (p *Parser) parseCreateTable() (*CreateTableStatement, error) {
|
||||
return nil, p.unexpectedToken()
|
||||
}
|
||||
|
||||
tok, ok := p.expectOne(IDENT, IF)
|
||||
tok, ok := p.expectOne(QUOTE, SINGLE_QUOTE, BACKQUOTE, IDENT, IF)
|
||||
if !ok {
|
||||
return nil, p.unexpectedToken(IDENT, IF)
|
||||
} else if tok == IF && !p.expectSequence(NOT, EXISTS, IDENT) {
|
||||
return nil, p.unexpectedToken()
|
||||
}
|
||||
|
||||
switch tok {
|
||||
case IF:
|
||||
if !p.expectSequence(NOT, EXISTS) {
|
||||
return nil, p.unexpectedToken()
|
||||
}
|
||||
p.consumeIfOne(QUOTE, SINGLE_QUOTE, BACKQUOTE)
|
||||
fallthrough
|
||||
case QUOTE, SINGLE_QUOTE, BACKQUOTE:
|
||||
if !p.expectNext(IDENT) {
|
||||
return nil, p.unexpectedToken()
|
||||
}
|
||||
}
|
||||
_, _, lit := p.rescan()
|
||||
|
||||
@@ -104,11 +115,13 @@ func (p *Parser) parseCreateTable() (*CreateTableStatement, error) {
|
||||
Columns: make([]Column, 0),
|
||||
}
|
||||
|
||||
p.consumeIfOne(QUOTE, SINGLE_QUOTE, BACKQUOTE)
|
||||
if !p.expectNext(LPAREN) {
|
||||
return nil, p.unexpectedToken(LPAREN)
|
||||
}
|
||||
|
||||
for {
|
||||
p.consumeIfOne(QUOTE, SINGLE_QUOTE, BACKQUOTE)
|
||||
_, tok, _ := p.scan()
|
||||
|
||||
switch tok {
|
||||
@@ -125,12 +138,29 @@ func (p *Parser) parseCreateTable() (*CreateTableStatement, error) {
|
||||
}
|
||||
stmt.Columns = append(stmt.Columns, column)
|
||||
|
||||
// TODO: HANDLE AND SAVE CONSTRAINTS
|
||||
case CONSTRAINT:
|
||||
p.consumeIfOne(QUOTE, SINGLE_QUOTE, BACKQUOTE)
|
||||
if !p.expectNext(IDENT) {
|
||||
return nil, p.unexpectedToken()
|
||||
}
|
||||
// _, _, constraintName := p.rescan()
|
||||
p.consumeIfOne(QUOTE, SINGLE_QUOTE, BACKQUOTE)
|
||||
|
||||
case FOREIGN:
|
||||
if !p.expectSequence(KEY, LPAREN, IDENT) {
|
||||
if !p.expectSequence(KEY, LPAREN) {
|
||||
return nil, p.unexpectedToken()
|
||||
}
|
||||
|
||||
p.consumeIfOne(QUOTE, SINGLE_QUOTE, BACKQUOTE)
|
||||
|
||||
if !p.expectNext(IDENT) {
|
||||
return nil, p.unexpectedToken()
|
||||
}
|
||||
_, _, columnName := p.rescan()
|
||||
|
||||
p.consumeIfOne(QUOTE, SINGLE_QUOTE, BACKQUOTE)
|
||||
|
||||
if !p.expectSequence(RPAREN, REFERENCES) {
|
||||
return nil, p.unexpectedToken()
|
||||
}
|
||||
@@ -147,12 +177,19 @@ func (p *Parser) parseCreateTable() (*CreateTableStatement, error) {
|
||||
stmt.Columns[column].Extra = append(stmt.Columns[column].Extra, ref)
|
||||
|
||||
case PRIMARY:
|
||||
if !p.expectSequence(KEY, LPAREN, IDENT) {
|
||||
if !p.expectSequence(KEY, LPAREN) {
|
||||
return nil, p.unexpectedToken()
|
||||
}
|
||||
p.consumeIfOne(QUOTE, SINGLE_QUOTE, BACKQUOTE)
|
||||
|
||||
if !p.expectNext(IDENT) {
|
||||
return nil, p.unexpectedToken()
|
||||
}
|
||||
|
||||
primaryKeyNames := make([]string, 0)
|
||||
_, _, columnName := p.rescan()
|
||||
primaryKeyNames = append(primaryKeyNames, columnName)
|
||||
p.consumeIfOne(QUOTE, SINGLE_QUOTE, BACKQUOTE)
|
||||
|
||||
for {
|
||||
tok, ok := p.expectOne(RPAREN, COMMA)
|
||||
@@ -163,11 +200,14 @@ func (p *Parser) parseCreateTable() (*CreateTableStatement, error) {
|
||||
break
|
||||
}
|
||||
|
||||
p.consumeIfOne(QUOTE, SINGLE_QUOTE, BACKQUOTE)
|
||||
if !p.expectNext(IDENT) {
|
||||
return nil, p.unexpectedToken()
|
||||
}
|
||||
|
||||
_, _, columnName := p.rescan()
|
||||
|
||||
p.consumeIfOne(QUOTE, SINGLE_QUOTE, BACKQUOTE)
|
||||
primaryKeyNames = append(primaryKeyNames, columnName)
|
||||
}
|
||||
|
||||
@@ -190,9 +230,10 @@ func (p *Parser) parseCreateTable() (*CreateTableStatement, error) {
|
||||
func (p *Parser) parseColumn() (Column, error) {
|
||||
_, _, lit := p.rescan()
|
||||
column := Column{Name: lit, Extra: make([]string, 0)}
|
||||
p.consumeIfOne(QUOTE, SINGLE_QUOTE, BACKQUOTE)
|
||||
|
||||
if _, ok := p.expectOne(TEXT, INTEGER, REAL, BLOB); !ok {
|
||||
return Column{}, p.unexpectedToken(TEXT, INTEGER, REAL, BLOB)
|
||||
if _, ok := p.expectOne(TEXT, INTEGER, REAL, BLOB, NUMERIC); !ok {
|
||||
return Column{}, p.unexpectedToken(TEXT, INTEGER, REAL, BLOB, NUMERIC)
|
||||
}
|
||||
_, _, column.Type = p.rescan()
|
||||
|
||||
@@ -222,6 +263,9 @@ func (p *Parser) parseColumn() (Column, error) {
|
||||
column.Extra = append(column.Extra, ref)
|
||||
fmt.Println(ref)
|
||||
|
||||
case AUTOINCREMENT:
|
||||
column.Extra = append(column.Extra, "AUTOINCREMENT")
|
||||
|
||||
default:
|
||||
return Column{}, p.unexpectedToken(COMMA, RPAREN, PRIMARY, NOT, REFERENCES)
|
||||
}
|
||||
@@ -253,17 +297,26 @@ func (p *Parser) unexpectedToken(expected ...Token) error {
|
||||
}
|
||||
|
||||
func (p *Parser) references() (string, error) {
|
||||
p.consumeIfOne(QUOTE, SINGLE_QUOTE, BACKQUOTE)
|
||||
if !p.expectNext(IDENT) {
|
||||
return "", p.unexpectedToken(IDENT)
|
||||
}
|
||||
_, _, referenceTableName := p.rescan()
|
||||
p.consumeIfOne(QUOTE, SINGLE_QUOTE, BACKQUOTE)
|
||||
|
||||
if !p.expectSequence(LPAREN, IDENT) {
|
||||
if !p.expectNext(LPAREN) {
|
||||
return "", p.unexpectedToken()
|
||||
}
|
||||
|
||||
p.consumeIfOne(QUOTE, SINGLE_QUOTE, BACKQUOTE)
|
||||
|
||||
if !p.expectNext(IDENT) {
|
||||
return "", p.unexpectedToken()
|
||||
}
|
||||
_, _, referenceColumnName := p.rescan()
|
||||
|
||||
p.consumeIfOne(QUOTE, SINGLE_QUOTE, BACKQUOTE)
|
||||
|
||||
if !p.expectNext(RPAREN) {
|
||||
return "", p.unexpectedToken(RPAREN)
|
||||
}
|
||||
@@ -316,6 +369,26 @@ func (p *Parser) consumeUntil(token Token, max int) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (p *Parser) consumeIfOne(token ...Token) {
|
||||
_, tok, _ := p.scan()
|
||||
if slices.ContainsFunc(token, func(t Token) bool {
|
||||
return tok == t
|
||||
}) {
|
||||
return
|
||||
}
|
||||
|
||||
p.unscan()
|
||||
}
|
||||
|
||||
func (p *Parser) consumeIf(token Token) {
|
||||
_, tok, _ := p.scan()
|
||||
if tok == token {
|
||||
return
|
||||
}
|
||||
|
||||
p.unscan()
|
||||
}
|
||||
|
||||
func (p *Parser) scan() (Position, Token, string) {
|
||||
if p.buf.avail {
|
||||
p.buf.avail = false
|
||||
|
||||
Reference in New Issue
Block a user