diff --git a/embedded/document/engine_test.go b/embedded/document/engine_test.go index 4be3987a28..cc90c35dd2 100644 --- a/embedded/document/engine_test.go +++ b/embedded/document/engine_test.go @@ -5,7 +5,7 @@ SPDX-License-Identifier: BUSL-1.1 you may not use this file except in compliance with the License. You may obtain a copy of the License at - https://mariadb.com/bsl11/ + https://mariadb.com/bsl11/ Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, @@ -1709,7 +1709,7 @@ func TestCollectionUpdateWithDeletedIndex(t *testing.T) { collectionName, "active", ) - require.ErrorIs(t, err, sql.ErrCantDropIndexedColumn) + require.ErrorIs(t, err, sql.ErrCannotDropColumn) err = engine.DeleteIndex( context.Background(), diff --git a/embedded/sql/catalog.go b/embedded/sql/catalog.go index bb224b2373..f9c3d91210 100644 --- a/embedded/sql/catalog.go +++ b/embedded/sql/catalog.go @@ -23,7 +23,6 @@ import ( "encoding/json" "errors" "fmt" - "io" "math" "strings" "time" @@ -43,19 +42,26 @@ type Catalog struct { maxTableID uint32 // The maxTableID variable is used to assign unique ids to new tables as they are created. } +type CheckConstraint struct { + id uint32 + name string + exp ValueExp +} + type Table struct { - catalog *Catalog - id uint32 - name string - cols []*Column - colsByID map[uint32]*Column - colsByName map[string]*Column - indexes []*Index - indexesByName map[string]*Index - indexesByColID map[uint32][]*Index - primaryIndex *Index - autoIncrementPK bool - maxPK int64 + catalog *Catalog + id uint32 + name string + cols []*Column + colsByID map[uint32]*Column + colsByName map[string]*Column + indexes []*Index + indexesByName map[string]*Index + indexesByColID map[uint32][]*Index + checkConstraints map[string]CheckConstraint + primaryIndex *Index + autoIncrementPK bool + maxPK int64 maxColID uint32 maxIndexID uint32 @@ -196,7 +202,6 @@ func (t *Table) IsIndexed(colName string) (indexed bool, err error) { if err != nil { return false, err } - return len(t.indexesByColID[col.id]) > 0, nil } @@ -364,7 +369,7 @@ func indexName(tableName string, cols []*Column) string { return buf.String() } -func (catlg *Catalog) newTable(name string, colsSpec map[uint32]*ColSpec, maxColID uint32) (table *Table, err error) { +func (catlg *Catalog) newTable(name string, colsSpec map[uint32]*ColSpec, checkConstraints map[string]CheckConstraint, maxColID uint32) (table *Table, err error) { if len(name) == 0 || len(colsSpec) == 0 { return nil, ErrIllegalArguments } @@ -392,15 +397,16 @@ func (catlg *Catalog) newTable(name string, colsSpec map[uint32]*ColSpec, maxCol } table = &Table{ - id: id, - catalog: catlg, - name: name, - cols: make([]*Column, 0, len(colsSpec)), - colsByID: make(map[uint32]*Column), - colsByName: make(map[string]*Column), - indexesByName: make(map[string]*Index), - indexesByColID: make(map[uint32][]*Index), - maxColID: maxColID, + id: id, + catalog: catlg, + name: name, + cols: make([]*Column, 0, len(colsSpec)), + colsByID: make(map[uint32]*Column), + colsByName: make(map[string]*Column), + indexesByName: make(map[string]*Index), + indexesByColID: make(map[uint32][]*Index), + checkConstraints: checkConstraints, + maxColID: maxColID, } for id := uint32(1); id <= maxColID; id++ { @@ -631,7 +637,7 @@ func (t *Table) deleteColumn(col *Column) error { } if isIndexed { - return fmt.Errorf("%w (%s)", ErrCantDropIndexedColumn, col.colName) + return fmt.Errorf("%w %s because one or more indexes require it", ErrCannotDropColumn, col.colName) } newCols := make([]*Column, 0, len(t.cols)-1) @@ -649,6 +655,16 @@ func (t *Table) deleteColumn(col *Column) error { return nil } +func (t *Table) deleteCheck(name string) (uint32, error) { + c, exists := t.checkConstraints[name] + if !exists { + return 0, fmt.Errorf("%s.%s: %w", t.name, name, ErrConstraintNotFound) + } + + delete(t.checkConstraints, name) + return c.id, nil +} + func (t *Table) deleteIndex(index *Index) error { if index.IsPrimary() { return fmt.Errorf("%w: primary key index can NOT be deleted", ErrIllegalArguments) @@ -724,55 +740,38 @@ func validMaxLenForType(maxLen int, sqlType SQLValueType) bool { } func (catlg *Catalog) load(ctx context.Context, tx *store.OngoingTx) error { - dbReaderSpec := store.KeyReaderSpec{ - Prefix: MapKey(catlg.enginePrefix, catalogTablePrefix, EncodeID(1)), - } + return catlg.loadCatalog(ctx, tx, false) +} - tableReader, err := tx.NewKeyReader(dbReaderSpec) - if err != nil { - return err - } - defer tableReader.Close() +func (catlg *Catalog) loadCatalog(ctx context.Context, tx *store.OngoingTx, copyToTx bool) error { + prefix := MapKey(catlg.enginePrefix, catalogTablePrefix, EncodeID(1)) - for { - mkey, vref, err := tableReader.Read(ctx) - if errors.Is(err, store.ErrNoMoreEntries) { - break - } - if err != nil { - return err - } - - dbID, tableID, err := unmapTableID(catlg.enginePrefix, mkey) + return iteratePrefix(ctx, tx, prefix, func(key, value []byte, deleted bool) error { + dbID, tableID, err := unmapTableID(catlg.enginePrefix, key) if err != nil { return err } - if dbID != 1 { + if dbID != DatabaseID { return ErrCorruptedData } - // Retrieve the key-value metadata (KVMetadata) of the current version reference (vref). - // If the metadata is not nil and the "Deleted" flag of the metadata is set to true, - // increment the catalog's table count by 1 and continue to the next iteration. - // This implies this is a deleted table and we should not load it. - md := vref.KVMetadata() - if md != nil && md.Deleted() { + if deleted { catlg.maxTableID++ - continue + return nil } - colSpecs, maxColID, err := loadColSpecs(ctx, dbID, tableID, tx, catlg.enginePrefix) + colSpecs, maxColID, err := loadColSpecs(ctx, tableID, tx, catlg.enginePrefix, copyToTx) if err != nil { return err } - v, err := vref.Resolve() + checks, err := loadCheckConstraints(ctx, dbID, tableID, tx, catlg.enginePrefix, copyToTx) if err != nil { return err } - table, err := catlg.newTable(string(v), colSpecs, maxColID) + table, err := catlg.newTable(string(value), colSpecs, checks, maxColID) if err != nil { return err } @@ -781,13 +780,13 @@ func (catlg *Catalog) load(ctx context.Context, tx *store.OngoingTx) error { return ErrCorruptedData } - err = table.loadIndexes(ctx, catlg.enginePrefix, tx) - if err != nil { - return err + if copyToTx { + if err := tx.Set(key, nil, value); err != nil { + return err + } } - } - - return nil + return table.loadIndexes(ctx, catlg.enginePrefix, tx, copyToTx) + }) } func loadMaxPK(ctx context.Context, sqlPrefix []byte, tx *store.OngoingTx, table *Table) ([]byte, error) { @@ -810,108 +809,86 @@ func loadMaxPK(ctx context.Context, sqlPrefix []byte, tx *store.OngoingTx, table return unmapIndexEntry(table.primaryIndex, sqlPrefix, mkey) } -func loadColSpecs(ctx context.Context, dbID, tableID uint32, tx *store.OngoingTx, sqlPrefix []byte) (specs map[uint32]*ColSpec, maxColID uint32, err error) { - initialKey := MapKey(sqlPrefix, catalogColumnPrefix, EncodeID(dbID), EncodeID(tableID)) +func loadColSpecs(ctx context.Context, tableID uint32, tx *store.OngoingTx, sqlPrefix []byte, copyToTx bool) (map[uint32]*ColSpec, uint32, error) { + prefix := MapKey(sqlPrefix, catalogColumnPrefix, EncodeID(1), EncodeID(tableID)) - dbReaderSpec := store.KeyReaderSpec{ - Prefix: initialKey, - } + var maxColID uint32 + specs := make(map[uint32]*ColSpec) - colSpecReader, err := tx.NewKeyReader(dbReaderSpec) - if err != nil { - return nil, 0, err - } - defer colSpecReader.Close() - - specs = make(map[uint32]*ColSpec, 0) - - for { - mkey, vref, err := colSpecReader.Read(ctx) - if errors.Is(err, store.ErrNoMoreEntries) { - break - } - if err != nil { - return nil, 0, err + err := iteratePrefix(ctx, tx, prefix, func(key, value []byte, deleted bool) error { + if deleted { + maxColID++ + return nil } - md := vref.KVMetadata() - if md != nil && md.IsExpirable() { - return nil, 0, ErrBrokenCatalogColSpecExpirable - } - - mdbID, mtableID, colID, colType, err := unmapColSpec(sqlPrefix, mkey) + colSpec, colID, err := loadColSpec(sqlPrefix, key, value, tableID) if err != nil { - return nil, 0, err - } - - if dbID != mdbID || tableID != mtableID { - return nil, 0, ErrCorruptedData + return err } - if colID != maxColID+1 { - return nil, 0, fmt.Errorf("%w: table columns not stored sequentially", ErrCorruptedData) - } + maxColID++ - maxColID = colID + specs[colID] = colSpec - if md != nil && md.Deleted() { - continue + if copyToTx { + return tx.Set(key, nil, value) } + return nil + }) + return specs, maxColID, err +} - v, err := vref.Resolve() - if err != nil { - return nil, 0, err - } +func loadColSpec(sqlPrefix, key, value []byte, tableID uint32) (*ColSpec, uint32, error) { + if len(value) < 6 { + return nil, 0, ErrCorruptedData + } - if len(v) < 6 { - return nil, 0, fmt.Errorf("%w: mismatch on database or table ids", ErrCorruptedData) - } + mdbID, mtableID, colID, colType, err := unmapColSpec(sqlPrefix, key) + if err != nil { + return nil, 0, err + } - specs[colID] = &ColSpec{ - colName: string(v[5:]), - colType: colType, - maxLen: int(binary.BigEndian.Uint32(v[1:])), - autoIncrement: v[0]&autoIncrementFlag != 0, - notNull: v[0]&nullableFlag != 0, - } + if mdbID != 1 || tableID != mtableID { + return nil, 0, ErrCorruptedData } - return specs, maxColID, nil + return &ColSpec{ + colName: string(value[5:]), + colType: colType, + maxLen: int(binary.BigEndian.Uint32(value[1:])), + autoIncrement: value[0]&autoIncrementFlag != 0, + notNull: value[0]&nullableFlag != 0, + }, colID, nil } -func (table *Table) loadIndexes(ctx context.Context, sqlPrefix []byte, tx *store.OngoingTx) error { - initialKey := MapKey(sqlPrefix, catalogIndexPrefix, EncodeID(1), EncodeID(table.id)) - - idxReaderSpec := store.KeyReaderSpec{ - Prefix: initialKey, - } +func loadCheckConstraints(ctx context.Context, dbID, tableID uint32, tx *store.OngoingTx, sqlPrefix []byte, copyToTx bool) (map[string]CheckConstraint, error) { + prefix := MapKey(sqlPrefix, catalogCheckPrefix, EncodeID(dbID), EncodeID(tableID)) + checks := make(map[string]CheckConstraint) - idxSpecReader, err := tx.NewKeyReader(idxReaderSpec) - if err != nil { - return err - } - defer idxSpecReader.Close() - - for { - mkey, vref, err := idxSpecReader.Read(ctx) - if errors.Is(err, store.ErrNoMoreEntries) { - break + err := iteratePrefix(ctx, tx, prefix, func(key, value []byte, deleted bool) error { + if deleted { + return nil } + + check, err := parseCheckConstraint(sqlPrefix, key, value) if err != nil { return err } + checks[check.name] = *check - // Retrieve the key-value metadata (KVMetadata) of the current version reference (vref). - // If the metadata is not nil and the "Deleted" flag of the metadata is set to true, - // increment the catalog's index count by 1 and continue to the next iteration. - // This implies this is a deleted index and we should not load it. - md := vref.KVMetadata() - if md != nil && md.Deleted() { - table.maxIndexID++ - continue + if copyToTx { + return tx.Set(key, nil, value) } + return nil + }) + return checks, err +} - dbID, tableID, indexID, err := unmapIndex(sqlPrefix, mkey) +func (table *Table) loadIndexes(ctx context.Context, sqlPrefix []byte, tx *store.OngoingTx, copyToTx bool) error { + prefix := MapKey(sqlPrefix, catalogIndexPrefix, EncodeID(1), EncodeID(table.id)) + + return iteratePrefix(ctx, tx, prefix, func(key, value []byte, deleted bool) error { + dbID, tableID, indexID, err := unmapIndex(sqlPrefix, key) if err != nil { return err } @@ -920,42 +897,44 @@ func (table *Table) loadIndexes(ctx context.Context, sqlPrefix []byte, tx *store return ErrCorruptedData } - v, err := vref.Resolve() - if err != nil { - return err + if deleted { + table.maxIndexID++ + return nil } - // v={unique {colID1}(ASC|DESC)...{colIDN}(ASC|DESC)} - colSpecLen := EncIDLen + 1 + if copyToTx { + if err := tx.Set(key, nil, value); err != nil { + return err + } + } else { + // v={unique {colID1}(ASC|DESC)...{colIDN}(ASC|DESC)} + colSpecLen := EncIDLen + 1 + if len(value) < 1+colSpecLen || len(value)%colSpecLen != 1 { + return ErrCorruptedData + } - if len(v) < 1+colSpecLen || len(v)%colSpecLen != 1 { - return ErrCorruptedData - } + var colIDs []uint32 + for i := 1; i < len(value); i += colSpecLen { + colID := binary.BigEndian.Uint32(value[i:]) - var colIDs []uint32 + // TODO: currently only ASC order is supported + if value[i+EncIDLen] != 0 { + return ErrCorruptedData + } + colIDs = append(colIDs, colID) + } - for i := 1; i < len(v); i += colSpecLen { - colID := binary.BigEndian.Uint32(v[i:]) + index, err := table.newIndex(value[0] > 0, colIDs) + if err != nil { + return err + } - // TODO: currently only ASC order is supported - if v[i+EncIDLen] != 0 { + if indexID != index.id { return ErrCorruptedData } - - colIDs = append(colIDs, colID) - } - - index, err := table.newIndex(v[0] > 0, colIDs) - if err != nil { - return err - } - - if indexID != index.id { - return ErrCorruptedData } - } - - return nil + return nil + }) } func trimPrefix(prefix, mkey []byte, mappingPrefix []byte) ([]byte, error) { @@ -984,6 +963,39 @@ func unmapTableID(prefix, mkey []byte) (dbID, tableID uint32, err error) { return } +func unmapCheckID(prefix, mkey []byte) (uint32, error) { + encID, err := trimPrefix(prefix, mkey, []byte(catalogCheckPrefix)) + if err != nil { + return 0, err + } + + if len(encID) < 3*EncIDLen { + return 0, ErrCorruptedData + } + return binary.BigEndian.Uint32(encID[2*EncIDLen:]), nil +} + +func parseCheckConstraint(prefix, key, value []byte) (*CheckConstraint, error) { + id, err := unmapCheckID(prefix, key) + if err != nil { + return nil, err + } + + nameLen := value[0] + 1 + name := string(value[1 : 1+nameLen]) + + exp, err := ParseExpFromString(string(value[1+nameLen:])) + if err != nil { + return nil, err + } + + return &CheckConstraint{ + id: id, + name: name, + exp: exp, + }, nil +} + func unmapColSpec(prefix, mkey []byte) (dbID, tableID, colID uint32, colType SQLValueType, err error) { encID, err := trimPrefix(prefix, mkey, []byte(catalogColumnPrefix)) if err != nil { @@ -1612,194 +1624,49 @@ func decodeValue(b []byte, colType SQLValueType, nullable bool) (TypedValue, int return nil, 0, ErrCorruptedData } -// addSchemaToTx adds the schema to the ongoing transaction. -func (t *Table) addIndexesToTx(ctx context.Context, sqlPrefix []byte, tx *store.OngoingTx) error { - initialKey := MapKey(sqlPrefix, catalogIndexPrefix, EncodeID(1), EncodeID(t.id)) - - idxReaderSpec := store.KeyReaderSpec{ - Prefix: initialKey, - } - - idxSpecReader, err := tx.NewKeyReader(idxReaderSpec) - if err != nil { - return err - } - defer idxSpecReader.Close() - - for { - mkey, vref, err := idxSpecReader.Read(ctx) - if errors.Is(err, store.ErrNoMoreEntries) { - break - } - if err != nil { - return err - } - - dbID, tableID, _, err := unmapIndex(sqlPrefix, mkey) - if err != nil { - return err - } - - if t.id != tableID || dbID != 1 { - return ErrCorruptedData - } - - v, err := vref.Resolve() - if err == io.EOF { - continue - } - if err != nil { - return err - } - - err = tx.Set(mkey, nil, v) - if err != nil { - return err - } - } - - return nil -} - // addSchemaToTx adds the schema of the catalog to the given transaction. -func (catlg *Catalog) addSchemaToTx(ctx context.Context, sqlPrefix []byte, tx *store.OngoingTx) error { - dbReaderSpec := store.KeyReaderSpec{ - Prefix: MapKey(sqlPrefix, catalogTablePrefix, EncodeID(1)), - } - - tableReader, err := tx.NewKeyReader(dbReaderSpec) - if err != nil { - return err - } - defer tableReader.Close() - - for { - mkey, vref, err := tableReader.Read(ctx) - if errors.Is(err, store.ErrNoMoreEntries) { - break - } - if err != nil { - return err - } - - dbID, tableID, err := unmapTableID(sqlPrefix, mkey) - if err != nil { - return err - } - - if dbID != 1 { - return ErrCorruptedData - } - - // read col specs into tx - colSpecs, maxColID, err := addColSpecsToTx(ctx, tx, sqlPrefix, tableID) - if err != nil { - return err - } - - v, err := vref.Resolve() - if err == io.EOF { - continue - } - if err != nil { - return err - } - - err = tx.Set(mkey, nil, v) - if err != nil { - return err - } - - table, err := catlg.newTable(string(v), colSpecs, maxColID) - if err != nil { - return err - } - - if tableID != table.id { - return ErrCorruptedData - } - - // read index specs into tx - err = table.addIndexesToTx(ctx, sqlPrefix, tx) - if err != nil { - return err - } - - } - - return nil +func (catlg *Catalog) addSchemaToTx(ctx context.Context, tx *store.OngoingTx) error { + return catlg.loadCatalog(ctx, tx, true) } -// addColSpecsToTx adds the column specs of the given table to the given transaction. -func addColSpecsToTx(ctx context.Context, tx *store.OngoingTx, sqlPrefix []byte, tableID uint32) (specs map[uint32]*ColSpec, maxColID uint32, err error) { - initialKey := MapKey(sqlPrefix, catalogColumnPrefix, EncodeID(1), EncodeID(tableID)) - +func iteratePrefix(ctx context.Context, tx *store.OngoingTx, prefix []byte, onSpec func(key, value []byte, deleted bool) error) error { dbReaderSpec := store.KeyReaderSpec{ - Prefix: initialKey, + Prefix: prefix, } colSpecReader, err := tx.NewKeyReader(dbReaderSpec) if err != nil { - return nil, 0, err + return err } defer colSpecReader.Close() - specs = make(map[uint32]*ColSpec, 0) - for { mkey, vref, err := colSpecReader.Read(ctx) if errors.Is(err, store.ErrNoMoreEntries) { break } if err != nil { - return nil, 0, err + return err } md := vref.KVMetadata() if md != nil && md.IsExpirable() { - return nil, 0, ErrBrokenCatalogColSpecExpirable - } - - mdbID, mtableID, colID, colType, err := unmapColSpec(sqlPrefix, mkey) - if err != nil { - return nil, 0, err - } - - if mdbID != 1 || tableID != mtableID { - return nil, 0, ErrCorruptedData - } - - if colID != maxColID+1 { - return nil, 0, fmt.Errorf("%w: table columns not stored sequentially", ErrCorruptedData) + return ErrBrokenCatalogColSpecExpirable } - maxColID = colID - - if md != nil && md.Deleted() { - continue - } - - v, err := vref.Resolve() - if err != nil { - return nil, 0, err - } - if len(v) < 6 { - return nil, 0, ErrCorruptedData + deleted := md != nil && md.Deleted() + var v []byte + if !deleted { + v, err = vref.Resolve() + if err != nil { + return err + } } - err = tx.Set(mkey, nil, v) + err = onSpec(mkey, v, deleted) if err != nil { - return nil, 0, err - } - - specs[colID] = &ColSpec{ - colName: string(v[5:]), - colType: colType, - maxLen: int(binary.BigEndian.Uint32(v[1:])), - autoIncrement: v[0]&autoIncrementFlag != 0, - notNull: v[0]&nullableFlag != 0, + return err } } - - return specs, maxColID, nil + return nil } diff --git a/embedded/sql/catalog_test.go b/embedded/sql/catalog_test.go index 9c812f1da2..4fe79fafba 100644 --- a/embedded/sql/catalog_test.go +++ b/embedded/sql/catalog_test.go @@ -40,25 +40,25 @@ func TestFromEmptyCatalog(t *testing.T) { _, err = db.GetTableByName("table1") require.ErrorIs(t, err, ErrTableDoesNotExist) - _, err = db.newTable("", nil, 0) + _, err = db.newTable("", nil, nil, 0) require.ErrorIs(t, err, ErrIllegalArguments) - _, err = db.newTable("table1", nil, 0) + _, err = db.newTable("table1", nil, nil, 0) require.ErrorIs(t, err, ErrIllegalArguments) - _, err = db.newTable("table1", map[uint32]*ColSpec{}, 0) + _, err = db.newTable("table1", map[uint32]*ColSpec{}, nil, 0) require.ErrorIs(t, err, ErrIllegalArguments) _, err = db.newTable("table1", map[uint32]*ColSpec{ 1: {colName: "id", colType: IntegerType}, 2: {colName: "id", colType: IntegerType}, - }, 2) + }, nil, 2) require.ErrorIs(t, err, ErrDuplicatedColumn) table, err := db.newTable("table1", map[uint32]*ColSpec{ 1: {colName: "id", colType: IntegerType}, 2: {colName: "title", colType: IntegerType}, - }, 2) + }, nil, 2) require.NoError(t, err) require.Equal(t, "table1", table.Name()) @@ -85,7 +85,7 @@ func TestFromEmptyCatalog(t *testing.T) { _, err = db.newTable("table1", map[uint32]*ColSpec{ 1: {colName: "id", colType: IntegerType}, 2: {colName: "title", colType: IntegerType}, - }, 2) + }, nil, 2) require.ErrorIs(t, err, ErrTableAlreadyExists) indexed, err := table.IsIndexed("id") @@ -189,7 +189,7 @@ func TestCatalogTableLength(t *testing.T) { for _, v := range []string{"table1", "table2", "table3"} { _, err := catlog.newTable(v, map[uint32]*ColSpec{ 1: {colName: "id", colType: IntegerType}, - }, 1) + }, nil, 1) require.ErrorIs(t, err, ErrTableAlreadyExists) } require.Equal(t, totalTablesCount, catlog.maxTableID) @@ -204,8 +204,7 @@ func TestCatalogTableLength(t *testing.T) { for _, v := range []string{"table4", "table5", "table6"} { _, err := catlog.newTable(v, map[uint32]*ColSpec{ 1: {colName: "id", colType: IntegerType}, - }, 1, - ) + }, nil, 1) require.NoError(t, err) } require.Equal(t, totalTablesCount+3, catlog.maxTableID) @@ -296,7 +295,7 @@ func TestCatalogTableLength(t *testing.T) { PRIMARY KEY(id) ) ` - stmts, err := Parse(strings.NewReader(sql)) + stmts, err := ParseSQL(strings.NewReader(sql)) require.NoError(t, err) require.Equal(t, 1, len(stmts)) stmt := stmts[0] diff --git a/embedded/sql/engine.go b/embedded/sql/engine.go index ba33f73d9b..b7bd51e39d 100644 --- a/embedded/sql/engine.go +++ b/embedded/sql/engine.go @@ -26,76 +26,84 @@ import ( "github.com/codenotary/immudb/embedded/store" ) -var ErrNoSupported = errors.New("not supported") -var ErrIllegalArguments = store.ErrIllegalArguments -var ErrMultiIndexingNotEnabled = fmt.Errorf("%w: multi-indexing must be enabled", store.ErrIllegalState) -var ErrParsingError = errors.New("parsing error") -var ErrDDLorDMLTxOnly = errors.New("transactions can NOT combine DDL and DML statements") -var ErrUnspecifiedMultiDBHandler = fmt.Errorf("%w: unspecified multidbHanlder", store.ErrIllegalState) -var ErrDatabaseDoesNotExist = errors.New("database does not exist") -var ErrDatabaseAlreadyExists = errors.New("database already exists") -var ErrTableAlreadyExists = errors.New("table already exists") -var ErrTableDoesNotExist = errors.New("table does not exist") -var ErrColumnDoesNotExist = errors.New("column does not exist") -var ErrColumnAlreadyExists = errors.New("column already exists") -var ErrCantDropIndexedColumn = errors.New("can not drop indexed column") -var ErrSameOldAndNewNames = errors.New("same old and new names") -var ErrColumnNotIndexed = errors.New("column is not indexed") -var ErrFunctionDoesNotExist = errors.New("function does not exist") -var ErrLimitedKeyType = errors.New("indexed key of unsupported type or exceeded length") -var ErrLimitedAutoIncrement = errors.New("only INTEGER single-column primary keys can be set as auto incremental") -var ErrLimitedMaxLen = errors.New("only VARCHAR and BLOB types support max length") -var ErrDuplicatedColumn = errors.New("duplicated column") -var ErrInvalidColumn = errors.New("invalid column") -var ErrReservedWord = errors.New("reserved word") -var ErrPKCanNotBeNull = errors.New("primary key can not be null") -var ErrPKCanNotBeUpdated = errors.New("primary key can not be updated") -var ErrNotNullableColumnCannotBeNull = errors.New("not nullable column can not be null") -var ErrNewColumnMustBeNullable = errors.New("new column must be nullable") -var ErrIndexAlreadyExists = errors.New("index already exists") -var ErrMaxNumberOfColumnsInIndexExceeded = errors.New("number of columns in multi-column index exceeded") -var ErrIndexNotFound = errors.New("index not found") -var ErrCannotIndexJson = errors.New("cannot index column of type json") -var ErrInvalidNumberOfValues = errors.New("invalid number of values provided") -var ErrInvalidValue = errors.New("invalid value provided") -var ErrInferredMultipleTypes = errors.New("inferred multiple types") -var ErrExpectingDQLStmt = errors.New("illegal statement. DQL statement expected") -var ErrColumnMustAppearInGroupByOrAggregation = errors.New("must appear in the group by clause or be used in an aggregated function") -var ErrIllegalMappedKey = errors.New("error illegal mapped key") -var ErrCorruptedData = store.ErrCorruptedData -var ErrBrokenCatalogColSpecExpirable = fmt.Errorf("%w: catalog column entry set as expirable", ErrCorruptedData) -var ErrNoMoreRows = store.ErrNoMoreEntries -var ErrInvalidTypes = errors.New("invalid types") -var ErrUnsupportedJoinType = errors.New("unsupported join type") -var ErrInvalidCondition = errors.New("invalid condition") -var ErrHavingClauseRequiresGroupClause = errors.New("having clause requires group clause") -var ErrNotComparableValues = errors.New("values are not comparable") -var ErrNumericTypeExpected = errors.New("numeric type expected") -var ErrUnexpected = errors.New("unexpected error") -var ErrMaxKeyLengthExceeded = errors.New("max key length exceeded") -var ErrMaxLengthExceeded = errors.New("max length exceeded") -var ErrColumnIsNotAnAggregation = errors.New("column is not an aggregation") -var ErrLimitedCount = errors.New("only unbounded counting is supported i.e. COUNT(*)") -var ErrTxDoesNotExist = errors.New("tx does not exist") -var ErrNestedTxNotSupported = errors.New("nested tx are not supported") -var ErrNoOngoingTx = errors.New("no ongoing transaction") -var ErrNonTransactionalStmt = errors.New("non transactional statement") -var ErrDivisionByZero = errors.New("division by zero") -var ErrMissingParameter = errors.New("missing parameter") -var ErrUnsupportedParameter = errors.New("unsupported parameter") -var ErrDuplicatedParameters = errors.New("duplicated parameters") -var ErrLimitedIndexCreation = errors.New("unique index creation is only supported on empty tables") -var ErrTooManyRows = errors.New("too many rows") -var ErrAlreadyClosed = store.ErrAlreadyClosed -var ErrAmbiguousSelector = errors.New("ambiguous selector") -var ErrUnsupportedCast = fmt.Errorf("%w: unsupported cast", ErrInvalidValue) -var ErrColumnMismatchInUnionStmt = errors.New("column mismatch in union statement") -var ErrInvalidTxMetadata = errors.New("invalid transaction metadata") +var ( + ErrNoSupported = errors.New("not supported") + ErrIllegalArguments = store.ErrIllegalArguments + ErrMultiIndexingNotEnabled = fmt.Errorf("%w: multi-indexing must be enabled", store.ErrIllegalState) + ErrParsingError = errors.New("parsing error") + ErrDDLorDMLTxOnly = errors.New("transactions can NOT combine DDL and DML statements") + ErrUnspecifiedMultiDBHandler = fmt.Errorf("%w: unspecified multidbHanlder", store.ErrIllegalState) + ErrDatabaseDoesNotExist = errors.New("database does not exist") + ErrDatabaseAlreadyExists = errors.New("database already exists") + ErrTableAlreadyExists = errors.New("table already exists") + ErrTableDoesNotExist = errors.New("table does not exist") + ErrColumnDoesNotExist = errors.New("column does not exist") + ErrColumnAlreadyExists = errors.New("column already exists") + ErrCannotDropColumn = errors.New("cannot drop column") + ErrSameOldAndNewNames = errors.New("same old and new names") + ErrColumnNotIndexed = errors.New("column is not indexed") + ErrFunctionDoesNotExist = errors.New("function does not exist") + ErrLimitedKeyType = errors.New("indexed key of unsupported type or exceeded length") + ErrLimitedAutoIncrement = errors.New("only INTEGER single-column primary keys can be set as auto incremental") + ErrLimitedMaxLen = errors.New("only VARCHAR and BLOB types support max length") + ErrDuplicatedColumn = errors.New("duplicated column") + ErrInvalidColumn = errors.New("invalid column") + ErrInvalidCheckConstraint = errors.New("invalid check constraint") + ErrCheckConstraintViolation = errors.New("check constraint violation") + ErrReservedWord = errors.New("reserved word") + ErrPKCanNotBeNull = errors.New("primary key can not be null") + ErrPKCanNotBeUpdated = errors.New("primary key can not be updated") + ErrNotNullableColumnCannotBeNull = errors.New("not nullable column can not be null") + ErrNewColumnMustBeNullable = errors.New("new column must be nullable") + ErrIndexAlreadyExists = errors.New("index already exists") + ErrMaxNumberOfColumnsInIndexExceeded = errors.New("number of columns in multi-column index exceeded") + ErrIndexNotFound = errors.New("index not found") + ErrConstraintNotFound = errors.New("constraint not found") + ErrInvalidNumberOfValues = errors.New("invalid number of values provided") + ErrInvalidValue = errors.New("invalid value provided") + ErrInferredMultipleTypes = errors.New("inferred multiple types") + ErrExpectingDQLStmt = errors.New("illegal statement. DQL statement expected") + ErrColumnMustAppearInGroupByOrAggregation = errors.New("must appear in the group by clause or be used in an aggregated function") + ErrIllegalMappedKey = errors.New("error illegal mapped key") + ErrCorruptedData = store.ErrCorruptedData + ErrBrokenCatalogColSpecExpirable = fmt.Errorf("%w: catalog column entry set as expirable", ErrCorruptedData) + ErrBrokenCatalogCheckConstraintExpirable = fmt.Errorf("%w: catalog check constraint set as expirable", ErrCorruptedData) + ErrNoMoreRows = store.ErrNoMoreEntries + ErrInvalidTypes = errors.New("invalid types") + ErrUnsupportedJoinType = errors.New("unsupported join type") + ErrInvalidCondition = errors.New("invalid condition") + ErrHavingClauseRequiresGroupClause = errors.New("having clause requires group clause") + ErrNotComparableValues = errors.New("values are not comparable") + ErrNumericTypeExpected = errors.New("numeric type expected") + ErrUnexpected = errors.New("unexpected error") + ErrMaxKeyLengthExceeded = errors.New("max key length exceeded") + ErrMaxLengthExceeded = errors.New("max length exceeded") + ErrColumnIsNotAnAggregation = errors.New("column is not an aggregation") + ErrLimitedCount = errors.New("only unbounded counting is supported i.e. COUNT(*)") + ErrTxDoesNotExist = errors.New("tx does not exist") + ErrNestedTxNotSupported = errors.New("nested tx are not supported") + ErrNoOngoingTx = errors.New("no ongoing transaction") + ErrNonTransactionalStmt = errors.New("non transactional statement") + ErrDivisionByZero = errors.New("division by zero") + ErrMissingParameter = errors.New("missing parameter") + ErrUnsupportedParameter = errors.New("unsupported parameter") + ErrDuplicatedParameters = errors.New("duplicated parameters") + ErrLimitedIndexCreation = errors.New("unique index creation is only supported on empty tables") + ErrTooManyRows = errors.New("too many rows") + ErrAlreadyClosed = store.ErrAlreadyClosed + ErrAmbiguousSelector = errors.New("ambiguous selector") + ErrUnsupportedCast = fmt.Errorf("%w: unsupported cast", ErrInvalidValue) + ErrColumnMismatchInUnionStmt = errors.New("column mismatch in union statement") + ErrCannotIndexJson = errors.New("cannot index column of type JSON") + ErrInvalidTxMetadata = errors.New("invalid transaction metadata") +) var MaxKeyLen = 512 -const EncIDLen = 4 -const EncLenLen = 4 +const ( + EncIDLen = 4 + EncLenLen = 4 +) const MaxNumberOfColumnsInIndex = 8 @@ -380,7 +388,7 @@ func indexEntryMapperFor(index, primaryIndex *Index) store.EntryMapper { } func (e *Engine) Exec(ctx context.Context, tx *SQLTx, sql string, params map[string]interface{}) (ntx *SQLTx, committedTxs []*SQLTx, err error) { - stmts, err := Parse(strings.NewReader(sql)) + stmts, err := ParseSQL(strings.NewReader(sql)) if err != nil { return nil, nil, fmt.Errorf("%w: %v", ErrParsingError, err) } @@ -520,7 +528,7 @@ func (e *Engine) queryAll(ctx context.Context, tx *SQLTx, sql string, params map } func (e *Engine) Query(ctx context.Context, tx *SQLTx, sql string, params map[string]interface{}) (RowReader, error) { - stmts, err := Parse(strings.NewReader(sql)) + stmts, err := ParseSQL(strings.NewReader(sql)) if err != nil { return nil, fmt.Errorf("%w: %v", ErrParsingError, err) } @@ -594,7 +602,7 @@ func (e *Engine) Catalog(ctx context.Context, tx *SQLTx) (catalog *Catalog, err } func (e *Engine) InferParameters(ctx context.Context, tx *SQLTx, sql string) (params map[string]SQLValueType, err error) { - stmts, err := Parse(strings.NewReader(sql)) + stmts, err := ParseSQL(strings.NewReader(sql)) if err != nil { return nil, fmt.Errorf("%w: %v", ErrParsingError, err) } @@ -654,7 +662,7 @@ func (e *Engine) CopyCatalogToTx(ctx context.Context, tx *store.OngoingTx) error catalog := newCatalog(e.prefix) - err := catalog.addSchemaToTx(ctx, e.prefix, tx) + err := catalog.addSchemaToTx(ctx, tx) if err != nil { return err } diff --git a/embedded/sql/engine_test.go b/embedded/sql/engine_test.go index dc24ea724e..2b47d77eb9 100644 --- a/embedded/sql/engine_test.go +++ b/embedded/sql/engine_test.go @@ -152,6 +152,9 @@ func TestCreateTable(t *testing.T) { _, _, err = engine.Exec(context.Background(), nil, "CREATE TABLE IF NOT EXISTS blob_table (id BLOB[2], PRIMARY KEY id)", nil) require.NoError(t, err) + + _, _, err = engine.Exec(context.Background(), nil, "CREATE TABLE IF NOT EXISTS balances (id INTEGER, balance FLOAT, CHECK (balance + id) >= 0, PRIMARY KEY id)", nil) + require.NoError(t, err) } func TestTimestampType(t *testing.T) { @@ -1373,13 +1376,13 @@ func TestAlterTableDropColumn(t *testing.T) { t.Run("fail to drop indexed columns", func(t *testing.T) { _, _, err = engine.Exec(context.Background(), nil, "ALTER TABLE table1 DROP COLUMN id", nil) - require.ErrorIs(t, err, ErrCantDropIndexedColumn) + require.ErrorIs(t, err, ErrCannotDropColumn) _, _, err = engine.Exec(context.Background(), nil, "ALTER TABLE table1 DROP COLUMN name", nil) - require.ErrorIs(t, err, ErrCantDropIndexedColumn) + require.ErrorIs(t, err, ErrCannotDropColumn) _, _, err = engine.Exec(context.Background(), nil, "ALTER TABLE table1 DROP COLUMN surname", nil) - require.ErrorIs(t, err, ErrCantDropIndexedColumn) + require.ErrorIs(t, err, ErrCannotDropColumn) }) t.Run("fail to drop columns that does not exist", func(t *testing.T) { @@ -2870,7 +2873,7 @@ func TestJSON(t *testing.T) { require.Len(t, rows, n/2) for i, row := range rows { - usr, _ := row.ValuesBySelector[EncodeSelector("", "tbl_with_json", "json_data->usr")].RawValue().(map[string]interface{}) + usr, _ := row.ValuesBySelector[EncodeSelector("", "tbl_with_json", "json_data->'usr'")].RawValue().(map[string]interface{}) require.Equal(t, map[string]interface{}{ "name": fmt.Sprintf("name%d", (2*i + 1)), @@ -2954,7 +2957,7 @@ func TestJSON(t *testing.T) { require.Len(t, rows, n) for i, row := range rows { - usr, _ := row.ValuesBySelector[EncodeSelector("", "tbl_with_json", "json_data->usr")].RawValue().(map[string]interface{}) + usr, _ := row.ValuesBySelector[EncodeSelector("", "tbl_with_json", "json_data->'usr'")].RawValue().(map[string]interface{}) name, _ := row.ValuesBySelector[EncodeSelector("", "tbl_with_json", "name")].RawValue().(string) age, _ := row.ValuesBySelector[EncodeSelector("", "tbl_with_json", "age")].RawValue().(float64) city, _ := row.ValuesBySelector[EncodeSelector("", "tbl_with_json", "city")].RawValue().(string) @@ -3124,7 +3127,7 @@ func TestJSON(t *testing.T) { ` CREATE TABLE test ( json_data JSON NOT NULL, - + PRIMARY KEY(json_data) )`, nil) require.ErrorIs(t, err, ErrCannotIndexJson) @@ -5796,7 +5799,7 @@ func TestInferParameters(t *testing.T) { require.NoError(t, err) require.Len(t, params, 0) - pstmt, err := Parse(strings.NewReader(stmt)) + pstmt, err := ParseSQL(strings.NewReader(stmt)) require.NoError(t, err) require.Len(t, pstmt, 1) @@ -5902,7 +5905,7 @@ func TestInferParameters(t *testing.T) { func TestInferParametersPrepared(t *testing.T) { engine := setupCommonTest(t) - stmts, err := Parse(strings.NewReader("CREATE TABLE mytable(id INTEGER, title VARCHAR, active BOOLEAN, PRIMARY KEY id)")) + stmts, err := ParseSQL(strings.NewReader("CREATE TABLE mytable(id INTEGER, title VARCHAR, active BOOLEAN, PRIMARY KEY id)")) require.NoError(t, err) require.Len(t, stmts, 1) @@ -7269,7 +7272,7 @@ func TestSingleDBCatalogQueries(t *testing.T) { _, _, err := engine.Exec(context.Background(), nil, ` CREATE TABLE mytable1(id INTEGER NOT NULL AUTO_INCREMENT, title VARCHAR[256], PRIMARY KEY id); - + CREATE TABLE mytable2(id INTEGER NOT NULL, name VARCHAR[100], active BOOLEAN, PRIMARY KEY id); `, nil) require.NoError(t, err) @@ -7279,7 +7282,7 @@ func TestSingleDBCatalogQueries(t *testing.T) { _, _, err = engine.Exec(context.Background(), tx, ` CREATE INDEX ON mytable1(title); - + CREATE INDEX ON mytable2(name); CREATE UNIQUE INDEX ON mytable2(name, active); `, nil) @@ -8513,6 +8516,136 @@ func (t *BrokenCatalogTestSuite) TestErrorDroppedPrimaryIndexColumn() { t.Require().ErrorIs(err, ErrColumnDoesNotExist) } +func TestCheckConstraints(t *testing.T) { + st, err := store.Open(t.TempDir(), store.DefaultOptions().WithMultiIndexing(true)) + require.NoError(t, err) + defer closeStore(t, st) + + engine, err := NewEngine(st, DefaultOptions()) + require.NoError(t, err) + + _, _, err = engine.Exec( + context.Background(), + nil, + `CREATE TABLE table_with_checks ( + id INTEGER AUTO_INCREMENT, + account VARCHAR, + in_balance FLOAT, + out_balance FLOAT, + balance FLOAT, + metadata JSON, + + CONSTRAINT metadata_check CHECK metadata->'usr' IS NOT NULL, + CHECK (account IS NULL) OR (account LIKE '^account_.*'), + CONSTRAINT in_out_balance_sum CHECK (in_balance + out_balance = balance), + CHECK (in_balance >= 0), + CHECK (out_balance <= 0), + CHECK (balance >= 0), + + PRIMARY KEY id + )`, nil, + ) + require.NoError(t, err) + + t.Run("check constraint violation", func(t *testing.T) { + _, _, err = engine.Exec(context.Background(), nil, `INSERT INTO table_with_checks(account, in_balance, out_balance, balance, metadata) VALUES ('account_one', 10, -1.5, 8.5, '{"usr": "user"}')`, nil) + require.NoError(t, err) + + _, _, err = engine.Exec(context.Background(), nil, `INSERT INTO table_with_checks(account, in_balance, out_balance, balance, metadata) VALUES ('account', 20, -1.0, 19.0, '{"usr": "user"}')`, nil) + require.ErrorIs(t, err, ErrCheckConstraintViolation) + + _, _, err = engine.Exec(context.Background(), nil, `INSERT INTO table_with_checks(account, in_balance, out_balance, balance, metadata) VALUES ('account_two', 10, 1.5, 11.5, '{"usr": "user"}')`, nil) + require.ErrorIs(t, err, ErrCheckConstraintViolation) + + _, _, err = engine.Exec(context.Background(), nil, `INSERT INTO table_with_checks(account, in_balance, out_balance, balance, metadata) VALUES ('account_two', -1, 2.5, 1.5, '{"usr": "user"}')`, nil) + require.ErrorIs(t, err, ErrCheckConstraintViolation) + + _, _, err = engine.Exec(context.Background(), nil, `INSERT INTO table_with_checks(account, in_balance, out_balance, balance, metadata) VALUES ('account_two', 10, -1.5, 9.0, '{"usr": "user"}')`, nil) + require.ErrorIs(t, err, ErrCheckConstraintViolation) + + _, _, err = engine.Exec(context.Background(), nil, + `UPDATE table_with_checks + SET + in_balance = in_balance - 1, + out_balance = out_balance + 1 + WHERE id = 1`, nil) + require.NoError(t, err) + + _, _, err = engine.Exec(context.Background(), nil, + `UPDATE table_with_checks + SET + out_balance = out_balance - 1, + balance = balance - 1 + WHERE id = 1`, nil) + require.NoError(t, err) + + _, _, err = engine.Exec(context.Background(), nil, "UPDATE table_with_checks SET in_balance = in_balance + 1 WHERE id = 1", nil) + require.ErrorIs(t, err, ErrCheckConstraintViolation) + + _, _, err = engine.Exec(context.Background(), nil, "UPDATE table_with_checks SET in_balance = NULL", nil) + require.ErrorIs(t, err, ErrCheckConstraintViolation) + }) + + t.Run("drop constraint", func(t *testing.T) { + _, _, err = engine.Exec(context.Background(), nil, "ALTER TABLE table_with_checks DROP CONSTRAINT metadata_check", nil) + require.NoError(t, err) + + _, _, err = engine.Exec(context.Background(), nil, "ALTER TABLE table_with_checks DROP CONSTRAINT in_out_balance_sum", nil) + require.NoError(t, err) + + _, _, err = engine.Exec(context.Background(), nil, "ALTER TABLE table_with_checks DROP CONSTRAINT in_out_balance_sum", nil) + require.ErrorIs(t, err, ErrConstraintNotFound) + + _, _, err = engine.Exec(context.Background(), nil, "INSERT INTO table_with_checks(account, in_balance, out_balance, balance) VALUES (NULL, 10, -1.5, 9.0)", nil) + require.NoError(t, err) + + _, _, err = engine.Exec(context.Background(), nil, "INSERT INTO table_with_checks(account, in_balance, out_balance, balance) VALUES ('account_three', -1, -1.5, 9.0)", nil) + require.ErrorIs(t, err, ErrCheckConstraintViolation) + + _, _, err = engine.Exec(context.Background(), nil, "INSERT INTO table_with_checks(account, in_balance, out_balance, balance) VALUES ('account_three', 10, 1.5, 9.0)", nil) + require.ErrorIs(t, err, ErrCheckConstraintViolation) + }) + + t.Run("drop column with constraint", func(t *testing.T) { + _, _, err = engine.Exec(context.Background(), nil, "ALTER TABLE table_with_checks DROP COLUMN account", nil) + require.ErrorIs(t, err, ErrCannotDropColumn) + + _, _, err = engine.Exec(context.Background(), nil, "ALTER TABLE table_with_checks DROP CONSTRAINT table_with_checks_check1", nil) + require.NoError(t, err) + + _, _, err = engine.Exec(context.Background(), nil, "ALTER TABLE table_with_checks DROP COLUMN account", nil) + require.NoError(t, err) + }) + + t.Run("unsupported check expressions", func(t *testing.T) { + _, _, err := engine.Exec( + context.Background(), + nil, + `CREATE TABLE table_with_invalid_checks ( + id INTEGER AUTO_INCREMENT, + + CHECK EXISTS (SELECT * FROM mytable), + + PRIMARY KEY id + )`, nil, + ) + require.ErrorIs(t, err, ErrNoSupported) + + _, _, err = engine.Exec( + context.Background(), + nil, + `CREATE TABLE table_with_invalid_checks ( + id INTEGER AUTO_INCREMENT, + + CHECK id IN (SELECT * FROM mytable), + + PRIMARY KEY id + )`, nil, + ) + require.ErrorIs(t, err, ErrNoSupported) + }) +} + func TestQueryTxMetadata(t *testing.T) { opts := store.DefaultOptions().WithMultiIndexing(true) opts.WithIndexOptions(opts.IndexOpts.WithMaxActiveSnapshots(1)) @@ -8569,7 +8702,7 @@ func TestQueryTxMetadata(t *testing.T) { require.Len(t, rows, 10) for i, row := range rows { - n := row.ValuesBySelector[EncodeSelector("", "mytbl", "_tx_metadata->n")].RawValue() + n := row.ValuesBySelector[EncodeSelector("", "mytbl", "_tx_metadata->'n'")].RawValue() require.Equal(t, float64(i+1), n) } diff --git a/embedded/sql/grouped_row_reader.go b/embedded/sql/grouped_row_reader.go index b2a7d5c9d4..8cf59420a1 100644 --- a/embedded/sql/grouped_row_reader.go +++ b/embedded/sql/grouped_row_reader.go @@ -152,33 +152,21 @@ func allAggregations(selectors []Selector) bool { func zeroForType(t SQLValueType) TypedValue { switch t { case IntegerType: - { - return &Integer{} - } + return &Integer{} case Float64Type: - { - return &Float64{} - } + return &Float64{} case BooleanType: - { - return &Bool{} - } + return &Bool{} case VarcharType: - { - return &Varchar{} - } + return &Varchar{} + case JSONType: + return &JSON{} case UUIDType: - { - return &UUID{} - } + return &UUID{} case BLOBType: - { - return &Blob{} - } + return &Blob{} case TimestampType: - { - return &Timestamp{} - } + return &Timestamp{} } return nil } diff --git a/embedded/sql/json_type.go b/embedded/sql/json_type.go index 1ac19565a9..31e7264b96 100644 --- a/embedded/sql/json_type.go +++ b/embedded/sql/json_type.go @@ -20,6 +20,14 @@ type JSON struct { val interface{} } +func NewJsonFromString(s string) (*JSON, error) { + var val interface{} + if err := json.Unmarshal([]byte(s), &val); err != nil { + return nil, err + } + return &JSON{val: val}, nil +} + func NewJson(val interface{}) *JSON { return &JSON{val: val} } @@ -82,6 +90,10 @@ func (v *JSON) RawValue() interface{} { } func (v *JSON) Compare(val TypedValue) (int, error) { + if val.IsNull() { + return val.Compare(v) + } + tv, ok := v.castToTypedValue() if !ok { return -1, fmt.Errorf("%w: comparison not defined for JSON %s", ErrNotComparableValues, v.primitiveType()) @@ -175,16 +187,16 @@ func (v *JSONSelector) alias() string { if v.ColSelector.as != "" { return v.ColSelector.as } - return v.string() + return v.String() } func (v *JSONSelector) resolve(implicitTable string) (string, string, string) { aggFn, table, _ := v.ColSelector.resolve(implicitTable) - return aggFn, table, v.string() + return aggFn, table, v.String() } -func (v *JSONSelector) string() string { - return fmt.Sprintf("%s->%s", v.ColSelector.col, strings.Join(v.fields, "->")) +func (v *JSONSelector) String() string { + return fmt.Sprintf("%s->'%s'", v.ColSelector.col, strings.Join(v.fields, "->")) } func (sel *JSONSelector) reduce(tx *SQLTx, row *Row, implicitTable string) (TypedValue, error) { diff --git a/embedded/sql/parser.go b/embedded/sql/parser.go index 11afcf3f80..046aeae282 100644 --- a/embedded/sql/parser.go +++ b/embedded/sql/parser.go @@ -102,6 +102,8 @@ var reservedWords = map[string]int{ "READ": READ, "READWRITE": READWRITE, "ADMIN": ADMIN, + "CHECK": CHECK, + "CONSTRAINT": CONSTRAINT, } var joinTypes = map[string]JoinType{ @@ -202,11 +204,11 @@ func (ar *aheadByteReader) NextByte() (byte, error) { return ar.nextChar, ar.nextErr } -func ParseString(sql string) ([]SQLStmt, error) { - return Parse(strings.NewReader(sql)) +func ParseSQLString(sql string) ([]SQLStmt, error) { + return ParseSQL(strings.NewReader(sql)) } -func Parse(r io.ByteReader) ([]SQLStmt, error) { +func ParseSQL(r io.ByteReader) ([]SQLStmt, error) { lexer := newLexer(r) yyParse(lexer) @@ -214,6 +216,18 @@ func Parse(r io.ByteReader) ([]SQLStmt, error) { return lexer.result, lexer.err } +func ParseExpFromString(exp string) (ValueExp, error) { + stmt := fmt.Sprintf("SELECT * FROM t WHERE %s", exp) + + res, err := ParseSQLString(stmt) + if err != nil { + return nil, err + } + + s := res[0].(*SelectStmt) + return s.where, nil +} + func newLexer(r io.ByteReader) *lexer { return &lexer{ r: newAheadByteReader(r), diff --git a/embedded/sql/parser_test.go b/embedded/sql/parser_test.go index 70841503b4..c18d83ac44 100644 --- a/embedded/sql/parser_test.go +++ b/embedded/sql/parser_test.go @@ -31,7 +31,7 @@ func init() { } func TestEmptyInput(t *testing.T) { - _, err := ParseString("") + _, err := ParseSQLString("") require.Error(t, err) } @@ -54,7 +54,7 @@ func TestCreateDatabaseStmt(t *testing.T) { } for i, tc := range testCases { - res, err := ParseString(tc.input) + res, err := ParseSQLString(tc.input) require.Equal(t, tc.expectedError, err, fmt.Sprintf("failed on iteration %d", i)) if tc.expectedError == nil { @@ -82,7 +82,7 @@ func TestUseDatabaseStmt(t *testing.T) { } for i, tc := range testCases { - res, err := ParseString(tc.input) + res, err := ParseSQLString(tc.input) require.Equal(t, tc.expectedError, err, fmt.Sprintf("failed on iteration %d", i)) if tc.expectedError == nil { @@ -179,7 +179,7 @@ func TestUseSnapshotStmt(t *testing.T) { } for i, tc := range testCases { - res, err := ParseString(tc.input) + res, err := ParseSQLString(tc.input) require.Equal(t, tc.expectedError, err, fmt.Sprintf("failed on iteration %d", i)) if tc.expectedError == nil { @@ -293,6 +293,29 @@ func TestCreateTableStmt(t *testing.T) { expectedOutput: []SQLStmt{&CreateTableStmt{table: "table1"}}, expectedError: errors.New("syntax error: unexpected ')', expecting IDENTIFIER at position 21"), }, + { + input: "CREATE TABLE table1(id INTEGER, balance FLOAT, CONSTRAINT non_negative_balance CHECK (balance >= 0), PRIMARY KEY id)", + expectedOutput: []SQLStmt{ + &CreateTableStmt{ + table: "table1", + colsSpec: []*ColSpec{ + {colName: "id", colType: IntegerType}, + {colName: "balance", colType: Float64Type}, + }, + checks: []CheckConstraint{ + { + name: "non_negative_balance", + exp: &CmpBoolExp{ + op: GE, + left: &ColSelector{col: "balance"}, + right: &Integer{val: 0}, + }, + }, + }, + pkColNames: []string{"id"}, + }}, + expectedError: nil, + }, { input: "DROP TABLE table1", expectedOutput: []SQLStmt{ @@ -304,7 +327,7 @@ func TestCreateTableStmt(t *testing.T) { } for i, tc := range testCases { - res, err := ParseString(tc.input) + res, err := ParseSQLString(tc.input) require.Equal(t, tc.expectedError, err, fmt.Sprintf("failed on iteration %d", i)) if tc.expectedError == nil { @@ -361,7 +384,7 @@ func TestCreateIndexStmt(t *testing.T) { } for i, tc := range testCases { - res, err := ParseString(tc.input) + res, err := ParseSQLString(tc.input) require.Equal(t, tc.expectedError, err, fmt.Sprintf("failed on iteration %d", i)) if tc.expectedError == nil { @@ -417,7 +440,7 @@ func TestAlterTable(t *testing.T) { } for i, tc := range testCases { - res, err := ParseString(tc.input) + res, err := ParseSQLString(tc.input) require.Equal(t, tc.expectedError, err, fmt.Sprintf("failed on iteration %d", i)) if tc.expectedError == nil { @@ -613,7 +636,7 @@ func TestInsertIntoStmt(t *testing.T) { } for i, tc := range testCases { - res, err := ParseString(tc.input) + res, err := ParseSQLString(tc.input) require.Equal(t, tc.expectedError, err, fmt.Sprintf("failed on iteration %d", i)) if tc.expectedError == nil { @@ -700,7 +723,7 @@ func TestStmtSeparator(t *testing.T) { } for i, tc := range testCases { - res, err := ParseString(tc.input) + res, err := ParseSQLString(tc.input) require.Equal(t, tc.expectedError, err, fmt.Sprintf("failed on iteration %d", i)) if tc.expectedError == nil { @@ -805,7 +828,7 @@ func TestTxStmt(t *testing.T) { } for i, tc := range testCases { - res, err := ParseString(tc.input) + res, err := ParseSQLString(tc.input) require.Equal(t, tc.expectedError, err, fmt.Sprintf("failed on iteration %d", i)) if tc.expectedError == nil { @@ -1169,7 +1192,7 @@ func TestSelectStmt(t *testing.T) { } for i, tc := range testCases { - res, err := ParseString(tc.input) + res, err := ParseSQLString(tc.input) require.Equal(t, tc.expectedError, err, fmt.Sprintf("failed on iteration %d", i)) if tc.expectedError == nil { @@ -1211,7 +1234,7 @@ func TestSelectUnionStmt(t *testing.T) { } for i, tc := range testCases { - res, err := ParseString(tc.input) + res, err := ParseSQLString(tc.input) require.Equal(t, tc.expectedError, err, fmt.Sprintf("failed on iteration %d", i)) if tc.expectedError == nil { @@ -1262,7 +1285,7 @@ func TestAggFnStmt(t *testing.T) { } for i, tc := range testCases { - res, err := ParseString(tc.input) + res, err := ParseSQLString(tc.input) require.Equal(t, tc.expectedError, err, fmt.Sprintf("failed on iteration %d", i)) if tc.expectedError == nil { @@ -1562,7 +1585,7 @@ func TestExpressions(t *testing.T) { } for i, tc := range testCases { - res, err := ParseString(tc.input) + res, err := ParseSQLString(tc.input) require.Equal(t, tc.expectedError, err, fmt.Sprintf("failed on iteration %d", i)) if tc.expectedError == nil { @@ -1659,7 +1682,7 @@ func TestMultiLineStmts(t *testing.T) { } for i, tc := range testCases { - res, err := ParseString(tc.input) + res, err := ParseSQLString(tc.input) require.Equal(t, tc.expectedError, err, fmt.Sprintf("failed on iteration %d", i)) if tc.expectedError == nil { @@ -1683,7 +1706,7 @@ func TestFloatCornerCases(t *testing.T) { {"123" + strings.Repeat("1", 10000) + ".123", true, nil}, } { t.Run(fmt.Sprintf("%+v", d), func(t *testing.T) { - stmt, err := ParseString("INSERT INTO t1(v) VALUES(" + d.s + ")") + stmt, err := ParseSQLString("INSERT INTO t1(v) VALUES(" + d.s + ")") if d.invalid { require.Error(t, err) require.Contains(t, err.Error(), "syntax error") @@ -1704,5 +1727,25 @@ func TestFloatCornerCases(t *testing.T) { } }) } +} +func TestExprString(t *testing.T) { + exps := []string{ + "(1 + 1) / (2 * 5 - 10)", + "@param LIKE 'pattern'", + "((col1 AND (col2 < 10)) OR (@param = 3 AND (col4 = TRUE))) AND NOT (col5 = 'value' OR (2 + 2 != 4))", + "CAST (func_call(1, 'two', 2.5) AS TIMESTAMP)", + "col IN (TRUE, 1, 'test', 1.5)", + } + + for i, e := range exps { + t.Run(fmt.Sprintf("test_expression_%d", i+1), func(t *testing.T) { + exp, err := ParseExpFromString(e) + require.NoError(t, err) + + parsedExp, err := ParseExpFromString(exp.String()) + require.NoError(t, err) + require.Equal(t, exp, parsedExp) + }) + } } diff --git a/embedded/sql/sql_grammar.y b/embedded/sql/sql_grammar.y index 88f24aa92a..34a731cf7c 100644 --- a/embedded/sql/sql_grammar.y +++ b/embedded/sql/sql_grammar.y @@ -57,6 +57,7 @@ func setResult(l yyLexer, stmts []SQLStmt) { joins []*JoinSpec join *JoinSpec joinType JoinType + checks []CheckConstraint exp ValueExp binExp ValueExp err error @@ -72,7 +73,7 @@ func setResult(l yyLexer, stmts []SQLStmt) { } %token CREATE DROP USE DATABASE USER WITH PASSWORD READ READWRITE ADMIN SNAPSHOT HISTORY SINCE AFTER BEFORE UNTIL TX OF TIMESTAMP -%token TABLE UNIQUE INDEX ON ALTER ADD RENAME TO COLUMN PRIMARY KEY +%token TABLE UNIQUE INDEX ON ALTER ADD RENAME TO COLUMN CONSTRAINT PRIMARY KEY CHECK %token BEGIN TRANSACTION COMMIT ROLLBACK %token INSERT UPSERT INTO VALUES DELETE UPDATE SET CONFLICT DO NOTHING %token SELECT DISTINCT FROM JOIN HAVING WHERE GROUP BY LIMIT OFFSET ORDER ASC DESC AS UNION ALL @@ -132,6 +133,7 @@ func setResult(l yyLexer, stmts []SQLStmt) { %type opt_joins joins %type join %type opt_join_type +%type opt_checks %type exp opt_where opt_having boundexp %type binExp %type opt_groupby @@ -152,10 +154,10 @@ func setResult(l yyLexer, stmts []SQLStmt) { %% sql: sqlstmts -{ - $$ = $1 - setResult(yylex, $1) -} + { + $$ = $1 + setResult(yylex, $1) + } sqlstmts: sqlstmt opt_separator @@ -213,9 +215,9 @@ ddlstmt: $$ = &UseSnapshotStmt{period: $3} } | - CREATE TABLE opt_if_not_exists IDENTIFIER '(' colsSpec ',' PRIMARY KEY one_or_more_ids ')' + CREATE TABLE opt_if_not_exists IDENTIFIER '(' colsSpec ',' opt_checks PRIMARY KEY one_or_more_ids ')' { - $$ = &CreateTableStmt{ifNotExists: $3, table: $4, colsSpec: $6, pkColNames: $10} + $$ = &CreateTableStmt{ifNotExists: $3, table: $4, colsSpec: $6, checks: $8, pkColNames: $11} } | DROP TABLE IDENTIFIER @@ -262,6 +264,11 @@ ddlstmt: { $$ = &DropColumnStmt{table: $3, colName: $6} } +| + ALTER TABLE IDENTIFIER DROP CONSTRAINT IDENTIFIER + { + $$ = &DropConstraintStmt{table: $3, constraintName: $6} + } | CREATE USER IDENTIFIER WITH PASSWORD VARCHAR permission { @@ -943,6 +950,21 @@ opt_as: $$ = $2 } +opt_checks: + { + $$ = nil + } +| + CHECK exp ',' opt_checks + { + $$ = append([]CheckConstraint{{exp: $2}}, $4...) + } +| + CONSTRAINT IDENTIFIER CHECK exp ',' opt_checks + { + $$ = append([]CheckConstraint{{name: $2, exp: $4}}, $6...) + } + exp: boundexp { diff --git a/embedded/sql/sql_parser.go b/embedded/sql/sql_parser.go index ee081ad5ad..b70588c9fc 100644 --- a/embedded/sql/sql_parser.go +++ b/embedded/sql/sql_parser.go @@ -43,6 +43,7 @@ type yySymType struct { joins []*JoinSpec join *JoinSpec joinType JoinType + checks []CheckConstraint exp ValueExp binExp ValueExp err error @@ -85,69 +86,71 @@ const ADD = 57370 const RENAME = 57371 const TO = 57372 const COLUMN = 57373 -const PRIMARY = 57374 -const KEY = 57375 -const BEGIN = 57376 -const TRANSACTION = 57377 -const COMMIT = 57378 -const ROLLBACK = 57379 -const INSERT = 57380 -const UPSERT = 57381 -const INTO = 57382 -const VALUES = 57383 -const DELETE = 57384 -const UPDATE = 57385 -const SET = 57386 -const CONFLICT = 57387 -const DO = 57388 -const NOTHING = 57389 -const SELECT = 57390 -const DISTINCT = 57391 -const FROM = 57392 -const JOIN = 57393 -const HAVING = 57394 -const WHERE = 57395 -const GROUP = 57396 -const BY = 57397 -const LIMIT = 57398 -const OFFSET = 57399 -const ORDER = 57400 -const ASC = 57401 -const DESC = 57402 -const AS = 57403 -const UNION = 57404 -const ALL = 57405 -const NOT = 57406 -const LIKE = 57407 -const IF = 57408 -const EXISTS = 57409 -const IN = 57410 -const IS = 57411 -const AUTO_INCREMENT = 57412 -const NULL = 57413 -const CAST = 57414 -const SCAST = 57415 -const SHOW = 57416 -const DATABASES = 57417 -const TABLES = 57418 -const USERS = 57419 -const NPARAM = 57420 -const PPARAM = 57421 -const JOINTYPE = 57422 -const LOP = 57423 -const CMPOP = 57424 -const IDENTIFIER = 57425 -const TYPE = 57426 -const INTEGER = 57427 -const FLOAT = 57428 -const VARCHAR = 57429 -const BOOLEAN = 57430 -const BLOB = 57431 -const AGGREGATE_FUNC = 57432 -const ERROR = 57433 -const DOT = 57434 -const ARROW = 57435 -const STMT_SEPARATOR = 57436 +const CONSTRAINT = 57374 +const PRIMARY = 57375 +const KEY = 57376 +const CHECK = 57377 +const BEGIN = 57378 +const TRANSACTION = 57379 +const COMMIT = 57380 +const ROLLBACK = 57381 +const INSERT = 57382 +const UPSERT = 57383 +const INTO = 57384 +const VALUES = 57385 +const DELETE = 57386 +const UPDATE = 57387 +const SET = 57388 +const CONFLICT = 57389 +const DO = 57390 +const NOTHING = 57391 +const SELECT = 57392 +const DISTINCT = 57393 +const FROM = 57394 +const JOIN = 57395 +const HAVING = 57396 +const WHERE = 57397 +const GROUP = 57398 +const BY = 57399 +const LIMIT = 57400 +const OFFSET = 57401 +const ORDER = 57402 +const ASC = 57403 +const DESC = 57404 +const AS = 57405 +const UNION = 57406 +const ALL = 57407 +const NOT = 57408 +const LIKE = 57409 +const IF = 57410 +const EXISTS = 57411 +const IN = 57412 +const IS = 57413 +const AUTO_INCREMENT = 57414 +const NULL = 57415 +const CAST = 57416 +const SCAST = 57417 +const SHOW = 57418 +const DATABASES = 57419 +const TABLES = 57420 +const USERS = 57421 +const NPARAM = 57422 +const PPARAM = 57423 +const JOINTYPE = 57424 +const LOP = 57425 +const CMPOP = 57426 +const IDENTIFIER = 57427 +const TYPE = 57428 +const INTEGER = 57429 +const FLOAT = 57430 +const VARCHAR = 57431 +const BOOLEAN = 57432 +const BLOB = 57433 +const AGGREGATE_FUNC = 57434 +const ERROR = 57435 +const DOT = 57436 +const ARROW = 57437 +const STMT_SEPARATOR = 57438 var yyToknames = [...]string{ "$end", @@ -181,8 +184,10 @@ var yyToknames = [...]string{ "RENAME", "TO", "COLUMN", + "CONSTRAINT", "PRIMARY", "KEY", + "CHECK", "BEGIN", "TRANSACTION", "COMMIT", @@ -267,239 +272,249 @@ var yyExca = [...]int16{ 1, -1, -2, 0, -1, 93, - 65, 164, - 68, 164, - -2, 152, - -1, 231, - 51, 128, - -2, 123, - -1, 272, - 51, 128, - -2, 125, + 67, 168, + 70, 168, + -2, 156, + -1, 233, + 53, 129, + -2, 124, + -1, 274, + 53, 129, + -2, 126, } const yyPrivate = 57344 -const yyLast = 452 +const yyLast = 481 var yyAct = [...]int16{ - 92, 364, 98, 78, 265, 129, 225, 171, 295, 299, - 177, 107, 168, 213, 294, 271, 214, 6, 121, 248, - 194, 57, 124, 20, 335, 286, 182, 285, 223, 258, - 223, 346, 223, 223, 339, 223, 340, 320, 318, 95, - 287, 259, 97, 224, 336, 330, 110, 106, 300, 19, - 77, 91, 321, 108, 109, 150, 319, 310, 111, 281, - 101, 102, 103, 104, 105, 79, 301, 148, 149, 279, - 278, 96, 276, 257, 255, 243, 100, 296, 180, 181, - 183, 144, 145, 147, 146, 242, 185, 222, 211, 126, - 135, 254, 143, 247, 159, 95, 154, 155, 97, 159, - 238, 157, 110, 106, 179, 237, 236, 235, 196, 108, - 109, 160, 158, 156, 111, 137, 101, 102, 103, 104, - 105, 79, 134, 173, 120, 119, 22, 96, 363, 150, - 80, 186, 100, 170, 324, 256, 357, 79, 191, 184, - 80, 188, 174, 150, 75, 199, 200, 201, 202, 203, - 204, 133, 150, 122, 190, 148, 149, 147, 146, 212, - 215, 323, 258, 244, 148, 149, 223, 128, 135, 144, - 145, 147, 146, 216, 210, 241, 114, 230, 144, 145, - 147, 146, 228, 217, 80, 231, 90, 80, 317, 221, - 239, 79, 240, 198, 175, 316, 95, 233, 229, 97, - 232, 189, 323, 110, 106, 30, 291, 282, 253, 246, - 108, 109, 31, 245, 131, 111, 150, 101, 102, 103, - 104, 105, 79, 150, 209, 169, 62, 267, 96, 149, - 274, 307, 293, 100, 280, 269, 130, 176, 275, 263, - 125, 261, 144, 145, 147, 146, 220, 112, 219, 144, - 145, 147, 146, 218, 195, 215, 197, 192, 195, 292, - 288, 187, 161, 138, 113, 283, 83, 298, 10, 12, - 11, 290, 289, 81, 302, 141, 142, 73, 41, 297, - 66, 29, 306, 63, 308, 309, 65, 311, 304, 303, - 64, 13, 61, 56, 55, 20, 45, 215, 7, 334, - 8, 9, 14, 15, 153, 252, 16, 17, 234, 333, - 325, 150, 20, 152, 326, 136, 329, 184, 315, 206, - 82, 19, 331, 51, 207, 314, 205, 208, 72, 42, - 365, 366, 337, 349, 266, 226, 345, 344, 19, 356, - 343, 20, 328, 350, 50, 122, 342, 352, 43, 44, - 46, 305, 127, 39, 358, 355, 48, 354, 347, 361, - 359, 362, 338, 70, 178, 264, 367, 19, 262, 368, - 52, 53, 38, 37, 23, 312, 163, 164, 165, 162, - 260, 34, 40, 117, 353, 268, 139, 84, 227, 54, - 36, 2, 24, 28, 172, 277, 32, 166, 33, 85, - 59, 60, 67, 68, 69, 35, 115, 116, 25, 27, - 26, 89, 88, 140, 49, 249, 250, 251, 118, 86, - 21, 322, 123, 151, 313, 332, 348, 360, 284, 327, - 94, 93, 341, 273, 272, 270, 87, 58, 71, 47, - 132, 76, 74, 99, 351, 167, 193, 18, 5, 4, - 3, 1, + 92, 377, 98, 78, 267, 172, 227, 129, 284, 299, + 303, 107, 178, 169, 214, 273, 298, 215, 6, 121, + 195, 250, 57, 124, 343, 20, 290, 183, 289, 225, + 260, 225, 225, 364, 225, 225, 304, 348, 326, 324, + 291, 95, 261, 226, 97, 347, 344, 336, 110, 106, + 77, 19, 327, 325, 305, 108, 109, 91, 150, 314, + 111, 283, 101, 102, 103, 104, 105, 79, 281, 280, + 148, 149, 278, 96, 259, 257, 245, 244, 100, 135, + 224, 181, 182, 184, 144, 145, 147, 146, 159, 186, + 126, 212, 143, 300, 256, 249, 154, 155, 159, 240, + 239, 157, 238, 95, 237, 197, 97, 180, 160, 158, + 110, 106, 156, 137, 134, 120, 119, 108, 109, 22, + 376, 369, 111, 174, 101, 102, 103, 104, 105, 79, + 122, 187, 329, 189, 171, 96, 150, 80, 192, 185, + 100, 150, 260, 175, 150, 200, 201, 202, 203, 204, + 205, 191, 330, 148, 149, 246, 225, 128, 133, 213, + 216, 135, 144, 145, 147, 146, 365, 144, 145, 147, + 146, 176, 147, 146, 217, 211, 114, 243, 232, 80, + 80, 230, 95, 218, 233, 97, 79, 79, 223, 110, + 106, 199, 190, 241, 75, 242, 108, 109, 323, 235, + 231, 111, 234, 101, 102, 103, 104, 105, 79, 255, + 90, 248, 322, 287, 96, 295, 286, 247, 258, 100, + 210, 150, 329, 80, 30, 62, 150, 170, 318, 269, + 311, 31, 297, 148, 149, 177, 131, 271, 148, 149, + 342, 282, 277, 265, 125, 263, 338, 144, 145, 147, + 146, 222, 144, 145, 147, 146, 221, 216, 130, 220, + 219, 296, 196, 292, 198, 193, 196, 285, 150, 302, + 188, 112, 161, 138, 294, 293, 306, 113, 83, 81, + 148, 149, 276, 301, 63, 150, 310, 317, 312, 313, + 308, 315, 307, 73, 144, 145, 147, 146, 149, 141, + 142, 216, 29, 41, 66, 65, 64, 61, 56, 55, + 20, 144, 145, 147, 146, 331, 153, 254, 341, 332, + 321, 185, 335, 207, 236, 152, 150, 320, 208, 72, + 206, 209, 136, 51, 82, 42, 19, 378, 379, 345, + 356, 352, 359, 268, 353, 45, 228, 355, 354, 368, + 351, 360, 10, 12, 11, 362, 334, 122, 350, 20, + 309, 366, 127, 39, 370, 367, 48, 357, 50, 266, + 346, 374, 372, 375, 371, 13, 70, 264, 179, 380, + 38, 37, 381, 23, 7, 19, 8, 9, 14, 15, + 339, 337, 16, 17, 52, 53, 40, 316, 20, 43, + 44, 46, 287, 165, 166, 286, 163, 164, 162, 117, + 262, 363, 24, 28, 270, 139, 67, 68, 69, 34, + 84, 229, 54, 85, 19, 36, 2, 279, 25, 27, + 26, 167, 115, 116, 32, 118, 33, 89, 88, 140, + 35, 59, 60, 251, 252, 253, 86, 173, 21, 49, + 328, 123, 151, 319, 340, 358, 373, 288, 333, 94, + 93, 349, 275, 274, 272, 87, 58, 71, 47, 132, + 76, 74, 99, 361, 168, 194, 18, 5, 4, 3, + 1, } var yyPact = [...]int16{ - 264, -1000, -1000, 26, -1000, -1000, -1000, 339, -1000, -1000, - 385, 198, 373, 382, 333, 332, 303, 195, 267, 273, - 307, -1000, 264, -1000, 257, 257, 257, 364, 211, -1000, - 210, 384, 209, 200, 207, 203, 197, 195, 195, 195, - 319, -1000, 265, -1000, -1000, 194, -1000, 47, -1000, -1000, - 190, 256, 183, 361, 257, 410, -1000, -1000, 393, 31, - 31, -1000, 181, 84, -1000, 378, 409, 24, 23, 292, - 157, 247, -1000, -1000, 302, -1000, 73, 153, 58, 21, - 76, -1000, 248, 14, 180, 360, 403, -1000, 31, 31, - -1000, 132, 83, 240, -1000, 132, 132, 12, -1000, -1000, - 132, -1000, -1000, -1000, -1000, -1000, 11, -1000, -1000, -1000, - -1000, -2, -1000, 10, 179, 348, 346, 347, 387, 142, - 142, 388, 132, 100, -1000, 155, -1000, 3, 101, -1000, - -1000, 178, 48, 114, 57, 174, -1000, 171, 7, 173, - 106, -1000, -1000, 83, 132, 132, 132, 132, 132, 132, - 255, 259, 140, -1000, 147, 60, 247, -14, 132, 132, - 142, -1000, 171, 170, 165, 163, 102, -15, 72, -1000, - -59, 279, 363, 83, 388, 157, 132, 388, 384, 293, - 6, 5, 4, -1, 153, -7, 153, -1000, 88, -1000, - -17, -27, -1000, 69, -1000, 129, 142, -8, 404, 60, - 60, 242, 242, 147, 154, -1000, 234, 132, -10, -1000, - -28, -1000, 74, -29, 68, 83, -61, -1000, -1000, 350, - -1000, 404, 327, 156, 324, 277, 132, 359, 279, -1000, - 83, 150, 153, -30, 374, -32, -33, 151, -43, -1000, - -1000, -1000, -1000, -1000, 175, -76, -62, 142, -1000, -1000, - -1000, -1000, -1000, 147, -25, -1000, 122, -1000, 132, -1000, - 149, -1000, -24, -1000, -24, -1000, 132, 83, -35, 277, - 292, -1000, 150, 300, -1000, -1000, 153, 148, 153, 153, - -45, 153, 342, -1000, 254, 110, 103, -1000, -64, -46, - -65, -50, 83, -1000, 108, -1000, 132, 67, 83, -1000, - -1000, 142, -1000, 288, -1000, 3, -1000, -57, -1000, -1000, - -1000, -1000, -35, 239, -1000, 228, -80, -58, -1000, -1000, - -1000, -1000, -1000, -24, 317, -68, -66, 294, 285, 388, - 153, -71, -1000, -1000, -1000, -1000, -1000, -1000, 312, -1000, - -1000, 275, 132, 104, 358, -1000, -1000, 310, 279, 284, - 83, 42, -1000, 132, -1000, 277, 101, 104, 83, -1000, - 34, 271, -1000, 101, -1000, -1000, -1000, 271, -1000, + 348, -1000, -1000, 17, -1000, -1000, -1000, 346, -1000, -1000, + 405, 217, 411, 417, 339, 338, 311, 218, 271, 322, + 315, -1000, 348, -1000, 265, 265, 265, 397, 224, -1000, + 223, 425, 222, 199, 221, 220, 219, 218, 218, 218, + 330, -1000, 264, -1000, -1000, 208, -1000, 95, -1000, -1000, + 194, 268, 193, 394, 265, 437, -1000, -1000, 419, 37, + 37, -1000, 192, 82, -1000, 404, 426, 13, 12, 302, + 159, 260, -1000, -1000, 310, -1000, 61, 173, 63, 11, + 67, -1000, 263, 10, 188, 389, 429, -1000, 37, 37, + -1000, 116, 197, 250, -1000, 116, 116, 9, -1000, -1000, + 116, -1000, -1000, -1000, -1000, -1000, 6, -1000, -1000, -1000, + -1000, -15, -1000, 5, 187, 377, 376, 372, 421, 142, + 142, 441, 116, 75, -1000, 151, -1000, 4, 94, -1000, + -1000, 185, 38, 103, 52, 180, -1000, 177, 2, 179, + 102, -1000, -1000, 197, 116, 116, 116, 116, 116, 116, + 257, 261, 134, -1000, 214, 73, 260, -13, 116, 116, + 142, -1000, 177, 175, 174, 171, 166, 99, -24, 60, + -1000, -61, 288, 396, 197, 441, 159, 116, 441, 425, + 309, 1, -1, -3, -4, 173, -5, 173, -1000, 88, + -1000, -27, -28, -1000, 59, -1000, 131, 142, -8, 432, + 73, 73, 255, 255, 214, 65, -1000, 244, 116, -9, + -1000, -29, -1000, 155, -30, 46, 197, -62, -1000, -1000, + 380, -1000, -1000, 432, 334, 158, 326, 284, 116, 388, + 288, -1000, 197, 200, 173, -32, 406, -35, -36, 156, + -43, -1000, -1000, -1000, -1000, -1000, 181, -77, -64, 142, + -1000, -1000, -1000, -1000, -1000, 214, -25, -1000, 129, -1000, + 116, -1000, 147, -1000, -10, -1000, -10, -1000, 116, 197, + -49, 284, 302, -1000, 200, 307, -1000, -1000, 173, 145, + 173, 173, -45, 173, 364, -1000, 116, 143, 254, 125, + 111, -1000, -65, -51, -66, -52, 197, -1000, 126, -1000, + 116, 36, 197, -1000, -1000, 142, -1000, 300, -1000, 4, + -1000, -57, -1000, -1000, -1000, -1000, 357, 150, 355, 246, + -1000, 167, -82, -58, -1000, -1000, -1000, -1000, -1000, -10, + 323, -59, -67, 304, 293, 441, 173, -49, 370, 116, + -1000, -1000, -1000, -1000, -1000, -1000, 319, -1000, -1000, 282, + 116, 138, 385, -1000, -71, -1000, 70, 312, 288, 292, + 197, 25, -1000, 116, -1000, 370, -1000, 284, 94, 138, + 197, -1000, -1000, 24, 276, -1000, 94, -1000, -1000, -1000, + 276, -1000, } var yyPgo = [...]int16{ - 0, 451, 391, 450, 449, 448, 17, 447, 446, 20, - 12, 9, 445, 444, 14, 8, 16, 13, 443, 11, - 2, 442, 441, 440, 3, 439, 438, 10, 364, 21, - 437, 436, 186, 435, 15, 434, 433, 0, 18, 432, - 431, 430, 429, 6, 4, 428, 5, 427, 426, 1, - 7, 344, 425, 424, 423, 22, 422, 421, 19, 420, + 0, 480, 426, 479, 478, 477, 18, 476, 475, 20, + 13, 10, 474, 473, 16, 9, 17, 14, 472, 11, + 2, 471, 470, 469, 3, 468, 467, 12, 378, 22, + 466, 465, 210, 464, 15, 463, 462, 8, 0, 19, + 461, 460, 459, 458, 6, 4, 457, 7, 456, 455, + 1, 5, 368, 454, 453, 452, 23, 451, 450, 21, + 448, } var yyR1 = [...]int8{ - 0, 1, 2, 2, 59, 59, 3, 3, 3, 4, + 0, 1, 2, 2, 60, 60, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 58, 58, 58, 58, 51, 51, 11, 11, 5, 5, - 5, 5, 57, 57, 56, 56, 55, 12, 12, 14, - 14, 15, 10, 10, 13, 13, 17, 17, 16, 16, - 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, - 19, 8, 8, 9, 45, 45, 45, 52, 52, 53, - 53, 53, 6, 6, 6, 6, 6, 6, 7, 26, - 26, 25, 25, 21, 21, 22, 22, 20, 20, 20, - 20, 23, 23, 24, 24, 27, 27, 27, 27, 27, - 27, 27, 27, 28, 29, 30, 30, 30, 31, 31, - 31, 32, 32, 33, 33, 34, 34, 35, 36, 36, - 38, 38, 42, 42, 39, 39, 43, 43, 44, 44, - 48, 48, 50, 50, 47, 47, 49, 49, 49, 46, - 46, 46, 37, 37, 37, 37, 37, 37, 37, 37, - 40, 40, 40, 40, 54, 54, 41, 41, 41, 41, - 41, 41, 41, 41, + 4, 59, 59, 59, 59, 52, 52, 11, 11, 5, + 5, 5, 5, 58, 58, 57, 57, 56, 12, 12, + 14, 14, 15, 10, 10, 13, 13, 17, 17, 16, + 16, 18, 18, 18, 18, 18, 18, 18, 18, 18, + 18, 19, 8, 8, 9, 46, 46, 46, 53, 53, + 54, 54, 54, 6, 6, 6, 6, 6, 6, 7, + 26, 26, 25, 25, 21, 21, 22, 22, 20, 20, + 20, 20, 23, 23, 24, 24, 27, 27, 27, 27, + 27, 27, 27, 27, 28, 29, 30, 30, 30, 31, + 31, 31, 32, 32, 33, 33, 34, 34, 35, 36, + 36, 39, 39, 43, 43, 40, 40, 44, 44, 45, + 45, 49, 49, 51, 51, 48, 48, 50, 50, 50, + 47, 47, 47, 37, 37, 37, 38, 38, 38, 38, + 38, 38, 38, 38, 41, 41, 41, 41, 55, 55, + 42, 42, 42, 42, 42, 42, 42, 42, } var yyR2 = [...]int8{ 0, 1, 2, 3, 0, 1, 1, 1, 1, 2, - 1, 1, 1, 4, 2, 3, 3, 11, 3, 8, - 9, 7, 5, 6, 6, 8, 6, 7, 7, 3, - 0, 1, 1, 1, 0, 3, 1, 3, 9, 8, - 7, 8, 0, 4, 1, 3, 3, 0, 1, 1, - 3, 3, 1, 3, 1, 3, 0, 1, 1, 3, - 1, 1, 1, 1, 1, 6, 1, 1, 1, 1, - 4, 1, 3, 5, 0, 3, 3, 0, 1, 0, - 1, 2, 1, 4, 2, 2, 3, 2, 13, 0, - 1, 0, 1, 1, 1, 2, 4, 1, 2, 4, - 4, 2, 3, 1, 3, 3, 4, 4, 4, 4, - 4, 2, 6, 1, 2, 0, 2, 2, 0, 2, - 2, 2, 1, 0, 1, 1, 2, 6, 0, 1, - 0, 2, 0, 3, 0, 2, 0, 2, 0, 2, - 0, 3, 0, 4, 2, 4, 0, 1, 1, 0, - 1, 2, 1, 1, 2, 2, 4, 4, 6, 6, - 1, 1, 3, 3, 0, 1, 3, 3, 3, 3, - 3, 3, 3, 4, + 1, 1, 1, 4, 2, 3, 3, 12, 3, 8, + 9, 7, 5, 6, 6, 8, 6, 6, 7, 7, + 3, 0, 1, 1, 1, 0, 3, 1, 3, 9, + 8, 7, 8, 0, 4, 1, 3, 3, 0, 1, + 1, 3, 3, 1, 3, 1, 3, 0, 1, 1, + 3, 1, 1, 1, 1, 1, 6, 1, 1, 1, + 1, 4, 1, 3, 5, 0, 3, 3, 0, 1, + 0, 1, 2, 1, 4, 2, 2, 3, 2, 13, + 0, 1, 0, 1, 1, 1, 2, 4, 1, 2, + 4, 4, 2, 3, 1, 3, 3, 4, 4, 4, + 4, 4, 2, 6, 1, 2, 0, 2, 2, 0, + 2, 2, 2, 1, 0, 1, 1, 2, 6, 0, + 1, 0, 2, 0, 3, 0, 2, 0, 2, 0, + 2, 0, 3, 0, 4, 2, 4, 0, 1, 1, + 0, 1, 2, 0, 4, 6, 1, 1, 2, 2, + 4, 4, 6, 6, 1, 1, 3, 3, 0, 1, + 3, 3, 3, 3, 3, 3, 3, 4, } var yyChk = [...]int16{ - -1000, -1, -2, -3, -4, -5, -6, 34, 36, 37, - 4, 6, 5, 27, 38, 39, 42, 43, -7, 74, - 48, -59, 100, 35, 7, 23, 25, 24, 8, 83, - 7, 14, 23, 25, 8, 23, 8, 40, 40, 50, - -28, 83, 62, 75, 76, 23, 77, -25, 49, -2, - -51, 66, -51, -51, 25, 83, 83, -29, -30, 16, - 17, 83, 26, 83, 83, 83, 83, -28, -28, -28, - 44, -26, 63, 83, -21, 97, -22, -20, -24, 90, - 83, 83, 64, 83, 26, -51, 9, -31, 19, 18, - -32, 20, -37, -40, -41, 64, 96, 67, -20, -18, - 101, 85, 86, 87, 88, 89, 72, -19, 78, 79, - 71, 83, -32, 83, 92, 28, 29, 5, 9, 101, - 101, -38, 53, -56, -55, 83, -6, 50, 94, -46, - 83, 61, -23, 93, 101, 92, 67, 101, 83, 26, - 10, -32, -32, -37, 95, 96, 98, 97, 81, 82, - 69, -54, 73, 64, -37, -37, 101, -37, 101, 101, - 101, 83, 31, 30, 31, 31, 10, -12, -10, 83, - -10, -50, 6, -37, -38, 94, 82, -27, -28, 101, - 75, 76, 23, 77, -19, 83, -20, 83, 93, 87, - 97, -24, 83, -8, -9, 83, 101, 83, 87, -37, - -37, -37, -37, -37, -37, 71, 64, 65, 68, 84, - -6, 102, -37, -17, -16, -37, -10, -9, 83, 83, - 83, 87, 102, 94, 102, -43, 56, 25, -50, -55, - -37, -50, -29, -6, 15, 101, 101, 101, 101, -46, - -46, 87, 102, 102, 94, 84, -10, 101, -58, 11, - 12, 13, 71, -37, 101, 102, 61, 102, 94, 102, - 30, -58, 41, 83, 41, -44, 57, -37, 26, -43, - -33, -34, -35, -36, 80, -46, 102, 21, 102, 102, - 83, 102, 32, -9, -45, 103, 101, 102, -10, -6, - -16, 84, -37, 83, -14, -15, 101, -14, -37, -11, - 83, 101, -44, -38, -34, 51, -46, 83, -46, -46, - 102, -46, 33, -53, 71, 64, 85, 85, 102, 102, - 102, 102, -57, 94, 26, -17, -10, -42, 54, -27, - 102, -11, -52, 70, 71, 104, 102, -15, 45, 102, - 102, -39, 52, 55, -50, -46, 102, 46, -48, 58, - -37, -13, -24, 26, 47, -43, 55, 94, -37, -44, - -47, -20, -24, 94, -49, 59, 60, -20, -49, + -1000, -1, -2, -3, -4, -5, -6, 36, 38, 39, + 4, 6, 5, 27, 40, 41, 44, 45, -7, 76, + 50, -60, 102, 37, 7, 23, 25, 24, 8, 85, + 7, 14, 23, 25, 8, 23, 8, 42, 42, 52, + -28, 85, 64, 77, 78, 23, 79, -25, 51, -2, + -52, 68, -52, -52, 25, 85, 85, -29, -30, 16, + 17, 85, 26, 85, 85, 85, 85, -28, -28, -28, + 46, -26, 65, 85, -21, 99, -22, -20, -24, 92, + 85, 85, 66, 85, 26, -52, 9, -31, 19, 18, + -32, 20, -38, -41, -42, 66, 98, 69, -20, -18, + 103, 87, 88, 89, 90, 91, 74, -19, 80, 81, + 73, 85, -32, 85, 94, 28, 29, 5, 9, 103, + 103, -39, 55, -57, -56, 85, -6, 52, 96, -47, + 85, 63, -23, 95, 103, 94, 69, 103, 85, 26, + 10, -32, -32, -38, 97, 98, 100, 99, 83, 84, + 71, -55, 75, 66, -38, -38, 103, -38, 103, 103, + 103, 85, 31, 30, 31, 31, 32, 10, -12, -10, + 85, -10, -51, 6, -38, -39, 96, 84, -27, -28, + 103, 77, 78, 23, 79, -19, 85, -20, 85, 95, + 89, 99, -24, 85, -8, -9, 85, 103, 85, 89, + -38, -38, -38, -38, -38, -38, 73, 66, 67, 70, + 86, -6, 104, -38, -17, -16, -38, -10, -9, 85, + 85, 85, 85, 89, 104, 96, 104, -44, 58, 25, + -51, -56, -38, -51, -29, -6, 15, 103, 103, 103, + 103, -47, -47, 89, 104, 104, 96, 86, -10, 103, + -59, 11, 12, 13, 73, -38, 103, 104, 63, 104, + 96, 104, 30, -59, 43, 85, 43, -45, 59, -38, + 26, -44, -33, -34, -35, -36, 82, -47, 104, 21, + 104, 104, 85, 104, -37, -9, 35, 32, -46, 105, + 103, 104, -10, -6, -16, 86, -38, 85, -14, -15, + 103, -14, -38, -11, 85, 103, -45, -39, -34, 53, + -47, 85, -47, -47, 104, -47, 33, -38, 85, -54, + 73, 66, 87, 87, 104, 104, 104, 104, -58, 96, + 26, -17, -10, -43, 56, -27, 104, 34, 96, 35, + -53, 72, 73, 106, 104, -15, 47, 104, 104, -40, + 54, 57, -51, -47, -11, -37, -38, 48, -49, 60, + -38, -13, -24, 26, 104, 96, 49, -44, 57, 96, + -38, -37, -45, -48, -20, -24, 96, -50, 61, 62, + -20, -50, } var yyDef = [...]int16{ 0, -2, 1, 4, 6, 7, 8, 10, 11, 12, - 0, 0, 0, 0, 0, 0, 0, 0, 82, 0, - 91, 2, 5, 9, 34, 34, 34, 0, 0, 14, - 0, 115, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 113, 89, 84, 85, 0, 87, 0, 92, 3, - 0, 0, 0, 0, 34, 0, 15, 16, 118, 0, - 0, 18, 0, 0, 29, 0, 0, 0, 0, 130, - 0, 0, 90, 86, 0, 93, 94, 149, 97, 0, - 103, 13, 0, 0, 0, 0, 0, 114, 0, 0, - 116, 0, 122, -2, 153, 0, 0, 0, 160, 161, - 0, 60, 61, 62, 63, 64, 0, 66, 67, 68, - 69, 103, 117, 0, 0, 0, 0, 0, 0, 47, - 0, 142, 0, 130, 44, 0, 83, 0, 0, 95, - 150, 0, 98, 0, 0, 0, 35, 0, 0, 0, - 0, 119, 120, 121, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 165, 154, 155, 0, 0, 0, 56, - 0, 22, 0, 0, 0, 0, 0, 0, 48, 52, - 0, 136, 0, 131, 142, 0, 0, 142, 115, 0, - 0, 0, 0, 0, 149, 113, 149, 151, 0, 101, - 0, 0, 104, 0, 71, 0, 0, 0, 30, 166, - 167, 168, 169, 170, 171, 172, 0, 0, 0, 163, - 0, 162, 0, 0, 57, 58, 0, 23, 24, 0, - 26, 30, 0, 0, 0, 138, 0, 0, 136, 45, - 46, -2, 149, 0, 0, 0, 0, 0, 0, 111, - 96, 102, 99, 100, 0, 74, 0, 0, 27, 31, - 32, 33, 173, 156, 0, 157, 0, 70, 0, 21, - 0, 28, 0, 53, 0, 40, 0, 137, 0, 138, - 130, 124, -2, 0, 129, 105, 149, 0, 149, 149, - 0, 149, 0, 72, 79, 0, 0, 19, 0, 0, - 0, 0, 59, 25, 42, 49, 56, 39, 139, 143, - 36, 0, 41, 132, 126, 0, 106, 0, 107, 108, - 109, 110, 0, 77, 80, 0, 0, 0, 20, 158, - 159, 65, 38, 0, 0, 0, 0, 134, 0, 142, - 149, 0, 73, 78, 81, 75, 76, 50, 0, 51, - 37, 140, 0, 0, 0, 112, 17, 0, 136, 0, - 135, 133, 54, 0, 43, 138, 0, 0, 127, 88, - 141, 146, 55, 0, 144, 147, 148, 146, 145, + 0, 0, 0, 0, 0, 0, 0, 0, 83, 0, + 92, 2, 5, 9, 35, 35, 35, 0, 0, 14, + 0, 116, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 114, 90, 85, 86, 0, 88, 0, 93, 3, + 0, 0, 0, 0, 35, 0, 15, 16, 119, 0, + 0, 18, 0, 0, 30, 0, 0, 0, 0, 131, + 0, 0, 91, 87, 0, 94, 95, 150, 98, 0, + 104, 13, 0, 0, 0, 0, 0, 115, 0, 0, + 117, 0, 123, -2, 157, 0, 0, 0, 164, 165, + 0, 61, 62, 63, 64, 65, 0, 67, 68, 69, + 70, 104, 118, 0, 0, 0, 0, 0, 0, 48, + 0, 143, 0, 131, 45, 0, 84, 0, 0, 96, + 151, 0, 99, 0, 0, 0, 36, 0, 0, 0, + 0, 120, 121, 122, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 169, 158, 159, 0, 0, 0, 57, + 0, 22, 0, 0, 0, 0, 0, 0, 0, 49, + 53, 0, 137, 0, 132, 143, 0, 0, 143, 116, + 0, 0, 0, 0, 0, 150, 114, 150, 152, 0, + 102, 0, 0, 105, 0, 72, 0, 0, 0, 31, + 170, 171, 172, 173, 174, 175, 176, 0, 0, 0, + 167, 0, 166, 0, 0, 58, 59, 0, 23, 24, + 0, 26, 27, 31, 0, 0, 0, 139, 0, 0, + 137, 46, 47, -2, 150, 0, 0, 0, 0, 0, + 0, 112, 97, 103, 100, 101, 153, 75, 0, 0, + 28, 32, 33, 34, 177, 160, 0, 161, 0, 71, + 0, 21, 0, 29, 0, 54, 0, 41, 0, 138, + 0, 139, 131, 125, -2, 0, 130, 106, 150, 0, + 150, 150, 0, 150, 0, 73, 0, 0, 80, 0, + 0, 19, 0, 0, 0, 0, 60, 25, 43, 50, + 57, 40, 140, 144, 37, 0, 42, 133, 127, 0, + 107, 0, 108, 109, 110, 111, 0, 0, 0, 78, + 81, 0, 0, 0, 20, 162, 163, 66, 39, 0, + 0, 0, 0, 135, 0, 143, 150, 0, 153, 0, + 74, 79, 82, 76, 77, 51, 0, 52, 38, 141, + 0, 0, 0, 113, 0, 154, 0, 0, 137, 0, + 136, 134, 55, 0, 17, 153, 44, 139, 0, 0, + 128, 155, 89, 142, 147, 56, 0, 145, 148, 149, + 147, 146, } var yyTok1 = [...]int8{ @@ -507,12 +522,12 @@ var yyTok1 = [...]int8{ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 101, 102, 97, 95, 94, 96, 99, 98, 3, 3, + 103, 104, 99, 97, 96, 98, 101, 100, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 103, 3, 104, + 3, 105, 3, 106, } var yyTok2 = [...]int8{ @@ -525,7 +540,7 @@ var yyTok2 = [...]int8{ 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, - 92, 93, 100, + 92, 93, 94, 95, 102, } var yyTok3 = [...]int8{ @@ -928,9 +943,9 @@ yydefault: yyVAL.stmt = &UseSnapshotStmt{period: yyDollar[3].period} } case 17: - yyDollar = yyS[yypt-11 : yypt+1] + yyDollar = yyS[yypt-12 : yypt+1] { - yyVAL.stmt = &CreateTableStmt{ifNotExists: yyDollar[3].boolean, table: yyDollar[4].id, colsSpec: yyDollar[6].colsSpec, pkColNames: yyDollar[10].ids} + yyVAL.stmt = &CreateTableStmt{ifNotExists: yyDollar[3].boolean, table: yyDollar[4].id, colsSpec: yyDollar[6].colsSpec, checks: yyDollar[8].checks, pkColNames: yyDollar[11].ids} } case 18: yyDollar = yyS[yypt-3 : yypt+1] @@ -978,286 +993,291 @@ yydefault: yyVAL.stmt = &DropColumnStmt{table: yyDollar[3].id, colName: yyDollar[6].id} } case 27: + yyDollar = yyS[yypt-6 : yypt+1] + { + yyVAL.stmt = &DropConstraintStmt{table: yyDollar[3].id, constraintName: yyDollar[6].id} + } + case 28: yyDollar = yyS[yypt-7 : yypt+1] { yyVAL.stmt = &CreateUserStmt{username: yyDollar[3].id, password: yyDollar[6].str, permission: yyDollar[7].permission} } - case 28: + case 29: yyDollar = yyS[yypt-7 : yypt+1] { yyVAL.stmt = &AlterUserStmt{username: yyDollar[3].id, password: yyDollar[6].str, permission: yyDollar[7].permission} } - case 29: + case 30: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.stmt = &DropUserStmt{username: yyDollar[3].id} } - case 30: + case 31: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.permission = PermissionReadWrite } - case 31: + case 32: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.permission = PermissionReadOnly } - case 32: + case 33: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.permission = PermissionReadWrite } - case 33: + case 34: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.permission = PermissionAdmin } - case 34: + case 35: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.boolean = false } - case 35: + case 36: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.boolean = true } - case 36: + case 37: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.ids = []string{yyDollar[1].id} } - case 37: + case 38: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.ids = yyDollar[2].ids } - case 38: + case 39: yyDollar = yyS[yypt-9 : yypt+1] { yyVAL.stmt = &UpsertIntoStmt{isInsert: true, tableRef: yyDollar[3].tableRef, cols: yyDollar[5].ids, rows: yyDollar[8].rows, onConflict: yyDollar[9].onConflict} } - case 39: + case 40: yyDollar = yyS[yypt-8 : yypt+1] { yyVAL.stmt = &UpsertIntoStmt{tableRef: yyDollar[3].tableRef, cols: yyDollar[5].ids, rows: yyDollar[8].rows} } - case 40: + case 41: yyDollar = yyS[yypt-7 : yypt+1] { yyVAL.stmt = &DeleteFromStmt{tableRef: yyDollar[3].tableRef, where: yyDollar[4].exp, indexOn: yyDollar[5].ids, limit: yyDollar[6].exp, offset: yyDollar[7].exp} } - case 41: + case 42: yyDollar = yyS[yypt-8 : yypt+1] { yyVAL.stmt = &UpdateStmt{tableRef: yyDollar[2].tableRef, updates: yyDollar[4].updates, where: yyDollar[5].exp, indexOn: yyDollar[6].ids, limit: yyDollar[7].exp, offset: yyDollar[8].exp} } - case 42: + case 43: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.onConflict = nil } - case 43: + case 44: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.onConflict = &OnConflictDo{} } - case 44: + case 45: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.updates = []*colUpdate{yyDollar[1].update} } - case 45: + case 46: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.updates = append(yyDollar[1].updates, yyDollar[3].update) } - case 46: + case 47: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.update = &colUpdate{col: yyDollar[1].id, op: yyDollar[2].cmpOp, val: yyDollar[3].exp} } - case 47: + case 48: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.ids = nil } - case 48: + case 49: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.ids = yyDollar[1].ids } - case 49: + case 50: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.rows = []*RowSpec{yyDollar[1].row} } - case 50: + case 51: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.rows = append(yyDollar[1].rows, yyDollar[3].row) } - case 51: + case 52: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.row = &RowSpec{Values: yyDollar[2].values} } - case 52: + case 53: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.ids = []string{yyDollar[1].id} } - case 53: + case 54: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.ids = append(yyDollar[1].ids, yyDollar[3].id) } - case 54: + case 55: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.cols = []*ColSelector{yyDollar[1].col} } - case 55: + case 56: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.cols = append(yyDollar[1].cols, yyDollar[3].col) } - case 56: + case 57: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.values = nil } - case 57: + case 58: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.values = yyDollar[1].values } - case 58: + case 59: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.values = []ValueExp{yyDollar[1].exp} } - case 59: + case 60: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.values = append(yyDollar[1].values, yyDollar[3].exp) } - case 60: + case 61: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.value = &Integer{val: int64(yyDollar[1].integer)} } - case 61: + case 62: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.value = &Float64{val: float64(yyDollar[1].float)} } - case 62: + case 63: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.value = &Varchar{val: yyDollar[1].str} } - case 63: + case 64: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.value = &Bool{val: yyDollar[1].boolean} } - case 64: + case 65: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.value = &Blob{val: yyDollar[1].blob} } - case 65: + case 66: yyDollar = yyS[yypt-6 : yypt+1] { yyVAL.value = &Cast{val: yyDollar[3].exp, t: yyDollar[5].sqlType} } - case 66: + case 67: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.value = yyDollar[1].value } - case 67: + case 68: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.value = &Param{id: yyDollar[1].id} } - case 68: + case 69: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.value = &Param{id: fmt.Sprintf("param%d", yyDollar[1].pparam), pos: yyDollar[1].pparam} } - case 69: + case 70: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.value = &NullValue{t: AnyType} } - case 70: + case 71: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.value = &FnCall{fn: yyDollar[1].id, params: yyDollar[3].values} } - case 71: + case 72: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.colsSpec = []*ColSpec{yyDollar[1].colSpec} } - case 72: + case 73: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.colsSpec = append(yyDollar[1].colsSpec, yyDollar[3].colSpec) } - case 73: + case 74: yyDollar = yyS[yypt-5 : yypt+1] { yyVAL.colSpec = &ColSpec{colName: yyDollar[1].id, colType: yyDollar[2].sqlType, maxLen: int(yyDollar[3].integer), notNull: yyDollar[4].boolean, autoIncrement: yyDollar[5].boolean} } - case 74: + case 75: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.integer = 0 } - case 75: + case 76: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.integer = yyDollar[2].integer } - case 76: + case 77: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.integer = yyDollar[2].integer } - case 77: + case 78: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.boolean = false } - case 78: + case 79: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.boolean = true } - case 79: + case 80: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.boolean = false } - case 80: + case 81: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.boolean = false } - case 81: + case 82: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.boolean = true } - case 82: + case 83: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.stmt = yyDollar[1].stmt } - case 83: + case 84: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.stmt = &UnionStmt{ @@ -1266,35 +1286,35 @@ yydefault: right: yyDollar[4].stmt.(DataSource), } } - case 84: + case 85: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.stmt = &SelectStmt{ ds: &FnDataSourceStmt{fnCall: &FnCall{fn: "databases"}}, } } - case 85: + case 86: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.stmt = &SelectStmt{ ds: &FnDataSourceStmt{fnCall: &FnCall{fn: "tables"}}, } } - case 86: + case 87: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.stmt = &SelectStmt{ ds: &FnDataSourceStmt{fnCall: &FnCall{fn: "table", params: []ValueExp{&Varchar{val: yyDollar[3].id}}}}, } } - case 87: + case 88: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.stmt = &SelectStmt{ ds: &FnDataSourceStmt{fnCall: &FnCall{fn: "users"}}, } } - case 88: + case 89: yyDollar = yyS[yypt-13 : yypt+1] { yyVAL.stmt = &SelectStmt{ @@ -1311,432 +1331,447 @@ yydefault: offset: yyDollar[13].exp, } } - case 89: + case 90: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.distinct = true } - case 90: + case 91: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.distinct = false } - case 91: + case 92: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.distinct = false } - case 92: + case 93: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.distinct = true } - case 93: + case 94: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.sels = nil } - case 94: + case 95: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.sels = yyDollar[1].sels } - case 95: + case 96: yyDollar = yyS[yypt-2 : yypt+1] { yyDollar[1].sel.setAlias(yyDollar[2].id) yyVAL.sels = []Selector{yyDollar[1].sel} } - case 96: + case 97: yyDollar = yyS[yypt-4 : yypt+1] { yyDollar[3].sel.setAlias(yyDollar[4].id) yyVAL.sels = append(yyDollar[1].sels, yyDollar[3].sel) } - case 97: + case 98: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.sel = yyDollar[1].col } - case 98: + case 99: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.sel = &JSONSelector{ColSelector: yyDollar[1].col, fields: yyDollar[2].jsonFields} } - case 99: + case 100: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.sel = &AggColSelector{aggFn: yyDollar[1].aggFn, col: "*"} } - case 100: + case 101: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.sel = &AggColSelector{aggFn: yyDollar[1].aggFn, table: yyDollar[3].col.table, col: yyDollar[3].col.col} } - case 101: + case 102: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.jsonFields = []string{yyDollar[2].str} } - case 102: + case 103: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.jsonFields = append(yyVAL.jsonFields, yyDollar[3].str) } - case 103: + case 104: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.col = &ColSelector{col: yyDollar[1].id} } - case 104: + case 105: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.col = &ColSelector{table: yyDollar[1].id, col: yyDollar[3].id} } - case 105: + case 106: yyDollar = yyS[yypt-3 : yypt+1] { yyDollar[1].tableRef.period = yyDollar[2].period yyDollar[1].tableRef.as = yyDollar[3].id yyVAL.ds = yyDollar[1].tableRef } - case 106: + case 107: yyDollar = yyS[yypt-4 : yypt+1] { yyDollar[2].stmt.(*SelectStmt).as = yyDollar[4].id yyVAL.ds = yyDollar[2].stmt.(DataSource) } - case 107: + case 108: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.ds = &FnDataSourceStmt{fnCall: &FnCall{fn: "databases"}, as: yyDollar[4].id} } - case 108: + case 109: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.ds = &FnDataSourceStmt{fnCall: &FnCall{fn: "tables"}, as: yyDollar[4].id} } - case 109: + case 110: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.ds = &FnDataSourceStmt{fnCall: &FnCall{fn: "table", params: []ValueExp{&Varchar{val: yyDollar[3].id}}}} } - case 110: + case 111: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.ds = &FnDataSourceStmt{fnCall: &FnCall{fn: "users"}, as: yyDollar[4].id} } - case 111: + case 112: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.ds = &FnDataSourceStmt{fnCall: yyDollar[1].value.(*FnCall), as: yyDollar[2].id} } - case 112: + case 113: yyDollar = yyS[yypt-6 : yypt+1] { yyVAL.ds = &tableRef{table: yyDollar[4].id, history: true, as: yyDollar[6].id} } - case 113: + case 114: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.tableRef = &tableRef{table: yyDollar[1].id} } - case 114: + case 115: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.period = period{start: yyDollar[1].openPeriod, end: yyDollar[2].openPeriod} } - case 115: + case 116: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.openPeriod = nil } - case 116: + case 117: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.openPeriod = &openPeriod{inclusive: true, instant: yyDollar[2].periodInstant} } - case 117: + case 118: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.openPeriod = &openPeriod{instant: yyDollar[2].periodInstant} } - case 118: + case 119: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.openPeriod = nil } - case 119: + case 120: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.openPeriod = &openPeriod{inclusive: true, instant: yyDollar[2].periodInstant} } - case 120: + case 121: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.openPeriod = &openPeriod{instant: yyDollar[2].periodInstant} } - case 121: + case 122: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.periodInstant = periodInstant{instantType: txInstant, exp: yyDollar[2].exp} } - case 122: + case 123: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.periodInstant = periodInstant{instantType: timeInstant, exp: yyDollar[1].exp} } - case 123: + case 124: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.joins = nil } - case 124: + case 125: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.joins = yyDollar[1].joins } - case 125: + case 126: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.joins = []*JoinSpec{yyDollar[1].join} } - case 126: + case 127: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.joins = append([]*JoinSpec{yyDollar[1].join}, yyDollar[2].joins...) } - case 127: + case 128: yyDollar = yyS[yypt-6 : yypt+1] { yyVAL.join = &JoinSpec{joinType: yyDollar[1].joinType, ds: yyDollar[3].ds, indexOn: yyDollar[4].ids, cond: yyDollar[6].exp} } - case 128: + case 129: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.joinType = InnerJoin } - case 129: + case 130: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.joinType = yyDollar[1].joinType } - case 130: + case 131: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.exp = nil } - case 131: + case 132: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.exp = yyDollar[2].exp } - case 132: + case 133: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.cols = nil } - case 133: + case 134: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.cols = yyDollar[3].cols } - case 134: + case 135: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.exp = nil } - case 135: + case 136: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.exp = yyDollar[2].exp } - case 136: + case 137: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.exp = nil } - case 137: + case 138: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.exp = yyDollar[2].exp } - case 138: + case 139: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.exp = nil } - case 139: + case 140: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.exp = yyDollar[2].exp } - case 140: + case 141: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.ordcols = nil } - case 141: + case 142: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.ordcols = yyDollar[3].ordcols } - case 142: + case 143: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.ids = nil } - case 143: + case 144: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.ids = yyDollar[4].ids } - case 144: + case 145: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.ordcols = []*OrdCol{{sel: yyDollar[1].sel, descOrder: yyDollar[2].opt_ord}} } - case 145: + case 146: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.ordcols = append(yyDollar[1].ordcols, &OrdCol{sel: yyDollar[3].sel, descOrder: yyDollar[4].opt_ord}) } - case 146: + case 147: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.opt_ord = false } - case 147: + case 148: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.opt_ord = false } - case 148: + case 149: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.opt_ord = true } - case 149: + case 150: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.id = "" } - case 150: + case 151: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.id = yyDollar[1].id } - case 151: + case 152: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.id = yyDollar[2].id } - case 152: + case 153: + yyDollar = yyS[yypt-0 : yypt+1] + { + yyVAL.checks = nil + } + case 154: + yyDollar = yyS[yypt-4 : yypt+1] + { + yyVAL.checks = append([]CheckConstraint{{exp: yyDollar[2].exp}}, yyDollar[4].checks...) + } + case 155: + yyDollar = yyS[yypt-6 : yypt+1] + { + yyVAL.checks = append([]CheckConstraint{{name: yyDollar[2].id, exp: yyDollar[4].exp}}, yyDollar[6].checks...) + } + case 156: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.exp = yyDollar[1].exp } - case 153: + case 157: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.exp = yyDollar[1].binExp } - case 154: + case 158: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.exp = &NotBoolExp{exp: yyDollar[2].exp} } - case 155: + case 159: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.exp = &NumExp{left: &Integer{val: 0}, op: SUBSOP, right: yyDollar[2].exp} } - case 156: + case 160: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.exp = &LikeBoolExp{val: yyDollar[1].exp, notLike: yyDollar[2].boolean, pattern: yyDollar[4].exp} } - case 157: + case 161: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.exp = &ExistsBoolExp{q: (yyDollar[3].stmt).(DataSource)} } - case 158: + case 162: yyDollar = yyS[yypt-6 : yypt+1] { yyVAL.exp = &InSubQueryExp{val: yyDollar[1].exp, notIn: yyDollar[2].boolean, q: yyDollar[5].stmt.(*SelectStmt)} } - case 159: + case 163: yyDollar = yyS[yypt-6 : yypt+1] { yyVAL.exp = &InListExp{val: yyDollar[1].exp, notIn: yyDollar[2].boolean, values: yyDollar[5].values} } - case 160: + case 164: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.exp = yyDollar[1].sel } - case 161: + case 165: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.exp = yyDollar[1].value } - case 162: + case 166: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.exp = yyDollar[2].exp } - case 163: + case 167: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.exp = &Cast{val: yyDollar[1].exp, t: yyDollar[3].sqlType} } - case 164: + case 168: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.boolean = false } - case 165: + case 169: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.boolean = true } - case 166: + case 170: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.binExp = &NumExp{left: yyDollar[1].exp, op: ADDOP, right: yyDollar[3].exp} } - case 167: + case 171: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.binExp = &NumExp{left: yyDollar[1].exp, op: SUBSOP, right: yyDollar[3].exp} } - case 168: + case 172: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.binExp = &NumExp{left: yyDollar[1].exp, op: DIVOP, right: yyDollar[3].exp} } - case 169: + case 173: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.binExp = &NumExp{left: yyDollar[1].exp, op: MULTOP, right: yyDollar[3].exp} } - case 170: + case 174: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.binExp = &BinBoolExp{left: yyDollar[1].exp, op: yyDollar[2].logicOp, right: yyDollar[3].exp} } - case 171: + case 175: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.binExp = &CmpBoolExp{left: yyDollar[1].exp, op: yyDollar[2].cmpOp, right: yyDollar[3].exp} } - case 172: + case 176: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.binExp = &CmpBoolExp{left: yyDollar[1].exp, op: EQ, right: &NullValue{t: AnyType}} } - case 173: + case 177: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.binExp = &CmpBoolExp{left: yyDollar[1].exp, op: NE, right: &NullValue{t: AnyType}} diff --git a/embedded/sql/stmt.go b/embedded/sql/stmt.go index de7859cae2..0bc2f7b27c 100644 --- a/embedded/sql/stmt.go +++ b/embedded/sql/stmt.go @@ -37,6 +37,7 @@ const ( catalogPrefix = "CTL." catalogTablePrefix = "CTL.TABLE." // (key=CTL.TABLE.{1}{tableID}, value={tableNAME}) catalogColumnPrefix = "CTL.COLUMN." // (key=CTL.COLUMN.{1}{tableID}{colID}{colTYPE}, value={(auto_incremental | nullable){maxLen}{colNAME}}) + catalogCheckPrefix = "CTL.CHECK." // (key=CTL.CHECK.{1}{tableID}{checkID}, value={nameLen}{name}{expText}) catalogIndexPrefix = "CTL.INDEX." // (key=CTL.INDEX.{1}{tableID}{indexID}, value={unique {colID1}(ASC|DESC)...{colIDN}(ASC|DESC)}) RowPrefix = "R." // (key=R.{1}{tableID}{0}({null}({pkVal}{padding}{pkValLen})?)+, value={count (colID valLen val)+}) @@ -115,6 +116,24 @@ const ( GE ) +func CmpOperatorToString(op CmpOperator) string { + switch op { + case EQ: + return "=" + case NE: + return "!=" + case LT: + return "<" + case LE: + return "<=" + case GT: + return ">" + case GE: + return ">=" + } + return "" +} + type LogicOperator = int const ( @@ -122,6 +141,13 @@ const ( OR ) +func LogicOperatorToString(op LogicOperator) string { + if op == AND { + return "AND" + } + return "OR" +} + type NumOperator = int const ( @@ -131,6 +157,20 @@ const ( MULTOP ) +func NumOperatorString(op NumOperator) string { + switch op { + case ADDOP: + return "+" + case SUBSOP: + return "-" + case DIVOP: + return "/" + case MULTOP: + return "*" + } + return "" +} + type JoinType = int const ( @@ -335,6 +375,7 @@ type CreateTableStmt struct { table string ifNotExists bool colsSpec []*ColSpec + checks []CheckConstraint pkColNames []string } @@ -346,6 +387,21 @@ func (stmt *CreateTableStmt) inferParameters(ctx context.Context, tx *SQLTx, par return nil } +func zeroRow(tableName string, cols []*ColSpec) *Row { + r := Row{ + ValuesByPosition: make([]TypedValue, len(cols)), + ValuesBySelector: make(map[string]TypedValue, len(cols)), + } + + for i, col := range cols { + v := zeroForType(col.colType) + + r.ValuesByPosition[i] = v + r.ValuesBySelector[EncodeSelector("", tableName, col.colName)] = v + } + return &r +} + func (stmt *CreateTableStmt) execAt(ctx context.Context, tx *SQLTx, params map[string]interface{}) (*SQLTx, error) { if stmt.ifNotExists && tx.catalog.ExistTable(stmt.table) { return tx, nil @@ -356,7 +412,33 @@ func (stmt *CreateTableStmt) execAt(ctx context.Context, tx *SQLTx, params map[s colSpecs[uint32(i)+1] = cs } - table, err := tx.catalog.newTable(stmt.table, colSpecs, uint32(len(colSpecs))) + row := zeroRow(stmt.table, stmt.colsSpec) + for _, check := range stmt.checks { + value, err := check.exp.reduce(tx, row, stmt.table) + if err != nil { + return nil, err + } + + if value.Type() != BooleanType { + return nil, ErrInvalidCheckConstraint + } + } + + nextUnnamedCheck := 0 + checks := make(map[string]CheckConstraint) + for id, check := range stmt.checks { + name := fmt.Sprintf("%s_check%d", stmt.table, nextUnnamedCheck+1) + if check.name != "" { + name = check.name + } else { + nextUnnamedCheck++ + } + check.id = uint32(id) + check.name = name + checks[name] = check + } + + table, err := tx.catalog.newTable(stmt.table, colSpecs, checks, uint32(len(colSpecs))) if err != nil { return nil, err } @@ -380,6 +462,12 @@ func (stmt *CreateTableStmt) execAt(ctx context.Context, tx *SQLTx, params map[s } } + for _, check := range checks { + if err := persistCheck(tx, table, &check); err != nil { + return nil, err + } + } + mappedKey := MapKey(tx.sqlPrefix(), catalogTablePrefix, EncodeID(DatabaseID), EncodeID(table.id)) err = tx.set(mappedKey, nil, []byte(table.name)) @@ -420,6 +508,32 @@ func persistColumn(tx *SQLTx, col *Column) error { return tx.set(mappedKey, nil, v) } +func persistCheck(tx *SQLTx, table *Table, check *CheckConstraint) error { + mappedKey := MapKey( + tx.sqlPrefix(), + catalogCheckPrefix, + EncodeID(DatabaseID), + EncodeID(table.id), + EncodeID(check.id), + ) + + name := check.name + expText := check.exp.String() + + val := make([]byte, 2+len(name)+len(expText)) + + if len(name) > 256 { + return fmt.Errorf("constraint name len: %w", ErrMaxLengthExceeded) + } + + val[0] = byte(len(name)) - 1 + + copy(val[1:], []byte(name)) + copy(val[1+len(name):], []byte(expText)) + + return tx.set(mappedKey, nil, val) +} + type ColSpec struct { colName string colType SQLValueType @@ -667,6 +781,11 @@ func (stmt *DropColumnStmt) execAt(ctx context.Context, tx *SQLTx, params map[st return nil, err } + err = canDropColumn(tx, table, col) + if err != nil { + return nil, err + } + err = table.deleteColumn(col) if err != nil { return nil, err @@ -682,6 +801,28 @@ func (stmt *DropColumnStmt) execAt(ctx context.Context, tx *SQLTx, params map[st return tx, nil } +func canDropColumn(tx *SQLTx, table *Table, col *Column) error { + colSpecs := make([]*ColSpec, 0, len(table.Cols())-1) + for _, c := range table.cols { + if c.id != col.id { + colSpecs = append(colSpecs, &ColSpec{colName: c.Name(), colType: c.Type()}) + } + } + + row := zeroRow(table.Name(), colSpecs) + for name, check := range table.checkConstraints { + _, err := check.exp.reduce(tx, row, table.name) + if errors.Is(err, ErrColumnDoesNotExist) { + return fmt.Errorf("%w %s because %s constraint requires it", ErrCannotDropColumn, col.Name(), name) + } + + if err != nil { + return err + } + } + return nil +} + func persistColumnDeletion(ctx context.Context, tx *SQLTx, col *Column) error { mappedKey := MapKey( tx.sqlPrefix(), @@ -695,6 +836,44 @@ func persistColumnDeletion(ctx context.Context, tx *SQLTx, col *Column) error { return tx.delete(ctx, mappedKey) } +type DropConstraintStmt struct { + table string + constraintName string +} + +func (stmt *DropConstraintStmt) execAt(ctx context.Context, tx *SQLTx, params map[string]interface{}) (*SQLTx, error) { + table, err := tx.catalog.GetTableByName(stmt.table) + if err != nil { + return nil, err + } + + id, err := table.deleteCheck(stmt.constraintName) + if err != nil { + return nil, err + } + + err = persistCheckDeletion(ctx, tx, table.id, id) + + tx.mutatedCatalog = true + + return tx, err +} + +func persistCheckDeletion(ctx context.Context, tx *SQLTx, tableID uint32, checkId uint32) error { + mappedKey := MapKey( + tx.sqlPrefix(), + catalogCheckPrefix, + EncodeID(DatabaseID), + EncodeID(tableID), + EncodeID(checkId), + ) + return tx.delete(ctx, mappedKey) +} + +func (stmt *DropConstraintStmt) inferParameters(ctx context.Context, tx *SQLTx, params map[string]SQLValueType) error { + return nil +} + type UpsertIntoStmt struct { isInsert bool tableRef *tableRef @@ -723,10 +902,11 @@ func NewRowSpec(values []ValueExp) *RowSpec { } } -type OnConflictDo struct { -} +type OnConflictDo struct{} func (stmt *UpsertIntoStmt) inferParameters(ctx context.Context, tx *SQLTx, params map[string]SQLValueType) error { + emptyDescriptors := make(map[string]ColDescriptor) + for _, row := range stmt.rows { if len(stmt.cols) != len(row.Values) { return ErrInvalidNumberOfValues @@ -743,13 +923,12 @@ func (stmt *UpsertIntoStmt) inferParameters(ctx context.Context, tx *SQLTx, para return err } - err = val.requiresType(col.colType, make(map[string]ColDescriptor), params, table.name) + err = val.requiresType(col.colType, emptyDescriptors, params, table.name) if err != nil { return err } } } - return nil } @@ -784,6 +963,11 @@ func (stmt *UpsertIntoStmt) execAt(ctx context.Context, tx *SQLTx, params map[st return nil, err } + r := &Row{ + ValuesByPosition: make([]TypedValue, len(table.cols)), + ValuesBySelector: make(map[string]TypedValue), + } + for _, row := range stmt.rows { if len(row.Values) != len(stmt.cols) { return nil, ErrInvalidNumberOfValues @@ -857,6 +1041,28 @@ func (stmt *UpsertIntoStmt) execAt(ctx context.Context, tx *SQLTx, params map[st valuesByColID[colID] = rval } + for i, col := range table.cols { + v := valuesByColID[col.id] + + if v == nil { + v = NewNull(AnyType) + } else if len(table.checkConstraints) > 0 && col.Type() == JSONType { + s, _ := v.RawValue().(string) + jsonVal, err := NewJsonFromString(s) + if err != nil { + return nil, err + } + v = jsonVal + } + + r.ValuesByPosition[i] = v + r.ValuesBySelector[EncodeSelector("", table.name, col.colName)] = v + } + + if err := checkConstraints(tx, table.checkConstraints, r, table.name); err != nil { + return nil, err + } + pkEncVals, err := encodedKey(table.primaryIndex, valuesByColID) if err != nil { return nil, err @@ -897,6 +1103,24 @@ func (stmt *UpsertIntoStmt) execAt(ctx context.Context, tx *SQLTx, params map[st return tx, nil } +func checkConstraints(tx *SQLTx, checks map[string]CheckConstraint, row *Row, table string) error { + for _, check := range checks { + val, err := check.exp.reduce(tx, row, table) + if err != nil { + return fmt.Errorf("%w: %s", ErrCheckConstraintViolation, err) + } + + if val.Type() != BooleanType { + return ErrInvalidCheckConstraint + } + + if !val.RawValue().(bool) { + return fmt.Errorf("%w: %s", ErrCheckConstraintViolation, check.exp.String()) + } + } + return nil +} + func (tx *SQLTx) encodeRowValue(valuesByColID map[uint32]TypedValue, table *Table) ([]byte, error) { valbuf := bytes.Buffer{} @@ -1315,6 +1539,17 @@ func (stmt *UpdateStmt) execAt(ctx context.Context, tx *SQLTx, params map[string valuesByColID[col.id] = rval } + for i, col := range table.cols { + v := valuesByColID[col.id] + + row.ValuesByPosition[i] = v + row.ValuesBySelector[EncodeSelector("", table.name, col.colName)] = v + } + + if err := checkConstraints(tx, table.checkConstraints, row, table.name); err != nil { + return nil, err + } + pkEncVals, err := encodedKey(table.primaryIndex, valuesByColID) if err != nil { return nil, err @@ -1463,6 +1698,7 @@ type ValueExp interface { reduceSelectors(row *Row, implicitTable string) ValueExp isConstant() bool selectorRanges(table *Table, asTable string, params map[string]interface{}, rangesByColID map[uint32]*typedValueRange) error + String() string } type typedValueRange struct { @@ -1573,7 +1809,6 @@ type TypedValue interface { RawValue() interface{} Compare(val TypedValue) (int, error) IsNull() bool - String() string } type Tuple []TypedValue @@ -1844,7 +2079,7 @@ func (v *Varchar) IsNull() bool { } func (v *Varchar) String() string { - return v.val + return fmt.Sprintf("'%s'", v.val) } func (v *Varchar) inferType(cols map[string]ColDescriptor, params map[string]SQLValueType, implicitTable string) (SQLValueType, error) { @@ -2323,6 +2558,14 @@ func (v *FnCall) selectorRanges(table *Table, asTable string, params map[string] return nil } +func (v *FnCall) String() string { + params := make([]string, len(v.params)) + for i, p := range v.params { + params[i] = p.String() + } + return v.fn + "(" + strings.Join(params, ",") + ")" +} + type Cast struct { val ValueExp t SQLValueType @@ -2385,6 +2628,10 @@ func (c *Cast) selectorRanges(table *Table, asTable string, params map[string]in return nil } +func (c *Cast) String() string { + return fmt.Sprintf("CAST (%s AS %s)", c.val.String(), c.t) +} + type Param struct { id string pos int @@ -2478,6 +2725,10 @@ func (v *Param) selectorRanges(table *Table, asTable string, params map[string]i return nil } +func (v *Param) String() string { + return "@" + v.id +} + type Comparison int const ( @@ -3266,6 +3517,10 @@ func (sel *ColSelector) selectorRanges(table *Table, asTable string, params map[ return nil } +func (sel *ColSelector) String() string { + return sel.col +} + type AggColSelector struct { aggFn AggregateFn table string @@ -3373,6 +3628,10 @@ func (sel *AggColSelector) selectorRanges(table *Table, asTable string, params m return nil } +func (sel *AggColSelector) String() string { + return sel.aggFn + "(" + sel.col + ")" +} + type NumExp struct { op NumOperator left, right ValueExp @@ -3522,6 +3781,10 @@ func (bexp *NumExp) selectorRanges(table *Table, asTable string, params map[stri return nil } +func (bexp *NumExp) String() string { + return fmt.Sprintf("(%s %s %s)", bexp.left.String(), NumOperatorString(bexp.op), bexp.right.String()) +} + type NotBoolExp struct { exp ValueExp } @@ -3582,6 +3845,10 @@ func (bexp *NotBoolExp) selectorRanges(table *Table, asTable string, params map[ return nil } +func (bexp *NotBoolExp) String() string { + return "NOT " + bexp.exp.String() +} + type LikeBoolExp struct { val ValueExp notLike bool @@ -3696,6 +3963,10 @@ func (bexp *LikeBoolExp) selectorRanges(table *Table, asTable string, params map return nil } +func (bexp *LikeBoolExp) String() string { + return fmt.Sprintf("(%s LIKE %s)", bexp.val.String(), bexp.pattern.String()) +} + type CmpBoolExp struct { op CmpOperator left, right ValueExp @@ -3862,6 +4133,11 @@ func (bexp *CmpBoolExp) selectorRanges(table *Table, asTable string, params map[ return updateRangeFor(column.id, rval, bexp.op, rangesByColID) } +func (bexp *CmpBoolExp) String() string { + opStr := CmpOperatorToString(bexp.op) + return fmt.Sprintf("(%s %s %s)", bexp.left.String(), opStr, bexp.right.String()) +} + func updateRangeFor(colID uint32, val TypedValue, cmp CmpOperator, rangesByColID map[uint32]*typedValueRange) error { currRange, ranged := rangesByColID[colID] var newRange *typedValueRange @@ -4022,7 +4298,8 @@ func (bexp *BinBoolExp) reduce(tx *SQLTx, row *Row, implicitTable string) (Typed return nil, fmt.Errorf("%w (expecting boolean value)", ErrInvalidValue) } - if (bexp.op == OR && bl.val) || (bexp.op == AND && !bl.val) { + // short-circuit evaluation + if (bl.val && bexp.op == OR) || (!bl.val && bexp.op == AND) { return &Bool{val: bl.val}, nil } @@ -4102,16 +4379,20 @@ func (bexp *BinBoolExp) selectorRanges(table *Table, asTable string, params map[ return nil } +func (bexp *BinBoolExp) String() string { + return fmt.Sprintf("(%s %s %s)", bexp.left.String(), LogicOperatorToString(bexp.op), bexp.right.String()) +} + type ExistsBoolExp struct { q DataSource } func (bexp *ExistsBoolExp) inferType(cols map[string]ColDescriptor, params map[string]SQLValueType, implicitTable string) (SQLValueType, error) { - return AnyType, errors.New("not yet supported") + return AnyType, fmt.Errorf("error inferring type in 'EXISTS' clause: %w", ErrNoSupported) } func (bexp *ExistsBoolExp) requiresType(t SQLValueType, cols map[string]ColDescriptor, params map[string]SQLValueType, implicitTable string) error { - return errors.New("not yet supported") + return fmt.Errorf("error inferring type in 'EXISTS' clause: %w", ErrNoSupported) } func (bexp *ExistsBoolExp) substitute(params map[string]interface{}) (ValueExp, error) { @@ -4119,7 +4400,7 @@ func (bexp *ExistsBoolExp) substitute(params map[string]interface{}) (ValueExp, } func (bexp *ExistsBoolExp) reduce(tx *SQLTx, row *Row, implicitTable string) (TypedValue, error) { - return nil, errors.New("not yet supported") + return nil, fmt.Errorf("'EXISTS' clause: %w", ErrNoSupported) } func (bexp *ExistsBoolExp) reduceSelectors(row *Row, implicitTable string) ValueExp { @@ -4134,6 +4415,10 @@ func (bexp *ExistsBoolExp) selectorRanges(table *Table, asTable string, params m return nil } +func (bexp *ExistsBoolExp) String() string { + return "" +} + type InSubQueryExp struct { val ValueExp notIn bool @@ -4168,6 +4453,10 @@ func (bexp *InSubQueryExp) selectorRanges(table *Table, asTable string, params m return nil } +func (bexp *InSubQueryExp) String() string { + return "" +} + // TODO: once InSubQueryExp is supported, this struct may become obsolete by creating a ListDataSource struct type InListExp struct { val ValueExp @@ -4276,6 +4565,14 @@ func (bexp *InListExp) selectorRanges(table *Table, asTable string, params map[s return nil } +func (bexp *InListExp) String() string { + values := make([]string, len(bexp.values)) + for i, exp := range bexp.values { + values[i] = exp.String() + } + return fmt.Sprintf("%s IN (%s)", bexp.val.String(), strings.Join(values, ",")) +} + type FnDataSourceStmt struct { fnCall *FnCall as string @@ -4761,6 +5058,21 @@ func (stmt *DropTableStmt) execAt(ctx context.Context, tx *SQLTx, params map[str } } + // delete checks + for name := range table.checkConstraints { + key := MapKey( + tx.sqlPrefix(), + catalogCheckPrefix, + EncodeID(DatabaseID), + EncodeID(table.id), + []byte(name), + ) + + if err := tx.delete(ctx, key); err != nil { + return nil, err + } + } + // delete indexes for _, index := range table.indexes { mappedKey := MapKey( diff --git a/embedded/sql/stmt_test.go b/embedded/sql/stmt_test.go index 24d2b7d2d7..c5def0b943 100644 --- a/embedded/sql/stmt_test.go +++ b/embedded/sql/stmt_test.go @@ -1324,7 +1324,7 @@ func TestTypedValueString(t *testing.T) { require.Equal(t, "10", i.String()) s := &Varchar{val: "test"} - require.Equal(t, "test", s.String()) + require.Equal(t, "'test'", s.String()) b := &Bool{val: true} require.Equal(t, "true", b.String()) diff --git a/embedded/sql/type_conversion.go b/embedded/sql/type_conversion.go index 623911b79f..cdf4669d94 100644 --- a/embedded/sql/type_conversion.go +++ b/embedded/sql/type_conversion.go @@ -20,6 +20,7 @@ import ( "encoding/json" "fmt" "strconv" + "strings" "time" "github.com/google/uuid" @@ -307,7 +308,9 @@ func getConverter(src, dst SQLValueType) (converterFunc, error) { return &JSON{val: tv.RawValue()}, nil case VarcharType: var x interface{} - err := json.Unmarshal([]byte(tv.String()), &x) + s := strings.TrimSuffix(strings.TrimPrefix(tv.String(), "'"), "'") + + err := json.Unmarshal([]byte(s), &x) return &JSON{val: x}, err } diff --git a/pkg/database/sql.go b/pkg/database/sql.go index ab1f973197..36f024f4c5 100644 --- a/pkg/database/sql.go +++ b/pkg/database/sql.go @@ -343,7 +343,7 @@ func (d *db) SQLExec(ctx context.Context, tx *sql.SQLTx, req *schema.SQLExecRequ return nil, nil, ErrIllegalArguments } - stmts, err := sql.Parse(strings.NewReader(req.Sql)) + stmts, err := sql.ParseSQL(strings.NewReader(req.Sql)) if err != nil { return nil, nil, err } @@ -377,7 +377,7 @@ func (d *db) SQLQuery(ctx context.Context, tx *sql.SQLTx, req *schema.SQLQueryRe return nil, ErrIllegalArguments } - stmts, err := sql.Parse(strings.NewReader(req.Sql)) + stmts, err := sql.ParseSQL(strings.NewReader(req.Sql)) if err != nil { return nil, err } diff --git a/pkg/database/sql_test.go b/pkg/database/sql_test.go index 27af31dfb0..1f696f7626 100644 --- a/pkg/database/sql_test.go +++ b/pkg/database/sql_test.go @@ -105,7 +105,7 @@ func TestSQLExecAndQuery(t *testing.T) { require.Len(t, inferredParams, 1) require.Equal(t, sql.BooleanType, inferredParams["active"]) - stmts, err := sql.ParseString(q) + stmts, err := sql.ParseSQLString(q) require.NoError(t, err) require.Len(t, stmts, 1) diff --git a/pkg/pgsql/server/bmessages/data_row.go b/pkg/pgsql/server/bmessages/data_row.go index 02e4bb1f6f..ceaad30d70 100644 --- a/pkg/pgsql/server/bmessages/data_row.go +++ b/pkg/pgsql/server/bmessages/data_row.go @@ -19,6 +19,7 @@ package bmessages import ( "bytes" "encoding/binary" + "strings" "github.com/codenotary/immudb/embedded/sql" ) @@ -67,7 +68,7 @@ func DataRow(rows []*sql.Row, colNumb int, ResultColumnFormatCodes []int16) []by } case sql.JSONType: { - jsonStr := val.String() + jsonStr := trimQuotes(val.String()) binary.BigEndian.PutUint32(valueLength, uint32(len(jsonStr))) value = []byte(jsonStr) } @@ -120,5 +121,19 @@ func renderValueAsByte(v sql.TypedValue) []byte { if v.IsNull() { return nil } - return []byte(v.String()) + + var s string + switch v.Type() { + case sql.VarcharType: + s, _ = v.RawValue().(string) + case sql.JSONType: + s = trimQuotes(v.String()) + default: + s = v.String() + } + return []byte(s) +} + +func trimQuotes(s string) string { + return strings.TrimSuffix(strings.TrimPrefix(s, "'"), "'") } diff --git a/pkg/pgsql/server/query_machine.go b/pkg/pgsql/server/query_machine.go index c716124e7c..e4bd8cbe9b 100644 --- a/pkg/pgsql/server/query_machine.go +++ b/pkg/pgsql/server/query_machine.go @@ -105,7 +105,7 @@ func (s *session) QueryMachine() error { var stmt sql.SQLStmt if !s.isInBlackList(v.Statements) { - stmts, err := sql.Parse(strings.NewReader(v.Statements)) + stmts, err := sql.ParseSQL(strings.NewReader(v.Statements)) if err != nil { waitForSync = extQueryMode s.HandleError(err) @@ -272,7 +272,7 @@ func (s *session) fetchAndWriteResults(statements string, parameters []*schema.N return err } - stmts, err := sql.Parse(strings.NewReader(statements)) + stmts, err := sql.ParseSQL(strings.NewReader(statements)) if err != nil { return err }