From c9e48b6de9a453b467d58dede8feba7439d5f1cc Mon Sep 17 00:00:00 2001 From: Vibhu Pandey Date: Tue, 8 Jul 2025 00:21:26 +0530 Subject: [PATCH] feat(sqlschema): add sqlschema (#8384) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## 📄 Summary - add sqlschema package - add unique index on email,org_id in users and user_invite --- ee/query-service/main.go | 10 + ee/sqlschema/postgressqlschema/formatter.go | 36 + ee/sqlschema/postgressqlschema/provider.go | 285 ++++++++ pkg/query-service/main.go | 4 + pkg/signoz/config.go | 5 + pkg/signoz/provider.go | 14 +- pkg/signoz/provider_test.go | 4 +- pkg/signoz/signoz.go | 15 +- pkg/sqlmigration/042_update_users.go | 85 +++ pkg/sqlmigration/043_update_user_invite.go | 88 +++ pkg/sqlmigration/044_update_org_domain.go | 85 +++ pkg/sqlmigration/045_add_factor_indexes.go | 75 +++ pkg/sqlmigration/sqlmigration.go | 3 + pkg/sqlschema/column.go | 130 ++++ pkg/sqlschema/column_test.go | 118 ++++ pkg/sqlschema/config.go | 17 + pkg/sqlschema/constraint.go | 325 +++++++++ pkg/sqlschema/constraint_test.go | 72 ++ pkg/sqlschema/formatter.go | 49 ++ pkg/sqlschema/index.go | 116 ++++ pkg/sqlschema/index_test.go | 51 ++ pkg/sqlschema/operator.go | 143 ++++ pkg/sqlschema/operator_test.go | 702 ++++++++++++++++++++ pkg/sqlschema/sqlitesqlschema/ddl.go | 326 +++++++++ pkg/sqlschema/sqlitesqlschema/ddl_test.go | 191 ++++++ pkg/sqlschema/sqlitesqlschema/formatter.go | 28 + pkg/sqlschema/sqlitesqlschema/provider.go | 144 ++++ pkg/sqlschema/sqlschema.go | 69 ++ pkg/sqlschema/sqlschematest/provider.go | 58 ++ pkg/sqlschema/table.go | 156 +++++ pkg/sqlschema/table_test.go | 177 +++++ 31 files changed, 3578 insertions(+), 3 deletions(-) create mode 100644 ee/sqlschema/postgressqlschema/formatter.go create mode 100644 ee/sqlschema/postgressqlschema/provider.go create mode 100644 pkg/sqlmigration/042_update_users.go create mode 100644 pkg/sqlmigration/043_update_user_invite.go create mode 100644 pkg/sqlmigration/044_update_org_domain.go create mode 100644 pkg/sqlmigration/045_add_factor_indexes.go create mode 100644 pkg/sqlschema/column.go create mode 100644 pkg/sqlschema/column_test.go create mode 100644 pkg/sqlschema/config.go create mode 100644 pkg/sqlschema/constraint.go create mode 100644 pkg/sqlschema/constraint_test.go create mode 100644 pkg/sqlschema/formatter.go create mode 100644 pkg/sqlschema/index.go create mode 100644 pkg/sqlschema/index_test.go create mode 100644 pkg/sqlschema/operator.go create mode 100644 pkg/sqlschema/operator_test.go create mode 100644 pkg/sqlschema/sqlitesqlschema/ddl.go create mode 100644 pkg/sqlschema/sqlitesqlschema/ddl_test.go create mode 100644 pkg/sqlschema/sqlitesqlschema/formatter.go create mode 100644 pkg/sqlschema/sqlitesqlschema/provider.go create mode 100644 pkg/sqlschema/sqlschema.go create mode 100644 pkg/sqlschema/sqlschematest/provider.go create mode 100644 pkg/sqlschema/table.go create mode 100644 pkg/sqlschema/table_test.go diff --git a/ee/query-service/main.go b/ee/query-service/main.go index c8a52d7ac48e..c7b6b2d23e6d 100644 --- a/ee/query-service/main.go +++ b/ee/query-service/main.go @@ -9,6 +9,7 @@ import ( "github.com/SigNoz/signoz/ee/licensing" "github.com/SigNoz/signoz/ee/licensing/httplicensing" "github.com/SigNoz/signoz/ee/query-service/app" + "github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema" "github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore" "github.com/SigNoz/signoz/ee/zeus" "github.com/SigNoz/signoz/ee/zeus/httpzeus" @@ -21,6 +22,7 @@ import ( "github.com/SigNoz/signoz/pkg/modules/organization" baseconst "github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/pkg/signoz" + "github.com/SigNoz/signoz/pkg/sqlschema" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook" "github.com/SigNoz/signoz/pkg/types/authtypes" @@ -145,6 +147,14 @@ func main() { signoz.NewEmailingProviderFactories(), signoz.NewCacheProviderFactories(), signoz.NewWebProviderFactories(), + func(sqlstore sqlstore.SQLStore) factory.NamedMap[factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config]] { + existingFactories := signoz.NewSQLSchemaProviderFactories(sqlstore) + if err := existingFactories.Add(postgressqlschema.NewFactory(sqlstore)); err != nil { + zap.L().Fatal("Failed to add postgressqlschema factory", zap.Error(err)) + } + + return existingFactories + }, sqlStoreFactories, signoz.NewTelemetryStoreProviderFactories(), ) diff --git a/ee/sqlschema/postgressqlschema/formatter.go b/ee/sqlschema/postgressqlschema/formatter.go new file mode 100644 index 000000000000..e51fd7ac951c --- /dev/null +++ b/ee/sqlschema/postgressqlschema/formatter.go @@ -0,0 +1,36 @@ +package postgressqlschema + +import ( + "strings" + + "github.com/SigNoz/signoz/pkg/sqlschema" +) + +type Formatter struct { + sqlschema.Formatter +} + +func (formatter Formatter) SQLDataTypeOf(dataType sqlschema.DataType) string { + if dataType == sqlschema.DataTypeTimestamp { + return "TIMESTAMPTZ" + } + + return strings.ToUpper(dataType.String()) +} + +func (formatter Formatter) DataTypeOf(dataType string) sqlschema.DataType { + switch strings.ToUpper(dataType) { + case "TIMESTAMPTZ", "TIMESTAMP", "TIMESTAMP WITHOUT TIME ZONE", "TIMESTAMP WITH TIME ZONE": + return sqlschema.DataTypeTimestamp + case "INT8": + return sqlschema.DataTypeBigInt + case "INT2", "INT4", "SMALLINT", "INTEGER": + return sqlschema.DataTypeInteger + case "BOOL", "BOOLEAN": + return sqlschema.DataTypeBoolean + case "VARCHAR", "CHARACTER VARYING", "CHARACTER": + return sqlschema.DataTypeText + } + + return formatter.Formatter.DataTypeOf(dataType) +} diff --git a/ee/sqlschema/postgressqlschema/provider.go b/ee/sqlschema/postgressqlschema/provider.go new file mode 100644 index 000000000000..af06994b45b3 --- /dev/null +++ b/ee/sqlschema/postgressqlschema/provider.go @@ -0,0 +1,285 @@ +package postgressqlschema + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/sqlschema" + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/uptrace/bun" +) + +type provider struct { + settings factory.ScopedProviderSettings + fmter sqlschema.SQLFormatter + sqlstore sqlstore.SQLStore + operator sqlschema.SQLOperator +} + +func NewFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config] { + return factory.NewProviderFactory(factory.MustNewName("postgres"), func(ctx context.Context, providerSettings factory.ProviderSettings, config sqlschema.Config) (sqlschema.SQLSchema, error) { + return New(ctx, providerSettings, config, sqlstore) + }) +} + +func New(ctx context.Context, providerSettings factory.ProviderSettings, config sqlschema.Config, sqlstore sqlstore.SQLStore) (sqlschema.SQLSchema, error) { + settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/sqlschema/postgressqlschema") + fmter := Formatter{Formatter: sqlschema.NewFormatter(sqlstore.BunDB().Dialect())} + + return &provider{ + sqlstore: sqlstore, + fmter: fmter, + settings: settings, + operator: sqlschema.NewOperator(fmter, sqlschema.OperatorSupport{ + DropConstraint: true, + ColumnIfNotExistsExists: true, + AlterColumnSetNotNull: true, + }), + }, nil +} + +func (provider *provider) Formatter() sqlschema.SQLFormatter { + return provider.fmter +} + +func (provider *provider) Operator() sqlschema.SQLOperator { + return provider.operator +} + +func (provider *provider) GetTable(ctx context.Context, tableName sqlschema.TableName) (*sqlschema.Table, []*sqlschema.UniqueConstraint, error) { + rows, err := provider. + sqlstore. + BunDB(). + QueryContext(ctx, ` +SELECT + c.column_name, + c.is_nullable = 'YES', + c.udt_name, + c.column_default +FROM + information_schema.columns AS c +WHERE + c.table_name = ?`, string(tableName)) + if err != nil { + return nil, nil, err + } + + defer func() { + if err := rows.Close(); err != nil { + provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err) + } + }() + + columns := make([]*sqlschema.Column, 0) + for rows.Next() { + var ( + name string + sqlDataType string + nullable bool + defaultVal *string + ) + if err := rows.Scan(&name, &nullable, &sqlDataType, &defaultVal); err != nil { + return nil, nil, err + } + + columnDefault := "" + if defaultVal != nil { + columnDefault = *defaultVal + } + + columns = append(columns, &sqlschema.Column{ + Name: sqlschema.ColumnName(name), + Nullable: nullable, + DataType: provider.fmter.DataTypeOf(sqlDataType), + Default: columnDefault, + }) + } + + constraintsRows, err := provider. + sqlstore. + BunDB(). + QueryContext(ctx, ` +SELECT + c.column_name, + constraint_name, + constraint_type +FROM + information_schema.table_constraints tc + JOIN information_schema.constraint_column_usage AS ccu USING (constraint_schema, constraint_catalog, table_name, constraint_name) + JOIN information_schema.columns AS c ON c.table_schema = tc.constraint_schema AND tc.table_name = c.table_name AND ccu.column_name = c.column_name +WHERE + c.table_name = ?`, string(tableName)) + if err != nil { + return nil, nil, err + } + + defer func() { + if err := constraintsRows.Close(); err != nil { + provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err) + } + }() + + var primaryKeyConstraint *sqlschema.PrimaryKeyConstraint + uniqueConstraintsMap := make(map[string]*sqlschema.UniqueConstraint) + for constraintsRows.Next() { + var ( + name string + constraintName string + constraintType string + ) + + if err := constraintsRows.Scan(&name, &constraintName, &constraintType); err != nil { + return nil, nil, err + } + + if constraintType == "PRIMARY KEY" { + if primaryKeyConstraint == nil { + primaryKeyConstraint = (&sqlschema.PrimaryKeyConstraint{ + ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(name)}, + }).Named(constraintName).(*sqlschema.PrimaryKeyConstraint) + } else { + primaryKeyConstraint.ColumnNames = append(primaryKeyConstraint.ColumnNames, sqlschema.ColumnName(name)) + } + } + + if constraintType == "UNIQUE" { + if _, ok := uniqueConstraintsMap[constraintName]; !ok { + uniqueConstraintsMap[constraintName] = (&sqlschema.UniqueConstraint{ + ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(name)}, + }).Named(constraintName).(*sqlschema.UniqueConstraint) + } else { + uniqueConstraintsMap[constraintName].ColumnNames = append(uniqueConstraintsMap[constraintName].ColumnNames, sqlschema.ColumnName(name)) + } + } + } + + foreignKeyConstraintsRows, err := provider. + sqlstore. + BunDB(). + QueryContext(ctx, ` +SELECT + tc.constraint_name, + kcu.table_name AS referencing_table, + kcu.column_name AS referencing_column, + ccu.table_name AS referenced_table, + ccu.column_name AS referenced_column +FROM + information_schema.key_column_usage kcu + JOIN information_schema.table_constraints tc ON kcu.constraint_name = tc.constraint_name AND kcu.table_schema = tc.table_schema + JOIN information_schema.constraint_column_usage ccu ON ccu.constraint_name = tc.constraint_name AND ccu.table_schema = tc.table_schema +WHERE + tc.constraint_type = ? + AND kcu.table_name = ?`, "FOREIGN KEY", string(tableName)) + if err != nil { + return nil, nil, err + } + + defer func() { + if err := foreignKeyConstraintsRows.Close(); err != nil { + provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err) + } + }() + + foreignKeyConstraints := make([]*sqlschema.ForeignKeyConstraint, 0) + for foreignKeyConstraintsRows.Next() { + var ( + constraintName string + referencingTable string + referencingColumn string + referencedTable string + referencedColumn string + ) + + if err := foreignKeyConstraintsRows.Scan(&constraintName, &referencingTable, &referencingColumn, &referencedTable, &referencedColumn); err != nil { + return nil, nil, err + } + + foreignKeyConstraints = append(foreignKeyConstraints, (&sqlschema.ForeignKeyConstraint{ + ReferencingColumnName: sqlschema.ColumnName(referencingColumn), + ReferencedTableName: sqlschema.TableName(referencedTable), + ReferencedColumnName: sqlschema.ColumnName(referencedColumn), + }).Named(constraintName).(*sqlschema.ForeignKeyConstraint)) + } + + uniqueConstraints := make([]*sqlschema.UniqueConstraint, 0) + for _, uniqueConstraint := range uniqueConstraintsMap { + uniqueConstraints = append(uniqueConstraints, uniqueConstraint) + } + + return &sqlschema.Table{ + Name: tableName, + Columns: columns, + PrimaryKeyConstraint: primaryKeyConstraint, + ForeignKeyConstraints: foreignKeyConstraints, + }, uniqueConstraints, nil +} + +func (provider *provider) GetIndices(ctx context.Context, name sqlschema.TableName) ([]sqlschema.Index, error) { + rows, err := provider. + sqlstore. + BunDB(). + QueryContext(ctx, ` +SELECT + ct.relname AS table_name, + ci.relname AS index_name, + i.indisunique AS unique, + i.indisprimary AS primary, + a.attname AS column_name +FROM + pg_index i + LEFT JOIN pg_class ct ON ct.oid = i.indrelid + LEFT JOIN pg_class ci ON ci.oid = i.indexrelid + LEFT JOIN pg_attribute a ON a.attrelid = ct.oid + LEFT JOIN pg_constraint con ON con.conindid = i.indexrelid +WHERE + a.attnum = ANY(i.indkey) + AND con.oid IS NULL + AND ct.relkind = 'r' + AND ct.relname = ?`, string(name)) + if err != nil { + return nil, err + } + + defer func() { + if err := rows.Close(); err != nil { + provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err) + } + }() + + uniqueIndicesMap := make(map[string]*sqlschema.UniqueIndex) + for rows.Next() { + var ( + tableName string + indexName string + unique bool + primary bool + columnName string + ) + + if err := rows.Scan(&tableName, &indexName, &unique, &primary, &columnName); err != nil { + return nil, err + } + + if unique { + if _, ok := uniqueIndicesMap[indexName]; !ok { + uniqueIndicesMap[indexName] = &sqlschema.UniqueIndex{ + TableName: name, + ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(columnName)}, + } + } else { + uniqueIndicesMap[indexName].ColumnNames = append(uniqueIndicesMap[indexName].ColumnNames, sqlschema.ColumnName(columnName)) + } + } + } + + indices := make([]sqlschema.Index, 0) + for _, index := range uniqueIndicesMap { + indices = append(indices, index) + } + + return indices, nil +} + +func (provider *provider) ToggleFKEnforcement(_ context.Context, _ bun.IDB, _ bool) error { + return nil +} diff --git a/pkg/query-service/main.go b/pkg/query-service/main.go index eb41bce8bdd8..90cad098a876 100644 --- a/pkg/query-service/main.go +++ b/pkg/query-service/main.go @@ -17,6 +17,7 @@ import ( "github.com/SigNoz/signoz/pkg/query-service/app" "github.com/SigNoz/signoz/pkg/query-service/constants" "github.com/SigNoz/signoz/pkg/signoz" + "github.com/SigNoz/signoz/pkg/sqlschema" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/types/authtypes" "github.com/SigNoz/signoz/pkg/version" @@ -137,6 +138,9 @@ func main() { signoz.NewEmailingProviderFactories(), signoz.NewCacheProviderFactories(), signoz.NewWebProviderFactories(), + func(sqlstore sqlstore.SQLStore) factory.NamedMap[factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config]] { + return signoz.NewSQLSchemaProviderFactories(sqlstore) + }, signoz.NewSQLStoreProviderFactories(), signoz.NewTelemetryStoreProviderFactories(), ) diff --git a/pkg/signoz/config.go b/pkg/signoz/config.go index 79aecec424ca..63222c5ef4c2 100644 --- a/pkg/signoz/config.go +++ b/pkg/signoz/config.go @@ -24,6 +24,7 @@ import ( "github.com/SigNoz/signoz/pkg/sharder" "github.com/SigNoz/signoz/pkg/sqlmigration" "github.com/SigNoz/signoz/pkg/sqlmigrator" + "github.com/SigNoz/signoz/pkg/sqlschema" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/statsreporter" "github.com/SigNoz/signoz/pkg/telemetrystore" @@ -57,6 +58,9 @@ type Config struct { // SQLMigrator config SQLMigrator sqlmigrator.Config `mapstructure:"sqlmigrator"` + // SQLSchema config + SQLSchema sqlschema.Config `mapstructure:"sqlschema"` + // API Server config APIServer apiserver.Config `mapstructure:"apiserver"` @@ -111,6 +115,7 @@ func NewConfig(ctx context.Context, resolverConfig config.ResolverConfig, deprec cache.NewConfigFactory(), sqlstore.NewConfigFactory(), sqlmigrator.NewConfigFactory(), + sqlschema.NewConfigFactory(), apiserver.NewConfigFactory(), telemetrystore.NewConfigFactory(), prometheus.NewConfigFactory(), diff --git a/pkg/signoz/provider.go b/pkg/signoz/provider.go index a18e05d7c066..af589abe4e12 100644 --- a/pkg/signoz/provider.go +++ b/pkg/signoz/provider.go @@ -26,6 +26,8 @@ import ( "github.com/SigNoz/signoz/pkg/sharder/noopsharder" "github.com/SigNoz/signoz/pkg/sharder/singlesharder" "github.com/SigNoz/signoz/pkg/sqlmigration" + "github.com/SigNoz/signoz/pkg/sqlschema" + "github.com/SigNoz/signoz/pkg/sqlschema/sqlitesqlschema" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/sqlstore/sqlitesqlstore" "github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook" @@ -69,7 +71,13 @@ func NewSQLStoreProviderFactories() factory.NamedMap[factory.ProviderFactory[sql ) } -func NewSQLMigrationProviderFactories(sqlstore sqlstore.SQLStore) factory.NamedMap[factory.ProviderFactory[sqlmigration.SQLMigration, sqlmigration.Config]] { +func NewSQLSchemaProviderFactories(sqlstore sqlstore.SQLStore) factory.NamedMap[factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config]] { + return factory.MustNewNamedMap( + sqlitesqlschema.NewFactory(sqlstore), + ) +} + +func NewSQLMigrationProviderFactories(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.NamedMap[factory.ProviderFactory[sqlmigration.SQLMigration, sqlmigration.Config]] { return factory.MustNewNamedMap( sqlmigration.NewAddDataMigrationsFactory(), sqlmigration.NewAddOrganizationFactory(), @@ -112,6 +120,10 @@ func NewSQLMigrationProviderFactories(sqlstore sqlstore.SQLStore) factory.NamedM sqlmigration.NewDropFeatureSetFactory(), sqlmigration.NewDropDeprecatedTablesFactory(), sqlmigration.NewUpdateAgentsFactory(sqlstore), + sqlmigration.NewUpdateUsersFactory(sqlstore, sqlschema), + sqlmigration.NewUpdateUserInviteFactory(sqlstore, sqlschema), + sqlmigration.NewUpdateOrgDomainFactory(sqlstore, sqlschema), + sqlmigration.NewAddFactorIndexesFactory(sqlstore, sqlschema), ) } diff --git a/pkg/signoz/provider_test.go b/pkg/signoz/provider_test.go index 072a7dd917b1..a034d7296363 100644 --- a/pkg/signoz/provider_test.go +++ b/pkg/signoz/provider_test.go @@ -8,6 +8,8 @@ import ( "github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest" "github.com/SigNoz/signoz/pkg/modules/organization/implorganization" "github.com/SigNoz/signoz/pkg/modules/user/impluser" + "github.com/SigNoz/signoz/pkg/sqlschema" + "github.com/SigNoz/signoz/pkg/sqlschema/sqlschematest" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/sqlstore/sqlstoretest" "github.com/SigNoz/signoz/pkg/statsreporter" @@ -38,7 +40,7 @@ func TestNewProviderFactories(t *testing.T) { }) assert.NotPanics(t, func() { - NewSQLMigrationProviderFactories(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual)) + NewSQLMigrationProviderFactories(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual), sqlschematest.New(map[string]*sqlschema.Table{}, map[string][]*sqlschema.UniqueConstraint{}, map[string]sqlschema.Index{})) }) assert.NotPanics(t, func() { diff --git a/pkg/signoz/signoz.go b/pkg/signoz/signoz.go index 30e94eb4e423..7cc9dee2100a 100644 --- a/pkg/signoz/signoz.go +++ b/pkg/signoz/signoz.go @@ -19,6 +19,7 @@ import ( "github.com/SigNoz/signoz/pkg/sharder" "github.com/SigNoz/signoz/pkg/sqlmigration" "github.com/SigNoz/signoz/pkg/sqlmigrator" + "github.com/SigNoz/signoz/pkg/sqlschema" "github.com/SigNoz/signoz/pkg/sqlstore" "github.com/SigNoz/signoz/pkg/statsreporter" "github.com/SigNoz/signoz/pkg/telemetrystore" @@ -61,6 +62,7 @@ func New( emailingProviderFactories factory.NamedMap[factory.ProviderFactory[emailing.Emailing, emailing.Config]], cacheProviderFactories factory.NamedMap[factory.ProviderFactory[cache.Cache, cache.Config]], webProviderFactories factory.NamedMap[factory.ProviderFactory[web.Web, web.Config]], + sqlSchemaProviderFactories func(sqlstore.SQLStore) factory.NamedMap[factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config]], sqlstoreProviderFactories factory.NamedMap[factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config]], telemetrystoreProviderFactories factory.NamedMap[factory.ProviderFactory[telemetrystore.TelemetryStore, telemetrystore.Config]], ) (*SigNoz, error) { @@ -183,12 +185,23 @@ func New( return nil, err } + sqlschema, err := factory.NewProviderFromNamedMap( + ctx, + providerSettings, + config.SQLSchema, + sqlSchemaProviderFactories(sqlstore), + config.SQLStore.Provider, + ) + if err != nil { + return nil, err + } + // Run migrations on the sqlstore sqlmigrations, err := sqlmigration.New( ctx, providerSettings, config.SQLMigration, - NewSQLMigrationProviderFactories(sqlstore), + NewSQLMigrationProviderFactories(sqlstore, sqlschema), ) if err != nil { return nil, err diff --git a/pkg/sqlmigration/042_update_users.go b/pkg/sqlmigration/042_update_users.go new file mode 100644 index 000000000000..5220ae3d8875 --- /dev/null +++ b/pkg/sqlmigration/042_update_users.go @@ -0,0 +1,85 @@ +package sqlmigration + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/sqlschema" + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/uptrace/bun" + "github.com/uptrace/bun/migrate" +) + +type updateUsers struct { + sqlstore sqlstore.SQLStore + sqlschema sqlschema.SQLSchema +} + +func NewUpdateUsersFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] { + return factory.NewProviderFactory(factory.MustNewName("update_users"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) { + return newUpdateUsers(ctx, providerSettings, config, sqlstore, sqlschema) + }) +} + +func newUpdateUsers(_ context.Context, _ factory.ProviderSettings, _ Config, sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) (SQLMigration, error) { + return &updateUsers{ + sqlstore: sqlstore, + sqlschema: sqlschema, + }, nil +} + +func (migration *updateUsers) Register(migrations *migrate.Migrations) error { + if err := migrations.Register(migration.Up, migration.Down); err != nil { + return err + } + + return nil +} + +func (migration *updateUsers) Up(ctx context.Context, db *bun.DB) error { + if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, false); err != nil { + return err + } + + tx, err := db.BeginTx(ctx, nil) + if err != nil { + return err + } + + defer func() { + _ = tx.Rollback() + }() + + table, uniqueConstraints, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("users")) + if err != nil { + return err + } + + sqls := [][]byte{} + + dropSQLs := migration.sqlschema.Operator().DropConstraint(table, uniqueConstraints, &sqlschema.UniqueConstraint{ColumnNames: []sqlschema.ColumnName{"email"}}) + sqls = append(sqls, dropSQLs...) + + indexSQLs := migration.sqlschema.Operator().CreateIndex(&sqlschema.UniqueIndex{TableName: "users", ColumnNames: []sqlschema.ColumnName{"email", "org_id"}}) + sqls = append(sqls, indexSQLs...) + + for _, sql := range sqls { + if _, err := tx.ExecContext(ctx, string(sql)); err != nil { + return err + } + } + + if err := tx.Commit(); err != nil { + return err + } + + if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, true); err != nil { + return err + } + + return nil +} + +func (migration *updateUsers) Down(ctx context.Context, db *bun.DB) error { + return nil +} diff --git a/pkg/sqlmigration/043_update_user_invite.go b/pkg/sqlmigration/043_update_user_invite.go new file mode 100644 index 000000000000..0bd2e60850c4 --- /dev/null +++ b/pkg/sqlmigration/043_update_user_invite.go @@ -0,0 +1,88 @@ +package sqlmigration + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/sqlschema" + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/uptrace/bun" + "github.com/uptrace/bun/migrate" +) + +type updateUserInvite struct { + sqlstore sqlstore.SQLStore + sqlschema sqlschema.SQLSchema +} + +func NewUpdateUserInviteFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] { + return factory.NewProviderFactory(factory.MustNewName("update_user_invite"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) { + return newUpdateUserInvite(ctx, providerSettings, config, sqlstore, sqlschema) + }) +} + +func newUpdateUserInvite(_ context.Context, _ factory.ProviderSettings, _ Config, sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) (SQLMigration, error) { + return &updateUserInvite{ + sqlstore: sqlstore, + sqlschema: sqlschema, + }, nil +} + +func (migration *updateUserInvite) Register(migrations *migrate.Migrations) error { + if err := migrations.Register(migration.Up, migration.Down); err != nil { + return err + } + + return nil +} + +func (migration *updateUserInvite) Up(ctx context.Context, db *bun.DB) error { + if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, false); err != nil { + return err + } + + tx, err := db.BeginTx(ctx, nil) + if err != nil { + return err + } + + defer func() { + _ = tx.Rollback() + }() + + table, uniqueConstraints, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("user_invite")) + if err != nil { + return err + } + + sqls := [][]byte{} + + dropSQLs := migration.sqlschema.Operator().DropConstraint(table, uniqueConstraints, &sqlschema.UniqueConstraint{ColumnNames: []sqlschema.ColumnName{"email"}}) + sqls = append(sqls, dropSQLs...) + + indexSQLs := migration.sqlschema.Operator().CreateIndex(&sqlschema.UniqueIndex{TableName: "user_invite", ColumnNames: []sqlschema.ColumnName{"email", "org_id"}}) + sqls = append(sqls, indexSQLs...) + + indexSQLs = migration.sqlschema.Operator().CreateIndex(&sqlschema.UniqueIndex{TableName: "user_invite", ColumnNames: []sqlschema.ColumnName{"token"}}) + sqls = append(sqls, indexSQLs...) + + for _, sql := range sqls { + if _, err := tx.ExecContext(ctx, string(sql)); err != nil { + return err + } + } + + if err := tx.Commit(); err != nil { + return err + } + + if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, true); err != nil { + return err + } + + return nil +} + +func (migration *updateUserInvite) Down(ctx context.Context, db *bun.DB) error { + return nil +} diff --git a/pkg/sqlmigration/044_update_org_domain.go b/pkg/sqlmigration/044_update_org_domain.go new file mode 100644 index 000000000000..82cb392eb725 --- /dev/null +++ b/pkg/sqlmigration/044_update_org_domain.go @@ -0,0 +1,85 @@ +package sqlmigration + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/sqlschema" + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/uptrace/bun" + "github.com/uptrace/bun/migrate" +) + +type updateOrgDomain struct { + sqlstore sqlstore.SQLStore + sqlschema sqlschema.SQLSchema +} + +func NewUpdateOrgDomainFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] { + return factory.NewProviderFactory(factory.MustNewName("update_org_domain"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) { + return newUpdateOrgDomain(ctx, providerSettings, config, sqlstore, sqlschema) + }) +} + +func newUpdateOrgDomain(_ context.Context, _ factory.ProviderSettings, _ Config, sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) (SQLMigration, error) { + return &updateOrgDomain{ + sqlstore: sqlstore, + sqlschema: sqlschema, + }, nil +} + +func (migration *updateOrgDomain) Register(migrations *migrate.Migrations) error { + if err := migrations.Register(migration.Up, migration.Down); err != nil { + return err + } + + return nil +} + +func (migration *updateOrgDomain) Up(ctx context.Context, db *bun.DB) error { + if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, false); err != nil { + return err + } + + tx, err := db.BeginTx(ctx, nil) + if err != nil { + return err + } + + defer func() { + _ = tx.Rollback() + }() + + table, uniqueConstraints, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("org_domains")) + if err != nil { + return err + } + + sqls := [][]byte{} + + dropSQLs := migration.sqlschema.Operator().DropConstraint(table, uniqueConstraints, &sqlschema.UniqueConstraint{ColumnNames: []sqlschema.ColumnName{"name"}}) + sqls = append(sqls, dropSQLs...) + + indexSQLs := migration.sqlschema.Operator().CreateIndex(&sqlschema.UniqueIndex{TableName: "org_domains", ColumnNames: []sqlschema.ColumnName{"name", "org_id"}}) + sqls = append(sqls, indexSQLs...) + + for _, sql := range sqls { + if _, err := tx.ExecContext(ctx, string(sql)); err != nil { + return err + } + } + + if err := tx.Commit(); err != nil { + return err + } + + if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, true); err != nil { + return err + } + + return nil +} + +func (migration *updateOrgDomain) Down(ctx context.Context, db *bun.DB) error { + return nil +} diff --git a/pkg/sqlmigration/045_add_factor_indexes.go b/pkg/sqlmigration/045_add_factor_indexes.go new file mode 100644 index 000000000000..ef1080cb0420 --- /dev/null +++ b/pkg/sqlmigration/045_add_factor_indexes.go @@ -0,0 +1,75 @@ +package sqlmigration + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/sqlschema" + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/uptrace/bun" + "github.com/uptrace/bun/migrate" +) + +type addFactorIndexes struct { + sqlstore sqlstore.SQLStore + sqlschema sqlschema.SQLSchema +} + +func NewAddFactorIndexesFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] { + return factory.NewProviderFactory(factory.MustNewName("add_factor_indexes"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) { + return newAddFactorIndexes(ctx, providerSettings, config, sqlstore, sqlschema) + }) +} + +func newAddFactorIndexes(_ context.Context, _ factory.ProviderSettings, _ Config, sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) (SQLMigration, error) { + return &addFactorIndexes{ + sqlstore: sqlstore, + sqlschema: sqlschema, + }, nil +} + +func (migration *addFactorIndexes) Register(migrations *migrate.Migrations) error { + if err := migrations.Register(migration.Up, migration.Down); err != nil { + return err + } + + return nil +} + +func (migration *addFactorIndexes) Up(ctx context.Context, db *bun.DB) error { + tx, err := db.BeginTx(ctx, nil) + if err != nil { + return err + } + + defer func() { + _ = tx.Rollback() + }() + + sqls := [][]byte{} + + indexSQLs := migration.sqlschema.Operator().CreateIndex(&sqlschema.UniqueIndex{TableName: "factor_password", ColumnNames: []sqlschema.ColumnName{"user_id"}}) + sqls = append(sqls, indexSQLs...) + + indexSQLs = migration.sqlschema.Operator().CreateIndex(&sqlschema.UniqueIndex{TableName: "reset_password_token", ColumnNames: []sqlschema.ColumnName{"password_id"}}) + sqls = append(sqls, indexSQLs...) + + indexSQLs = migration.sqlschema.Operator().CreateIndex(&sqlschema.UniqueIndex{TableName: "reset_password_token", ColumnNames: []sqlschema.ColumnName{"token"}}) + sqls = append(sqls, indexSQLs...) + + for _, sql := range sqls { + if _, err := tx.ExecContext(ctx, string(sql)); err != nil { + return err + } + } + + if err := tx.Commit(); err != nil { + return err + } + + return nil +} + +func (migration *addFactorIndexes) Down(ctx context.Context, db *bun.DB) error { + return nil +} diff --git a/pkg/sqlmigration/sqlmigration.go b/pkg/sqlmigration/sqlmigration.go index 5ec189090c8d..3365c50ae15f 100644 --- a/pkg/sqlmigration/sqlmigration.go +++ b/pkg/sqlmigration/sqlmigration.go @@ -14,8 +14,10 @@ type SQLMigration interface { // Register registers the migration with the given migrations. Each migration needs to be registered //in a dedicated `*.go` file so that the correct migration semantics can be detected. Register(*migrate.Migrations) error + // Up runs the migration. Up(context.Context, *bun.DB) error + // Down rolls back the migration. Down(context.Context, *bun.DB) error } @@ -66,5 +68,6 @@ func MustNew( if err != nil { panic(err) } + return migrations } diff --git a/pkg/sqlschema/column.go b/pkg/sqlschema/column.go new file mode 100644 index 000000000000..470fbb8c81d8 --- /dev/null +++ b/pkg/sqlschema/column.go @@ -0,0 +1,130 @@ +package sqlschema + +import ( + "time" + + "github.com/SigNoz/signoz/pkg/valuer" +) + +var ( + DataTypeText = DataType{s: valuer.NewString("TEXT"), z: ""} + DataTypeBigInt = DataType{s: valuer.NewString("BIGINT"), z: int64(0)} + DataTypeInteger = DataType{s: valuer.NewString("INTEGER"), z: int64(0)} + DataTypeNumeric = DataType{s: valuer.NewString("NUMERIC"), z: float64(0)} + DataTypeBoolean = DataType{s: valuer.NewString("BOOLEAN"), z: false} + DataTypeTimestamp = DataType{s: valuer.NewString("TIMESTAMP"), z: time.Time{}} +) + +type DataType struct { + s valuer.String + z any +} + +func (d DataType) String() string { + return d.s.String() +} + +type ColumnName string + +type Column struct { + // The name of the column in the table. + Name ColumnName + + // The data type of the column. This will be translated to the the appropriate data type as per the dialect. + DataType DataType + + // Whether the column is nullable. + Nullable bool + + // The default value of the column. + Default string +} + +func (column *Column) ToDefinitionSQL(fmter SQLFormatter) []byte { + sql := []byte{} + + sql = fmter.AppendIdent(sql, string(column.Name)) + sql = append(sql, " "...) + sql = append(sql, fmter.SQLDataTypeOf(column.DataType)...) + + if !column.Nullable { + sql = append(sql, " NOT NULL"...) + } + + if column.Default != "" { + sql = append(sql, " DEFAULT "...) + sql = append(sql, column.Default...) + } + + return sql +} + +func (column *Column) ToAddSQL(fmter SQLFormatter, tableName TableName, ifNotExists bool) []byte { + sql := []byte{} + + sql = append(sql, "ALTER TABLE "...) + sql = fmter.AppendIdent(sql, string(tableName)) + sql = append(sql, " ADD COLUMN "...) + if ifNotExists { + sql = append(sql, "IF NOT EXISTS "...) + } + + if column.Default == "" && !column.Nullable { + adjustedColumn := &Column{ + Name: column.Name, + DataType: column.DataType, + Nullable: true, + Default: column.Default, + } + + sql = append(sql, adjustedColumn.ToDefinitionSQL(fmter)...) + } else { + sql = append(sql, column.ToDefinitionSQL(fmter)...) + } + + return sql +} + +func (column *Column) ToDropSQL(fmter SQLFormatter, tableName TableName, ifExists bool) []byte { + sql := []byte{} + + sql = append(sql, "ALTER TABLE "...) + sql = fmter.AppendIdent(sql, string(tableName)) + sql = append(sql, " DROP COLUMN "...) + if ifExists { + sql = append(sql, "IF EXISTS "...) + } + sql = fmter.AppendIdent(sql, string(column.Name)) + + return sql +} + +func (column *Column) ToUpdateSQL(fmter SQLFormatter, tableName TableName, value any) []byte { + sql := []byte{} + + sql = append(sql, "UPDATE "...) + sql = fmter.AppendIdent(sql, string(tableName)) + sql = append(sql, " SET "...) + sql = fmter.AppendIdent(sql, string(column.Name)) + sql = append(sql, " = "...) + + if v, ok := value.(ColumnName); ok { + sql = fmter.AppendIdent(sql, string(v)) + } else { + sql = fmter.AppendValue(sql, value) + } + + return sql +} + +func (column *Column) ToSetNotNullSQL(fmter SQLFormatter, tableName TableName) []byte { + sql := []byte{} + + sql = append(sql, "ALTER TABLE "...) + sql = fmter.AppendIdent(sql, string(tableName)) + sql = append(sql, " ALTER COLUMN "...) + sql = fmter.AppendIdent(sql, string(column.Name)) + sql = append(sql, " SET NOT NULL"...) + + return sql +} diff --git a/pkg/sqlschema/column_test.go b/pkg/sqlschema/column_test.go new file mode 100644 index 000000000000..c283bae810ed --- /dev/null +++ b/pkg/sqlschema/column_test.go @@ -0,0 +1,118 @@ +package sqlschema + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/uptrace/bun/schema" +) + +func TestColumnToDefinitionSQL(t *testing.T) { + testCases := []struct { + name string + column Column + sql string + }{ + { + name: "TimestampNotNullNoDefault", + column: Column{Name: "created_at", DataType: DataTypeTimestamp, Nullable: false}, + sql: `"created_at" TIMESTAMP NOT NULL`, + }, + { + name: "TimestampNotNullWithDefault", + column: Column{Name: "created_at", DataType: DataTypeTimestamp, Nullable: false, Default: "CURRENT_TIMESTAMP"}, + sql: `"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP`, + }, + { + name: "TimestampNullableNoDefault", + column: Column{Name: "created_at", DataType: DataTypeTimestamp, Nullable: true}, + sql: `"created_at" TIMESTAMP`, + }, + } + + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + fmter := Formatter{schema.NewNopFormatter()} + sql := testCase.column.ToDefinitionSQL(fmter) + + assert.Equal(t, testCase.sql, string(sql)) + }) + } +} + +func TestColumnToUpdateSQL(t *testing.T) { + testCases := []struct { + name string + column Column + tableName TableName + value any + sql string + }{ + { + name: "Timestamp", + column: Column{Name: "ts", DataType: DataTypeTimestamp}, + tableName: "test", + value: time.Time{}, + sql: `UPDATE "test" SET "ts" = '0001-01-01 00:00:00+00:00'`, + }, + { + name: "Integer", + column: Column{Name: "i", DataType: DataTypeInteger}, + tableName: "test", + value: 1, + sql: `UPDATE "test" SET "i" = 1`, + }, + { + name: "Text", + column: Column{Name: "t", DataType: DataTypeText}, + tableName: "test", + value: "test", + sql: `UPDATE "test" SET "t" = 'test'`, + }, + { + name: "BigInt", + column: Column{Name: "bi", DataType: DataTypeBigInt}, + tableName: "test", + value: 1, + sql: `UPDATE "test" SET "bi" = 1`, + }, + { + name: "Numeric", + column: Column{Name: "n", DataType: DataTypeNumeric}, + tableName: "test", + value: 1.1, + sql: `UPDATE "test" SET "n" = 1.1`, + }, + { + name: "Boolean", + column: Column{Name: "b", DataType: DataTypeBoolean}, + tableName: "test", + value: true, + sql: `UPDATE "test" SET "b" = TRUE`, + }, + { + name: "Null", + column: Column{Name: "n", DataType: DataTypeNumeric}, + tableName: "test", + value: nil, + sql: `UPDATE "test" SET "n" = NULL`, + }, + { + name: "ColumnName", + column: Column{Name: "n", DataType: DataTypeNumeric}, + tableName: "test", + value: ColumnName("n"), + sql: `UPDATE "test" SET "n" = "n"`, + }, + } + + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + fmter := Formatter{schema.NewNopFormatter()} + sql := testCase.column.ToUpdateSQL(fmter, testCase.tableName, testCase.value) + + assert.Equal(t, testCase.sql, string(sql)) + }) + } +} diff --git a/pkg/sqlschema/config.go b/pkg/sqlschema/config.go new file mode 100644 index 000000000000..ed659a56d43e --- /dev/null +++ b/pkg/sqlschema/config.go @@ -0,0 +1,17 @@ +package sqlschema + +import "github.com/SigNoz/signoz/pkg/factory" + +type Config struct{} + +func NewConfigFactory() factory.ConfigFactory { + return factory.NewConfigFactory(factory.MustNewName("sqlschema"), newConfig) +} + +func newConfig() factory.Config { + return Config{} +} + +func (c Config) Validate() error { + return nil +} diff --git a/pkg/sqlschema/constraint.go b/pkg/sqlschema/constraint.go new file mode 100644 index 000000000000..3b0b52c2901a --- /dev/null +++ b/pkg/sqlschema/constraint.go @@ -0,0 +1,325 @@ +package sqlschema + +import ( + "strings" + + "github.com/SigNoz/signoz/pkg/valuer" +) + +var ( + ConstraintTypePrimaryKey = ConstraintType{s: valuer.NewString("pk")} + ConstraintTypeForeignKey = ConstraintType{s: valuer.NewString("fk")} + ConstraintTypeCheck = ConstraintType{s: valuer.NewString("ck")} + ConstraintTypeUnique = ConstraintType{s: valuer.NewString("uq")} +) + +type ConstraintType struct{ s valuer.String } + +func (c ConstraintType) String() string { + return c.s.String() +} + +var ( + _ Constraint = (*PrimaryKeyConstraint)(nil) + _ Constraint = (*ForeignKeyConstraint)(nil) + _ Constraint = (*UniqueConstraint)(nil) +) + +type Constraint interface { + // The name of the constraint. This will be autogenerated and should not be set by the user. + // - Primary keys are named as `pk_`. + // - Foreign key constraints are named as `fk__`. + // - Check constraints are named as `ck__`. The name is the name of the check constraint. + Name(tableName TableName) string + + // Add name to the constraint. This is typically used to override the autogenerated name because the database might have a different name. + Named(name string) Constraint + + // The type of the constraint. + Type() ConstraintType + + // The columns that the constraint is applied to. + Columns() []ColumnName + + // Equals returns true if the constraint is equal to the other constraint. + Equals(other Constraint) bool + + // The SQL representation of the constraint. + ToDefinitionSQL(fmter SQLFormatter, tableName TableName) []byte + + // The SQL representation of the constraint. + ToDropSQL(fmter SQLFormatter, tableName TableName) []byte +} + +type PrimaryKeyConstraint struct { + ColumnNames []ColumnName + name string +} + +func (constraint *PrimaryKeyConstraint) Name(tableName TableName) string { + if constraint.name != "" { + return constraint.name + } + + var b strings.Builder + b.WriteString(ConstraintTypePrimaryKey.String()) + b.WriteString("_") + b.WriteString(string(tableName)) + return b.String() +} + +func (constraint *PrimaryKeyConstraint) Named(name string) Constraint { + copyOfColumnNames := make([]ColumnName, len(constraint.ColumnNames)) + copy(copyOfColumnNames, constraint.ColumnNames) + + return &PrimaryKeyConstraint{ + ColumnNames: copyOfColumnNames, + name: name, + } +} + +func (constraint *PrimaryKeyConstraint) Type() ConstraintType { + return ConstraintTypePrimaryKey +} + +func (constraint *PrimaryKeyConstraint) Columns() []ColumnName { + return constraint.ColumnNames +} + +func (constraint *PrimaryKeyConstraint) Equals(other Constraint) bool { + if other.Type() != ConstraintTypePrimaryKey { + return false + } + + if len(constraint.ColumnNames) != len(other.Columns()) { + return false + } + + foundColumns := make(map[ColumnName]bool) + for _, column := range constraint.ColumnNames { + foundColumns[column] = true + } + + for _, column := range other.Columns() { + if !foundColumns[column] { + return false + } + } + + return true +} + +func (constraint *PrimaryKeyConstraint) ToDefinitionSQL(fmter SQLFormatter, tableName TableName) []byte { + sql := []byte{} + + sql = append(sql, "CONSTRAINT "...) + sql = fmter.AppendIdent(sql, constraint.Name(tableName)) + sql = append(sql, " PRIMARY KEY ("...) + + for i, column := range constraint.ColumnNames { + if i > 0 { + sql = append(sql, ", "...) + } + sql = fmter.AppendIdent(sql, string(column)) + } + + sql = append(sql, ")"...) + + return sql +} + +func (constraint *PrimaryKeyConstraint) ToDropSQL(fmter SQLFormatter, tableName TableName) []byte { + sql := []byte{} + + sql = append(sql, "ALTER TABLE "...) + sql = fmter.AppendIdent(sql, string(tableName)) + sql = append(sql, " DROP CONSTRAINT IF EXISTS "...) + sql = fmter.AppendIdent(sql, constraint.Name(tableName)) + + return sql +} + +type ForeignKeyConstraint struct { + ReferencingColumnName ColumnName + ReferencedTableName TableName + ReferencedColumnName ColumnName + name string +} + +func (constraint *ForeignKeyConstraint) Name(tableName TableName) string { + if constraint.name != "" { + return constraint.name + } + + var b strings.Builder + b.WriteString(ConstraintTypeForeignKey.String()) + b.WriteString("_") + b.WriteString(string(tableName)) + b.WriteString("_") + b.WriteString(string(constraint.ReferencingColumnName)) + return b.String() +} + +func (constraint *ForeignKeyConstraint) Named(name string) Constraint { + return &ForeignKeyConstraint{ + ReferencingColumnName: constraint.ReferencingColumnName, + ReferencedTableName: constraint.ReferencedTableName, + ReferencedColumnName: constraint.ReferencedColumnName, + name: name, + } +} + +func (constraint *ForeignKeyConstraint) Type() ConstraintType { + return ConstraintTypeForeignKey +} + +func (constraint *ForeignKeyConstraint) Columns() []ColumnName { + return []ColumnName{constraint.ReferencingColumnName} +} + +func (constraint *ForeignKeyConstraint) Equals(other Constraint) bool { + if other.Type() != ConstraintTypeForeignKey { + return false + } + + otherForeignKeyConstraint, ok := other.(*ForeignKeyConstraint) + if !ok { + return false + } + + return constraint.ReferencingColumnName == otherForeignKeyConstraint.ReferencingColumnName && + constraint.ReferencedTableName == otherForeignKeyConstraint.ReferencedTableName && + constraint.ReferencedColumnName == otherForeignKeyConstraint.ReferencedColumnName +} + +func (constraint *ForeignKeyConstraint) ToDefinitionSQL(fmter SQLFormatter, tableName TableName) []byte { + sql := []byte{} + + sql = append(sql, "CONSTRAINT "...) + sql = fmter.AppendIdent(sql, constraint.Name(tableName)) + sql = append(sql, " FOREIGN KEY ("...) + + sql = fmter.AppendIdent(sql, string(constraint.ReferencingColumnName)) + sql = append(sql, ") REFERENCES "...) + sql = fmter.AppendIdent(sql, string(constraint.ReferencedTableName)) + sql = append(sql, " ("...) + sql = fmter.AppendIdent(sql, string(constraint.ReferencedColumnName)) + sql = append(sql, ")"...) + + return sql +} + +func (constraint *ForeignKeyConstraint) ToDropSQL(fmter SQLFormatter, tableName TableName) []byte { + sql := []byte{} + + sql = append(sql, "ALTER TABLE "...) + sql = fmter.AppendIdent(sql, string(tableName)) + sql = append(sql, " DROP CONSTRAINT IF EXISTS "...) + sql = fmter.AppendIdent(sql, constraint.Name(tableName)) + + return sql +} + +// Do not use this constraint type. Instead create an index with the `UniqueIndex` type. +// The main difference between a Unique Index and a Unique Constraint is mostly semantic, with a constraint focusing more on data integrity, while an index focuses on performance. +// We choose to create unique indices because of sqlite. Dropping a unique index is directly supported whilst dropping a unique constraint requires a recreation of the table with the constraint removed. +type UniqueConstraint struct { + ColumnNames []ColumnName + name string +} + +func (constraint *UniqueConstraint) Name(tableName TableName) string { + if constraint.name != "" { + return constraint.name + } + + var b strings.Builder + b.WriteString(ConstraintTypeUnique.String()) + b.WriteString("_") + b.WriteString(string(tableName)) + b.WriteString("_") + for i, column := range constraint.ColumnNames { + if i > 0 { + b.WriteString("_") + } + b.WriteString(string(column)) + } + return b.String() +} + +func (constraint *UniqueConstraint) Named(name string) Constraint { + copyOfColumnNames := make([]ColumnName, len(constraint.ColumnNames)) + copy(copyOfColumnNames, constraint.ColumnNames) + + return &UniqueConstraint{ + ColumnNames: copyOfColumnNames, + name: name, + } +} + +func (constraint *UniqueConstraint) Type() ConstraintType { + return ConstraintTypeUnique +} + +func (constraint *UniqueConstraint) Columns() []ColumnName { + return constraint.ColumnNames +} + +func (constraint *UniqueConstraint) Equals(other Constraint) bool { + if other.Type() != ConstraintTypeUnique { + return false + } + + foundColumns := make(map[ColumnName]bool) + for _, column := range constraint.ColumnNames { + foundColumns[column] = true + } + + for _, column := range other.Columns() { + if !foundColumns[column] { + return false + } + } + + return true +} + +func (constraint *UniqueConstraint) ToIndex(tableName TableName) *UniqueIndex { + copyOfColumnNames := make([]ColumnName, len(constraint.ColumnNames)) + copy(copyOfColumnNames, constraint.ColumnNames) + + return &UniqueIndex{ + TableName: tableName, + ColumnNames: copyOfColumnNames, + } +} + +func (constraint *UniqueConstraint) ToDefinitionSQL(fmter SQLFormatter, tableName TableName) []byte { + sql := []byte{} + + sql = append(sql, "CONSTRAINT "...) + sql = fmter.AppendIdent(sql, constraint.Name(tableName)) + sql = append(sql, " UNIQUE ("...) + + for i, column := range constraint.ColumnNames { + if i > 0 { + sql = append(sql, ", "...) + } + sql = fmter.AppendIdent(sql, string(column)) + } + + sql = append(sql, ")"...) + + return sql +} + +func (constraint *UniqueConstraint) ToDropSQL(fmter SQLFormatter, tableName TableName) []byte { + sql := []byte{} + + sql = append(sql, "ALTER TABLE "...) + sql = fmter.AppendIdent(sql, string(tableName)) + sql = append(sql, " DROP CONSTRAINT IF EXISTS "...) + sql = fmter.AppendIdent(sql, constraint.Name(tableName)) + + return sql +} diff --git a/pkg/sqlschema/constraint_test.go b/pkg/sqlschema/constraint_test.go new file mode 100644 index 000000000000..c0088e3db9d9 --- /dev/null +++ b/pkg/sqlschema/constraint_test.go @@ -0,0 +1,72 @@ +package sqlschema + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/uptrace/bun/schema" +) + +func TestPrimaryKeyConstraintToDefinitionSQL(t *testing.T) { + testCases := []struct { + name string + tableName TableName + constraint *PrimaryKeyConstraint + sql string + }{ + { + name: "SingleColumn", + tableName: "test", + constraint: &PrimaryKeyConstraint{ + ColumnNames: []ColumnName{"id"}, + }, + sql: `CONSTRAINT "pk_test" PRIMARY KEY ("id")`, + }, + { + name: "MultipleColumns", + tableName: "test", + constraint: &PrimaryKeyConstraint{ + ColumnNames: []ColumnName{"id", "name"}, + }, + sql: `CONSTRAINT "pk_test" PRIMARY KEY ("id", "name")`, + }, + } + + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + fmter := Formatter{schema.NewNopFormatter()} + sql := testCase.constraint.ToDefinitionSQL(fmter, testCase.tableName) + + assert.Equal(t, testCase.sql, string(sql)) + }) + } +} + +func TestForeignKeyConstraintToDefinitionSQL(t *testing.T) { + testCases := []struct { + name string + tableName TableName + constraint *ForeignKeyConstraint + sql string + }{ + { + name: "NoCascade", + tableName: "test", + constraint: &ForeignKeyConstraint{ + ReferencingColumnName: "id", + ReferencedTableName: "test_referenced", + ReferencedColumnName: "id", + }, + sql: `CONSTRAINT "fk_test_id" FOREIGN KEY ("id") REFERENCES "test_referenced" ("id")`, + }, + } + + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + fmter := Formatter{schema.NewNopFormatter()} + sql := testCase.constraint.ToDefinitionSQL(fmter, testCase.tableName) + + assert.Equal(t, testCase.sql, string(sql)) + }) + } +} diff --git a/pkg/sqlschema/formatter.go b/pkg/sqlschema/formatter.go new file mode 100644 index 000000000000..0f72d7df8983 --- /dev/null +++ b/pkg/sqlschema/formatter.go @@ -0,0 +1,49 @@ +package sqlschema + +import ( + "strings" + + "github.com/SigNoz/signoz/pkg/valuer" + "github.com/uptrace/bun/schema" +) + +var _ SQLFormatter = (*Formatter)(nil) + +type Formatter struct { + bunf schema.Formatter +} + +func NewFormatter(dialect schema.Dialect) Formatter { + return Formatter{bunf: schema.NewFormatter(dialect)} +} + +func (formatter Formatter) SQLDataTypeOf(dataType DataType) string { + return strings.ToUpper(dataType.String()) +} + +func (formatter Formatter) DataTypeOf(dataType string) DataType { + switch strings.ToUpper(dataType) { + case "TEXT": + return DataTypeText + case "BIGINT": + return DataTypeBigInt + case "INTEGER": + return DataTypeInteger + case "NUMERIC": + return DataTypeNumeric + case "BOOLEAN": + return DataTypeBoolean + case "TIMESTAMP": + return DataTypeTimestamp + default: + return DataType{s: valuer.NewString(dataType)} + } +} + +func (formatter Formatter) AppendIdent(b []byte, ident string) []byte { + return formatter.bunf.AppendIdent(b, ident) +} + +func (formatter Formatter) AppendValue(b []byte, v any) []byte { + return schema.Append(formatter.bunf, b, v) +} diff --git a/pkg/sqlschema/index.go b/pkg/sqlschema/index.go new file mode 100644 index 000000000000..1988b1fd5e21 --- /dev/null +++ b/pkg/sqlschema/index.go @@ -0,0 +1,116 @@ +package sqlschema + +import ( + "strings" + + "github.com/SigNoz/signoz/pkg/valuer" +) + +var ( + IndexTypeUnique = IndexType{s: valuer.NewString("uq")} + IndexTypeIndex = IndexType{s: valuer.NewString("ix")} +) + +type IndexType struct{ s valuer.String } + +func (i IndexType) String() string { + return i.s.String() +} + +type Index interface { + // The name of the index. + // - Indexes are named as `ix__`. The column names are separated by underscores. + // - Unique constraints are named as `uq__`. The column names are separated by underscores. + // The name is autogenerated and should not be set by the user. + Name() string + + // Add name to the index. This is typically used to override the autogenerated name because the database might have a different name. + Named(name string) Index + + // The type of the index. + Type() IndexType + + // The columns that the index is applied to. + Columns() []ColumnName + + // The SQL representation of the index. + ToCreateSQL(fmter SQLFormatter) []byte + + // Drop the index. + ToDropSQL(fmter SQLFormatter) []byte +} + +type UniqueIndex struct { + TableName TableName + ColumnNames []ColumnName + name string +} + +func (index *UniqueIndex) Name() string { + if index.name != "" { + return index.name + } + + var b strings.Builder + b.WriteString(IndexTypeUnique.String()) + b.WriteString("_") + b.WriteString(string(index.TableName)) + b.WriteString("_") + for i, column := range index.ColumnNames { + if i > 0 { + b.WriteString("_") + } + b.WriteString(string(column)) + } + return b.String() +} + +func (index *UniqueIndex) Named(name string) Index { + copyOfColumnNames := make([]ColumnName, len(index.ColumnNames)) + copy(copyOfColumnNames, index.ColumnNames) + + return &UniqueIndex{ + TableName: index.TableName, + ColumnNames: copyOfColumnNames, + name: name, + } +} + +func (*UniqueIndex) Type() IndexType { + return IndexTypeUnique +} + +func (index *UniqueIndex) Columns() []ColumnName { + return index.ColumnNames +} + +func (index *UniqueIndex) ToCreateSQL(fmter SQLFormatter) []byte { + sql := []byte{} + + sql = append(sql, "CREATE UNIQUE INDEX IF NOT EXISTS "...) + sql = fmter.AppendIdent(sql, index.Name()) + sql = append(sql, " ON "...) + sql = fmter.AppendIdent(sql, string(index.TableName)) + sql = append(sql, " ("...) + + for i, column := range index.ColumnNames { + if i > 0 { + sql = append(sql, ", "...) + } + + sql = fmter.AppendIdent(sql, string(column)) + } + + sql = append(sql, ")"...) + + return sql +} + +func (index *UniqueIndex) ToDropSQL(fmter SQLFormatter) []byte { + sql := []byte{} + + sql = append(sql, "DROP INDEX IF EXISTS "...) + sql = fmter.AppendIdent(sql, index.Name()) + + return sql +} diff --git a/pkg/sqlschema/index_test.go b/pkg/sqlschema/index_test.go new file mode 100644 index 000000000000..ae554cb88879 --- /dev/null +++ b/pkg/sqlschema/index_test.go @@ -0,0 +1,51 @@ +package sqlschema + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/uptrace/bun/schema" +) + +func TestIndexToCreateSQL(t *testing.T) { + testCases := []struct { + name string + index Index + sql string + }{ + { + name: "Unique_1Column", + index: &UniqueIndex{ + TableName: "users", + ColumnNames: []ColumnName{"id"}, + }, + sql: `CREATE UNIQUE INDEX IF NOT EXISTS "uq_users_id" ON "users" ("id")`, + }, + { + name: "Unique_2Columns", + index: &UniqueIndex{ + TableName: "users", + ColumnNames: []ColumnName{"id", "name"}, + }, + sql: `CREATE UNIQUE INDEX IF NOT EXISTS "uq_users_id_name" ON "users" ("id", "name")`, + }, + { + name: "Unique_3Columns_Named", + index: &UniqueIndex{ + TableName: "users", + ColumnNames: []ColumnName{"id", "name", "email"}, + name: "my_index", + }, + sql: `CREATE UNIQUE INDEX IF NOT EXISTS "my_index" ON "users" ("id", "name", "email")`, + }, + } + + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + fmter := Formatter{schema.NewNopFormatter()} + sql := testCase.index.ToCreateSQL(fmter) + + assert.Equal(t, testCase.sql, string(sql)) + }) + } +} diff --git a/pkg/sqlschema/operator.go b/pkg/sqlschema/operator.go new file mode 100644 index 000000000000..fe8fddcc6e66 --- /dev/null +++ b/pkg/sqlschema/operator.go @@ -0,0 +1,143 @@ +package sqlschema + +var _ SQLOperator = (*Operator)(nil) + +type OperatorSupport struct { + DropConstraint bool + ColumnIfNotExistsExists bool + AlterColumnSetNotNull bool +} + +type Operator struct { + fmter SQLFormatter + support OperatorSupport +} + +func NewOperator(fmter SQLFormatter, support OperatorSupport) *Operator { + return &Operator{ + fmter: fmter, + support: support, + } +} + +func (operator *Operator) CreateTable(table *Table) [][]byte { + return [][]byte{table.ToCreateSQL(operator.fmter)} +} + +func (operator *Operator) RenameTable(table *Table, newName TableName) [][]byte { + table.Name = newName + return [][]byte{table.ToRenameSQL(operator.fmter, newName)} +} + +func (operator *Operator) RecreateTable(table *Table, uniqueConstraints []*UniqueConstraint) [][]byte { + sqls := [][]byte{} + + sqls = append(sqls, table.ToCreateTempInsertDropAlterSQL(operator.fmter)...) + + for _, uniqueConstraint := range uniqueConstraints { + sqls = append(sqls, uniqueConstraint.ToIndex(table.Name).ToCreateSQL(operator.fmter)) + } + + return sqls +} + +func (operator *Operator) DropTable(table *Table) [][]byte { + return [][]byte{table.ToDropSQL(operator.fmter)} +} + +func (operator *Operator) CreateIndex(index Index) [][]byte { + return [][]byte{index.ToCreateSQL(operator.fmter)} +} + +func (operator *Operator) DropIndex(index Index) [][]byte { + return [][]byte{index.ToDropSQL(operator.fmter)} +} + +func (operator *Operator) AddColumn(table *Table, uniqueConstraints []*UniqueConstraint, column *Column, val any) [][]byte { + // If the column already exists, we do not need to add it. + if index := operator.findColumnByName(table, column.Name); index != -1 { + return [][]byte{} + } + + // Add the column to the table. + table.Columns = append(table.Columns, column) + + sqls := [][]byte{ + column.ToAddSQL(operator.fmter, table.Name, operator.support.ColumnIfNotExistsExists), + } + + if !column.Nullable { + if val == nil { + val = column.DataType.z + } + sqls = append(sqls, column.ToUpdateSQL(operator.fmter, table.Name, val)) + + if operator.support.AlterColumnSetNotNull { + sqls = append(sqls, column.ToSetNotNullSQL(operator.fmter, table.Name)) + } else { + sqls = append(sqls, operator.RecreateTable(table, uniqueConstraints)...) + } + } + + return sqls +} + +func (operator *Operator) DropColumn(table *Table, column *Column) [][]byte { + index := operator.findColumnByName(table, column.Name) + // If the column does not exist, we do not need to drop it. + if index == -1 { + return [][]byte{} + } + + table.Columns = append(table.Columns[:index], table.Columns[index+1:]...) + + return [][]byte{column.ToDropSQL(operator.fmter, table.Name, operator.support.ColumnIfNotExistsExists)} +} + +func (operator *Operator) DropConstraint(table *Table, uniqueConstraints []*UniqueConstraint, constraint Constraint) [][]byte { + // The name of the input constraint is not guaranteed to be the same as the name of the constraint in the database. + // So we need to find the constraint in the database and drop it. + toDropConstraint, found := table.DropConstraint(constraint) + if !found { + uniqueConstraintIndex := operator.findUniqueConstraint(uniqueConstraints, constraint) + if uniqueConstraintIndex == -1 { + return [][]byte{} + } + + if operator.support.DropConstraint { + return [][]byte{uniqueConstraints[uniqueConstraintIndex].ToDropSQL(operator.fmter, table.Name)} + } + + return operator.RecreateTable(table, append(uniqueConstraints[:uniqueConstraintIndex], uniqueConstraints[uniqueConstraintIndex+1:]...)) + } + + if operator.support.DropConstraint { + return [][]byte{toDropConstraint.ToDropSQL(operator.fmter, table.Name)} + } + + return operator.RecreateTable(table, uniqueConstraints) +} + +func (*Operator) findColumnByName(table *Table, columnName ColumnName) int { + for i, column := range table.Columns { + if column.Name == columnName { + return i + } + } + + return -1 +} + +func (*Operator) findUniqueConstraint(uniqueConstraints []*UniqueConstraint, constraint Constraint) int { + if constraint.Type() != ConstraintTypeUnique { + return -1 + } + + for i, uniqueConstraint := range uniqueConstraints { + if uniqueConstraint.Equals(constraint) { + return i + } + } + + return -1 +} diff --git a/pkg/sqlschema/operator_test.go b/pkg/sqlschema/operator_test.go new file mode 100644 index 000000000000..6ea932ff98cf --- /dev/null +++ b/pkg/sqlschema/operator_test.go @@ -0,0 +1,702 @@ +package sqlschema + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/uptrace/bun/schema" +) + +// See table_test.go for more test cases on creating tables. +func TestOperatorCreateTable(t *testing.T) { + testCases := []struct { + name string + table *Table + expectedSQLs [][]byte + }{ + { + name: "PrimaryKey_ForeignKey_NotNullable", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "name", DataType: DataTypeText, Nullable: false, Default: ""}, + {Name: "org_id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + PrimaryKeyConstraint: &PrimaryKeyConstraint{ + ColumnNames: []ColumnName{"id"}, + }, + ForeignKeyConstraints: []*ForeignKeyConstraint{ + {ReferencingColumnName: "org_id", ReferencedTableName: "orgs", ReferencedColumnName: "id"}, + }, + }, + expectedSQLs: [][]byte{ + []byte(`CREATE TABLE IF NOT EXISTS "users" ("id" INTEGER NOT NULL, "name" TEXT NOT NULL, "org_id" INTEGER NOT NULL, CONSTRAINT "pk_users" PRIMARY KEY ("id"), CONSTRAINT "fk_users_org_id" FOREIGN KEY ("org_id") REFERENCES "orgs" ("id"))`), + }, + }, + } + + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + fmter := NewFormatter(schema.NewNopFormatter().Dialect()) + operator := NewOperator(fmter, OperatorSupport{}) + + actuals := operator.CreateTable(testCase.table) + assert.Equal(t, testCase.expectedSQLs, actuals) + }) + } +} + +func TestOperatorAddColumn(t *testing.T) { + testCases := []struct { + name string + table *Table + column *Column + val any + uniqueConstraints []*UniqueConstraint + support OperatorSupport + expectedSQLs [][]byte + expectedTable *Table + }{ + { + name: "NullableNoDefault_DoesNotExist", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + }, + column: &Column{Name: "name", DataType: DataTypeText, Nullable: true, Default: ""}, + val: nil, + support: OperatorSupport{ + ColumnIfNotExistsExists: true, + AlterColumnSetNotNull: true, + }, + expectedSQLs: [][]byte{ + []byte(`ALTER TABLE "users" ADD COLUMN IF NOT EXISTS "name" TEXT`), + }, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "name", DataType: DataTypeText, Nullable: true, Default: ""}, + }, + }, + }, + { + name: "MismatchingDataType_DoesExist", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "name", DataType: DataTypeText, Nullable: true, Default: ""}, + }, + }, + column: &Column{Name: "name", DataType: DataTypeBigInt, Nullable: true, Default: ""}, + val: nil, + support: OperatorSupport{ + ColumnIfNotExistsExists: true, + AlterColumnSetNotNull: true, + }, + expectedSQLs: [][]byte{}, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "name", DataType: DataTypeText, Nullable: true, Default: ""}, + }, + }, + }, + { + name: "NotNullableNoDefaultNoVal_DoesNotExist", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + }, + column: &Column{Name: "name", DataType: DataTypeText, Nullable: false, Default: ""}, + val: nil, + support: OperatorSupport{ + ColumnIfNotExistsExists: true, + AlterColumnSetNotNull: true, + }, + expectedSQLs: [][]byte{ + []byte(`ALTER TABLE "users" ADD COLUMN IF NOT EXISTS "name" TEXT`), + []byte(`UPDATE "users" SET "name" = ''`), + []byte(`ALTER TABLE "users" ALTER COLUMN "name" SET NOT NULL`), + }, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "name", DataType: DataTypeText, Nullable: false, Default: ""}, + }, + }, + }, + { + name: "NotNullableNoDefault_DoesNotExist", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + }, + column: &Column{Name: "num", DataType: DataTypeInteger, Nullable: false, Default: ""}, + val: int64(100), + support: OperatorSupport{ + ColumnIfNotExistsExists: true, + AlterColumnSetNotNull: true, + }, + expectedSQLs: [][]byte{ + []byte(`ALTER TABLE "users" ADD COLUMN IF NOT EXISTS "num" INTEGER`), + []byte(`UPDATE "users" SET "num" = 100`), + []byte(`ALTER TABLE "users" ALTER COLUMN "num" SET NOT NULL`), + }, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "num", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + }, + }, + { + name: "NotNullableNoDefault_DoesNotExist_AlterColumnSetNotNullFalse", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "name", DataType: DataTypeText, Nullable: false, Default: ""}, + }, + }, + column: &Column{Name: "num", DataType: DataTypeInteger, Nullable: false, Default: ""}, + val: int64(100), + uniqueConstraints: []*UniqueConstraint{ + {ColumnNames: []ColumnName{"name"}}, + }, + support: OperatorSupport{ + ColumnIfNotExistsExists: true, + AlterColumnSetNotNull: false, + }, + expectedSQLs: [][]byte{ + []byte(`ALTER TABLE "users" ADD COLUMN IF NOT EXISTS "num" INTEGER`), + []byte(`UPDATE "users" SET "num" = 100`), + []byte(`CREATE TABLE IF NOT EXISTS "users__temp" ("id" INTEGER NOT NULL, "name" TEXT NOT NULL, "num" INTEGER NOT NULL)`), + []byte(`INSERT INTO "users__temp" ("id", "name", "num") SELECT "id", "name", "num" FROM "users"`), + []byte(`DROP TABLE IF EXISTS "users"`), + []byte(`ALTER TABLE "users__temp" RENAME TO "users"`), + []byte(`CREATE UNIQUE INDEX IF NOT EXISTS "uq_users_name" ON "users" ("name")`), + }, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "name", DataType: DataTypeText, Nullable: false, Default: ""}, + {Name: "num", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + }, + }, + { + name: "MismatchingDataType_DoesExist_AlterColumnSetNotNullFalse", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "name", DataType: DataTypeText, Nullable: true, Default: ""}, + }, + }, + column: &Column{Name: "name", DataType: DataTypeBigInt, Nullable: false, Default: ""}, + val: nil, + support: OperatorSupport{ + ColumnIfNotExistsExists: true, + AlterColumnSetNotNull: false, + }, + expectedSQLs: [][]byte{}, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "name", DataType: DataTypeText, Nullable: true, Default: ""}, + }, + }, + }, + } + + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + fmter := NewFormatter(schema.NewNopFormatter().Dialect()) + operator := NewOperator(fmter, testCase.support) + + actuals := operator.AddColumn(testCase.table, testCase.uniqueConstraints, testCase.column, testCase.val) + assert.Equal(t, testCase.expectedSQLs, actuals) + assert.Equal(t, testCase.expectedTable, testCase.table) + }) + } +} + +func TestOperatorDropConstraint(t *testing.T) { + testCases := []struct { + name string + table *Table + constraint Constraint + uniqueConstraints []*UniqueConstraint + support OperatorSupport + expectedSQLs [][]byte + expectedTable *Table + }{ + { + name: "PrimaryKeyConstraint_DoesExist_DropConstraintTrue", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + PrimaryKeyConstraint: &PrimaryKeyConstraint{ + ColumnNames: []ColumnName{"id"}, + }, + }, + constraint: &PrimaryKeyConstraint{ + ColumnNames: []ColumnName{"id"}, + }, + support: OperatorSupport{ + DropConstraint: true, + }, + expectedSQLs: [][]byte{ + []byte(`ALTER TABLE "users" DROP CONSTRAINT IF EXISTS "pk_users"`), + }, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + }, + }, + { + name: "PrimaryKeyConstraint_DoesNotExist_DropConstraintTrue", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + }, + constraint: &PrimaryKeyConstraint{ + ColumnNames: []ColumnName{"id"}, + }, + support: OperatorSupport{ + DropConstraint: true, + }, + expectedSQLs: [][]byte{}, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + }, + }, + { + name: "PrimaryKeyConstraintDifferentName_DoesExist_DropConstraintTrue", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + PrimaryKeyConstraint: &PrimaryKeyConstraint{ + ColumnNames: []ColumnName{"id"}, + name: "pk_users_different_name", + }, + }, + constraint: &PrimaryKeyConstraint{ + ColumnNames: []ColumnName{"id"}, + }, + support: OperatorSupport{ + DropConstraint: true, + }, + expectedSQLs: [][]byte{ + []byte(`ALTER TABLE "users" DROP CONSTRAINT IF EXISTS "pk_users_different_name"`), + }, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + }, + }, + { + name: "PrimaryKeyConstraint_DoesExist_DropConstraintFalse", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + PrimaryKeyConstraint: &PrimaryKeyConstraint{ + ColumnNames: []ColumnName{"id"}, + }, + }, + constraint: &PrimaryKeyConstraint{ + ColumnNames: []ColumnName{"id"}, + }, + support: OperatorSupport{ + DropConstraint: false, + }, + expectedSQLs: [][]byte{ + []byte(`CREATE TABLE IF NOT EXISTS "users__temp" ("id" INTEGER NOT NULL)`), + []byte(`INSERT INTO "users__temp" ("id") SELECT "id" FROM "users"`), + []byte(`DROP TABLE IF EXISTS "users"`), + []byte(`ALTER TABLE "users__temp" RENAME TO "users"`), + }, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + }, + }, + { + name: "PrimaryKeyConstraint_DoesNotExist_DropConstraintFalse", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + }, + constraint: &PrimaryKeyConstraint{ + ColumnNames: []ColumnName{"id"}, + }, + support: OperatorSupport{ + DropConstraint: false, + }, + expectedSQLs: [][]byte{}, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + }, + }, + { + name: "UniqueConstraint_DoesExist_DropConstraintTrue", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "name", DataType: DataTypeText, Nullable: false, Default: ""}, + }, + }, + constraint: &UniqueConstraint{ + ColumnNames: []ColumnName{"name"}, + }, + uniqueConstraints: []*UniqueConstraint{ + {ColumnNames: []ColumnName{"name"}}, + }, + support: OperatorSupport{ + DropConstraint: true, + }, + expectedSQLs: [][]byte{ + []byte(`ALTER TABLE "users" DROP CONSTRAINT IF EXISTS "uq_users_name"`), + }, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "name", DataType: DataTypeText, Nullable: false, Default: ""}, + }, + }, + }, + { + name: "UniqueConstraint_DoesNotExist_DropConstraintTrue", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "name", DataType: DataTypeText, Nullable: false, Default: ""}, + }, + }, + constraint: &UniqueConstraint{ + ColumnNames: []ColumnName{"name"}, + }, + uniqueConstraints: []*UniqueConstraint{ + {ColumnNames: []ColumnName{"id"}}, + }, + support: OperatorSupport{ + DropConstraint: true, + }, + expectedSQLs: [][]byte{}, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "name", DataType: DataTypeText, Nullable: false, Default: ""}, + }, + }, + }, + { + name: "UniqueConstraint_DoesExist_DropConstraintFalse", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "name", DataType: DataTypeText, Nullable: false, Default: ""}, + }, + }, + constraint: &UniqueConstraint{ + ColumnNames: []ColumnName{"name"}, + }, + uniqueConstraints: []*UniqueConstraint{ + {ColumnNames: []ColumnName{"name"}}, + }, + support: OperatorSupport{ + DropConstraint: false, + }, + expectedSQLs: [][]byte{ + []byte(`CREATE TABLE IF NOT EXISTS "users__temp" ("id" INTEGER NOT NULL, "name" TEXT NOT NULL)`), + []byte(`INSERT INTO "users__temp" ("id", "name") SELECT "id", "name" FROM "users"`), + []byte(`DROP TABLE IF EXISTS "users"`), + []byte(`ALTER TABLE "users__temp" RENAME TO "users"`), + }, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "name", DataType: DataTypeText, Nullable: false, Default: ""}, + }, + }, + }, + { + name: "UniqueConstraint_DoesNotExist_DropConstraintFalse", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "name", DataType: DataTypeText, Nullable: false, Default: ""}, + }, + }, + constraint: &UniqueConstraint{ + ColumnNames: []ColumnName{"name"}, + }, + uniqueConstraints: []*UniqueConstraint{ + {ColumnNames: []ColumnName{"id"}}, + }, + support: OperatorSupport{ + DropConstraint: false, + }, + expectedSQLs: [][]byte{}, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "name", DataType: DataTypeText, Nullable: false, Default: ""}, + }, + }, + }, + { + name: "ForeignKeyConstraint_DoesExist_DropConstraintTrue", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "org_id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + ForeignKeyConstraints: []*ForeignKeyConstraint{ + {ReferencingColumnName: "org_id", ReferencedTableName: "orgs", ReferencedColumnName: "id"}, + }, + }, + constraint: &ForeignKeyConstraint{ReferencingColumnName: "org_id", ReferencedTableName: "orgs", ReferencedColumnName: "id"}, + uniqueConstraints: []*UniqueConstraint{ + {ColumnNames: []ColumnName{"id"}}, + }, + support: OperatorSupport{ + DropConstraint: true, + }, + expectedSQLs: [][]byte{ + []byte(`ALTER TABLE "users" DROP CONSTRAINT IF EXISTS "fk_users_org_id"`), + }, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "org_id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + ForeignKeyConstraints: []*ForeignKeyConstraint{}, + }, + }, + { + name: "ForeignKeyConstraintDifferentName_DoesExist_DropConstraintTrue", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "org_id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + ForeignKeyConstraints: []*ForeignKeyConstraint{ + {ReferencingColumnName: "org_id", ReferencedTableName: "orgs", ReferencedColumnName: "id", name: "my_fk"}, + }, + }, + constraint: &ForeignKeyConstraint{ReferencingColumnName: "org_id", ReferencedTableName: "orgs", ReferencedColumnName: "id"}, + uniqueConstraints: []*UniqueConstraint{ + {ColumnNames: []ColumnName{"id"}}, + }, + support: OperatorSupport{ + DropConstraint: true, + }, + expectedSQLs: [][]byte{ + []byte(`ALTER TABLE "users" DROP CONSTRAINT IF EXISTS "my_fk"`), + }, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "org_id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + ForeignKeyConstraints: []*ForeignKeyConstraint{}, + }, + }, + { + name: "ForeignKeyConstraint_DoesNotExist_DropConstraintTrue", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "org_id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + ForeignKeyConstraints: []*ForeignKeyConstraint{ + {ReferencingColumnName: "org_id", ReferencedTableName: "orgs", ReferencedColumnName: "id"}, + }, + }, + // Note that the name of the referencing column is different from the one in the table. + constraint: &ForeignKeyConstraint{ReferencingColumnName: "orgid", ReferencedTableName: "orgs", ReferencedColumnName: "id"}, + uniqueConstraints: []*UniqueConstraint{ + {ColumnNames: []ColumnName{"id"}}, + }, + support: OperatorSupport{ + DropConstraint: true, + }, + expectedSQLs: [][]byte{}, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "org_id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + ForeignKeyConstraints: []*ForeignKeyConstraint{ + {ReferencingColumnName: "org_id", ReferencedTableName: "orgs", ReferencedColumnName: "id"}, + }, + }, + }, + { + name: "ForeignKeyConstraint_DoesExist_DropConstraintFalse", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "org_id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + ForeignKeyConstraints: []*ForeignKeyConstraint{ + {ReferencingColumnName: "org_id", ReferencedTableName: "orgs", ReferencedColumnName: "id"}, + }, + }, + constraint: &ForeignKeyConstraint{ReferencingColumnName: "org_id", ReferencedTableName: "orgs", ReferencedColumnName: "id"}, + uniqueConstraints: []*UniqueConstraint{ + {ColumnNames: []ColumnName{"id"}}, + }, + support: OperatorSupport{ + DropConstraint: false, + }, + expectedSQLs: [][]byte{ + []byte(`CREATE TABLE IF NOT EXISTS "users__temp" ("id" INTEGER NOT NULL, "org_id" INTEGER NOT NULL)`), + []byte(`INSERT INTO "users__temp" ("id", "org_id") SELECT "id", "org_id" FROM "users"`), + []byte(`DROP TABLE IF EXISTS "users"`), + []byte(`ALTER TABLE "users__temp" RENAME TO "users"`), + // Note that a unique index is created because a unique constraint already existed for the table. + []byte(`CREATE UNIQUE INDEX IF NOT EXISTS "uq_users_id" ON "users" ("id")`), + }, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "org_id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + ForeignKeyConstraints: []*ForeignKeyConstraint{}, + }, + }, + { + name: "ForeignKeyConstraintDifferentName_DoesExist_DropConstraintFalse", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "org_id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + ForeignKeyConstraints: []*ForeignKeyConstraint{ + {ReferencingColumnName: "org_id", ReferencedTableName: "orgs", ReferencedColumnName: "id", name: "my_fk"}, + }, + }, + constraint: &ForeignKeyConstraint{ReferencingColumnName: "org_id", ReferencedTableName: "orgs", ReferencedColumnName: "id"}, + uniqueConstraints: []*UniqueConstraint{ + {ColumnNames: []ColumnName{"id"}}, + }, + support: OperatorSupport{ + DropConstraint: false, + }, + expectedSQLs: [][]byte{ + []byte(`CREATE TABLE IF NOT EXISTS "users__temp" ("id" INTEGER NOT NULL, "org_id" INTEGER NOT NULL)`), + []byte(`INSERT INTO "users__temp" ("id", "org_id") SELECT "id", "org_id" FROM "users"`), + []byte(`DROP TABLE IF EXISTS "users"`), + []byte(`ALTER TABLE "users__temp" RENAME TO "users"`), + // Note that a unique index is created because a unique constraint already existed for the table. + []byte(`CREATE UNIQUE INDEX IF NOT EXISTS "uq_users_id" ON "users" ("id")`), + }, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "org_id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + ForeignKeyConstraints: []*ForeignKeyConstraint{}, + }, + }, + { + name: "ForeignKeyConstraint_DoesNotExist_DropConstraintFalse", + table: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "org_id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + ForeignKeyConstraints: []*ForeignKeyConstraint{ + {ReferencingColumnName: "org_id", ReferencedTableName: "orgs", ReferencedColumnName: "id"}, + }, + }, + // Note that the name of the referencing column is different from the one in the table. + constraint: &ForeignKeyConstraint{ReferencingColumnName: "orgid", ReferencedTableName: "orgs", ReferencedColumnName: "id"}, + uniqueConstraints: []*UniqueConstraint{ + {ColumnNames: []ColumnName{"id"}}, + }, + support: OperatorSupport{ + DropConstraint: false, + }, + expectedSQLs: [][]byte{}, + expectedTable: &Table{ + Name: "users", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "org_id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + }, + ForeignKeyConstraints: []*ForeignKeyConstraint{ + {ReferencingColumnName: "org_id", ReferencedTableName: "orgs", ReferencedColumnName: "id"}, + }, + }, + }, + } + + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + fmter := NewFormatter(schema.NewNopFormatter().Dialect()) + operator := NewOperator(fmter, testCase.support) + + actuals := operator.DropConstraint(testCase.table, testCase.uniqueConstraints, testCase.constraint) + assert.Equal(t, testCase.expectedSQLs, actuals) + assert.Equal(t, testCase.expectedTable, testCase.table) + }) + } +} diff --git a/pkg/sqlschema/sqlitesqlschema/ddl.go b/pkg/sqlschema/sqlitesqlschema/ddl.go new file mode 100644 index 000000000000..5f1afd8e1b24 --- /dev/null +++ b/pkg/sqlschema/sqlitesqlschema/ddl.go @@ -0,0 +1,326 @@ +package sqlitesqlschema + +import ( + "errors" + "fmt" + "regexp" + "strings" + + "github.com/SigNoz/signoz/pkg/sqlschema" +) + +// Inspired by https://github.com/go-gorm/sqlite + +var ( + sqliteSeparator = "`|\"|'|\t" + sqliteIdentQuote = "`|\"|'" + uniqueRegexp = regexp.MustCompile(fmt.Sprintf(`^(?:CONSTRAINT [%v]?[\w-]+[%v]? )?UNIQUE (.*)$`, sqliteSeparator, sqliteSeparator)) + tableRegexp = regexp.MustCompile(fmt.Sprintf(`(?is)(CREATE TABLE [%v]?[\w\d-]+[%v]?)(?:\s*\((.*)\))?`, sqliteSeparator, sqliteSeparator)) + tableNameRegexp = regexp.MustCompile(fmt.Sprintf(`CREATE TABLE [%v]?([\w-]+)[%v]?`, sqliteSeparator, sqliteSeparator)) + checkRegexp = regexp.MustCompile(`^(?i)CHECK[\s]*\(`) + constraintRegexp = regexp.MustCompile(fmt.Sprintf(`CONSTRAINT\s+[%v]?[\w\d_]+[%v]?[\s]+`, sqliteSeparator, sqliteSeparator)) + foreignKeyRegexp = regexp.MustCompile(fmt.Sprintf(`FOREIGN KEY\s*\(\s*[%v]?(\w+)[%v]?\s*\)\s*REFERENCES\s*[%v]?(\w+)[%v]?\s*\(\s*[%v]?(\w+)[%v]?\s*\)`, sqliteSeparator, sqliteSeparator, sqliteSeparator, sqliteSeparator, sqliteSeparator, sqliteSeparator)) + referencesRegexp = regexp.MustCompile(fmt.Sprintf(`(\w+)\s*(\w+)\s*REFERENCES\s*[%v]?(\w+)[%v]?\s*\(\s*[%v]?(\w+)[%v]?\s*\)`, sqliteSeparator, sqliteSeparator, sqliteSeparator, sqliteSeparator)) + identQuoteRegexp = regexp.MustCompile(fmt.Sprintf("[%v]", sqliteIdentQuote)) + columnRegexp = regexp.MustCompile(fmt.Sprintf(`^[%v]?([\w\d]+)[%v]?\s+([\w\(\)\d]+)(.*)$`, sqliteSeparator, sqliteSeparator)) + defaultValueRegexp = regexp.MustCompile(`(?i) DEFAULT \(?(.+)?\)?( |COLLATE|GENERATED|$)`) +) + +type parseAllColumnsState int + +const ( + parseAllColumnsState_NONE parseAllColumnsState = iota + parseAllColumnsState_Beginning + parseAllColumnsState_ReadingRawName + parseAllColumnsState_ReadingQuotedName + parseAllColumnsState_EndOfName + parseAllColumnsState_State_End +) + +func parseCreateTable(str string, fmter sqlschema.SQLFormatter) (*sqlschema.Table, []*sqlschema.UniqueConstraint, error) { + sections := tableRegexp.FindStringSubmatch(str) + if len(sections) == 0 { + return nil, nil, errors.New("invalid DDL") + } + + tableNameSections := tableNameRegexp.FindStringSubmatch(str) + if len(tableNameSections) == 0 { + return nil, nil, errors.New("invalid DDL") + } + + tableName := sqlschema.TableName(tableNameSections[1]) + fields := make([]string, 0) + columns := make([]*sqlschema.Column, 0) + var primaryKeyConstraint *sqlschema.PrimaryKeyConstraint + foreignKeyConstraints := make([]*sqlschema.ForeignKeyConstraint, 0) + uniqueConstraints := make([]*sqlschema.UniqueConstraint, 0) + + var ( + ddlBody = sections[2] + ddlBodyRunes = []rune(ddlBody) + bracketLevel int + quote rune + buf string + ) + ddlBodyRunesLen := len(ddlBodyRunes) + + for idx := 0; idx < ddlBodyRunesLen; idx++ { + var ( + next rune = 0 + c = ddlBodyRunes[idx] + ) + if idx+1 < ddlBodyRunesLen { + next = ddlBodyRunes[idx+1] + } + + if sc := string(c); identQuoteRegexp.MatchString(sc) { + if c == next { + buf += sc // Skip escaped quote + idx++ + } else if quote > 0 { + quote = 0 + } else { + quote = c + } + } else if quote == 0 { + if c == '(' { + bracketLevel++ + } else if c == ')' { + bracketLevel-- + } else if bracketLevel == 0 { + if c == ',' { + fields = append(fields, strings.TrimSpace(buf)) + buf = "" + continue + } + } + } + + if bracketLevel < 0 { + return nil, nil, errors.New("invalid DDL, unbalanced brackets") + } + + buf += string(c) + } + + if bracketLevel != 0 { + return nil, nil, errors.New("invalid DDL, unbalanced brackets") + } + + if buf != "" { + fields = append(fields, strings.TrimSpace(buf)) + } + + for _, f := range fields { + fUpper := strings.ToUpper(f) + if checkRegexp.MatchString(f) { + continue + } + + if strings.Contains(fUpper, "FOREIGN KEY") { + matches := foreignKeyRegexp.FindStringSubmatch(f) + if len(matches) >= 4 { + foreignKeyConstraints = append(foreignKeyConstraints, &sqlschema.ForeignKeyConstraint{ + ReferencingColumnName: sqlschema.ColumnName(matches[1]), + ReferencedTableName: sqlschema.TableName(matches[2]), + ReferencedColumnName: sqlschema.ColumnName(matches[3]), + }) + } + + // This can never be a column name, so we can skip it + continue + } + + if strings.Contains(fUpper, "REFERENCES") && !strings.Contains(fUpper, "FOREIGN KEY") { + matches := referencesRegexp.FindStringSubmatch(f) + if len(matches) >= 4 { + foreignKeyConstraints = append(foreignKeyConstraints, &sqlschema.ForeignKeyConstraint{ + ReferencingColumnName: sqlschema.ColumnName(matches[1]), + ReferencedTableName: sqlschema.TableName(matches[3]), + ReferencedColumnName: sqlschema.ColumnName(matches[4]), + }) + } + } + + // Match unique constraints + if matches := uniqueRegexp.FindStringSubmatch(f); matches != nil { + if len(matches) > 0 { + cols, err := parseAllColumns(matches[1]) + if err == nil { + uniqueConstraints = append(uniqueConstraints, &sqlschema.UniqueConstraint{ + ColumnNames: cols, + }) + } + } + // This can never be a column name, so we can skip it + continue + } + + if matches := constraintRegexp.FindStringSubmatch(f); len(matches) > 0 { + if strings.Contains(fUpper, "PRIMARY KEY") { + cols, err := parseAllColumns(f) + if err == nil { + primaryKeyConstraint = &sqlschema.PrimaryKeyConstraint{ + ColumnNames: cols, + } + } + } + + // This can never be a column name, so we can skip it + continue + } + + if strings.HasPrefix(fUpper, "PRIMARY KEY") { + cols, err := parseAllColumns(f) + if err == nil { + primaryKeyConstraint = &sqlschema.PrimaryKeyConstraint{ + ColumnNames: cols, + } + } + } else if matches := columnRegexp.FindStringSubmatch(f); len(matches) > 0 { + column := &sqlschema.Column{ + Name: sqlschema.ColumnName(matches[1]), + DataType: fmter.DataTypeOf(matches[2]), + Nullable: true, + Default: "", + } + + matchUpper := strings.ToUpper(matches[3]) + if strings.Contains(matchUpper, " NOT NULL") { + column.Nullable = false + } else if strings.Contains(matchUpper, " NULL") { + column.Nullable = true + } + + if strings.Contains(matchUpper, " UNIQUE") && !strings.Contains(matchUpper, " PRIMARY") { + uniqueConstraints = append(uniqueConstraints, &sqlschema.UniqueConstraint{ + ColumnNames: []sqlschema.ColumnName{column.Name}, + }) + } + + if strings.Contains(matchUpper, " PRIMARY") { + column.Nullable = false + primaryKeyConstraint = &sqlschema.PrimaryKeyConstraint{ + ColumnNames: []sqlschema.ColumnName{column.Name}, + } + } + + if defaultMatches := defaultValueRegexp.FindStringSubmatch(matches[3]); len(defaultMatches) > 1 { + if strings.ToLower(defaultMatches[1]) != "null" { + column.Default = strings.Trim(defaultMatches[1], `"`) + } + } + + columns = append(columns, column) + } + } + + return &sqlschema.Table{ + Name: tableName, + Columns: columns, + PrimaryKeyConstraint: primaryKeyConstraint, + ForeignKeyConstraints: foreignKeyConstraints, + }, uniqueConstraints, nil +} + +func parseAllColumns(in string) ([]sqlschema.ColumnName, error) { + s := []rune(in) + columns := make([]sqlschema.ColumnName, 0) + state := parseAllColumnsState_NONE + quote := rune(0) + name := make([]rune, 0) + for i := 0; i < len(s); i++ { + switch state { + case parseAllColumnsState_NONE: + if s[i] == '(' { + state = parseAllColumnsState_Beginning + } + case parseAllColumnsState_Beginning: + if isSpace(s[i]) { + continue + } + if isQuote(s[i]) { + state = parseAllColumnsState_ReadingQuotedName + quote = s[i] + continue + } + if s[i] == '[' { + state = parseAllColumnsState_ReadingQuotedName + quote = ']' + continue + } else if s[i] == ')' { + return columns, fmt.Errorf("unexpected token: %s", string(s[i])) + } + state = parseAllColumnsState_ReadingRawName + name = append(name, s[i]) + case parseAllColumnsState_ReadingRawName: + if isSeparator(s[i]) { + state = parseAllColumnsState_Beginning + columns = append(columns, sqlschema.ColumnName(name)) + name = make([]rune, 0) + continue + } + if s[i] == ')' { + state = parseAllColumnsState_State_End + columns = append(columns, sqlschema.ColumnName(name)) + } + if isQuote(s[i]) { + return nil, fmt.Errorf("unexpected token: %s", string(s[i])) + } + if isSpace(s[i]) { + state = parseAllColumnsState_EndOfName + columns = append(columns, sqlschema.ColumnName(name)) + name = make([]rune, 0) + continue + } + name = append(name, s[i]) + case parseAllColumnsState_ReadingQuotedName: + if s[i] == quote { + // check if quote character is escaped + if i+1 < len(s) && s[i+1] == quote { + name = append(name, quote) + i++ + continue + } + state = parseAllColumnsState_EndOfName + columns = append(columns, sqlschema.ColumnName(name)) + name = make([]rune, 0) + continue + } + name = append(name, s[i]) + case parseAllColumnsState_EndOfName: + if isSpace(s[i]) { + continue + } + if isSeparator(s[i]) { + state = parseAllColumnsState_Beginning + continue + } + if s[i] == ')' { + state = parseAllColumnsState_State_End + continue + } + return nil, fmt.Errorf("unexpected token: %s", string(s[i])) + case parseAllColumnsState_State_End: + // break is automatic in Go switch statements + } + } + + if state != parseAllColumnsState_State_End { + return nil, errors.New("unexpected end") + } + + return columns, nil +} + +func isSpace(r rune) bool { + return r == ' ' || r == '\t' +} + +func isQuote(r rune) bool { + return r == '`' || r == '"' || r == '\'' +} + +func isSeparator(r rune) bool { + return r == ',' +} diff --git a/pkg/sqlschema/sqlitesqlschema/ddl_test.go b/pkg/sqlschema/sqlitesqlschema/ddl_test.go new file mode 100644 index 000000000000..a8f8c8c553e8 --- /dev/null +++ b/pkg/sqlschema/sqlitesqlschema/ddl_test.go @@ -0,0 +1,191 @@ +package sqlitesqlschema + +import ( + "testing" + + "github.com/SigNoz/signoz/pkg/sqlschema" + "github.com/stretchr/testify/assert" + "github.com/uptrace/bun/dialect/sqlitedialect" +) + +func TestParseCreateTable(t *testing.T) { + testCases := []struct { + name string + sql string + table *sqlschema.Table + uniqueConstraints []*sqlschema.UniqueConstraint + err error + }{ + { + name: "NewlineAndTabBeforeComma_NoQuotesInColumnNames_InlineConstraints_References", + sql: `CREATE TABLE test ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL, + email TEXT NOT NULL, + data TEXT + , created_at TIMESTAMP, updated_at TIMESTAMP, org_id TEXT REFERENCES organizations(id) ON DELETE CASCADE)`, + table: &sqlschema.Table{ + Name: "test", + Columns: []*sqlschema.Column{ + {Name: "id", DataType: sqlschema.DataTypeText, Nullable: false}, + {Name: "name", DataType: sqlschema.DataTypeText, Nullable: false}, + {Name: "email", DataType: sqlschema.DataTypeText, Nullable: false}, + {Name: "data", DataType: sqlschema.DataTypeText, Nullable: true}, + {Name: "created_at", DataType: sqlschema.DataTypeTimestamp, Nullable: true}, + {Name: "updated_at", DataType: sqlschema.DataTypeTimestamp, Nullable: true}, + {Name: "org_id", DataType: sqlschema.DataTypeText, Nullable: true}, + }, + PrimaryKeyConstraint: &sqlschema.PrimaryKeyConstraint{ColumnNames: []sqlschema.ColumnName{"id"}}, + ForeignKeyConstraints: []*sqlschema.ForeignKeyConstraint{ + {ReferencingColumnName: "org_id", ReferencedTableName: "organizations", ReferencedColumnName: "id"}, + }, + }, + uniqueConstraints: []*sqlschema.UniqueConstraint{}, + err: nil, + }, + { + name: "SingleLine_QuotesInColumnNames_SeparateConstraints_PrimaryAndForeign", + sql: `CREATE TABLE "test" ("id" TEXT NOT NULL, "display_name" TEXT NOT NULL, "org_id" TEXT NOT NULL, CONSTRAINT "pk_users" PRIMARY KEY ("id"), CONSTRAINT "fk_users_org_id" FOREIGN KEY ("org_id") REFERENCES "organizations" ("id"))`, + table: &sqlschema.Table{ + Name: "test", + Columns: []*sqlschema.Column{ + {Name: "id", DataType: sqlschema.DataTypeText, Nullable: false}, + {Name: "display_name", DataType: sqlschema.DataTypeText, Nullable: false}, + {Name: "org_id", DataType: sqlschema.DataTypeText, Nullable: false}, + }, + PrimaryKeyConstraint: &sqlschema.PrimaryKeyConstraint{ColumnNames: []sqlschema.ColumnName{"id"}}, + ForeignKeyConstraints: []*sqlschema.ForeignKeyConstraint{ + {ReferencingColumnName: "org_id", ReferencedTableName: "organizations", ReferencedColumnName: "id"}, + }, + }, + uniqueConstraints: []*sqlschema.UniqueConstraint{}, + err: nil, + }, + { + name: "SingleLine_QuotesInColumnNames_InlineConstraints_UniqueAndForeign", + sql: `CREATE TABLE "test" ("id" text NOT NULL, "created_at" TIMESTAMP, "org_id" text NOT NULL, PRIMARY KEY ("id"), UNIQUE ("org_id"), FOREIGN KEY ("org_id") REFERENCES "organizations" ("id"))`, + table: &sqlschema.Table{ + Name: "test", + Columns: []*sqlschema.Column{ + {Name: "id", DataType: sqlschema.DataTypeText, Nullable: false}, + {Name: "created_at", DataType: sqlschema.DataTypeTimestamp, Nullable: true}, + {Name: "org_id", DataType: sqlschema.DataTypeText, Nullable: false}, + }, + PrimaryKeyConstraint: &sqlschema.PrimaryKeyConstraint{ColumnNames: []sqlschema.ColumnName{"id"}}, + ForeignKeyConstraints: []*sqlschema.ForeignKeyConstraint{ + {ReferencingColumnName: "org_id", ReferencedTableName: "organizations", ReferencedColumnName: "id"}, + }, + }, + uniqueConstraints: []*sqlschema.UniqueConstraint{ + {ColumnNames: []sqlschema.ColumnName{"org_id"}}, + }, + err: nil, + }, + { + name: "SingleLine_NoQuotes_InlineConstraints_2ColumnsInUnique", + sql: `CREATE TABLE "test" ("id" text NOT NULL, "signal" TEXT NOT NULL, "org_id" text NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "org_id_signal" UNIQUE ("org_id", "signal"))`, + table: &sqlschema.Table{ + Name: "test", + Columns: []*sqlschema.Column{ + {Name: "id", DataType: sqlschema.DataTypeText, Nullable: false}, + {Name: "signal", DataType: sqlschema.DataTypeText, Nullable: false}, + {Name: "org_id", DataType: sqlschema.DataTypeText, Nullable: false}, + }, + PrimaryKeyConstraint: &sqlschema.PrimaryKeyConstraint{ColumnNames: []sqlschema.ColumnName{"id"}}, + }, + uniqueConstraints: []*sqlschema.UniqueConstraint{ + {ColumnNames: []sqlschema.ColumnName{"org_id", "signal"}}, + }, + err: nil, + }, + { + name: "Tabbed_BacktickQuotes_Constraints_PrimaryAndUnique", + sql: "CREATE TABLE `test` (id integer primary key unique, dark_mode numeric DEFAULT true)", + table: &sqlschema.Table{ + Name: "test", + Columns: []*sqlschema.Column{ + {Name: "id", DataType: sqlschema.DataTypeInteger, Nullable: false}, + {Name: "dark_mode", DataType: sqlschema.DataTypeNumeric, Nullable: true, Default: "true"}, + }, + PrimaryKeyConstraint: &sqlschema.PrimaryKeyConstraint{ColumnNames: []sqlschema.ColumnName{"id"}}, + }, + uniqueConstraints: []*sqlschema.UniqueConstraint{}, + err: nil, + }, + { + name: "SingleLine_BacktickQuotesInteger_NoConstraints", + sql: "CREATE TABLE `test-hyphen` (`field` integer NOT NULL)", + table: &sqlschema.Table{ + Name: "test-hyphen", + Columns: []*sqlschema.Column{ + {Name: "field", DataType: sqlschema.DataTypeInteger, Nullable: false}, + }, + }, + uniqueConstraints: []*sqlschema.UniqueConstraint{}, + err: nil, + }, + { + name: "SingleLine_BacktickQuotesNumeric_NoConstraints", + sql: "CREATE TABLE `test-hyphen` (`field` real NOT NULL)", + table: &sqlschema.Table{ + Name: "test-hyphen", + Columns: []*sqlschema.Column{ + {Name: "field", DataType: sqlschema.DataTypeNumeric, Nullable: false}, + }, + }, + uniqueConstraints: []*sqlschema.UniqueConstraint{}, + err: nil, + }, + { + name: "SingleLine_QuotesAndDefaultInColumnNames_2Constraints_UniqueAndForeign", + sql: `CREATE TABLE "test" ("id" text NOT NULL, "created_at" TIMESTAMP, "updated_at" TIMESTAMP, "status" text NOT NULL DEFAULT 'notstarted', "org_id" text NOT NULL, PRIMARY KEY ("id"), CONSTRAINT "idx" UNIQUE ("org_id", "status", "created_at"), FOREIGN KEY ("org_id") REFERENCES "organizations" ("id"))`, + table: &sqlschema.Table{ + Name: "test", + Columns: []*sqlschema.Column{ + {Name: "id", DataType: sqlschema.DataTypeText, Nullable: false}, + {Name: "created_at", DataType: sqlschema.DataTypeTimestamp, Nullable: true}, + {Name: "updated_at", DataType: sqlschema.DataTypeTimestamp, Nullable: true}, + {Name: "status", DataType: sqlschema.DataTypeText, Nullable: false, Default: "'notstarted'"}, + {Name: "org_id", DataType: sqlschema.DataTypeText, Nullable: false}, + }, + PrimaryKeyConstraint: &sqlschema.PrimaryKeyConstraint{ColumnNames: []sqlschema.ColumnName{"id"}}, + ForeignKeyConstraints: []*sqlschema.ForeignKeyConstraint{ + {ReferencingColumnName: "org_id", ReferencedTableName: "organizations", ReferencedColumnName: "id"}, + }, + }, + uniqueConstraints: []*sqlschema.UniqueConstraint{ + {ColumnNames: []sqlschema.ColumnName{"org_id", "status", "created_at"}}, + }, + err: nil, + }, + { + name: "SingleLine_QuotesAndDefaultInColumnNames_NoConstraints", + sql: `CREATE TABLE "real_default" ("id" INTEGER NOT NULL, "r" REAL DEFAULT 1.0)`, + table: &sqlschema.Table{ + Name: "real_default", + Columns: []*sqlschema.Column{ + {Name: "id", DataType: sqlschema.DataTypeInteger, Nullable: false}, + {Name: "r", DataType: sqlschema.DataTypeNumeric, Nullable: true, Default: "1.0"}, + }, + }, + uniqueConstraints: []*sqlschema.UniqueConstraint{}, + err: nil, + }, + } + + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + table, uniqueConstraints, err := parseCreateTable(testCase.sql, Formatter{sqlschema.NewFormatter(sqlitedialect.New())}) + if testCase.err != nil { + assert.Equal(t, testCase.err, err) + return + } + + assert.Equal(t, testCase.table.Name, table.Name) + assert.ElementsMatch(t, testCase.table.Columns, table.Columns) + assert.Equal(t, testCase.table.PrimaryKeyConstraint, table.PrimaryKeyConstraint) + assert.ElementsMatch(t, testCase.table.ForeignKeyConstraints, table.ForeignKeyConstraints) + assert.ElementsMatch(t, testCase.uniqueConstraints, uniqueConstraints) + }) + } +} diff --git a/pkg/sqlschema/sqlitesqlschema/formatter.go b/pkg/sqlschema/sqlitesqlschema/formatter.go new file mode 100644 index 000000000000..8704a13d801d --- /dev/null +++ b/pkg/sqlschema/sqlitesqlschema/formatter.go @@ -0,0 +1,28 @@ +package sqlitesqlschema + +import ( + "strings" + + "github.com/SigNoz/signoz/pkg/sqlschema" +) + +type Formatter struct { + sqlschema.Formatter +} + +func (formatter Formatter) SQLDataTypeOf(dataType sqlschema.DataType) string { + if dataType == sqlschema.DataTypeNumeric { + return "REAL" + } + + return strings.ToUpper(dataType.String()) +} + +func (formatter Formatter) DataTypeOf(dataType string) sqlschema.DataType { + switch strings.ToUpper(dataType) { + case "REAL": + return sqlschema.DataTypeNumeric + } + + return formatter.Formatter.DataTypeOf(dataType) +} diff --git a/pkg/sqlschema/sqlitesqlschema/provider.go b/pkg/sqlschema/sqlitesqlschema/provider.go new file mode 100644 index 000000000000..354df7a0356a --- /dev/null +++ b/pkg/sqlschema/sqlitesqlschema/provider.go @@ -0,0 +1,144 @@ +package sqlitesqlschema + +import ( + "context" + "strconv" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/factory" + "github.com/SigNoz/signoz/pkg/sqlschema" + "github.com/SigNoz/signoz/pkg/sqlstore" + "github.com/uptrace/bun" +) + +type provider struct { + settings factory.ScopedProviderSettings + fmter sqlschema.SQLFormatter + sqlstore sqlstore.SQLStore + operator sqlschema.SQLOperator +} + +func NewFactory(sqlstore sqlstore.SQLStore) factory.ProviderFactory[sqlschema.SQLSchema, sqlschema.Config] { + return factory.NewProviderFactory(factory.MustNewName("sqlite"), func(ctx context.Context, providerSettings factory.ProviderSettings, config sqlschema.Config) (sqlschema.SQLSchema, error) { + return New(ctx, providerSettings, config, sqlstore) + }) +} + +func New(ctx context.Context, providerSettings factory.ProviderSettings, config sqlschema.Config, sqlstore sqlstore.SQLStore) (sqlschema.SQLSchema, error) { + settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/sqlschema/sqlitesqlschema") + fmter := Formatter{sqlschema.NewFormatter(sqlstore.BunDB().Dialect())} + + return &provider{ + fmter: fmter, + settings: settings, + sqlstore: sqlstore, + operator: sqlschema.NewOperator(fmter, sqlschema.OperatorSupport{ + DropConstraint: false, + ColumnIfNotExistsExists: false, + AlterColumnSetNotNull: false, + }), + }, nil +} + +func (provider *provider) Formatter() sqlschema.SQLFormatter { + return provider.fmter +} + +func (provider *provider) Operator() sqlschema.SQLOperator { + return provider.operator +} + +func (provider *provider) GetTable(ctx context.Context, tableName sqlschema.TableName) (*sqlschema.Table, []*sqlschema.UniqueConstraint, error) { + var sql string + + if err := provider. + sqlstore. + BunDB(). + NewRaw("SELECT sql FROM sqlite_master WHERE type IN (?) AND tbl_name = ? AND sql IS NOT NULL", bun.In([]string{"table"}), string(tableName)). + Scan(ctx, &sql); err != nil { + return nil, nil, err + } + + table, uniqueConstraints, err := parseCreateTable(sql, provider.fmter) + if err != nil { + return nil, nil, err + } + + return table, uniqueConstraints, nil +} + +func (provider *provider) GetIndices(ctx context.Context, tableName sqlschema.TableName) ([]sqlschema.Index, error) { + rows, err := provider. + sqlstore. + BunDB(). + QueryContext(ctx, "SELECT * FROM PRAGMA_index_list(?)", string(tableName)) + if err != nil { + return nil, err + } + + defer func() { + if err := rows.Close(); err != nil { + provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err) + } + }() + + indices := []sqlschema.Index{} + for rows.Next() { + var ( + seq int + name string + unique bool + origin string + partial bool + columns []sqlschema.ColumnName + ) + if err := rows.Scan(&seq, &name, &unique, &origin, &partial); err != nil { + return nil, err + } + + // skip the index that was created by a UNIQUE constraint + if origin == "u" { + continue + } + + // skip the index that was created by primary key constraint + if origin == "pk" { + continue + } + + if err := provider. + sqlstore. + BunDB(). + NewRaw("SELECT name FROM PRAGMA_index_info(?)", string(name)). + Scan(ctx, &columns); err != nil { + return nil, err + } + + if unique { + indices = append(indices, (&sqlschema.UniqueIndex{ + TableName: tableName, + ColumnNames: columns, + }).Named(name).(*sqlschema.UniqueIndex)) + } + } + + return indices, nil +} + +func (provider *provider) ToggleFKEnforcement(ctx context.Context, db bun.IDB, on bool) error { + _, err := db.ExecContext(ctx, "PRAGMA foreign_keys = ?", on) + if err != nil { + return err + } + + var val bool + if err := db.NewRaw("PRAGMA foreign_keys").Scan(ctx, &val); err != nil { + return err + } + + if on == val { + return nil + } + + return errors.NewInternalf(errors.CodeInternal, "foreign_keys(actual: %s, expected: %s), maybe a transaction is in progress?", strconv.FormatBool(val), strconv.FormatBool(on)) +} diff --git a/pkg/sqlschema/sqlschema.go b/pkg/sqlschema/sqlschema.go new file mode 100644 index 000000000000..1fdeab35c853 --- /dev/null +++ b/pkg/sqlschema/sqlschema.go @@ -0,0 +1,69 @@ +package sqlschema + +import ( + "context" + + "github.com/uptrace/bun" +) + +type SQLSchema interface { + // Returns the formatter for the schema. + Formatter() SQLFormatter + + // Returns the operator for the schema. + Operator() SQLOperator + + // Inspects the schema and returns the table with the given name. + GetTable(context.Context, TableName) (*Table, []*UniqueConstraint, error) + + // Inspects the schema and returns the indices for the given table. + GetIndices(context.Context, TableName) ([]Index, error) + + // Toggles foreign key enforcement for the schema for the current session. + ToggleFKEnforcement(context.Context, bun.IDB, bool) error +} + +// SQLOperator performs operations on a table. +type SQLOperator interface { + // Returns a list of SQL statements to create a table. + CreateTable(*Table) [][]byte + + // Returns a list of SQL statements to drop a table. + DropTable(*Table) [][]byte + + // Returns a list of SQL statements to rename a table. + RenameTable(*Table, TableName) [][]byte + + // Returns a list of SQL statements to recreate a table. + RecreateTable(*Table, []*UniqueConstraint) [][]byte + + // Returns a list of SQL statements to create an index. + CreateIndex(Index) [][]byte + + // Returns a list of SQL statements to drop an index. + DropIndex(Index) [][]byte + + // Returns a list of SQL statements to add a column to a table. + // If the column is not nullable, the column is added with the input value, then the column is made non-nullable. + AddColumn(*Table, []*UniqueConstraint, *Column, any) [][]byte + + // Returns a list of SQL statements to drop a column from a table. + DropColumn(*Table, *Column) [][]byte + + // Returns a list of SQL statements to drop a constraint from a table. + DropConstraint(*Table, []*UniqueConstraint, Constraint) [][]byte +} + +type SQLFormatter interface { + // Returns the SQL data type for the given data type. + SQLDataTypeOf(DataType) string + + // Returns the data type for the given SQL data type. + DataTypeOf(string) DataType + + // Appends an identifier to the given byte slice. + AppendIdent([]byte, string) []byte + + // Appends a value to the given byte slice. + AppendValue([]byte, any) []byte +} diff --git a/pkg/sqlschema/sqlschematest/provider.go b/pkg/sqlschema/sqlschematest/provider.go new file mode 100644 index 000000000000..f0c32578148f --- /dev/null +++ b/pkg/sqlschema/sqlschematest/provider.go @@ -0,0 +1,58 @@ +package sqlschematest + +import ( + "context" + + "github.com/SigNoz/signoz/pkg/errors" + "github.com/SigNoz/signoz/pkg/sqlschema" + "github.com/uptrace/bun" + "github.com/uptrace/bun/schema" +) + +var _ sqlschema.SQLSchema = (*Provider)(nil) + +type Provider struct { + Fmter sqlschema.Formatter + Tables map[string]*sqlschema.Table + UniqueConstraints map[string][]*sqlschema.UniqueConstraint + Indices map[string]sqlschema.Index +} + +func New(tables map[string]*sqlschema.Table, uniqueConstraints map[string][]*sqlschema.UniqueConstraint, indices map[string]sqlschema.Index) *Provider { + return &Provider{ + Fmter: sqlschema.NewFormatter(schema.NewNopFormatter().Dialect()), + Tables: tables, + UniqueConstraints: uniqueConstraints, + Indices: indices, + } +} + +func (provider *Provider) Formatter() sqlschema.SQLFormatter { + return provider.Fmter +} + +func (provider *Provider) Operator() sqlschema.SQLOperator { + return sqlschema.NewOperator(provider.Fmter, sqlschema.OperatorSupport{}) +} + +func (provider *Provider) GetTable(ctx context.Context, name sqlschema.TableName) (*sqlschema.Table, []*sqlschema.UniqueConstraint, error) { + table, ok := provider.Tables[string(name)] + if !ok { + return nil, nil, errors.NewNotFoundf(errors.CodeNotFound, "table %s not found", name) + } + + return table, provider.UniqueConstraints[string(name)], nil +} + +func (provider *Provider) GetIndices(ctx context.Context, name sqlschema.TableName) ([]sqlschema.Index, error) { + indices, ok := provider.Indices[string(name)] + if !ok { + return []sqlschema.Index{}, nil + } + + return []sqlschema.Index{indices}, nil +} + +func (provider *Provider) ToggleFKEnforcement(_ context.Context, _ bun.IDB, _ bool) error { + return nil +} diff --git a/pkg/sqlschema/table.go b/pkg/sqlschema/table.go new file mode 100644 index 000000000000..e9d0b3daf16e --- /dev/null +++ b/pkg/sqlschema/table.go @@ -0,0 +1,156 @@ +package sqlschema + +type TableName string + +type Table struct { + // The name of the table. + Name TableName + + // The columns that the table contains. + Columns []*Column + + // The primary key constraint that the table contains. + PrimaryKeyConstraint *PrimaryKeyConstraint + + // The foreign key constraints that the table contains. + ForeignKeyConstraints []*ForeignKeyConstraint +} + +func (table *Table) Clone() *Table { + copyOfColumns := make([]*Column, len(table.Columns)) + copy(copyOfColumns, table.Columns) + + copyOfForeignKeyConstraints := make([]*ForeignKeyConstraint, len(table.ForeignKeyConstraints)) + copy(copyOfForeignKeyConstraints, table.ForeignKeyConstraints) + + return &Table{ + Name: table.Name, + Columns: copyOfColumns, + PrimaryKeyConstraint: table.PrimaryKeyConstraint, + ForeignKeyConstraints: copyOfForeignKeyConstraints, + } +} + +func (table *Table) DropConstraint(constraint Constraint) (Constraint, bool) { + var droppedConstraint Constraint + found := false + + if table.PrimaryKeyConstraint != nil && constraint.Equals(table.PrimaryKeyConstraint) { + droppedConstraint = table.PrimaryKeyConstraint + table.PrimaryKeyConstraint = nil + found = true + } + + if constraint.Type() == ConstraintTypeForeignKey { + for i, fkConstraint := range table.ForeignKeyConstraints { + if constraint.Equals(fkConstraint) { + droppedConstraint = fkConstraint + table.ForeignKeyConstraints = append(table.ForeignKeyConstraints[:i], table.ForeignKeyConstraints[i+1:]...) + found = true + break + } + } + } + + return droppedConstraint, found +} + +func (table *Table) ToDropSQL(fmter SQLFormatter) []byte { + sql := []byte{} + + sql = append(sql, "DROP TABLE IF EXISTS "...) + sql = fmter.AppendIdent(sql, string(table.Name)) + + return sql +} + +func (table *Table) ToRenameSQL(fmter SQLFormatter, newName TableName) []byte { + sql := []byte{} + + sql = append(sql, "ALTER TABLE "...) + sql = fmter.AppendIdent(sql, string(table.Name)) + sql = append(sql, " RENAME TO "...) + sql = fmter.AppendIdent(sql, string(newName)) + + return sql +} + +// Creates a temporary table with the same schema as the input table, +// inserts the data from the input table into the temporary table, drops the input table, +// and then renames the temporary table to the input table name. +// +// It creates constraints with the same name as the input table making it unfit for RDMS systems which will complain about duplicate constraints. +// It is only useful for SQLite. +func (table *Table) ToCreateTempInsertDropAlterSQL(fmter SQLFormatter) [][]byte { + sql := [][]byte{} + + tempTable := table.Clone() + tempTable.Name = table.Name + "__temp" + + if tempTable.PrimaryKeyConstraint != nil { + tempTable.PrimaryKeyConstraint = tempTable.PrimaryKeyConstraint.Named(table.PrimaryKeyConstraint.Name(table.Name)).(*PrimaryKeyConstraint) + } + + for i, constraint := range tempTable.ForeignKeyConstraints { + tempTable.ForeignKeyConstraints[i] = constraint.Named(constraint.Name(table.Name)).(*ForeignKeyConstraint) + } + + sql = append(sql, tempTable.ToCreateSQL(fmter)) + + columns := []byte{} + for i, column := range table.Columns { + if i > 0 { + columns = append(columns, ", "...) + } + + columns = fmter.AppendIdent(columns, string(column.Name)) + } + + insertIntoSelectSQL := []byte{} + insertIntoSelectSQL = append(insertIntoSelectSQL, "INSERT INTO "...) + insertIntoSelectSQL = fmter.AppendIdent(insertIntoSelectSQL, string(tempTable.Name)) + insertIntoSelectSQL = append(insertIntoSelectSQL, " ("...) + + insertIntoSelectSQL = append(insertIntoSelectSQL, columns...) + insertIntoSelectSQL = append(insertIntoSelectSQL, ") SELECT "...) + insertIntoSelectSQL = append(insertIntoSelectSQL, columns...) + insertIntoSelectSQL = append(insertIntoSelectSQL, " FROM "...) + insertIntoSelectSQL = fmter.AppendIdent(insertIntoSelectSQL, string(table.Name)) + + sql = append(sql, insertIntoSelectSQL) + sql = append(sql, table.ToDropSQL(fmter)) + sql = append(sql, tempTable.ToRenameSQL(fmter, table.Name)) + + return sql +} + +func (table *Table) ToCreateSQL(fmter SQLFormatter) []byte { + sql := []byte{} + + sql = append(sql, "CREATE TABLE IF NOT EXISTS "...) + + sql = fmter.AppendIdent(sql, string(table.Name)) + sql = append(sql, " ("...) + + for i, column := range table.Columns { + if i > 0 { + sql = append(sql, ", "...) + } + + sql = append(sql, column.ToDefinitionSQL(fmter)...) + } + + if table.PrimaryKeyConstraint != nil { + sql = append(sql, ", "...) + sql = append(sql, table.PrimaryKeyConstraint.ToDefinitionSQL(fmter, table.Name)...) + } + + for _, constraint := range table.ForeignKeyConstraints { + sql = append(sql, ", "...) + sql = append(sql, constraint.ToDefinitionSQL(fmter, table.Name)...) + } + + sql = append(sql, ")"...) + + return sql +} diff --git a/pkg/sqlschema/table_test.go b/pkg/sqlschema/table_test.go new file mode 100644 index 000000000000..9a1f52412a8f --- /dev/null +++ b/pkg/sqlschema/table_test.go @@ -0,0 +1,177 @@ +package sqlschema + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/uptrace/bun/schema" +) + +func TestTableToCreateSQL(t *testing.T) { + testCases := []struct { + name string + table *Table + sql string + }{ + { + name: "NoPrimaryKey_NoForeignKey_Nullable_BooleanDefault", + table: &Table{ + Name: "boolean_default", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "b", DataType: DataTypeBoolean, Nullable: true, Default: "false"}, + }, + }, + sql: `CREATE TABLE IF NOT EXISTS "boolean_default" ("id" INTEGER NOT NULL, "b" BOOLEAN DEFAULT false)`, + }, + { + name: "NoPrimaryKey_NoForeignKey_Nullable_TextDefault", + table: &Table{ + Name: "text_default", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "t", DataType: DataTypeText, Nullable: true, Default: "'text'"}, + }, + }, + sql: `CREATE TABLE IF NOT EXISTS "text_default" ("id" INTEGER NOT NULL, "t" TEXT DEFAULT 'text')`, + }, + { + name: "NoPrimaryKey_NoForeignKey_Nullable_IntegerDefault", + table: &Table{ + Name: "integer_default", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "i", DataType: DataTypeInteger, Nullable: true, Default: "1"}, + }, + }, + sql: `CREATE TABLE IF NOT EXISTS "integer_default" ("id" INTEGER NOT NULL, "i" INTEGER DEFAULT 1)`, + }, + { + name: "NoPrimaryKey_NoForeignKey_Nullable_NumericDefault", + table: &Table{ + Name: "numeric_default", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "n", DataType: DataTypeNumeric, Nullable: true, Default: "1.0"}, + }, + }, + sql: `CREATE TABLE IF NOT EXISTS "numeric_default" ("id" INTEGER NOT NULL, "n" NUMERIC DEFAULT 1.0)`, + }, + { + name: "NoPrimaryKey_NoForeignKey_Nullable_TimestampDefault", + table: &Table{ + Name: "timestamp_default", + Columns: []*Column{ + {Name: "id", DataType: DataTypeInteger, Nullable: false, Default: ""}, + {Name: "t", DataType: DataTypeTimestamp, Nullable: true, Default: "CURRENT_TIMESTAMP"}, + }, + }, + sql: `CREATE TABLE IF NOT EXISTS "timestamp_default" ("id" INTEGER NOT NULL, "t" TIMESTAMP DEFAULT CURRENT_TIMESTAMP)`, + }, + { + name: "PrimaryKey_NonNullable", + table: &Table{ + Name: "test", + Columns: []*Column{ + {Name: "id", DataType: DataTypeText, Nullable: false}, + {Name: "name", DataType: DataTypeText, Nullable: false}, + {Name: "org_id", DataType: DataTypeText, Nullable: false}, + }, + PrimaryKeyConstraint: &PrimaryKeyConstraint{ + ColumnNames: []ColumnName{"id"}, + }, + }, + sql: `CREATE TABLE IF NOT EXISTS "test" ("id" TEXT NOT NULL, "name" TEXT NOT NULL, "org_id" TEXT NOT NULL, CONSTRAINT "pk_test" PRIMARY KEY ("id"))`, + }, + { + name: "PrimaryKey_ForeignKey_NonNullable", + table: &Table{ + Name: "test", + Columns: []*Column{ + {Name: "id", DataType: DataTypeText, Nullable: false}, + {Name: "name", DataType: DataTypeText, Nullable: false}, + {Name: "org_id", DataType: DataTypeText, Nullable: false}, + }, + PrimaryKeyConstraint: &PrimaryKeyConstraint{ColumnNames: []ColumnName{"id"}}, + ForeignKeyConstraints: []*ForeignKeyConstraint{ + {ReferencingColumnName: "org_id", ReferencedTableName: "organizations", ReferencedColumnName: "id"}, + }, + }, + sql: `CREATE TABLE IF NOT EXISTS "test" ("id" TEXT NOT NULL, "name" TEXT NOT NULL, "org_id" TEXT NOT NULL, CONSTRAINT "pk_test" PRIMARY KEY ("id"), CONSTRAINT "fk_test_org_id" FOREIGN KEY ("org_id") REFERENCES "organizations" ("id"))`, + }, + { + name: "PrimaryKey_MultipleForeignKeys_NonNullable", + table: &Table{ + Name: "test", + Columns: []*Column{ + {Name: "id", DataType: DataTypeText, Nullable: false}, + {Name: "name", DataType: DataTypeText, Nullable: false}, + {Name: "org_id", DataType: DataTypeText, Nullable: false}, + {Name: "user_id", DataType: DataTypeText, Nullable: false}, + }, + PrimaryKeyConstraint: &PrimaryKeyConstraint{ColumnNames: []ColumnName{"id"}}, + ForeignKeyConstraints: []*ForeignKeyConstraint{ + {ReferencingColumnName: "org_id", ReferencedTableName: "organizations", ReferencedColumnName: "id"}, + {ReferencingColumnName: "user_id", ReferencedTableName: "users", ReferencedColumnName: "id"}, + }, + }, + sql: `CREATE TABLE IF NOT EXISTS "test" ("id" TEXT NOT NULL, "name" TEXT NOT NULL, "org_id" TEXT NOT NULL, "user_id" TEXT NOT NULL, CONSTRAINT "pk_test" PRIMARY KEY ("id"), CONSTRAINT "fk_test_org_id" FOREIGN KEY ("org_id") REFERENCES "organizations" ("id"), CONSTRAINT "fk_test_user_id" FOREIGN KEY ("user_id") REFERENCES "users" ("id"))`, + }, + } + + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + fmter := Formatter{schema.NewNopFormatter()} + sql := testCase.table.ToCreateSQL(fmter) + + assert.Equal(t, testCase.sql, string(sql)) + }) + } +} + +func TestTableToCreateTempInsertDropAlterSQL(t *testing.T) { + testCases := []struct { + name string + table Table + sqls []string + }{ + { + name: "PrimaryKey_ForeignKey_NonNullable", + table: Table{ + Name: "test", + Columns: []*Column{ + {Name: "id", DataType: DataTypeText, Nullable: false}, + {Name: "name", DataType: DataTypeText, Nullable: false}, + {Name: "org_id", DataType: DataTypeText, Nullable: false}, + }, + PrimaryKeyConstraint: &PrimaryKeyConstraint{ + ColumnNames: []ColumnName{"id"}, + }, + ForeignKeyConstraints: []*ForeignKeyConstraint{ + { + ReferencingColumnName: "org_id", + ReferencedTableName: "organizations", + ReferencedColumnName: "id", + }, + }, + }, + sqls: []string{ + `CREATE TABLE IF NOT EXISTS "test__temp" ("id" TEXT NOT NULL, "name" TEXT NOT NULL, "org_id" TEXT NOT NULL, CONSTRAINT "pk_test" PRIMARY KEY ("id"), CONSTRAINT "fk_test_org_id" FOREIGN KEY ("org_id") REFERENCES "organizations" ("id"))`, + `INSERT INTO "test__temp" ("id", "name", "org_id") SELECT "id", "name", "org_id" FROM "test"`, + `DROP TABLE IF EXISTS "test"`, + `ALTER TABLE "test__temp" RENAME TO "test"`, + }, + }, + } + + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + fmter := Formatter{schema.NewNopFormatter()} + sqls := testCase.table.ToCreateTempInsertDropAlterSQL(fmter) + + for i, sql := range sqls { + assert.Equal(t, testCase.sqls[i], string(sql)) + } + }) + } +}