": "NULL",
+}
+var sqlLogPrefixedTableIdentifierPattern = regexp.MustCompile(`\[\[(.+)\.(.+)\]\]`)
+var sqlLogPrefixedColumnIdentifierPattern = regexp.MustCompile(`\{\{(.+)\.(.+)\}\}`)
- dao.BeforeCreateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- e := new(ModelEvent)
- e.Dao = eventDao
- e.Model = m
+// normalizeSQLLog replaces common query builder charactes with their plain SQL version for easier debugging.
+// The query is still not suitable for execution and should be used only for log and debug purposes
+// (the normalization is done here to avoid breaking changes in dbx).
+func normalizeSQLLog(sql string) string {
+ sql = sqlLogPrefixedTableIdentifierPattern.ReplaceAllString(sql, "`$1`.`$2`")
- return app.OnModelBeforeCreate().Trigger(e, func(e *ModelEvent) error {
- return action()
- })
+ sql = sqlLogPrefixedColumnIdentifierPattern.ReplaceAllString(sql, "`$1`.`$2`")
+
+ for old, new := range sqlLogReplacements {
+ sql = strings.ReplaceAll(sql, old, new)
}
- dao.AfterCreateFunc = func(eventDao *daos.Dao, m models.Model) error {
- e := new(ModelEvent)
- e.Dao = eventDao
- e.Model = m
-
- return app.OnModelAfterCreate().Trigger(e)
- }
-
- dao.BeforeUpdateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- e := new(ModelEvent)
- e.Dao = eventDao
- e.Model = m
-
- return app.OnModelBeforeUpdate().Trigger(e, func(e *ModelEvent) error {
- return action()
- })
- }
-
- dao.AfterUpdateFunc = func(eventDao *daos.Dao, m models.Model) error {
- e := new(ModelEvent)
- e.Dao = eventDao
- e.Model = m
-
- return app.OnModelAfterUpdate().Trigger(e)
- }
-
- dao.BeforeDeleteFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- e := new(ModelEvent)
- e.Dao = eventDao
- e.Model = m
-
- return app.OnModelBeforeDelete().Trigger(e, func(e *ModelEvent) error {
- return action()
- })
- }
-
- dao.AfterDeleteFunc = func(eventDao *daos.Dao, m models.Model) error {
- e := new(ModelEvent)
- e.Dao = eventDao
- e.Model = m
-
- return app.OnModelAfterDelete().Trigger(e)
- }
-
- return dao
+ return sql
}
-func (app *BaseApp) registerDefaultHooks() {
+func (app *BaseApp) initAuxDB() error {
+ dbPath := filepath.Join(app.DataDir(), "aux.db")
+
+ concurrentDB, err := app.config.DBConnect(dbPath)
+ if err != nil {
+ return err
+ }
+ concurrentDB.DB().SetMaxOpenConns(app.config.AuxMaxOpenConns)
+ concurrentDB.DB().SetMaxIdleConns(app.config.AuxMaxIdleConns)
+ concurrentDB.DB().SetConnMaxIdleTime(3 * time.Minute)
+
+ nonconcurrentDB, err := app.config.DBConnect(dbPath)
+ if err != nil {
+ return err
+ }
+ nonconcurrentDB.DB().SetMaxOpenConns(1)
+ nonconcurrentDB.DB().SetMaxIdleConns(1)
+ nonconcurrentDB.DB().SetConnMaxIdleTime(3 * time.Minute)
+
+ app.auxConcurrentDB = concurrentDB
+ app.auxNonconcurrentDB = nonconcurrentDB
+
+ return nil
+}
+
+func (app *BaseApp) registerBaseHooks() {
deletePrefix := func(prefix string) error {
fs, err := app.NewFilesystem()
if err != nil {
@@ -1160,33 +1192,58 @@ func (app *BaseApp) registerDefaultHooks() {
}
// try to delete the storage files from deleted Collection, Records, etc. model
- app.OnModelAfterDelete().Add(func(e *ModelEvent) error {
- if m, ok := e.Model.(models.FilesManager); ok && m.BaseFilesPath() != "" {
- // ensure that there is a trailing slash so that the list iterator could start walking from the prefix
- // (https://github.com/pocketbase/pocketbase/discussions/5246#discussioncomment-10128955)
- prefix := strings.TrimRight(m.BaseFilesPath(), "/") + "/"
+ app.OnModelAfterDeleteSuccess().Bind(&hook.Handler[*ModelEvent]{
+ Id: "__pbFilesManagerDelete__",
+ Func: func(e *ModelEvent) error {
+ if m, ok := e.Model.(FilesManager); ok && m.BaseFilesPath() != "" {
+ // ensure that there is a trailing slash so that the list iterator could start walking from the prefix dir
+ // (https://github.com/pocketbase/pocketbase/discussions/5246#discussioncomment-10128955)
+ prefix := strings.TrimRight(m.BaseFilesPath(), "/") + "/"
- // run in the background for "optimistic" delete to avoid
- // blocking the delete transaction
- routine.FireAndForget(func() {
- if err := deletePrefix(prefix); err != nil {
- app.Logger().Error(
- "Failed to delete storage prefix (non critical error; usually could happen because of S3 api limits)",
- slog.String("prefix", prefix),
- slog.String("error", err.Error()),
- )
- }
- })
- }
+ // run in the background for "optimistic" delete to avoid
+ // blocking the delete transaction
+ routine.FireAndForget(func() {
+ if err := deletePrefix(prefix); err != nil {
+ app.Logger().Error(
+ "Failed to delete storage prefix (non critical error; usually could happen because of S3 api limits)",
+ slog.String("prefix", prefix),
+ slog.String("error", err.Error()),
+ )
+ }
+ })
+ }
- return nil
+ return e.Next()
+ },
+ Priority: -99,
})
- if err := app.initAutobackupHooks(); err != nil {
- app.Logger().Error("Failed to init auto backup hooks", slog.String("error", err.Error()))
- }
+ app.OnServe().Bind(&hook.Handler[*ServeEvent]{
+ Id: "__pbCronStart__",
+ Func: func(e *ServeEvent) error {
+ app.Cron().Start()
- registerCachedCollectionsAppHooks(app)
+ return e.Next()
+ },
+ Priority: 999,
+ })
+
+ app.Cron().Add("__pbPragmaOptimize__", "0 0 * * *", func() {
+ _, execErr := app.DB().NewQuery("PRAGMA optimize").Execute()
+ if execErr != nil {
+ app.Logger().Warn("Failed to run periodic PRAGMA optimize", slog.String("error", execErr.Error()))
+ }
+ })
+
+ app.registerSettingsHooks()
+ app.registerAutobackupHooks()
+ app.registerCollectionHooks()
+ app.registerRecordHooks()
+ app.registerSuperuserHooks()
+ app.registerExternalAuthHooks()
+ app.registerMFAHooks()
+ app.registerOTPHooks()
+ app.registerAuthOriginHooks()
}
// getLoggerMinLevel returns the logger min level based on the
@@ -1198,11 +1255,11 @@ func (app *BaseApp) registerDefaultHooks() {
// practically all logs to the terminal.
// In this case DB logs are still filtered but the checks for the min level are done
// in the BatchOptions.BeforeAddFunc instead of the slog.Handler.Enabled() method.
-func (app *BaseApp) getLoggerMinLevel() slog.Level {
+func getLoggerMinLevel(app App) slog.Level {
var minLevel slog.Level
if app.IsDev() {
- minLevel = -9999
+ minLevel = -99999
} else if app.Settings() != nil {
minLevel = slog.Level(app.Settings().Logs.MinLevel)
}
@@ -1216,7 +1273,7 @@ func (app *BaseApp) initLogger() error {
done := make(chan bool)
handler := logger.NewBatchHandler(logger.BatchOptions{
- Level: app.getLoggerMinLevel(),
+ Level: getLoggerMinLevel(app),
BatchSize: 200,
BeforeAddFunc: func(ctx context.Context, log *logger.Log) bool {
if app.IsDev() {
@@ -1239,19 +1296,17 @@ func (app *BaseApp) initLogger() error {
// write the accumulated logs
// (note: based on several local tests there is no significant performance difference between small number of separate write queries vs 1 big INSERT)
- app.LogsDao().RunInTransaction(func(txDao *daos.Dao) error {
- model := &models.Log{}
+ app.AuxRunInTransaction(func(txApp App) error {
+ model := &Log{}
for _, l := range logs {
model.MarkAsNew()
- // note: using pseudorandom for a slightly better performance
- model.Id = security.PseudorandomStringWithAlphabet(models.DefaultIdLength, models.DefaultIdAlphabet)
+ model.Id = GenerateDefaultRandomId()
model.Level = int(l.Level)
model.Message = l.Message
model.Data = l.Data
model.Created, _ = types.ParseDateTime(l.Time)
- model.Updated = model.Created
- if err := txDao.SaveLog(model); err != nil {
+ if err := txApp.AuxSave(model); err != nil {
log.Println("Failed to write log", model, err)
}
}
@@ -1259,21 +1314,6 @@ func (app *BaseApp) initLogger() error {
return nil
})
- // @todo replace with cron so that it doesn't rely on the logs write
- //
- // delete old logs
- // ---
- now := time.Now()
- lastLogsDeletedAt := cast.ToTime(app.Store().Get("lastLogsDeletedAt"))
- if now.Sub(lastLogsDeletedAt).Hours() >= 6 {
- deleteErr := app.LogsDao().DeleteOldLogs(now.AddDate(0, 0, -1*app.Settings().Logs.MaxDays))
- if deleteErr == nil {
- app.Store().Set("lastLogsDeletedAt", now)
- } else {
- log.Println("Logs delete failed", deleteErr)
- }
- }
-
return nil
},
})
@@ -1293,15 +1333,66 @@ func (app *BaseApp) initLogger() error {
app.logger = slog.New(handler)
- app.OnTerminate().PreAdd(func(e *TerminateEvent) error {
- // write all remaining logs before ticker.Stop to avoid races with ResetBootstrap user calls
- handler.WriteAll(context.Background())
+ // write all remaining logs before ticker.Stop to avoid races with ResetBootstrap user calls
+ app.OnTerminate().Bind(&hook.Handler[*TerminateEvent]{
+ Id: "__pbAppLoggerOnTerminate__",
+ Func: func(e *TerminateEvent) error {
+ handler.WriteAll(context.Background())
- ticker.Stop()
+ ticker.Stop()
- done <- true
+ done <- true
- return nil
+ return e.Next()
+ },
+ Priority: -999,
+ })
+
+ // reload log handler level (if initialized)
+ app.OnSettingsReload().Bind(&hook.Handler[*SettingsReloadEvent]{
+ Id: "__pbAppLoggerOnSettingsReload__",
+ Func: func(e *SettingsReloadEvent) error {
+ err := e.Next()
+ if err != nil {
+ return err
+ }
+
+ if e.App.Logger() != nil {
+ if h, ok := e.App.Logger().Handler().(*logger.BatchHandler); ok {
+ h.SetLevel(getLoggerMinLevel(e.App))
+ }
+ }
+
+ // try to clear old logs not matching the new settings
+ createdBefore := types.NowDateTime().AddDate(0, 0, -1*e.App.Settings().Logs.MaxDays)
+ expr := dbx.NewExp("[[created]] <= {:date} OR [[level]] < {:level}", dbx.Params{
+ "date": createdBefore.String(),
+ "level": e.App.Settings().Logs.MinLevel,
+ })
+ _, err = e.App.AuxNonconcurrentDB().Delete((&Log{}).TableName(), expr).Execute()
+ if err != nil {
+ e.App.Logger().Debug("Failed to cleanup old logs", "error", err)
+ }
+
+ // no logs are allowed -> try to reclaim preserved disk space after the previous delete operation
+ if e.App.Settings().Logs.MaxDays == 0 {
+ err = e.App.AuxVacuum()
+ if err != nil {
+ e.App.Logger().Debug("Failed to VACUUM aux database", "error", err)
+ }
+ }
+
+ return nil
+ },
+ Priority: -999,
+ })
+
+ // cleanup old logs
+ app.Cron().Add("__pbLogsCleanup__", "0 */6 * * *", func() {
+ deleteErr := app.DeleteOldLogs(time.Now().AddDate(0, 0, -1*app.Settings().Logs.MaxDays))
+ if deleteErr != nil {
+ app.Logger().Warn("Failed to delete old logs", "error", deleteErr)
+ }
})
return nil
diff --git a/core/base_backup.go b/core/base_backup.go
index 65d6e201..2b272bd8 100644
--- a/core/base_backup.go
+++ b/core/base_backup.go
@@ -12,20 +12,16 @@ import (
"sort"
"time"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
"github.com/pocketbase/pocketbase/tools/archive"
- "github.com/pocketbase/pocketbase/tools/cron"
"github.com/pocketbase/pocketbase/tools/filesystem"
"github.com/pocketbase/pocketbase/tools/inflector"
"github.com/pocketbase/pocketbase/tools/osutils"
"github.com/pocketbase/pocketbase/tools/security"
)
-// Deprecated: Replaced with StoreKeyActiveBackup.
-const CacheKeyActiveBackup string = "@activeBackup"
-
-const StoreKeyActiveBackup string = "@activeBackup"
+const (
+ StoreKeyActiveBackup = "@activeBackup"
+)
// CreateBackup creates a new backup of the current app pb_data directory.
//
@@ -50,61 +46,67 @@ func (app *BaseApp) CreateBackup(ctx context.Context, name string) error {
return errors.New("try again later - another backup/restore operation has already been started")
}
- if name == "" {
- name = app.generateBackupName("pb_backup_")
- }
-
app.Store().Set(StoreKeyActiveBackup, name)
defer app.Store().Remove(StoreKeyActiveBackup)
- // root dir entries to exclude from the backup generation
- exclude := []string{LocalBackupsDirName, LocalTempDirName}
+ event := new(BackupEvent)
+ event.App = app
+ event.Context = ctx
+ event.Name = name
+ // default root dir entries to exclude from the backup generation
+ event.Exclude = []string{LocalBackupsDirName, LocalTempDirName, LocalAutocertCacheDirName}
- // make sure that the special temp directory exists
- // note: it needs to be inside the current pb_data to avoid "cross-device link" errors
- localTempDir := filepath.Join(app.DataDir(), LocalTempDirName)
- if err := os.MkdirAll(localTempDir, os.ModePerm); err != nil {
- return fmt.Errorf("failed to create a temp dir: %w", err)
- }
+ return app.OnBackupCreate().Trigger(event, func(e *BackupEvent) error {
+ // generate a default name if missing
+ if e.Name == "" {
+ e.Name = generateBackupName(e.App, "pb_backup_")
+ }
- // Archive pb_data in a temp directory, exluding the "backups" and the temp dirs.
- //
- // Run in transaction to temporary block other writes (transactions uses the NonconcurrentDB connection).
- // ---
- tempPath := filepath.Join(localTempDir, "pb_backup_"+security.PseudorandomString(4))
- createErr := app.Dao().RunInTransaction(func(dataTXDao *daos.Dao) error {
- return app.LogsDao().RunInTransaction(func(logsTXDao *daos.Dao) error {
- // @todo consider experimenting with temp switching the readonly pragma after the db interface change
- return archive.Create(app.DataDir(), tempPath, exclude...)
+ // make sure that the special temp directory exists
+ // note: it needs to be inside the current pb_data to avoid "cross-device link" errors
+ localTempDir := filepath.Join(e.App.DataDir(), LocalTempDirName)
+ if err := os.MkdirAll(localTempDir, os.ModePerm); err != nil {
+ return fmt.Errorf("failed to create a temp dir: %w", err)
+ }
+
+ // archive pb_data in a temp directory, exluding the "backups" and the temp dirs
+ //
+ // Run in transaction to temporary block other writes (transactions uses the NonconcurrentDB connection).
+ // ---
+ tempPath := filepath.Join(localTempDir, "pb_backup_"+security.PseudorandomString(6))
+ createErr := e.App.RunInTransaction(func(txApp App) error {
+ return txApp.AuxRunInTransaction(func(txApp App) error {
+ return archive.Create(txApp.DataDir(), tempPath, e.Exclude...)
+ })
})
+ if createErr != nil {
+ return createErr
+ }
+ defer os.Remove(tempPath)
+
+ // persist the backup in the backups filesystem
+ // ---
+ fsys, err := e.App.NewBackupsFilesystem()
+ if err != nil {
+ return err
+ }
+ defer fsys.Close()
+
+ fsys.SetContext(e.Context)
+
+ file, err := filesystem.NewFileFromPath(tempPath)
+ if err != nil {
+ return err
+ }
+ file.OriginalName = e.Name
+ file.Name = file.OriginalName
+
+ if err := fsys.UploadFile(file, file.Name); err != nil {
+ return err
+ }
+
+ return nil
})
- if createErr != nil {
- return createErr
- }
- defer os.Remove(tempPath)
-
- // Persist the backup in the backups filesystem.
- // ---
- fsys, err := app.NewBackupsFilesystem()
- if err != nil {
- return err
- }
- defer fsys.Close()
-
- fsys.SetContext(ctx)
-
- file, err := filesystem.NewFileFromPath(tempPath)
- if err != nil {
- return err
- }
- file.OriginalName = name
- file.Name = file.OriginalName
-
- if err := fsys.UploadFile(file, file.Name); err != nil {
- return err
- }
-
- return nil
}
// RestoreBackup restores the backup with the specified name and restarts
@@ -136,10 +138,6 @@ func (app *BaseApp) CreateBackup(ctx context.Context, name string) error {
// If a failure occure during the restore process the dir changes are reverted.
// If for whatever reason the revert is not possible, it panics.
func (app *BaseApp) RestoreBackup(ctx context.Context, name string) error {
- if runtime.GOOS == "windows" {
- return errors.New("restore is not supported on windows")
- }
-
if app.Store().Has(StoreKeyActiveBackup) {
return errors.New("try again later - another backup/restore operation has already been started")
}
@@ -147,131 +145,129 @@ func (app *BaseApp) RestoreBackup(ctx context.Context, name string) error {
app.Store().Set(StoreKeyActiveBackup, name)
defer app.Store().Remove(StoreKeyActiveBackup)
- fsys, err := app.NewBackupsFilesystem()
- if err != nil {
- return err
- }
- defer fsys.Close()
+ event := new(BackupEvent)
+ event.App = app
+ event.Context = ctx
+ event.Name = name
+ // default root dir entries to exclude from the backup restore
+ event.Exclude = []string{LocalBackupsDirName, LocalTempDirName, LocalAutocertCacheDirName}
- fsys.SetContext(ctx)
-
- // fetch the backup file in a temp location
- br, err := fsys.GetFile(name)
- if err != nil {
- return err
- }
- defer br.Close()
-
- // make sure that the special temp directory exists
- // note: it needs to be inside the current pb_data to avoid "cross-device link" errors
- localTempDir := filepath.Join(app.DataDir(), LocalTempDirName)
- if err := os.MkdirAll(localTempDir, os.ModePerm); err != nil {
- return fmt.Errorf("failed to create a temp dir: %w", err)
- }
-
- // create a temp zip file from the blob.Reader and try to extract it
- tempZip, err := os.CreateTemp(localTempDir, "pb_restore_zip")
- if err != nil {
- return err
- }
- defer os.Remove(tempZip.Name())
-
- if _, err := io.Copy(tempZip, br); err != nil {
- return err
- }
-
- extractedDataDir := filepath.Join(localTempDir, "pb_restore_"+security.PseudorandomString(4))
- defer os.RemoveAll(extractedDataDir)
- if err := archive.Extract(tempZip.Name(), extractedDataDir); err != nil {
- return err
- }
-
- // ensure that a database file exists
- extractedDB := filepath.Join(extractedDataDir, "data.db")
- if _, err := os.Stat(extractedDB); err != nil {
- return fmt.Errorf("data.db file is missing or invalid: %w", err)
- }
-
- // remove the extracted zip file since we no longer need it
- // (this is in case the app restarts and the defer calls are not called)
- if err := os.Remove(tempZip.Name()); err != nil {
- app.Logger().Debug(
- "[RestoreBackup] Failed to remove the temp zip backup file",
- slog.String("file", tempZip.Name()),
- slog.String("error", err.Error()),
- )
- }
-
- // root dir entries to exclude from the backup restore
- exclude := []string{LocalBackupsDirName, LocalTempDirName}
-
- // move the current pb_data content to a special temp location
- // that will hold the old data between dirs replace
- // (the temp dir will be automatically removed on the next app start)
- oldTempDataDir := filepath.Join(localTempDir, "old_pb_data_"+security.PseudorandomString(4))
- if err := osutils.MoveDirContent(app.DataDir(), oldTempDataDir, exclude...); err != nil {
- return fmt.Errorf("failed to move the current pb_data content to a temp location: %w", err)
- }
-
- // move the extracted archive content to the app's pb_data
- if err := osutils.MoveDirContent(extractedDataDir, app.DataDir(), exclude...); err != nil {
- return fmt.Errorf("failed to move the extracted archive content to pb_data: %w", err)
- }
-
- revertDataDirChanges := func() error {
- if err := osutils.MoveDirContent(app.DataDir(), extractedDataDir, exclude...); err != nil {
- return fmt.Errorf("failed to revert the extracted dir change: %w", err)
+ return app.OnBackupRestore().Trigger(event, func(e *BackupEvent) error {
+ if runtime.GOOS == "windows" {
+ return errors.New("restore is not supported on Windows")
}
- if err := osutils.MoveDirContent(oldTempDataDir, app.DataDir(), exclude...); err != nil {
- return fmt.Errorf("failed to revert old pb_data dir change: %w", err)
+ fsys, err := e.App.NewBackupsFilesystem()
+ if err != nil {
+ return err
+ }
+ defer fsys.Close()
+
+ fsys.SetContext(e.Context)
+
+ // fetch the backup file in a temp location
+ br, err := fsys.GetFile(name)
+ if err != nil {
+ return err
+ }
+ defer br.Close()
+
+ // make sure that the special temp directory exists
+ // note: it needs to be inside the current pb_data to avoid "cross-device link" errors
+ localTempDir := filepath.Join(e.App.DataDir(), LocalTempDirName)
+ if err := os.MkdirAll(localTempDir, os.ModePerm); err != nil {
+ return fmt.Errorf("failed to create a temp dir: %w", err)
}
- return nil
- }
+ // create a temp zip file from the blob.Reader and try to extract it
+ tempZip, err := os.CreateTemp(localTempDir, "pb_restore_zip")
+ if err != nil {
+ return err
+ }
+ defer os.Remove(tempZip.Name())
- // restart the app
- if err := app.Restart(); err != nil {
- if revertErr := revertDataDirChanges(); revertErr != nil {
- panic(revertErr)
+ if _, err := io.Copy(tempZip, br); err != nil {
+ return err
}
- return fmt.Errorf("failed to restart the app process: %w", err)
- }
+ extractedDataDir := filepath.Join(localTempDir, "pb_restore_"+security.PseudorandomString(4))
+ defer os.RemoveAll(extractedDataDir)
+ if err := archive.Extract(tempZip.Name(), extractedDataDir); err != nil {
+ return err
+ }
- return nil
-}
+ // ensure that a database file exists
+ extractedDB := filepath.Join(extractedDataDir, "data.db")
+ if _, err := os.Stat(extractedDB); err != nil {
+ return fmt.Errorf("data.db file is missing or invalid: %w", err)
+ }
-// initAutobackupHooks registers the autobackup app serve hooks.
-func (app *BaseApp) initAutobackupHooks() error {
- c := cron.New()
- isServe := false
-
- loadJob := func() {
- c.Stop()
-
- // make sure that app.Settings() is always up to date
- //
- // @todo remove with the refactoring as core.App and daos.Dao will be one.
- if err := app.RefreshSettings(); err != nil {
- app.Logger().Debug(
- "[Backup cron] Failed to get the latest app settings",
+ // remove the extracted zip file since we no longer need it
+ // (this is in case the app restarts and the defer calls are not called)
+ if err := os.Remove(tempZip.Name()); err != nil {
+ e.App.Logger().Debug(
+ "[RestoreBackup] Failed to remove the temp zip backup file",
+ slog.String("file", tempZip.Name()),
slog.String("error", err.Error()),
)
}
+ // move the current pb_data content to a special temp location
+ // that will hold the old data between dirs replace
+ // (the temp dir will be automatically removed on the next app start)
+ oldTempDataDir := filepath.Join(localTempDir, "old_pb_data_"+security.PseudorandomString(4))
+ if err := osutils.MoveDirContent(e.App.DataDir(), oldTempDataDir, e.Exclude...); err != nil {
+ return fmt.Errorf("failed to move the current pb_data content to a temp location: %w", err)
+ }
+
+ // move the extracted archive content to the app's pb_data
+ if err := osutils.MoveDirContent(extractedDataDir, e.App.DataDir(), e.Exclude...); err != nil {
+ return fmt.Errorf("failed to move the extracted archive content to pb_data: %w", err)
+ }
+
+ revertDataDirChanges := func() error {
+ if err := osutils.MoveDirContent(e.App.DataDir(), extractedDataDir, e.Exclude...); err != nil {
+ return fmt.Errorf("failed to revert the extracted dir change: %w", err)
+ }
+
+ if err := osutils.MoveDirContent(oldTempDataDir, e.App.DataDir(), e.Exclude...); err != nil {
+ return fmt.Errorf("failed to revert old pb_data dir change: %w", err)
+ }
+
+ return nil
+ }
+
+ // restart the app
+ if err := e.App.Restart(); err != nil {
+ if revertErr := revertDataDirChanges(); revertErr != nil {
+ panic(revertErr)
+ }
+
+ return fmt.Errorf("failed to restart the app process: %w", err)
+ }
+
+ return nil
+ })
+}
+
+// registerAutobackupHooks registers the autobackup app serve hooks.
+func (app *BaseApp) registerAutobackupHooks() {
+ const jobId = "__auto_pb_backup__"
+
+ loadJob := func() {
rawSchedule := app.Settings().Backups.Cron
- if rawSchedule == "" || !isServe || !app.IsBootstrapped() {
+ if rawSchedule == "" {
+ app.Cron().Remove(jobId)
return
}
- c.Add("@autobackup", rawSchedule, func() {
+ app.Cron().Add(jobId, rawSchedule, func() {
const autoPrefix = "@auto_pb_backup_"
- name := app.generateBackupName(autoPrefix)
+ name := generateBackupName(app, autoPrefix)
if err := app.CreateBackup(context.Background(), name); err != nil {
- app.Logger().Debug(
+ app.Logger().Error(
"[Backup cron] Failed to create backup",
slog.String("name", name),
slog.String("error", err.Error()),
@@ -286,7 +282,7 @@ func (app *BaseApp) initAutobackupHooks() error {
fsys, err := app.NewBackupsFilesystem()
if err != nil {
- app.Logger().Debug(
+ app.Logger().Error(
"[Backup cron] Failed to initialize the backup filesystem",
slog.String("error", err.Error()),
)
@@ -296,7 +292,7 @@ func (app *BaseApp) initAutobackupHooks() error {
files, err := fsys.List(autoPrefix)
if err != nil {
- app.Logger().Debug(
+ app.Logger().Error(
"[Backup cron] Failed to list autogenerated backups",
slog.String("error", err.Error()),
)
@@ -317,7 +313,7 @@ func (app *BaseApp) initAutobackupHooks() error {
for _, f := range toRemove {
if err := fsys.Delete(f.Key); err != nil {
- app.Logger().Debug(
+ app.Logger().Error(
"[Backup cron] Failed to remove old autogenerated backup",
slog.String("key", f.Key),
slog.String("error", err.Error()),
@@ -325,29 +321,11 @@ func (app *BaseApp) initAutobackupHooks() error {
}
}
})
-
- // restart the ticker
- c.Start()
}
- // load on app serve
- app.OnBeforeServe().Add(func(e *ServeEvent) error {
- isServe = true
- loadJob()
- return nil
- })
-
- // stop the ticker on app termination
- app.OnTerminate().Add(func(e *TerminateEvent) error {
- c.Stop()
- return nil
- })
-
- // reload on app settings change
- app.OnModelAfterUpdate((&models.Param{}).TableName()).Add(func(e *ModelEvent) error {
- p := e.Model.(*models.Param)
- if p == nil || p.Key != models.ParamAppSettings {
- return nil
+ app.OnBootstrap().BindFunc(func(e *BootstrapEvent) error {
+ if err := e.Next(); err != nil {
+ return err
}
loadJob()
@@ -355,10 +333,18 @@ func (app *BaseApp) initAutobackupHooks() error {
return nil
})
- return nil
+ app.OnSettingsReload().BindFunc(func(e *SettingsReloadEvent) error {
+ if err := e.Next(); err != nil {
+ return err
+ }
+
+ loadJob()
+
+ return nil
+ })
}
-func (app *BaseApp) generateBackupName(prefix string) string {
+func generateBackupName(app App, prefix string) string {
appName := inflector.Snakecase(app.Settings().Meta.AppName)
if len(appName) > 50 {
appName = appName[:50]
diff --git a/core/base_backup_test.go b/core/base_backup_test.go
index da4f93b5..496585ee 100644
--- a/core/base_backup_test.go
+++ b/core/base_backup_test.go
@@ -128,9 +128,9 @@ func verifyBackupContent(app core.App, path string) error {
"data.db",
"data.db-shm",
"data.db-wal",
- "logs.db",
- "logs.db-shm",
- "logs.db-wal",
+ "aux.db",
+ "aux.db-shm",
+ "aux.db-wal",
".gitignore",
}
diff --git a/core/base_settings_test.go b/core/base_settings_test.go
deleted file mode 100644
index 4c38a829..00000000
--- a/core/base_settings_test.go
+++ /dev/null
@@ -1,63 +0,0 @@
-package core_test
-
-import (
- "testing"
-
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-func TestBaseAppRefreshSettings(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- // cleanup all stored settings
- if _, err := app.DB().NewQuery("DELETE from _params;").Execute(); err != nil {
- t.Fatalf("Failed to delete all test settings: %v", err)
- }
-
- // check if the new settings are saved in the db
- app.ResetEventCalls()
- if err := app.RefreshSettings(); err != nil {
- t.Fatalf("Failed to refresh the settings after delete: %v", err)
- }
- testEventCalls(t, app, map[string]int{
- "OnModelBeforeCreate": 1,
- "OnModelAfterCreate": 1,
- })
- param, err := app.Dao().FindParamByKey(models.ParamAppSettings)
- if err != nil {
- t.Fatalf("Expected new settings to be persisted, got %v", err)
- }
-
- // change the db entry and refresh the app settings (ensure that there was no db update)
- param.Value = types.JsonRaw([]byte(`{"example": 123}`))
- if err := app.Dao().SaveParam(param.Key, param.Value); err != nil {
- t.Fatalf("Failed to update the test settings: %v", err)
- }
- app.ResetEventCalls()
- if err := app.RefreshSettings(); err != nil {
- t.Fatalf("Failed to refresh the app settings: %v", err)
- }
- testEventCalls(t, app, nil)
-
- // try to refresh again without doing any changes
- app.ResetEventCalls()
- if err := app.RefreshSettings(); err != nil {
- t.Fatalf("Failed to refresh the app settings without change: %v", err)
- }
- testEventCalls(t, app, nil)
-}
-
-func testEventCalls(t *testing.T, app *tests.TestApp, events map[string]int) {
- if len(events) != len(app.EventCalls) {
- t.Fatalf("Expected events doesn't match: \n%v, \ngot \n%v", events, app.EventCalls)
- }
-
- for name, total := range events {
- if v, ok := app.EventCalls[name]; !ok || v != total {
- t.Fatalf("Expected events doesn't exist or match: \n%v, \ngot \n%v", events, app.EventCalls)
- }
- }
-}
diff --git a/core/base_test.go b/core/base_test.go
index 6b9c59f5..a64a94b4 100644
--- a/core/base_test.go
+++ b/core/base_test.go
@@ -1,59 +1,56 @@
-package core
+package core_test
import (
"context"
- "database/sql"
- "fmt"
"log/slog"
"os"
- "strings"
"testing"
"time"
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/migrations"
- "github.com/pocketbase/pocketbase/migrations/logs"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tools/list"
+ _ "unsafe"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
"github.com/pocketbase/pocketbase/tools/logger"
"github.com/pocketbase/pocketbase/tools/mailer"
- "github.com/pocketbase/pocketbase/tools/migrate"
- "github.com/pocketbase/pocketbase/tools/types"
)
func TestNewBaseApp(t *testing.T) {
const testDataDir = "./pb_base_app_test_data_dir/"
defer os.RemoveAll(testDataDir)
- app := NewBaseApp(BaseAppConfig{
+ app := core.NewBaseApp(core.BaseAppConfig{
DataDir: testDataDir,
EncryptionEnv: "test_env",
IsDev: true,
})
- if app.dataDir != testDataDir {
- t.Fatalf("expected dataDir %q, got %q", testDataDir, app.dataDir)
+ if app.DataDir() != testDataDir {
+ t.Fatalf("expected DataDir %q, got %q", testDataDir, app.DataDir())
}
- if app.encryptionEnv != "test_env" {
- t.Fatalf("expected encryptionEnv test_env, got %q", app.dataDir)
+ if app.EncryptionEnv() != "test_env" {
+ t.Fatalf("expected EncryptionEnv test_env, got %q", app.EncryptionEnv())
}
- if !app.isDev {
- t.Fatalf("expected isDev true, got %v", app.isDev)
+ if !app.IsDev() {
+ t.Fatalf("expected IsDev true, got %v", app.IsDev())
}
- if app.store == nil {
- t.Fatal("expected store to be set, got nil")
+ if app.Store() == nil {
+ t.Fatal("expected Store to be set, got nil")
}
- if app.settings == nil {
- t.Fatal("expected settings to be set, got nil")
+ if app.Settings() == nil {
+ t.Fatal("expected Settings to be set, got nil")
}
- if app.subscriptionsBroker == nil {
- t.Fatal("expected subscriptionsBroker to be set, got nil")
+ if app.SubscriptionsBroker() == nil {
+ t.Fatal("expected SubscriptionsBroker to be set, got nil")
+ }
+
+ if app.Cron() == nil {
+ t.Fatal("expected Cron to be set, got nil")
}
}
@@ -61,9 +58,8 @@ func TestBaseAppBootstrap(t *testing.T) {
const testDataDir = "./pb_base_app_test_data_dir/"
defer os.RemoveAll(testDataDir)
- app := NewBaseApp(BaseAppConfig{
- DataDir: testDataDir,
- EncryptionEnv: "pb_test_env",
+ app := core.NewBaseApp(core.BaseAppConfig{
+ DataDir: testDataDir,
})
defer app.ResetBootstrapState()
@@ -83,72 +79,59 @@ func TestBaseAppBootstrap(t *testing.T) {
t.Fatal("Expected test data directory to be created.")
}
- if app.dao == nil {
- t.Fatal("Expected app.dao to be initialized, got nil.")
+ type nilCheck struct {
+ name string
+ value any
+ expectNil bool
}
- if app.dao.BeforeCreateFunc == nil {
- t.Fatal("Expected app.dao.BeforeCreateFunc to be set, got nil.")
+ runNilChecks := func(checks []nilCheck) {
+ for _, check := range checks {
+ t.Run(check.name, func(t *testing.T) {
+ isNil := check.value == nil
+ if isNil != check.expectNil {
+ t.Fatalf("Expected isNil %v, got %v", check.expectNil, isNil)
+ }
+ })
+ }
}
- if app.dao.AfterCreateFunc == nil {
- t.Fatal("Expected app.dao.AfterCreateFunc to be set, got nil.")
+ nilChecksBeforeReset := []nilCheck{
+ {"[before] concurrentDB", app.DB(), false},
+ {"[before] nonconcurrentDB", app.NonconcurrentDB(), false},
+ {"[before] auxConcurrentDB", app.AuxDB(), false},
+ {"[before] auxNonconcurrentDB", app.AuxNonconcurrentDB(), false},
+ {"[before] settings", app.Settings(), false},
+ {"[before] logger", app.Logger(), false},
+ {"[before] cached collections", app.Store().Get(core.StoreKeyCachedCollections), false},
}
- if app.dao.BeforeUpdateFunc == nil {
- t.Fatal("Expected app.dao.BeforeUpdateFunc to be set, got nil.")
- }
-
- if app.dao.AfterUpdateFunc == nil {
- t.Fatal("Expected app.dao.AfterUpdateFunc to be set, got nil.")
- }
-
- if app.dao.BeforeDeleteFunc == nil {
- t.Fatal("Expected app.dao.BeforeDeleteFunc to be set, got nil.")
- }
-
- if app.dao.AfterDeleteFunc == nil {
- t.Fatal("Expected app.dao.AfterDeleteFunc to be set, got nil.")
- }
-
- if app.logsDao == nil {
- t.Fatal("Expected app.logsDao to be initialized, got nil.")
- }
-
- if app.settings == nil {
- t.Fatal("Expected app.settings to be initialized, got nil.")
- }
-
- if app.logger == nil {
- t.Fatal("Expected app.logger to be initialized, got nil.")
- }
-
- if _, ok := app.logger.Handler().(*logger.BatchHandler); !ok {
- t.Fatal("Expected app.logger handler to be initialized.")
- }
+ runNilChecks(nilChecksBeforeReset)
// reset
if err := app.ResetBootstrapState(); err != nil {
t.Fatal(err)
}
- if app.dao != nil {
- t.Fatalf("Expected app.dao to be nil, got %v.", app.dao)
+ nilChecksAfterReset := []nilCheck{
+ {"[after] concurrentDB", app.DB(), true},
+ {"[after] nonconcurrentDB", app.NonconcurrentDB(), true},
+ {"[after] auxConcurrentDB", app.AuxDB(), true},
+ {"[after] auxNonconcurrentDB", app.AuxNonconcurrentDB(), true},
+ {"[after] settings", app.Settings(), false},
+ {"[after] logger", app.Logger(), false},
+ {"[after] cached collections", app.Store().Get(core.StoreKeyCachedCollections), false},
}
- if app.logsDao != nil {
- t.Fatalf("Expected app.logsDao to be nil, got %v.", app.logsDao)
- }
+ runNilChecks(nilChecksAfterReset)
}
-func TestBaseAppGetters(t *testing.T) {
+func TestNewBaseAppIsTransactional(t *testing.T) {
const testDataDir = "./pb_base_app_test_data_dir/"
defer os.RemoveAll(testDataDir)
- app := NewBaseApp(BaseAppConfig{
- DataDir: testDataDir,
- EncryptionEnv: "pb_test_env",
- IsDev: true,
+ app := core.NewBaseApp(core.BaseAppConfig{
+ DataDir: testDataDir,
})
defer app.ResetBootstrapState()
@@ -156,81 +139,58 @@ func TestBaseAppGetters(t *testing.T) {
t.Fatal(err)
}
- if app.dao != app.Dao() {
- t.Fatalf("Expected app.Dao %v, got %v", app.Dao(), app.dao)
+ if app.IsTransactional() {
+ t.Fatalf("Didn't expect the app to be transactional")
}
- if app.dao.ConcurrentDB() != app.DB() {
- t.Fatalf("Expected app.DB %v, got %v", app.DB(), app.dao.ConcurrentDB())
- }
+ app.RunInTransaction(func(txApp core.App) error {
+ if !txApp.IsTransactional() {
+ t.Fatalf("Expected the app to be transactional")
+ }
- if app.logsDao != app.LogsDao() {
- t.Fatalf("Expected app.LogsDao %v, got %v", app.LogsDao(), app.logsDao)
- }
-
- if app.logsDao.ConcurrentDB() != app.LogsDB() {
- t.Fatalf("Expected app.LogsDB %v, got %v", app.LogsDB(), app.logsDao.ConcurrentDB())
- }
-
- if app.dataDir != app.DataDir() {
- t.Fatalf("Expected app.DataDir %v, got %v", app.DataDir(), app.dataDir)
- }
-
- if app.encryptionEnv != app.EncryptionEnv() {
- t.Fatalf("Expected app.EncryptionEnv %v, got %v", app.EncryptionEnv(), app.encryptionEnv)
- }
-
- if app.isDev != app.IsDev() {
- t.Fatalf("Expected app.IsDev %v, got %v", app.IsDev(), app.isDev)
- }
-
- if app.settings != app.Settings() {
- t.Fatalf("Expected app.Settings %v, got %v", app.Settings(), app.settings)
- }
-
- if app.store != app.Store() {
- t.Fatalf("Expected app.Store %v, got %v", app.Store(), app.store)
- }
-
- if app.logger != app.Logger() {
- t.Fatalf("Expected app.Logger %v, got %v", app.Logger(), app.logger)
- }
-
- if app.subscriptionsBroker != app.SubscriptionsBroker() {
- t.Fatalf("Expected app.SubscriptionsBroker %v, got %v", app.SubscriptionsBroker(), app.subscriptionsBroker)
- }
-
- if app.onBeforeServe != app.OnBeforeServe() || app.OnBeforeServe() == nil {
- t.Fatalf("Getter app.OnBeforeServe does not match or nil (%v vs %v)", app.OnBeforeServe(), app.onBeforeServe)
- }
+ return nil
+ })
}
func TestBaseAppNewMailClient(t *testing.T) {
- app, cleanup, err := initTestBaseApp()
- if err != nil {
- t.Fatal(err)
- }
- defer cleanup()
+ const testDataDir = "./pb_base_app_test_data_dir/"
+ defer os.RemoveAll(testDataDir)
+
+ app := core.NewBaseApp(core.BaseAppConfig{
+ DataDir: testDataDir,
+ EncryptionEnv: "pb_test_env",
+ })
+ defer app.ResetBootstrapState()
client1 := app.NewMailClient()
- if val, ok := client1.(*mailer.Sendmail); !ok {
- t.Fatalf("Expected mailer.Sendmail instance, got %v", val)
+ m1, ok := client1.(*mailer.Sendmail)
+ if !ok {
+ t.Fatalf("Expected mailer.Sendmail instance, got %v", m1)
+ }
+ if m1.OnSend() == nil || m1.OnSend().Length() == 0 {
+ t.Fatal("Expected OnSend hook to be registered")
}
- app.Settings().Smtp.Enabled = true
+ app.Settings().SMTP.Enabled = true
client2 := app.NewMailClient()
- if val, ok := client2.(*mailer.SmtpClient); !ok {
- t.Fatalf("Expected mailer.SmtpClient instance, got %v", val)
+ m2, ok := client2.(*mailer.SMTPClient)
+ if !ok {
+ t.Fatalf("Expected mailer.SMTPClient instance, got %v", m2)
+ }
+ if m2.OnSend() == nil || m2.OnSend().Length() == 0 {
+ t.Fatal("Expected OnSend hook to be registered")
}
}
func TestBaseAppNewFilesystem(t *testing.T) {
- app, cleanup, err := initTestBaseApp()
- if err != nil {
- t.Fatal(err)
- }
- defer cleanup()
+ const testDataDir = "./pb_base_app_test_data_dir/"
+ defer os.RemoveAll(testDataDir)
+
+ app := core.NewBaseApp(core.BaseAppConfig{
+ DataDir: testDataDir,
+ })
+ defer app.ResetBootstrapState()
// local
local, localErr := app.NewFilesystem()
@@ -253,11 +213,13 @@ func TestBaseAppNewFilesystem(t *testing.T) {
}
func TestBaseAppNewBackupsFilesystem(t *testing.T) {
- app, cleanup, err := initTestBaseApp()
- if err != nil {
- t.Fatal(err)
- }
- defer cleanup()
+ const testDataDir = "./pb_base_app_test_data_dir/"
+ defer os.RemoveAll(testDataDir)
+
+ app := core.NewBaseApp(core.BaseAppConfig{
+ DataDir: testDataDir,
+ })
+ defer app.ResetBootstrapState()
// local
local, localErr := app.NewBackupsFilesystem()
@@ -280,18 +242,22 @@ func TestBaseAppNewBackupsFilesystem(t *testing.T) {
}
func TestBaseAppLoggerWrites(t *testing.T) {
- app, cleanup, err := initTestBaseApp()
- if err != nil {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ // reset
+ if err := app.DeleteOldLogs(time.Now()); err != nil {
t.Fatal(err)
}
- defer cleanup()
const logsThreshold = 200
- totalLogs := func(app App, t *testing.T) int {
+ totalLogs := func(app core.App, t *testing.T) int {
var total int
- err := app.LogsDao().LogQuery().Select("count(*)").Row(&total)
+ err := app.LogQuery().Select("count(*)").Row(&total)
if err != nil {
t.Fatalf("Failed to fetch total logs: %v", err)
}
@@ -338,106 +304,9 @@ func TestBaseAppLoggerWrites(t *testing.T) {
t.Fatalf("Expected %d logs, got %d", logsThreshold+1, total)
}
})
-
- t.Run("test batch logs delete", func(t *testing.T) {
- app.Settings().Logs.MaxDays = 2
-
- deleteQueries := 0
-
- // reset
- app.Store().Set("lastLogsDeletedAt", time.Now())
- if err := app.LogsDao().DeleteOldLogs(time.Now()); err != nil {
- t.Fatal(err)
- }
-
- db := app.LogsDao().NonconcurrentDB().(*dbx.DB)
- db.ExecLogFunc = func(ctx context.Context, t time.Duration, sql string, result sql.Result, err error) {
- if strings.Contains(sql, "DELETE") {
- deleteQueries++
- }
- }
-
- // trigger batch write (A)
- expectedLogs := logsThreshold
- for i := 0; i < expectedLogs; i++ {
- app.Logger().Error("testA")
- }
-
- if total := totalLogs(app, t); total != expectedLogs {
- t.Fatalf("[batch write A] Expected %d logs, got %d", expectedLogs, total)
- }
-
- // mark the A inserted logs as 2-day expired
- aExpiredDate, err := types.ParseDateTime(time.Now().AddDate(0, 0, -2))
- if err != nil {
- t.Fatal(err)
- }
- _, err = app.LogsDao().NonconcurrentDB().NewQuery("UPDATE _logs SET created={:date}, updated={:date}").Bind(dbx.Params{
- "date": aExpiredDate.String(),
- }).Execute()
- if err != nil {
- t.Fatalf("Failed to mock logs timestamp fields: %v", err)
- }
-
- // simulate recently deleted logs
- app.Store().Set("lastLogsDeletedAt", time.Now().Add(-5*time.Hour))
-
- // trigger batch write (B)
- for i := 0; i < logsThreshold; i++ {
- app.Logger().Error("testB")
- }
-
- expectedLogs = 2 * logsThreshold
-
- // note: even though there are expired logs it shouldn't perform the delete operation because of the lastLogsDeledAt time
- if total := totalLogs(app, t); total != expectedLogs {
- t.Fatalf("[batch write B] Expected %d logs, got %d", expectedLogs, total)
- }
-
- // mark the B inserted logs as 1-day expired to ensure that they will not be deleted
- bExpiredDate, err := types.ParseDateTime(time.Now().AddDate(0, 0, -1))
- if err != nil {
- t.Fatal(err)
- }
- _, err = app.LogsDao().NonconcurrentDB().NewQuery("UPDATE _logs SET created={:date}, updated={:date} where message='testB'").Bind(dbx.Params{
- "date": bExpiredDate.String(),
- }).Execute()
- if err != nil {
- t.Fatalf("Failed to mock logs timestamp fields: %v", err)
- }
-
- // should trigger delete on the next batch write
- app.Store().Set("lastLogsDeletedAt", time.Now().Add(-6*time.Hour))
-
- // trigger batch write (C)
- for i := 0; i < logsThreshold; i++ {
- app.Logger().Error("testC")
- }
-
- expectedLogs = 2 * logsThreshold // only B and C logs should remain
-
- if total := totalLogs(app, t); total != expectedLogs {
- t.Fatalf("[batch write C] Expected %d logs, got %d", expectedLogs, total)
- }
-
- if deleteQueries != 1 {
- t.Fatalf("Expected DeleteOldLogs to be called %d, got %d", 1, deleteQueries)
- }
- })
}
func TestBaseAppRefreshSettingsLoggerMinLevelEnabled(t *testing.T) {
- app, cleanup, err := initTestBaseApp()
- if err != nil {
- t.Fatal(err)
- }
- defer cleanup()
-
- handler, ok := app.Logger().Handler().(*logger.BatchHandler)
- if !ok {
- t.Fatalf("Expected BatchHandler, got %v", app.Logger().Handler())
- }
-
scenarios := []struct {
name string
isDev bool
@@ -469,173 +338,35 @@ func TestBaseAppRefreshSettingsLoggerMinLevelEnabled(t *testing.T) {
for _, s := range scenarios {
t.Run(s.name, func(t *testing.T) {
- app.isDev = s.isDev
+ const testDataDir = "./pb_base_app_test_data_dir/"
+ defer os.RemoveAll(testDataDir)
+
+ app := core.NewBaseApp(core.BaseAppConfig{
+ DataDir: testDataDir,
+ IsDev: s.isDev,
+ })
+ defer app.ResetBootstrapState()
+
+ if err := app.Bootstrap(); err != nil {
+ t.Fatal(err)
+ }
+
+ handler, ok := app.Logger().Handler().(*logger.BatchHandler)
+ if !ok {
+ t.Fatalf("Expected BatchHandler, got %v", app.Logger().Handler())
+ }
app.Settings().Logs.MinLevel = s.level
- if err := app.Dao().SaveSettings(app.Settings()); err != nil {
+ if err := app.Save(app.Settings()); err != nil {
t.Fatalf("Failed to save settings: %v", err)
}
- if err := app.RefreshSettings(); err != nil {
- t.Fatalf("Failed to refresh app settings: %v", err)
- }
-
for level, enabled := range s.expectations {
- if v := handler.Enabled(nil, slog.Level(level)); v != enabled {
+ if v := handler.Enabled(context.Background(), slog.Level(level)); v != enabled {
t.Fatalf("Expected level %d Enabled() to be %v, got %v", level, enabled, v)
}
}
})
}
}
-
-func TestBaseAppLoggerLevelDevPrint(t *testing.T) {
- app, cleanup, err := initTestBaseApp()
- if err != nil {
- t.Fatal(err)
- }
- defer cleanup()
-
- testLogLevel := 4
-
- app.Settings().Logs.MinLevel = testLogLevel
- if err := app.Dao().SaveSettings(app.Settings()); err != nil {
- t.Fatal(err)
- }
-
- scenarios := []struct {
- name string
- isDev bool
- levels []int
- printedLevels []int
- persistedLevels []int
- }{
- {
- "dev mode",
- true,
- []int{testLogLevel - 1, testLogLevel, testLogLevel + 1},
- []int{testLogLevel - 1, testLogLevel, testLogLevel + 1},
- []int{testLogLevel, testLogLevel + 1},
- },
- {
- "nondev mode",
- false,
- []int{testLogLevel - 1, testLogLevel, testLogLevel + 1},
- []int{},
- []int{testLogLevel, testLogLevel + 1},
- },
- }
-
- for _, s := range scenarios {
- t.Run(s.name, func(t *testing.T) {
- var printedLevels []int
- var persistedLevels []int
-
- app.isDev = s.isDev
-
- // trigger slog handler min level refresh
- if err := app.RefreshSettings(); err != nil {
- t.Fatal(err)
- }
-
- // track printed logs
- originalPrintLog := printLog
- defer func() {
- printLog = originalPrintLog
- }()
- printLog = func(log *logger.Log) {
- printedLevels = append(printedLevels, int(log.Level))
- }
-
- // track persisted logs
- app.LogsDao().AfterCreateFunc = func(eventDao *daos.Dao, m models.Model) error {
- l, ok := m.(*models.Log)
- if ok {
- persistedLevels = append(persistedLevels, l.Level)
- }
- return nil
- }
-
- // write and persist logs
- for _, l := range s.levels {
- app.Logger().Log(nil, slog.Level(l), "test")
- }
- handler, ok := app.Logger().Handler().(*logger.BatchHandler)
- if !ok {
- t.Fatalf("Expected BatchHandler, got %v", app.Logger().Handler())
- }
- if err := handler.WriteAll(nil); err != nil {
- t.Fatalf("Failed to write all logs: %v", err)
- }
-
- // check persisted log levels
- if len(s.persistedLevels) != len(persistedLevels) {
- t.Fatalf("Expected persisted levels \n%v\ngot\n%v", s.persistedLevels, persistedLevels)
- }
- for _, l := range persistedLevels {
- if !list.ExistInSlice(l, s.persistedLevels) {
- t.Fatalf("Missing expected persisted level %v in %v", l, persistedLevels)
- }
- }
-
- // check printed log levels
- if len(s.printedLevels) != len(printedLevels) {
- t.Fatalf("Expected printed levels \n%v\ngot\n%v", s.printedLevels, printedLevels)
- }
- for _, l := range printedLevels {
- if !list.ExistInSlice(l, s.printedLevels) {
- t.Fatalf("Missing expected printed level %v in %v", l, printedLevels)
- }
- }
- })
- }
-}
-
-// -------------------------------------------------------------------
-
-// note: make sure to call `defer cleanup()` when the app is no longer needed.
-func initTestBaseApp() (app *BaseApp, cleanup func(), err error) {
- testDataDir, err := os.MkdirTemp("", "test_base_app")
- if err != nil {
- return nil, nil, err
- }
-
- cleanup = func() {
- os.RemoveAll(testDataDir)
- }
-
- app = NewBaseApp(BaseAppConfig{
- DataDir: testDataDir,
- })
-
- initErr := func() error {
- if err := app.Bootstrap(); err != nil {
- return fmt.Errorf("bootstrap error: %w", err)
- }
-
- logsRunner, err := migrate.NewRunner(app.LogsDB(), logs.LogsMigrations)
- if err != nil {
- return fmt.Errorf("logsRunner error: %w", err)
- }
- if _, err := logsRunner.Up(); err != nil {
- return fmt.Errorf("logsRunner migrations execution error: %w", err)
- }
-
- dataRunner, err := migrate.NewRunner(app.DB(), migrations.AppMigrations)
- if err != nil {
- return fmt.Errorf("logsRunner error: %w", err)
- }
- if _, err := dataRunner.Up(); err != nil {
- return fmt.Errorf("dataRunner migrations execution error: %w", err)
- }
-
- return nil
- }()
- if initErr != nil {
- cleanup()
- return nil, nil, initErr
- }
-
- return app, cleanup, nil
-}
diff --git a/core/collection_import.go b/core/collection_import.go
new file mode 100644
index 00000000..39a155b9
--- /dev/null
+++ b/core/collection_import.go
@@ -0,0 +1,194 @@
+package core
+
+import (
+ "cmp"
+ "context"
+ "database/sql"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "slices"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/spf13/cast"
+)
+
+// ImportCollectionsByMarshaledJSON is the same as [ImportCollections]
+// but accept marshaled json array as import data (usually used for the autogenerated snapshots).
+func (app *BaseApp) ImportCollectionsByMarshaledJSON(rawSliceOfMaps []byte, deleteMissing bool) error {
+ data := []map[string]any{}
+
+ err := json.Unmarshal(rawSliceOfMaps, &data)
+ if err != nil {
+ return err
+ }
+
+ return app.ImportCollections(data, deleteMissing)
+}
+
+// ImportCollections imports the provided collections data in a single transaction.
+//
+// For existing matching collections, the imported data is unmarshaled on top of the existing model.
+//
+// NB! If deleteMissing is true, ALL NON-SYSTEM COLLECTIONS AND SCHEMA FIELDS,
+// that are not present in the imported configuration, WILL BE DELETED
+// (this includes their related records data).
+func (app *BaseApp) ImportCollections(toImport []map[string]any, deleteMissing bool) error {
+ if len(toImport) == 0 {
+ // prevent accidentally deleting all collections
+ return errors.New("no collections to import")
+ }
+
+ importedCollections := make([]*Collection, len(toImport))
+ mappedImported := make(map[string]*Collection, len(toImport))
+
+ // normalize imported collections data to ensure that all
+ // collection fields are present and properly initialized
+ for i, data := range toImport {
+ var imported *Collection
+
+ identifier := cast.ToString(data["id"])
+ if identifier == "" {
+ identifier = cast.ToString(data["name"])
+ }
+
+ existing, err := app.FindCollectionByNameOrId(identifier)
+ if err != nil && !errors.Is(err, sql.ErrNoRows) {
+ return err
+ }
+
+ if existing != nil {
+ // refetch for deep copy
+ imported, err = app.FindCollectionByNameOrId(existing.Id)
+ if err != nil {
+ return err
+ }
+
+ // ensure that the fields will be cleared
+ if data["fields"] == nil && deleteMissing {
+ data["fields"] = []map[string]any{}
+ }
+
+ rawData, err := json.Marshal(data)
+ if err != nil {
+ return err
+ }
+
+ // load the imported data
+ err = json.Unmarshal(rawData, imported)
+ if err != nil {
+ return err
+ }
+
+ // extend with the existing fields if necessary
+ for _, f := range existing.Fields {
+ if !f.GetSystem() && deleteMissing {
+ continue
+ }
+ if imported.Fields.GetById(f.GetId()) == nil {
+ imported.Fields.Add(f)
+ }
+ }
+ } else {
+ imported = &Collection{}
+
+ rawData, err := json.Marshal(data)
+ if err != nil {
+ return err
+ }
+
+ // load the imported data
+ err = json.Unmarshal(rawData, imported)
+ if err != nil {
+ return err
+ }
+ }
+
+ imported.IntegrityChecks(false)
+
+ importedCollections[i] = imported
+ mappedImported[imported.Id] = imported
+ }
+
+ // reorder views last since the view query could depend on some of the other collections
+ slices.SortStableFunc(importedCollections, func(a, b *Collection) int {
+ cmpA := -1
+ if a.IsView() {
+ cmpA = 1
+ }
+
+ cmpB := -1
+ if b.IsView() {
+ cmpB = 1
+ }
+
+ res := cmp.Compare(cmpA, cmpB)
+ if res == 0 {
+ res = a.Created.Compare(b.Created)
+ if res == 0 {
+ res = a.Updated.Compare(b.Updated)
+ }
+ }
+ return res
+ })
+
+ return app.RunInTransaction(func(txApp App) error {
+ existingCollections := []*Collection{}
+ if err := txApp.CollectionQuery().OrderBy("updated ASC").All(&existingCollections); err != nil {
+ return err
+ }
+ mappedExisting := make(map[string]*Collection, len(existingCollections))
+ for _, existing := range existingCollections {
+ existing.IntegrityChecks(false)
+ mappedExisting[existing.Id] = existing
+ }
+
+ // delete old collections not available in the new configuration
+ // (before saving the imports in case a deleted collection name is being reused)
+ if deleteMissing {
+ for _, existing := range existingCollections {
+ if mappedImported[existing.Id] != nil || existing.System {
+ continue // exist or system
+ }
+
+ // delete collection
+ if err := txApp.Delete(existing); err != nil {
+ return err
+ }
+ }
+ }
+
+ // upsert imported collections
+ for _, imported := range importedCollections {
+ if err := txApp.SaveNoValidate(imported); err != nil {
+ return fmt.Errorf("failed to save collection %q: %w", imported.Name, err)
+ }
+ }
+
+ // run validations
+ for _, imported := range importedCollections {
+ original := mappedExisting[imported.Id]
+ if original == nil {
+ original = imported
+ }
+
+ validator := newCollectionValidator(
+ context.Background(),
+ txApp,
+ imported,
+ original,
+ )
+ if err := validator.run(); err != nil {
+ // serialize the validation error(s)
+ serializedErr, _ := json.MarshalIndent(err, "", " ")
+
+ return validation.Errors{"collections": validation.NewError(
+ "validation_collections_import_failure",
+ fmt.Sprintf("Data validations failed for collection %q (%s):\n%s", imported.Name, imported.Id, serializedErr),
+ )}
+ }
+ }
+
+ return nil
+ })
+}
diff --git a/core/collection_import_test.go b/core/collection_import_test.go
new file mode 100644
index 00000000..74c1201f
--- /dev/null
+++ b/core/collection_import_test.go
@@ -0,0 +1,476 @@
+package core_test
+
+import (
+ "encoding/json"
+ "strings"
+ "testing"
+
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestImportCollections(t *testing.T) {
+ t.Parallel()
+
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ var regularCollections []*core.Collection
+ err := testApp.CollectionQuery().AndWhere(dbx.HashExp{"system": false}).All(®ularCollections)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ var systemCollections []*core.Collection
+ err = testApp.CollectionQuery().AndWhere(dbx.HashExp{"system": true}).All(&systemCollections)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ totalRegularCollections := len(regularCollections)
+ totalSystemCollections := len(systemCollections)
+ totalCollections := totalRegularCollections + totalSystemCollections
+
+ scenarios := []struct {
+ name string
+ data []map[string]any
+ deleteMissing bool
+ expectError bool
+ expectCollectionsCount int
+ afterTestFunc func(testApp *tests.TestApp, resultCollections []*core.Collection)
+ }{
+ {
+ name: "empty collections",
+ data: []map[string]any{},
+ expectError: true,
+ expectCollectionsCount: totalCollections,
+ },
+ {
+ name: "minimal collection import (with missing system fields)",
+ data: []map[string]any{
+ {"name": "import_test1", "type": "auth"},
+ {
+ "name": "import_test2", "fields": []map[string]any{
+ {"name": "test", "type": "text"},
+ },
+ },
+ },
+ deleteMissing: false,
+ expectError: false,
+ expectCollectionsCount: totalCollections + 2,
+ },
+ {
+ name: "minimal collection import (trigger collection model validations)",
+ data: []map[string]any{
+ {"name": ""},
+ {
+ "name": "import_test2", "fields": []map[string]any{
+ {"name": "test", "type": "text"},
+ },
+ },
+ },
+ deleteMissing: false,
+ expectError: true,
+ expectCollectionsCount: totalCollections,
+ },
+ {
+ name: "minimal collection import (trigger field settings validation)",
+ data: []map[string]any{
+ {"name": "import_test", "fields": []map[string]any{{"name": "test", "type": "text", "min": -1}}},
+ },
+ deleteMissing: false,
+ expectError: true,
+ expectCollectionsCount: totalCollections,
+ },
+ {
+ name: "new + update + delete (system collections delete should be ignored)",
+ data: []map[string]any{
+ {
+ "id": "wsmn24bux7wo113",
+ "name": "demo",
+ "fields": []map[string]any{
+ {
+ "id": "_2hlxbmp",
+ "name": "title",
+ "type": "text",
+ "system": false,
+ "required": true,
+ "min": 3,
+ "max": nil,
+ "pattern": "",
+ },
+ },
+ "indexes": []string{},
+ },
+ {
+ "name": "import1",
+ "fields": []map[string]any{
+ {
+ "name": "active",
+ "type": "bool",
+ },
+ },
+ },
+ },
+ deleteMissing: true,
+ expectError: false,
+ expectCollectionsCount: totalSystemCollections + 2,
+ },
+ {
+ name: "test with deleteMissing: false",
+ data: []map[string]any{
+ {
+ // "id": "wsmn24bux7wo113", // test update with only name as identifier
+ "name": "demo1",
+ "fields": []map[string]any{
+ {
+ "id": "_2hlxbmp",
+ "name": "title",
+ "type": "text",
+ "system": false,
+ "required": true,
+ "min": 3,
+ "max": nil,
+ "pattern": "",
+ },
+ {
+ "id": "_2hlxbmp",
+ "name": "field_with_duplicate_id",
+ "type": "text",
+ "system": false,
+ "required": true,
+ "unique": false,
+ "min": 4,
+ "max": nil,
+ "pattern": "",
+ },
+ {
+ "id": "abcd_import",
+ "name": "new_field",
+ "type": "text",
+ },
+ },
+ },
+ {
+ "name": "new_import",
+ "fields": []map[string]any{
+ {
+ "id": "abcd_import",
+ "name": "active",
+ "type": "bool",
+ },
+ },
+ },
+ },
+ deleteMissing: false,
+ expectError: false,
+ expectCollectionsCount: totalCollections + 1,
+ afterTestFunc: func(testApp *tests.TestApp, resultCollections []*core.Collection) {
+ expectedCollectionFields := map[string]int{
+ core.CollectionNameAuthOrigins: 6,
+ "nologin": 10,
+ "demo1": 18,
+ "demo2": 5,
+ "demo3": 5,
+ "demo4": 16,
+ "demo5": 9,
+ "new_import": 2,
+ }
+ for name, expectedCount := range expectedCollectionFields {
+ collection, err := testApp.FindCollectionByNameOrId(name)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if totalFields := len(collection.Fields); totalFields != expectedCount {
+ t.Errorf("Expected %d %q fields, got %d", expectedCount, collection.Name, totalFields)
+ }
+ }
+ },
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ err := testApp.ImportCollections(s.data, s.deleteMissing)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ // check collections count
+ collections := []*core.Collection{}
+ if err := testApp.CollectionQuery().All(&collections); err != nil {
+ t.Fatal(err)
+ }
+ if len(collections) != s.expectCollectionsCount {
+ t.Fatalf("Expected %d collections, got %d", s.expectCollectionsCount, len(collections))
+ }
+
+ if s.afterTestFunc != nil {
+ s.afterTestFunc(testApp, collections)
+ }
+ })
+ }
+}
+
+func TestImportCollectionsByMarshaledJSON(t *testing.T) {
+ t.Parallel()
+
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ var regularCollections []*core.Collection
+ err := testApp.CollectionQuery().AndWhere(dbx.HashExp{"system": false}).All(®ularCollections)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ var systemCollections []*core.Collection
+ err = testApp.CollectionQuery().AndWhere(dbx.HashExp{"system": true}).All(&systemCollections)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ totalRegularCollections := len(regularCollections)
+ totalSystemCollections := len(systemCollections)
+ totalCollections := totalRegularCollections + totalSystemCollections
+
+ scenarios := []struct {
+ name string
+ data string
+ deleteMissing bool
+ expectError bool
+ expectCollectionsCount int
+ afterTestFunc func(testApp *tests.TestApp, resultCollections []*core.Collection)
+ }{
+ {
+ name: "invalid json array",
+ data: `{"test":123}`,
+ expectError: true,
+ expectCollectionsCount: totalCollections,
+ },
+ {
+ name: "new + update + delete (system collections delete should be ignored)",
+ data: `[
+ {
+ "id": "wsmn24bux7wo113",
+ "name": "demo",
+ "fields": [
+ {
+ "id": "_2hlxbmp",
+ "name": "title",
+ "type": "text",
+ "system": false,
+ "required": true,
+ "min": 3,
+ "max": null,
+ "pattern": ""
+ }
+ ],
+ "indexes": []
+ },
+ {
+ "name": "import1",
+ "fields": [
+ {
+ "name": "active",
+ "type": "bool"
+ }
+ ]
+ }
+ ]`,
+ deleteMissing: true,
+ expectError: false,
+ expectCollectionsCount: totalSystemCollections + 2,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ err := testApp.ImportCollectionsByMarshaledJSON([]byte(s.data), s.deleteMissing)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ // check collections count
+ collections := []*core.Collection{}
+ if err := testApp.CollectionQuery().All(&collections); err != nil {
+ t.Fatal(err)
+ }
+ if len(collections) != s.expectCollectionsCount {
+ t.Fatalf("Expected %d collections, got %d", s.expectCollectionsCount, len(collections))
+ }
+
+ if s.afterTestFunc != nil {
+ s.afterTestFunc(testApp, collections)
+ }
+ })
+ }
+}
+
+func TestImportCollectionsUpdateRules(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ name string
+ data map[string]any
+ deleteMissing bool
+ }{
+ {
+ "extend existing by name (without deleteMissing)",
+ map[string]any{"name": "clients", "authToken": map[string]any{"duration": 100}, "fields": []map[string]any{{"name": "test", "type": "text"}}},
+ false,
+ },
+ {
+ "extend existing by id (without deleteMissing)",
+ map[string]any{"id": "v851q4r790rhknl", "authToken": map[string]any{"duration": 100}, "fields": []map[string]any{{"name": "test", "type": "text"}}},
+ false,
+ },
+ {
+ "extend with delete missing",
+ map[string]any{
+ "id": "v851q4r790rhknl",
+ "authToken": map[string]any{"duration": 100},
+ "fields": []map[string]any{{"name": "test", "type": "text"}},
+ "passwordAuth": map[string]any{"identityFields": []string{"email"}},
+ "indexes": []string{
+ // min required system fields indexes
+ "CREATE UNIQUE INDEX `_v851q4r790rhknl_email_idx` ON `clients` (email) WHERE email != ''",
+ "CREATE UNIQUE INDEX `_v851q4r790rhknl_tokenKey_idx` ON `clients` (tokenKey)",
+ },
+ },
+ true,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ beforeCollection, err := testApp.FindCollectionByNameOrId("clients")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ err = testApp.ImportCollections([]map[string]any{s.data}, s.deleteMissing)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ afterCollection, err := testApp.FindCollectionByNameOrId("clients")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if afterCollection.AuthToken.Duration != 100 {
+ t.Fatalf("Expected AuthToken duration to be %d, got %d", 100, afterCollection.AuthToken.Duration)
+ }
+ if beforeCollection.AuthToken.Secret != afterCollection.AuthToken.Secret {
+ t.Fatalf("Expected AuthToken secrets to remain the same, got\n%q\nVS\n%q", beforeCollection.AuthToken.Secret, afterCollection.AuthToken.Secret)
+ }
+ if beforeCollection.Name != afterCollection.Name {
+ t.Fatalf("Expected Name to remain the same, got\n%q\nVS\n%q", beforeCollection.Name, afterCollection.Name)
+ }
+ if beforeCollection.Id != afterCollection.Id {
+ t.Fatalf("Expected Id to remain the same, got\n%q\nVS\n%q", beforeCollection.Id, afterCollection.Id)
+ }
+
+ if !s.deleteMissing {
+ totalExpectedFields := len(beforeCollection.Fields) + 1
+ if v := len(afterCollection.Fields); v != totalExpectedFields {
+ t.Fatalf("Expected %d total fields, got %d", totalExpectedFields, v)
+ }
+
+ if afterCollection.Fields.GetByName("test") == nil {
+ t.Fatalf("Missing new field %q", "test")
+ }
+
+ // ensure that the old fields still exist
+ oldFields := beforeCollection.Fields.FieldNames()
+ for _, name := range oldFields {
+ if afterCollection.Fields.GetByName(name) == nil {
+ t.Fatalf("Missing expected old field %q", name)
+ }
+ }
+ } else {
+ totalExpectedFields := 1
+ for _, f := range beforeCollection.Fields {
+ if f.GetSystem() {
+ totalExpectedFields++
+ }
+ }
+
+ if v := len(afterCollection.Fields); v != totalExpectedFields {
+ t.Fatalf("Expected %d total fields, got %d", totalExpectedFields, v)
+ }
+
+ if afterCollection.Fields.GetByName("test") == nil {
+ t.Fatalf("Missing new field %q", "test")
+ }
+
+ // ensure that the old system fields still exist
+ for _, f := range beforeCollection.Fields {
+ if f.GetSystem() && afterCollection.Fields.GetByName(f.GetName()) == nil {
+ t.Fatalf("Missing expected old field %q", f.GetName())
+ }
+ }
+ }
+ })
+ }
+}
+
+func TestImportCollectionsCreateRules(t *testing.T) {
+ t.Parallel()
+
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ err := testApp.ImportCollections([]map[string]any{
+ {"name": "new_test", "type": "auth", "authToken": map[string]any{"duration": 123}, "fields": []map[string]any{{"name": "test", "type": "text"}}},
+ }, false)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ collection, err := testApp.FindCollectionByNameOrId("new_test")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ raw, err := json.Marshal(collection)
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawStr := string(raw)
+
+ expectedParts := []string{
+ `"name":"new_test"`,
+ `"fields":[`,
+ `"name":"id"`,
+ `"name":"email"`,
+ `"name":"tokenKey"`,
+ `"name":"password"`,
+ `"name":"test"`,
+ `"indexes":[`,
+ `CREATE UNIQUE INDEX`,
+ `"duration":123`,
+ }
+
+ for _, part := range expectedParts {
+ if !strings.Contains(rawStr, part) {
+ t.Errorf("Missing %q in\n%s", part, rawStr)
+ }
+ }
+}
diff --git a/core/collection_model.go b/core/collection_model.go
new file mode 100644
index 00000000..2abb2017
--- /dev/null
+++ b/core/collection_model.go
@@ -0,0 +1,949 @@
+package core
+
+import (
+ "encoding/json"
+ "fmt"
+ "strings"
+
+ "github.com/pocketbase/pocketbase/tools/dbutils"
+ "github.com/pocketbase/pocketbase/tools/hook"
+ "github.com/pocketbase/pocketbase/tools/security"
+ "github.com/pocketbase/pocketbase/tools/types"
+ "github.com/spf13/cast"
+)
+
+var (
+ _ Model = (*Collection)(nil)
+ _ DBExporter = (*Collection)(nil)
+ _ FilesManager = (*Collection)(nil)
+)
+
+const (
+ CollectionTypeBase = "base"
+ CollectionTypeAuth = "auth"
+ CollectionTypeView = "view"
+)
+
+const systemHookIdCollection = "__pbCollectionSystemHook__"
+
+func (app *BaseApp) registerCollectionHooks() {
+ app.OnModelValidate().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdCollection,
+ Func: func(me *ModelEvent) error {
+ if ce, ok := newCollectionEventFromModelEvent(me); ok {
+ return me.App.OnCollectionValidate().Trigger(ce, func(ce *CollectionEvent) error {
+ syncModelEventWithCollectionEvent(me, ce)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelCreate().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdCollection,
+ Func: func(me *ModelEvent) error {
+ if ce, ok := newCollectionEventFromModelEvent(me); ok {
+ return me.App.OnCollectionCreate().Trigger(ce, func(ce *CollectionEvent) error {
+ syncModelEventWithCollectionEvent(me, ce)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelCreateExecute().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdCollection,
+ Func: func(me *ModelEvent) error {
+ if ce, ok := newCollectionEventFromModelEvent(me); ok {
+ return me.App.OnCollectionCreateExecute().Trigger(ce, func(ce *CollectionEvent) error {
+ syncModelEventWithCollectionEvent(me, ce)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelAfterCreateSuccess().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdCollection,
+ Func: func(me *ModelEvent) error {
+ if ce, ok := newCollectionEventFromModelEvent(me); ok {
+ return me.App.OnCollectionAfterCreateSuccess().Trigger(ce, func(ce *CollectionEvent) error {
+ syncModelEventWithCollectionEvent(me, ce)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelAfterCreateError().Bind(&hook.Handler[*ModelErrorEvent]{
+ Id: systemHookIdCollection,
+ Func: func(me *ModelErrorEvent) error {
+ if ce, ok := newCollectionErrorEventFromModelErrorEvent(me); ok {
+ return me.App.OnCollectionAfterCreateError().Trigger(ce, func(ce *CollectionErrorEvent) error {
+ syncModelErrorEventWithCollectionErrorEvent(me, ce)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelUpdate().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdCollection,
+ Func: func(me *ModelEvent) error {
+ if ce, ok := newCollectionEventFromModelEvent(me); ok {
+ return me.App.OnCollectionUpdate().Trigger(ce, func(ce *CollectionEvent) error {
+ syncModelEventWithCollectionEvent(me, ce)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelUpdateExecute().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdCollection,
+ Func: func(me *ModelEvent) error {
+ if ce, ok := newCollectionEventFromModelEvent(me); ok {
+ return me.App.OnCollectionUpdateExecute().Trigger(ce, func(ce *CollectionEvent) error {
+ syncModelEventWithCollectionEvent(me, ce)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelAfterUpdateSuccess().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdCollection,
+ Func: func(me *ModelEvent) error {
+ if ce, ok := newCollectionEventFromModelEvent(me); ok {
+ return me.App.OnCollectionAfterUpdateSuccess().Trigger(ce, func(ce *CollectionEvent) error {
+ syncModelEventWithCollectionEvent(me, ce)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelAfterUpdateError().Bind(&hook.Handler[*ModelErrorEvent]{
+ Id: systemHookIdCollection,
+ Func: func(me *ModelErrorEvent) error {
+ if ce, ok := newCollectionErrorEventFromModelErrorEvent(me); ok {
+ return me.App.OnCollectionAfterUpdateError().Trigger(ce, func(ce *CollectionErrorEvent) error {
+ syncModelErrorEventWithCollectionErrorEvent(me, ce)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelDelete().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdCollection,
+ Func: func(me *ModelEvent) error {
+ if ce, ok := newCollectionEventFromModelEvent(me); ok {
+ return me.App.OnCollectionDelete().Trigger(ce, func(ce *CollectionEvent) error {
+ syncModelEventWithCollectionEvent(me, ce)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelDeleteExecute().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdCollection,
+ Func: func(me *ModelEvent) error {
+ if ce, ok := newCollectionEventFromModelEvent(me); ok {
+ return me.App.OnCollectionDeleteExecute().Trigger(ce, func(ce *CollectionEvent) error {
+ syncModelEventWithCollectionEvent(me, ce)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelAfterDeleteSuccess().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdCollection,
+ Func: func(me *ModelEvent) error {
+ if ce, ok := newCollectionEventFromModelEvent(me); ok {
+ return me.App.OnCollectionAfterDeleteSuccess().Trigger(ce, func(ce *CollectionEvent) error {
+ syncModelEventWithCollectionEvent(me, ce)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelAfterDeleteError().Bind(&hook.Handler[*ModelErrorEvent]{
+ Id: systemHookIdCollection,
+ Func: func(me *ModelErrorEvent) error {
+ if ce, ok := newCollectionErrorEventFromModelErrorEvent(me); ok {
+ return me.App.OnCollectionAfterDeleteError().Trigger(ce, func(ce *CollectionErrorEvent) error {
+ syncModelErrorEventWithCollectionErrorEvent(me, ce)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ // --------------------------------------------------------------
+
+ app.OnCollectionValidate().Bind(&hook.Handler[*CollectionEvent]{
+ Id: systemHookIdCollection,
+ Func: onCollectionValidate,
+ Priority: 99,
+ })
+
+ app.OnCollectionCreate().Bind(&hook.Handler[*CollectionEvent]{
+ Id: systemHookIdCollection,
+ Func: onCollectionSave,
+ Priority: -99,
+ })
+
+ app.OnCollectionUpdate().Bind(&hook.Handler[*CollectionEvent]{
+ Id: systemHookIdCollection,
+ Func: onCollectionSave,
+ Priority: -99,
+ })
+
+ app.OnCollectionCreateExecute().Bind(&hook.Handler[*CollectionEvent]{
+ Id: systemHookIdCollection,
+ Func: onCollectionSaveExecute,
+ // execute as latest as possible, aka. closer to the db action to minimize the transactions lock time
+ Priority: 99,
+ })
+
+ app.OnCollectionUpdateExecute().Bind(&hook.Handler[*CollectionEvent]{
+ Id: systemHookIdCollection,
+ Func: onCollectionSaveExecute,
+ Priority: 99, // execute as latest as possible, aka. closer to the db action to minimize the transactions lock time
+ })
+
+ app.OnCollectionDeleteExecute().Bind(&hook.Handler[*CollectionEvent]{
+ Id: systemHookIdCollection,
+ Func: onCollectionDeleteExecute,
+ Priority: 99, // execute as latest as possible, aka. closer to the db action to minimize the transactions lock time
+ })
+
+ // reload cache on failure
+ // ---
+ onErrorReloadCachedCollections := func(ce *CollectionErrorEvent) error {
+ if err := ce.App.ReloadCachedCollections(); err != nil {
+ ce.App.Logger().Warn("Failed to reload collections cache", "error", err)
+ }
+
+ return ce.Next()
+ }
+ app.OnCollectionAfterCreateError().Bind(&hook.Handler[*CollectionErrorEvent]{
+ Id: systemHookIdCollection,
+ Func: onErrorReloadCachedCollections,
+ Priority: -99,
+ })
+ app.OnCollectionAfterUpdateError().Bind(&hook.Handler[*CollectionErrorEvent]{
+ Id: systemHookIdCollection,
+ Func: onErrorReloadCachedCollections,
+ Priority: -99,
+ })
+ app.OnCollectionAfterDeleteError().Bind(&hook.Handler[*CollectionErrorEvent]{
+ Id: systemHookIdCollection,
+ Func: onErrorReloadCachedCollections,
+ Priority: -99,
+ })
+ // ---
+
+ app.OnBootstrap().Bind(&hook.Handler[*BootstrapEvent]{
+ Id: systemHookIdCollection,
+ Func: func(e *BootstrapEvent) error {
+ if err := e.Next(); err != nil {
+ return err
+ }
+
+ if err := e.App.ReloadCachedCollections(); err != nil {
+ return fmt.Errorf("failed to load collections cache: %w", err)
+ }
+
+ return nil
+ },
+ Priority: 99, // execute as latest as possible
+ })
+}
+
+// @todo experiment eventually replacing the rules *string with a struct?
+type baseCollection struct {
+ BaseModel
+
+ disableIntegrityChecks bool
+
+ ListRule *string `db:"listRule" json:"listRule" form:"listRule"`
+ ViewRule *string `db:"viewRule" json:"viewRule" form:"viewRule"`
+ CreateRule *string `db:"createRule" json:"createRule" form:"createRule"`
+ UpdateRule *string `db:"updateRule" json:"updateRule" form:"updateRule"`
+ DeleteRule *string `db:"deleteRule" json:"deleteRule" form:"deleteRule"`
+
+ // RawOptions represents the raw serialized collection option loaded from the DB.
+ // NB! This field shouldn't be modified manually. It is automatically updated
+ // with the collection type specific option before save.
+ RawOptions types.JSONRaw `db:"options" json:"-" xml:"-" form:"-"`
+
+ Name string `db:"name" json:"name" form:"name"`
+ Type string `db:"type" json:"type" form:"type"`
+ Fields FieldsList `db:"fields" json:"fields" form:"fields"`
+ Indexes types.JSONArray[string] `db:"indexes" json:"indexes" form:"indexes"`
+ System bool `db:"system" json:"system" form:"system"`
+ Created types.DateTime `db:"created" json:"created"`
+ Updated types.DateTime `db:"updated" json:"updated"`
+}
+
+// Collection defines the table, fields and various options related to a set of records.
+type Collection struct {
+ baseCollection
+ collectionAuthOptions
+ collectionViewOptions
+}
+
+// NewCollection initializes and returns a new Collection model with the specified type and name.
+func NewCollection(typ, name string) *Collection {
+ switch typ {
+ case CollectionTypeAuth:
+ return NewAuthCollection(name)
+ case CollectionTypeView:
+ return NewViewCollection(name)
+ default:
+ return NewBaseCollection(name)
+ }
+}
+
+// NewBaseCollection initializes and returns a new "base" Collection model.
+func NewBaseCollection(name string) *Collection {
+ m := &Collection{}
+ m.Name = name
+ m.Type = CollectionTypeBase
+ m.initDefaultId()
+ m.initDefaultFields()
+ return m
+}
+
+// NewViewCollection initializes and returns a new "view" Collection model.
+func NewViewCollection(name string) *Collection {
+ m := &Collection{}
+ m.Name = name
+ m.Type = CollectionTypeView
+ m.initDefaultId()
+ m.initDefaultFields()
+ return m
+}
+
+// NewAuthCollection initializes and returns a new "auth" Collection model.
+func NewAuthCollection(name string) *Collection {
+ m := &Collection{}
+ m.Name = name
+ m.Type = CollectionTypeAuth
+ m.initDefaultId()
+ m.initDefaultFields()
+ m.setDefaultAuthOptions()
+ return m
+}
+
+// TableName returns the Collection model SQL table name.
+func (m *Collection) TableName() string {
+ return "_collections"
+}
+
+// BaseFilesPath returns the storage dir path used by the collection.
+func (m *Collection) BaseFilesPath() string {
+ return m.Id
+}
+
+// IsBase checks if the current collection has "base" type.
+func (m *Collection) IsBase() bool {
+ return m.Type == CollectionTypeBase
+}
+
+// IsAuth checks if the current collection has "auth" type.
+func (m *Collection) IsAuth() bool {
+ return m.Type == CollectionTypeAuth
+}
+
+// IsView checks if the current collection has "view" type.
+func (m *Collection) IsView() bool {
+ return m.Type == CollectionTypeView
+}
+
+// IntegrityChecks toggles the current collection integrity checks (ex. checking references on delete).
+func (m *Collection) IntegrityChecks(enable bool) {
+ m.disableIntegrityChecks = !enable
+}
+
+// PostScan implements the [dbx.PostScanner] interface to auto unmarshal
+// the raw serialized options into the concrete type specific fields.
+func (m *Collection) PostScan() error {
+ if err := m.BaseModel.PostScan(); err != nil {
+ return err
+ }
+
+ return m.unmarshalRawOptions()
+}
+
+func (m *Collection) unmarshalRawOptions() error {
+ raw, err := m.RawOptions.MarshalJSON()
+ if err != nil {
+ return nil
+ }
+
+ switch m.Type {
+ case CollectionTypeView:
+ return json.Unmarshal(raw, &m.collectionViewOptions)
+ case CollectionTypeAuth:
+ return json.Unmarshal(raw, &m.collectionAuthOptions)
+ }
+
+ return nil
+}
+
+// UnmarshalJSON implements the [json.Unmarshaler] interface.
+//
+// For new/"blank" Collection models it replaces the model with a factory
+// instance and then unmarshal the provided data one on top of it.
+func (m *Collection) UnmarshalJSON(b []byte) error {
+ type alias *Collection
+
+ // initialize the default fields
+ // (e.g. in case the collection was NOT created using the designated factories)
+ if m.IsNew() && m.Type == "" {
+ minimal := &struct {
+ Type string `json:"type"`
+ Name string `json:"name"`
+ }{}
+ if err := json.Unmarshal(b, minimal); err != nil {
+ return err
+ }
+
+ blank := NewCollection(minimal.Type, minimal.Name)
+ *m = *blank
+ }
+
+ return json.Unmarshal(b, alias(m))
+}
+
+// MarshalJSON implements the [json.Marshaler] interface.
+//
+// Note that non-type related fields are ignored from the serialization
+// (ex. for "view" colections the "auth" fields are skipped).
+func (m Collection) MarshalJSON() ([]byte, error) {
+ switch m.Type {
+ case CollectionTypeView:
+ return json.Marshal(struct {
+ baseCollection
+ collectionViewOptions
+ }{m.baseCollection, m.collectionViewOptions})
+ case CollectionTypeAuth:
+ alias := struct {
+ baseCollection
+ collectionAuthOptions
+ }{m.baseCollection, m.collectionAuthOptions}
+
+ // ensure that it is always returned as array
+ if alias.OAuth2.Providers == nil {
+ alias.OAuth2.Providers = []OAuth2ProviderConfig{}
+ }
+
+ // hide secret keys from the serialization
+ alias.AuthToken.Secret = ""
+ alias.FileToken.Secret = ""
+ alias.PasswordResetToken.Secret = ""
+ alias.EmailChangeToken.Secret = ""
+ alias.VerificationToken.Secret = ""
+ for i := range alias.OAuth2.Providers {
+ alias.OAuth2.Providers[i].ClientSecret = ""
+ }
+
+ return json.Marshal(alias)
+ default:
+ return json.Marshal(m.baseCollection)
+ }
+}
+
+// String returns a string representation of the current collection.
+func (m Collection) String() string {
+ raw, _ := json.Marshal(m)
+ return string(raw)
+}
+
+// DBExport prepares and exports the current collection data for db persistence.
+func (m *Collection) DBExport(app App) (map[string]any, error) {
+ result := map[string]any{
+ "id": m.Id,
+ "type": m.Type,
+ "listRule": m.ListRule,
+ "viewRule": m.ViewRule,
+ "createRule": m.CreateRule,
+ "updateRule": m.UpdateRule,
+ "deleteRule": m.DeleteRule,
+ "name": m.Name,
+ "fields": m.Fields,
+ "indexes": m.Indexes,
+ "system": m.System,
+ "created": m.Created,
+ "updated": m.Updated,
+ "options": `{}`,
+ }
+
+ switch m.Type {
+ case CollectionTypeView:
+ if raw, err := types.ParseJSONRaw(m.collectionViewOptions); err == nil {
+ result["options"] = raw
+ } else {
+ return nil, err
+ }
+ case CollectionTypeAuth:
+ if raw, err := types.ParseJSONRaw(m.collectionAuthOptions); err == nil {
+ result["options"] = raw
+ } else {
+ return nil, err
+ }
+ }
+
+ return result, nil
+}
+
+// GetIndex returns s single Collection index expression by its name.
+func (m *Collection) GetIndex(name string) string {
+ for _, idx := range m.Indexes {
+ if strings.EqualFold(dbutils.ParseIndex(idx).IndexName, name) {
+ return idx
+ }
+ }
+
+ return ""
+}
+
+// AddIndex adds a new index into the current collection.
+//
+// If the collection has an existing index matching the new name it will be replaced with the new one.
+func (m *Collection) AddIndex(name string, unique bool, columnsExpr string, optWhereExpr string) {
+ m.RemoveIndex(name)
+
+ var idx strings.Builder
+
+ idx.WriteString("CREATE ")
+ if unique {
+ idx.WriteString("UNIQUE ")
+ }
+ idx.WriteString("INDEX `")
+ idx.WriteString(name)
+ idx.WriteString("` ")
+ idx.WriteString("ON `")
+ idx.WriteString(m.Name)
+ idx.WriteString("` (")
+ idx.WriteString(columnsExpr)
+ idx.WriteString(")")
+ if optWhereExpr != "" {
+ idx.WriteString(" WHERE ")
+ idx.WriteString(optWhereExpr)
+ }
+
+ m.Indexes = append(m.Indexes, idx.String())
+}
+
+// RemoveIndex removes a single index with the specified name from the current collection.
+func (m *Collection) RemoveIndex(name string) {
+ for i, idx := range m.Indexes {
+ if strings.EqualFold(dbutils.ParseIndex(idx).IndexName, name) {
+ m.Indexes = append(m.Indexes[:i], m.Indexes[i+1:]...)
+ return
+ }
+ }
+}
+
+// delete hook
+// -------------------------------------------------------------------
+
+func onCollectionDeleteExecute(e *CollectionEvent) error {
+ if e.Collection.System {
+ return fmt.Errorf("[%s] system collections cannot be deleted", e.Collection.Name)
+ }
+
+ defer func() {
+ if err := e.App.ReloadCachedCollections(); err != nil {
+ e.App.Logger().Warn("Failed to reload collections cache", "error", err)
+ }
+ }()
+
+ if !e.Collection.disableIntegrityChecks {
+ // ensure that there aren't any existing references.
+ // note: the select is outside of the transaction to prevent SQLITE_LOCKED error when mixing read&write in a single transaction
+ references, err := e.App.FindCollectionReferences(e.Collection, e.Collection.Id)
+ if err != nil {
+ return fmt.Errorf("[%s] failed to check collection references: %w", e.Collection.Name, err)
+ }
+ if total := len(references); total > 0 {
+ names := make([]string, 0, len(references))
+ for ref := range references {
+ names = append(names, ref.Name)
+ }
+ return fmt.Errorf("[%s] failed to delete due to existing relation references: %s", e.Collection.Name, strings.Join(names, ", "))
+ }
+ }
+
+ originalApp := e.App
+
+ txErr := e.App.RunInTransaction(func(txApp App) error {
+ e.App = txApp
+
+ // delete the related view or records table
+ if e.Collection.IsView() {
+ if err := txApp.DeleteView(e.Collection.Name); err != nil {
+ return err
+ }
+ } else {
+ if err := txApp.DeleteTable(e.Collection.Name); err != nil {
+ return err
+ }
+ }
+
+ if !e.Collection.disableIntegrityChecks {
+ // trigger views resave to check for dependencies
+ if err := resaveViewsWithChangedFields(txApp, e.Collection.Id); err != nil {
+ return fmt.Errorf("[%s] failed to delete due to existing view dependency: %w", e.Collection.Name, err)
+ }
+ }
+
+ // delete
+ return e.Next()
+ })
+
+ e.App = originalApp
+
+ return txErr
+}
+
+// save hook
+// -------------------------------------------------------------------
+
+func (c *Collection) initDefaultId() {
+ if c.Id == "" && c.Name != "" {
+ c.Id = "_pbc_" + crc32Checksum(c.Name)
+ }
+}
+
+func (c *Collection) savePrepare() error {
+ if c.Type == "" {
+ c.Type = CollectionTypeBase
+ }
+
+ if c.IsNew() {
+ c.initDefaultId()
+ c.Created = types.NowDateTime()
+ }
+
+ c.Updated = types.NowDateTime()
+
+ // recreate the fields list to ensure that all normalizations
+ // like default field id are applied
+ c.Fields = NewFieldsList(c.Fields...)
+
+ c.initDefaultFields()
+
+ if c.IsAuth() {
+ c.unsetMissingOAuth2MappedFields()
+ }
+
+ return nil
+}
+
+func onCollectionSave(e *CollectionEvent) error {
+ if err := e.Collection.savePrepare(); err != nil {
+ return err
+ }
+
+ return e.Next()
+}
+
+func onCollectionSaveExecute(e *CollectionEvent) error {
+ defer func() {
+ if err := e.App.ReloadCachedCollections(); err != nil {
+ e.App.Logger().Warn("Failed to reload collections cache", "error", err)
+ }
+ }()
+
+ var oldCollection *Collection
+ if !e.Collection.IsNew() {
+ var err error
+ oldCollection, err = e.App.FindCachedCollectionByNameOrId(e.Collection.Id)
+ if err != nil {
+ return err
+ }
+
+ // invalidate previously issued auth tokens on auth rule change
+ if oldCollection.AuthRule != e.Collection.AuthRule &&
+ cast.ToString(oldCollection.AuthRule) != cast.ToString(e.Collection.AuthRule) {
+ e.Collection.AuthToken.Secret = security.RandomString(50)
+ }
+ }
+
+ originalApp := e.App
+ txErr := e.App.RunInTransaction(func(txApp App) error {
+ e.App = txApp
+
+ isView := e.Collection.IsView()
+
+ // ensures that the view collection shema is properly loaded
+ if isView {
+ query := e.Collection.ViewQuery
+
+ // generate collection fields list from the query
+ viewFields, err := e.App.CreateViewFields(query)
+ if err != nil {
+ return err
+ }
+
+ // delete old renamed view
+ if oldCollection != nil {
+ if err := e.App.DeleteView(oldCollection.Name); err != nil {
+ return err
+ }
+ }
+
+ // wrap view query if necessary
+ query, err = normalizeViewQueryId(e.App, query)
+ if err != nil {
+ return fmt.Errorf("failed to normalize view query id: %w", err)
+ }
+
+ // (re)create the view
+ if err := e.App.SaveView(e.Collection.Name, query); err != nil {
+ return err
+ }
+
+ // updates newCollection.Fields based on the generated view table info and query
+ e.Collection.Fields = viewFields
+ }
+
+ // save the Collection model
+ if err := e.Next(); err != nil {
+ return err
+ }
+
+ // sync the changes with the related records table
+ if !isView {
+ if err := e.App.SyncRecordTableSchema(e.Collection, oldCollection); err != nil {
+ // note: don't wrap to allow propagating indexes validation.Errors
+ return err
+ }
+ }
+
+ return nil
+ })
+ e.App = originalApp
+
+ if txErr != nil {
+ return txErr
+ }
+
+ // trigger an update for all views with changed fields as a result of the current collection save
+ // (ignoring view errors to allow users to update the query from the UI)
+ resaveViewsWithChangedFields(e.App, e.Collection.Id)
+
+ return nil
+}
+
+func (m *Collection) initDefaultFields() {
+ switch m.Type {
+ case CollectionTypeBase:
+ m.initIdField()
+ case CollectionTypeAuth:
+ m.initIdField()
+ m.initPasswordField()
+ m.initTokenKeyField()
+ m.initEmailField()
+ m.initEmailVisibilityField()
+ m.initVerifiedField()
+ case CollectionTypeView:
+ // view fields are autogenerated
+ }
+}
+
+func (m *Collection) initIdField() {
+ field, _ := m.Fields.GetByName(FieldNameId).(*TextField)
+ if field == nil {
+ // create default field
+ field = &TextField{
+ Name: FieldNameId,
+ System: true,
+ PrimaryKey: true,
+ Required: true,
+ Min: 15,
+ Max: 15,
+ Pattern: `^[a-z0-9]+$`,
+ AutogeneratePattern: `[a-z0-9]{15}`,
+ }
+
+ // prepend it
+ m.Fields = NewFieldsList(append([]Field{field}, m.Fields...)...)
+ } else {
+ // enforce system defaults
+ field.System = true
+ field.Required = true
+ field.PrimaryKey = true
+ field.Hidden = false
+ }
+}
+
+func (m *Collection) initPasswordField() {
+ field, _ := m.Fields.GetByName(FieldNamePassword).(*PasswordField)
+ if field == nil {
+ // load default field
+ m.Fields.Add(&PasswordField{
+ Name: FieldNamePassword,
+ System: true,
+ Hidden: true,
+ Required: true,
+ Min: 8,
+ })
+ } else {
+ // enforce system defaults
+ field.System = true
+ field.Hidden = true
+ field.Required = true
+ }
+}
+
+func (m *Collection) initTokenKeyField() {
+ field, _ := m.Fields.GetByName(FieldNameTokenKey).(*TextField)
+ if field == nil {
+ // load default field
+ m.Fields.Add(&TextField{
+ Name: FieldNameTokenKey,
+ System: true,
+ Hidden: true,
+ Min: 30,
+ Max: 60,
+ Required: true,
+ AutogeneratePattern: `[a-zA-Z0-9]{50}`,
+ })
+ } else {
+ // enforce system defaults
+ field.System = true
+ field.Hidden = true
+ field.Required = true
+ }
+
+ // ensure that there is a unique index for the field
+ if !dbutils.HasSingleColumnUniqueIndex(FieldNameTokenKey, m.Indexes) {
+ m.Indexes = append(m.Indexes, fmt.Sprintf(
+ "CREATE UNIQUE INDEX `%s` ON `%s` (`%s`)",
+ m.fieldIndexName(FieldNameTokenKey),
+ m.Name,
+ FieldNameTokenKey,
+ ))
+ }
+}
+
+func (m *Collection) initEmailField() {
+ field, _ := m.Fields.GetByName(FieldNameEmail).(*EmailField)
+ if field == nil {
+ // load default field
+ m.Fields.Add(&EmailField{
+ Name: FieldNameEmail,
+ System: true,
+ Required: true,
+ })
+ } else {
+ // enforce system defaults
+ field.System = true
+ field.Hidden = false // managed by the emailVisibility flag
+ }
+
+ // ensure that there is a unique index for the email field
+ if !dbutils.HasSingleColumnUniqueIndex(FieldNameEmail, m.Indexes) {
+ m.Indexes = append(m.Indexes, fmt.Sprintf(
+ "CREATE UNIQUE INDEX `%s` ON `%s` (`%s`) WHERE `%s` != ''",
+ m.fieldIndexName(FieldNameEmail),
+ m.Name,
+ FieldNameEmail,
+ FieldNameEmail,
+ ))
+ }
+}
+
+func (m *Collection) initEmailVisibilityField() {
+ field, _ := m.Fields.GetByName(FieldNameEmailVisibility).(*BoolField)
+ if field == nil {
+ // load default field
+ m.Fields.Add(&BoolField{
+ Name: FieldNameEmailVisibility,
+ System: true,
+ })
+ } else {
+ // enforce system defaults
+ field.System = true
+ }
+}
+
+func (m *Collection) initVerifiedField() {
+ field, _ := m.Fields.GetByName(FieldNameVerified).(*BoolField)
+ if field == nil {
+ // load default field
+ m.Fields.Add(&BoolField{
+ Name: FieldNameVerified,
+ System: true,
+ })
+ } else {
+ // enforce system defaults
+ field.System = true
+ }
+}
+
+func (m *Collection) fieldIndexName(field string) string {
+ name := "idx_" + field + "_"
+
+ if m.Id != "" {
+ name += m.Id
+ } else if m.Name != "" {
+ name += m.Name
+ } else {
+ name += security.PseudorandomString(10)
+ }
+
+ if len(name) > 64 {
+ return name[:64]
+ }
+
+ return name
+}
diff --git a/core/collection_model_auth_options.go b/core/collection_model_auth_options.go
new file mode 100644
index 00000000..75626f97
--- /dev/null
+++ b/core/collection_model_auth_options.go
@@ -0,0 +1,535 @@
+package core
+
+import (
+ "strconv"
+ "strings"
+ "time"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/go-ozzo/ozzo-validation/v4/is"
+ "github.com/pocketbase/pocketbase/tools/auth"
+ "github.com/pocketbase/pocketbase/tools/list"
+ "github.com/pocketbase/pocketbase/tools/security"
+ "github.com/pocketbase/pocketbase/tools/types"
+ "github.com/spf13/cast"
+)
+
+func (m *Collection) unsetMissingOAuth2MappedFields() {
+ if !m.IsAuth() {
+ return
+ }
+
+ if m.OAuth2.MappedFields.Id != "" {
+ if m.Fields.GetByName(m.OAuth2.MappedFields.Id) == nil {
+ m.OAuth2.MappedFields.Id = ""
+ }
+ }
+
+ if m.OAuth2.MappedFields.Name != "" {
+ if m.Fields.GetByName(m.OAuth2.MappedFields.Name) == nil {
+ m.OAuth2.MappedFields.Name = ""
+ }
+ }
+
+ if m.OAuth2.MappedFields.Username != "" {
+ if m.Fields.GetByName(m.OAuth2.MappedFields.Username) == nil {
+ m.OAuth2.MappedFields.Username = ""
+ }
+ }
+
+ if m.OAuth2.MappedFields.AvatarURL != "" {
+ if m.Fields.GetByName(m.OAuth2.MappedFields.AvatarURL) == nil {
+ m.OAuth2.MappedFields.AvatarURL = ""
+ }
+ }
+}
+
+func (m *Collection) setDefaultAuthOptions() {
+ m.collectionAuthOptions = collectionAuthOptions{
+ VerificationTemplate: defaultVerificationTemplate,
+ ResetPasswordTemplate: defaultResetPasswordTemplate,
+ ConfirmEmailChangeTemplate: defaultConfirmEmailChangeTemplate,
+ AuthRule: types.Pointer(""),
+ AuthAlert: AuthAlertConfig{
+ Enabled: true,
+ EmailTemplate: defaultAuthAlertTemplate,
+ },
+ PasswordAuth: PasswordAuthConfig{
+ Enabled: true,
+ IdentityFields: []string{FieldNameEmail},
+ },
+ MFA: MFAConfig{
+ Enabled: false,
+ Duration: 1800, // 30min
+ },
+ OTP: OTPConfig{
+ Enabled: false,
+ Duration: 180, // 3min
+ Length: 8,
+ EmailTemplate: defaultOTPTemplate,
+ },
+ AuthToken: TokenConfig{
+ Secret: security.RandomString(50),
+ Duration: 604800, // 7 days
+ },
+ PasswordResetToken: TokenConfig{
+ Secret: security.RandomString(50),
+ Duration: 1800, // 30min
+ },
+ EmailChangeToken: TokenConfig{
+ Secret: security.RandomString(50),
+ Duration: 1800, // 30min
+ },
+ VerificationToken: TokenConfig{
+ Secret: security.RandomString(50),
+ Duration: 259200, // 3days
+ },
+ FileToken: TokenConfig{
+ Secret: security.RandomString(50),
+ Duration: 180, // 3min
+ },
+ }
+}
+
+var _ optionsValidator = (*collectionAuthOptions)(nil)
+
+// collectionAuthOptions defines the options for the "auth" type collection.
+type collectionAuthOptions struct {
+ // AuthRule could be used to specify additional record constraints
+ // applied after record authentication and right before returning the
+ // auth token response to the client.
+ //
+ // For example, to allow only verified users you could set it to
+ // "verified = true".
+ //
+ // Set it to empty string to allow any Auth collection record to authenticate.
+ //
+ // Set it to nil to disallow authentication altogether for the collection
+ // (that includes password, OAuth2, etc.).
+ AuthRule *string `form:"authRule" json:"authRule"`
+
+ // ManageRule gives admin-like permissions to allow fully managing
+ // the auth record(s), eg. changing the password without requiring
+ // to enter the old one, directly updating the verified state and email, etc.
+ //
+ // This rule is executed in addition to the Create and Update API rules.
+ ManageRule *string `form:"manageRule" json:"manageRule"`
+
+ // AuthAlert defines options related to the auth alerts on new device login.
+ AuthAlert AuthAlertConfig `form:"authAlert" json:"authAlert"`
+
+ // OAuth2 specifies whether OAuth2 auth is enabled for the collection
+ // and which OAuth2 providers are allowed.
+ OAuth2 OAuth2Config `form:"oauth2" json:"oauth2"`
+
+ PasswordAuth PasswordAuthConfig `form:"passwordAuth" json:"passwordAuth"`
+
+ MFA MFAConfig `form:"mfa" json:"mfa"`
+
+ OTP OTPConfig `form:"otp" json:"otp"`
+
+ // Various token configurations
+ // ---
+ AuthToken TokenConfig `form:"authToken" json:"authToken"`
+ PasswordResetToken TokenConfig `form:"passwordResetToken" json:"passwordResetToken"`
+ EmailChangeToken TokenConfig `form:"emailChangeToken" json:"emailChangeToken"`
+ VerificationToken TokenConfig `form:"verificationToken" json:"verificationToken"`
+ FileToken TokenConfig `form:"fileToken" json:"fileToken"`
+
+ // default email templates
+ // ---
+ VerificationTemplate EmailTemplate `form:"verificationTemplate" json:"verificationTemplate"`
+ ResetPasswordTemplate EmailTemplate `form:"resetPasswordTemplate" json:"resetPasswordTemplate"`
+ ConfirmEmailChangeTemplate EmailTemplate `form:"confirmEmailChangeTemplate" json:"confirmEmailChangeTemplate"`
+}
+
+func (o *collectionAuthOptions) validate(cv *collectionValidator) error {
+ err := validation.ValidateStruct(o,
+ validation.Field(
+ &o.AuthRule,
+ validation.By(cv.checkRule),
+ validation.By(cv.ensureNoSystemRuleChange(cv.original.AuthRule)),
+ ),
+ validation.Field(
+ &o.ManageRule,
+ validation.NilOrNotEmpty,
+ validation.By(cv.checkRule),
+ validation.By(cv.ensureNoSystemRuleChange(cv.original.ManageRule)),
+ ),
+ validation.Field(&o.AuthAlert),
+ validation.Field(&o.PasswordAuth),
+ validation.Field(&o.OAuth2),
+ validation.Field(&o.OTP),
+ validation.Field(&o.MFA),
+ validation.Field(&o.AuthToken),
+ validation.Field(&o.PasswordResetToken),
+ validation.Field(&o.EmailChangeToken),
+ validation.Field(&o.VerificationToken),
+ validation.Field(&o.FileToken),
+ validation.Field(&o.VerificationTemplate, validation.Required),
+ validation.Field(&o.ResetPasswordTemplate, validation.Required),
+ validation.Field(&o.ConfirmEmailChangeTemplate, validation.Required),
+ )
+ if err != nil {
+ return err
+ }
+
+ if o.MFA.Enabled {
+ // if MFA is enabled require at least 2 auth methods
+ //
+ // @todo maybe consider disabling the check because if custom auth methods
+ // are registered it may fail since we don't have mechanism to detect them at the moment
+ authsEnabled := 0
+ if o.PasswordAuth.Enabled {
+ authsEnabled++
+ }
+ if o.OAuth2.Enabled {
+ authsEnabled++
+ }
+ if o.OTP.Enabled {
+ authsEnabled++
+ }
+ if authsEnabled < 2 {
+ return validation.Errors{
+ "mfa": validation.Errors{
+ "enabled": validation.NewError("validation_mfa_not_enough_auths", "MFA requires at least 2 auth methods to be enabled."),
+ },
+ }
+ }
+
+ if o.MFA.Rule != "" {
+ mfaRuleValidators := []validation.RuleFunc{
+ cv.checkRule,
+ cv.ensureNoSystemRuleChange(&cv.original.MFA.Rule),
+ }
+
+ for _, validator := range mfaRuleValidators {
+ err := validator(&o.MFA.Rule)
+ if err != nil {
+ return validation.Errors{
+ "mfa": validation.Errors{
+ "rule": err,
+ },
+ }
+ }
+ }
+ }
+ }
+
+ // extra check to ensure that only unique identity fields are used
+ if o.PasswordAuth.Enabled {
+ err = validation.Validate(o.PasswordAuth.IdentityFields, validation.By(cv.checkFieldsForUniqueIndex))
+ if err != nil {
+ return validation.Errors{
+ "passwordAuth": validation.Errors{
+ "identityFields": err,
+ },
+ }
+ }
+ }
+
+ return nil
+}
+
+// -------------------------------------------------------------------
+
+type EmailTemplate struct {
+ Subject string `form:"subject" json:"subject"`
+ Body string `form:"body" json:"body"`
+}
+
+// Validate makes EmailTemplate validatable by implementing [validation.Validatable] interface.
+func (t EmailTemplate) Validate() error {
+ return validation.ValidateStruct(&t,
+ validation.Field(&t.Subject, validation.Required),
+ validation.Field(&t.Body, validation.Required),
+ )
+}
+
+// Resolve replaces the placeholder parameters in the current email
+// template and returns its components as ready-to-use strings.
+func (t EmailTemplate) Resolve(placeholders map[string]any) (subject, body string) {
+ body = t.Body
+ subject = t.Subject
+
+ for k, v := range placeholders {
+ vStr := cast.ToString(v)
+
+ // replace subject placeholder params (if any)
+ subject = strings.ReplaceAll(subject, k, vStr)
+
+ // replace body placeholder params (if any)
+ body = strings.ReplaceAll(body, k, vStr)
+ }
+
+ return subject, body
+}
+
+// -------------------------------------------------------------------
+
+type AuthAlertConfig struct {
+ Enabled bool `form:"enabled" json:"enabled"`
+ EmailTemplate EmailTemplate `form:"emailTemplate" json:"emailTemplate"`
+}
+
+// Validate makes AuthAlertConfig validatable by implementing [validation.Validatable] interface.
+func (c AuthAlertConfig) Validate() error {
+ return validation.ValidateStruct(&c,
+ // note: for now always run the email template validations even
+ // if not enabled since it could be used separately
+ validation.Field(&c.EmailTemplate),
+ )
+}
+
+// -------------------------------------------------------------------
+
+type TokenConfig struct {
+ Secret string `form:"secret" json:"secret,omitempty"`
+
+ // Duration specifies how long an issued token to be valid (in seconds)
+ Duration int64 `form:"duration" json:"duration"`
+}
+
+// Validate makes TokenConfig validatable by implementing [validation.Validatable] interface.
+func (c TokenConfig) Validate() error {
+ return validation.ValidateStruct(&c,
+ validation.Field(&c.Secret, validation.Required, validation.Length(30, 255)),
+ validation.Field(&c.Duration, validation.Required, validation.Min(10), validation.Max(94670856)), // ~3y max
+ )
+}
+
+// DurationTime returns the current Duration as [time.Duration].
+func (c TokenConfig) DurationTime() time.Duration {
+ return time.Duration(c.Duration) * time.Second
+}
+
+// -------------------------------------------------------------------
+
+type OTPConfig struct {
+ Enabled bool `form:"enabled" json:"enabled"`
+
+ // Duration specifies how long the OTP to be valid (in seconds)
+ Duration int64 `form:"duration" json:"duration"`
+
+ // Length specifies the auto generated password length.
+ Length int `form:"length" json:"length"`
+
+ // EmailTemplate is the default OTP email template that will be send to the auth record.
+ //
+ // In addition to the system placeholders you can also make use of
+ // [core.EmailPlaceholderOTPId] and [core.EmailPlaceholderOTP].
+ EmailTemplate EmailTemplate `form:"emailTemplate" json:"emailTemplate"`
+}
+
+// Validate makes OTPConfig validatable by implementing [validation.Validatable] interface.
+func (c OTPConfig) Validate() error {
+ return validation.ValidateStruct(&c,
+ validation.Field(&c.Duration, validation.When(c.Enabled, validation.Required, validation.Min(10), validation.Max(86400))),
+ validation.Field(&c.Length, validation.When(c.Enabled, validation.Required, validation.Min(4))),
+ // note: for now always run the email template validations even
+ // if not enabled since it could be used separately
+ validation.Field(&c.EmailTemplate),
+ )
+}
+
+// DurationTime returns the current Duration as [time.Duration].
+func (c OTPConfig) DurationTime() time.Duration {
+ return time.Duration(c.Duration) * time.Second
+}
+
+// -------------------------------------------------------------------
+
+type MFAConfig struct {
+ Enabled bool `form:"enabled" json:"enabled"`
+
+ // Duration specifies how long an issued MFA to be valid (in seconds)
+ Duration int64 `form:"duration" json:"duration"`
+
+ // Rule is an optional field to restrict MFA only for the records that satisfy the rule.
+ //
+ // Leave it empty to enable MFA for everyone.
+ Rule string `form:"rule" json:"rule"`
+}
+
+// Validate makes MFAConfig validatable by implementing [validation.Validatable] interface.
+func (c MFAConfig) Validate() error {
+ return validation.ValidateStruct(&c,
+ validation.Field(&c.Duration, validation.When(c.Enabled, validation.Required, validation.Min(10), validation.Max(86400))),
+ )
+}
+
+// DurationTime returns the current Duration as [time.Duration].
+func (c MFAConfig) DurationTime() time.Duration {
+ return time.Duration(c.Duration) * time.Second
+}
+
+// -------------------------------------------------------------------
+
+type PasswordAuthConfig struct {
+ Enabled bool `form:"enabled" json:"enabled"`
+
+ // IdentityFields is a list of field names that could be used as
+ // identity during password authentication.
+ //
+ // Usually only fields that has single column UNIQUE index are accepted as values.
+ IdentityFields []string `form:"identityFields" json:"identityFields"`
+}
+
+// Validate makes PasswordAuthConfig validatable by implementing [validation.Validatable] interface.
+func (c PasswordAuthConfig) Validate() error {
+ // strip duplicated values
+ c.IdentityFields = list.ToUniqueStringSlice(c.IdentityFields)
+
+ if !c.Enabled {
+ return nil // no need to validate
+ }
+
+ return validation.ValidateStruct(&c,
+ validation.Field(&c.IdentityFields, validation.Required),
+ )
+}
+
+// -------------------------------------------------------------------
+
+type OAuth2KnownFields struct {
+ Id string `form:"id" json:"id"`
+ Name string `form:"name" json:"name"`
+ Username string `form:"username" json:"username"`
+ AvatarURL string `form:"avatarURL" json:"avatarURL"`
+}
+
+type OAuth2Config struct {
+ Providers []OAuth2ProviderConfig `form:"providers" json:"providers"`
+
+ MappedFields OAuth2KnownFields `form:"mappedFields" json:"mappedFields"`
+
+ Enabled bool `form:"enabled" json:"enabled"`
+}
+
+// GetProviderConfig returns the first OAuth2ProviderConfig that matches the specified name.
+//
+// Returns false and zero config if no such provider is available in c.Providers.
+func (c OAuth2Config) GetProviderConfig(name string) (config OAuth2ProviderConfig, exists bool) {
+ for _, p := range c.Providers {
+ if p.Name == name {
+ return p, true
+ }
+ }
+ return
+}
+
+// Validate makes OAuth2Config validatable by implementing [validation.Validatable] interface.
+func (c OAuth2Config) Validate() error {
+ if !c.Enabled {
+ return nil // no need to validate
+ }
+
+ return validation.ValidateStruct(&c,
+ // note: don't require providers for now as they could be externally registered/removed
+ validation.Field(&c.Providers, validation.By(checkForDuplicatedProviders)),
+ )
+}
+
+func checkForDuplicatedProviders(value any) error {
+ configs, _ := value.([]OAuth2ProviderConfig)
+
+ existing := map[string]struct{}{}
+
+ for i, c := range configs {
+ if c.Name == "" {
+ continue // the name nonempty state is validated separately
+ }
+ if _, ok := existing[c.Name]; ok {
+ return validation.Errors{
+ strconv.Itoa(i): validation.Errors{
+ "name": validation.NewError("validation_duplicated_provider", "The provider "+c.Name+" is already registered.").
+ SetParams(map[string]any{"name": c.Name}),
+ },
+ }
+ }
+ existing[c.Name] = struct{}{}
+ }
+
+ return nil
+}
+
+type OAuth2ProviderConfig struct {
+ // PKCE overwrites the default provider PKCE config option.
+ //
+ // This usually shouldn't be needed but some OAuth2 vendors, like the LinkedIn OIDC,
+ // may require manual adjustment due to returning error if extra parameters are added to the request
+ // (https://github.com/pocketbase/pocketbase/discussions/3799#discussioncomment-7640312)
+ PKCE *bool `form:"pkce" json:"pkce"`
+
+ Name string `form:"name" json:"name"`
+ ClientId string `form:"clientId" json:"clientId"`
+ ClientSecret string `form:"clientSecret" json:"clientSecret,omitempty"`
+ AuthURL string `form:"authURL" json:"authURL"`
+ TokenURL string `form:"tokenURL" json:"tokenURL"`
+ UserInfoURL string `form:"userInfoURL" json:"userInfoURL"`
+ DisplayName string `form:"displayName" json:"displayName"`
+}
+
+// Validate makes OAuth2ProviderConfig validatable by implementing [validation.Validatable] interface.
+func (c OAuth2ProviderConfig) Validate() error {
+ return validation.ValidateStruct(&c,
+ validation.Field(&c.Name, validation.Required, validation.By(checkProviderName)),
+ validation.Field(&c.ClientId, validation.Required),
+ validation.Field(&c.ClientSecret, validation.Required),
+ validation.Field(&c.AuthURL, is.URL),
+ validation.Field(&c.TokenURL, is.URL),
+ validation.Field(&c.UserInfoURL, is.URL),
+ )
+}
+
+func checkProviderName(value any) error {
+ name, _ := value.(string)
+ if name == "" {
+ return nil // nothing to check
+ }
+
+ if _, err := auth.NewProviderByName(name); err != nil {
+ return validation.NewError("validation_missing_provider", "Invalid or missing provider with name "+name+".").
+ SetParams(map[string]any{"name": name})
+ }
+
+ return nil
+}
+
+// InitProvider returns a new auth.Provider instance loaded with the current OAuth2ProviderConfig options.
+func (c OAuth2ProviderConfig) InitProvider() (auth.Provider, error) {
+ provider, err := auth.NewProviderByName(c.Name)
+ if err != nil {
+ return nil, err
+ }
+
+ if c.ClientId != "" {
+ provider.SetClientId(c.ClientId)
+ }
+
+ if c.ClientSecret != "" {
+ provider.SetClientSecret(c.ClientSecret)
+ }
+
+ if c.AuthURL != "" {
+ provider.SetAuthURL(c.AuthURL)
+ }
+
+ if c.UserInfoURL != "" {
+ provider.SetUserInfoURL(c.UserInfoURL)
+ }
+
+ if c.TokenURL != "" {
+ provider.SetTokenURL(c.TokenURL)
+ }
+
+ if c.DisplayName != "" {
+ provider.SetDisplayName(c.DisplayName)
+ }
+
+ if c.PKCE != nil {
+ provider.SetPKCE(*c.PKCE)
+ }
+
+ return provider, nil
+}
diff --git a/core/collection_model_auth_options_test.go b/core/collection_model_auth_options_test.go
new file mode 100644
index 00000000..3a87869d
--- /dev/null
+++ b/core/collection_model_auth_options_test.go
@@ -0,0 +1,1016 @@
+package core_test
+
+import (
+ "fmt"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/auth"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func TestCollectionAuthOptionsValidate(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ name string
+ collection func(app core.App) (*core.Collection, error)
+ expectedErrors []string
+ }{
+ // authRule
+ {
+ name: "nil authRule",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.AuthRule = nil
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "empty authRule",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.AuthRule = types.Pointer("")
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "invalid authRule",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.AuthRule = types.Pointer("missing != ''")
+ return c, nil
+ },
+ expectedErrors: []string{"authRule"},
+ },
+ {
+ name: "valid authRule",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.AuthRule = types.Pointer("id != ''")
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+
+ // manageRule
+ {
+ name: "nil manageRule",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.ManageRule = nil
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "empty manageRule",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.ManageRule = types.Pointer("")
+ return c, nil
+ },
+ expectedErrors: []string{"manageRule"},
+ },
+ {
+ name: "invalid manageRule",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.ManageRule = types.Pointer("missing != ''")
+ return c, nil
+ },
+ expectedErrors: []string{"manageRule"},
+ },
+ {
+ name: "valid manageRule",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.ManageRule = types.Pointer("id != ''")
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+
+ // passwordAuth
+ {
+ name: "trigger passwordAuth validations",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.PasswordAuth = core.PasswordAuthConfig{
+ Enabled: true,
+ }
+ return c, nil
+ },
+ expectedErrors: []string{"passwordAuth"},
+ },
+ {
+ name: "passwordAuth with non-unique identity fields",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.Fields.Add(&core.TextField{Name: "test"})
+ c.PasswordAuth = core.PasswordAuthConfig{
+ Enabled: true,
+ IdentityFields: []string{"email", "test"},
+ }
+ return c, nil
+ },
+ expectedErrors: []string{"passwordAuth"},
+ },
+ {
+ name: "passwordAuth with non-unique identity fields",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.Fields.Add(&core.TextField{Name: "test"})
+ c.AddIndex("auth_test_idx", true, "test", "")
+ c.PasswordAuth = core.PasswordAuthConfig{
+ Enabled: true,
+ IdentityFields: []string{"email", "test"},
+ }
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+
+ // oauth2
+ {
+ name: "trigger oauth2 validations",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.OAuth2 = core.OAuth2Config{
+ Enabled: true,
+ Providers: []core.OAuth2ProviderConfig{
+ {Name: "missing"},
+ },
+ }
+ return c, nil
+ },
+ expectedErrors: []string{"oauth2"},
+ },
+
+ // otp
+ {
+ name: "trigger otp validations",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.OTP = core.OTPConfig{
+ Enabled: true,
+ Duration: -10,
+ }
+ return c, nil
+ },
+ expectedErrors: []string{"otp"},
+ },
+
+ // mfa
+ {
+ name: "trigger mfa validations",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.MFA = core.MFAConfig{
+ Enabled: true,
+ Duration: -10,
+ }
+ return c, nil
+ },
+ expectedErrors: []string{"mfa"},
+ },
+ {
+ name: "mfa enabled with < 2 auth methods",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.MFA.Enabled = true
+ c.PasswordAuth.Enabled = true
+ c.OTP.Enabled = false
+ c.OAuth2.Enabled = false
+ return c, nil
+ },
+ expectedErrors: []string{"mfa"},
+ },
+ {
+ name: "mfa enabled with >= 2 auth methods",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.MFA.Enabled = true
+ c.PasswordAuth.Enabled = true
+ c.OTP.Enabled = true
+ c.OAuth2.Enabled = false
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "mfa disabled with invalid rule",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.PasswordAuth.Enabled = true
+ c.OTP.Enabled = true
+ c.MFA.Enabled = false
+ c.MFA.Rule = "invalid"
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "mfa enabled with invalid rule",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.PasswordAuth.Enabled = true
+ c.OTP.Enabled = true
+ c.MFA.Enabled = true
+ c.MFA.Rule = "invalid"
+ return c, nil
+ },
+ expectedErrors: []string{"mfa"},
+ },
+ {
+ name: "mfa enabled with valid rule",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.PasswordAuth.Enabled = true
+ c.OTP.Enabled = true
+ c.MFA.Enabled = true
+ c.MFA.Rule = "1=1"
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+
+ // tokens
+ {
+ name: "trigger authToken validations",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.AuthToken.Secret = ""
+ return c, nil
+ },
+ expectedErrors: []string{"authToken"},
+ },
+ {
+ name: "trigger passwordResetToken validations",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.PasswordResetToken.Secret = ""
+ return c, nil
+ },
+ expectedErrors: []string{"passwordResetToken"},
+ },
+ {
+ name: "trigger emailChangeToken validations",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.EmailChangeToken.Secret = ""
+ return c, nil
+ },
+ expectedErrors: []string{"emailChangeToken"},
+ },
+ {
+ name: "trigger verificationToken validations",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.VerificationToken.Secret = ""
+ return c, nil
+ },
+ expectedErrors: []string{"verificationToken"},
+ },
+ {
+ name: "trigger fileToken validations",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.FileToken.Secret = ""
+ return c, nil
+ },
+ expectedErrors: []string{"fileToken"},
+ },
+
+ // templates
+ {
+ name: "trigger verificationTemplate validations",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.VerificationTemplate.Body = ""
+ return c, nil
+ },
+ expectedErrors: []string{"verificationTemplate"},
+ },
+ {
+ name: "trigger resetPasswordTemplate validations",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.ResetPasswordTemplate.Body = ""
+ return c, nil
+ },
+ expectedErrors: []string{"resetPasswordTemplate"},
+ },
+ {
+ name: "trigger confirmEmailChangeTemplate validations",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.ConfirmEmailChangeTemplate.Body = ""
+ return c, nil
+ },
+ expectedErrors: []string{"confirmEmailChangeTemplate"},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection, err := s.collection(app)
+ if err != nil {
+ t.Fatalf("Failed to retrieve test collection: %v", err)
+ }
+
+ result := app.Validate(collection)
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
+
+func TestEmailTemplateValidate(t *testing.T) {
+ scenarios := []struct {
+ name string
+ template core.EmailTemplate
+ expectedErrors []string
+ }{
+ {
+ "zero value",
+ core.EmailTemplate{},
+ []string{"subject", "body"},
+ },
+ {
+ "non-empty data",
+ core.EmailTemplate{
+ Subject: "a",
+ Body: "b",
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result := s.template.Validate()
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
+
+func TestEmailTemplateResolve(t *testing.T) {
+ template := core.EmailTemplate{
+ Subject: "test_subject {PARAM3} {PARAM1}-{PARAM2} repeat-{PARAM1}",
+ Body: "test_body {PARAM3} {PARAM2}-{PARAM1} repeat-{PARAM2}",
+ }
+
+ scenarios := []struct {
+ name string
+ placeholders map[string]any
+ template core.EmailTemplate
+ expectedSubject string
+ expectedBody string
+ }{
+ {
+ "no placeholders",
+ nil,
+ template,
+ template.Subject,
+ template.Body,
+ },
+ {
+ "no matching placeholders",
+ map[string]any{"{A}": "abc", "{B}": 456},
+ template,
+ template.Subject,
+ template.Body,
+ },
+ {
+ "at least one matching placeholder",
+ map[string]any{"{PARAM1}": "abc", "{PARAM2}": 456},
+ template,
+ "test_subject {PARAM3} abc-456 repeat-abc",
+ "test_body {PARAM3} 456-abc repeat-456",
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ subject, body := s.template.Resolve(s.placeholders)
+
+ if subject != s.expectedSubject {
+ t.Fatalf("Expected subject\n%v\ngot\n%v", s.expectedSubject, subject)
+ }
+
+ if body != s.expectedBody {
+ t.Fatalf("Expected body\n%v\ngot\n%v", s.expectedBody, body)
+ }
+ })
+ }
+}
+
+func TestTokenConfigValidate(t *testing.T) {
+ scenarios := []struct {
+ name string
+ config core.TokenConfig
+ expectedErrors []string
+ }{
+ {
+ "zero value",
+ core.TokenConfig{},
+ []string{"secret", "duration"},
+ },
+ {
+ "invalid data",
+ core.TokenConfig{
+ Secret: strings.Repeat("a", 29),
+ Duration: 9,
+ },
+ []string{"secret", "duration"},
+ },
+ {
+ "valid data",
+ core.TokenConfig{
+ Secret: strings.Repeat("a", 30),
+ Duration: 10,
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result := s.config.Validate()
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
+
+func TestTokenConfigDurationTime(t *testing.T) {
+ scenarios := []struct {
+ config core.TokenConfig
+ expected time.Duration
+ }{
+ {core.TokenConfig{}, 0 * time.Second},
+ {core.TokenConfig{Duration: 1234}, 1234 * time.Second},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%d", i, s.config.Duration), func(t *testing.T) {
+ result := s.config.DurationTime()
+
+ if result != s.expected {
+ t.Fatalf("Expected duration %d, got %d", s.expected, result)
+ }
+ })
+ }
+}
+
+func TestAuthAlertConfigValidate(t *testing.T) {
+ scenarios := []struct {
+ name string
+ config core.AuthAlertConfig
+ expectedErrors []string
+ }{
+ {
+ "zero value (disabled)",
+ core.AuthAlertConfig{},
+ []string{"emailTemplate"},
+ },
+ {
+ "zero value (enabled)",
+ core.AuthAlertConfig{Enabled: true},
+ []string{"emailTemplate"},
+ },
+ {
+ "invalid template",
+ core.AuthAlertConfig{
+ EmailTemplate: core.EmailTemplate{Body: "", Subject: "b"},
+ },
+ []string{"emailTemplate"},
+ },
+ {
+ "valid data",
+ core.AuthAlertConfig{
+ EmailTemplate: core.EmailTemplate{Body: "a", Subject: "b"},
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result := s.config.Validate()
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
+
+func TestOTPConfigValidate(t *testing.T) {
+ scenarios := []struct {
+ name string
+ config core.OTPConfig
+ expectedErrors []string
+ }{
+ {
+ "zero value (disabled)",
+ core.OTPConfig{},
+ []string{"emailTemplate"},
+ },
+ {
+ "zero value (enabled)",
+ core.OTPConfig{Enabled: true},
+ []string{"duration", "length", "emailTemplate"},
+ },
+ {
+ "invalid length (< 3)",
+ core.OTPConfig{
+ Enabled: true,
+ EmailTemplate: core.EmailTemplate{Body: "a", Subject: "b"},
+ Duration: 100,
+ Length: 3,
+ },
+ []string{"length"},
+ },
+ {
+ "invalid duration (< 10)",
+ core.OTPConfig{
+ Enabled: true,
+ EmailTemplate: core.EmailTemplate{Body: "a", Subject: "b"},
+ Duration: 9,
+ Length: 100,
+ },
+ []string{"duration"},
+ },
+ {
+ "invalid duration (> 86400)",
+ core.OTPConfig{
+ Enabled: true,
+ EmailTemplate: core.EmailTemplate{Body: "a", Subject: "b"},
+ Duration: 86401,
+ Length: 100,
+ },
+ []string{"duration"},
+ },
+ {
+ "invalid template (triggering EmailTemplate validations)",
+ core.OTPConfig{
+ Enabled: true,
+ EmailTemplate: core.EmailTemplate{Body: "", Subject: "b"},
+ Duration: 86400,
+ Length: 4,
+ },
+ []string{"emailTemplate"},
+ },
+ {
+ "valid data",
+ core.OTPConfig{
+ Enabled: true,
+ EmailTemplate: core.EmailTemplate{Body: "a", Subject: "b"},
+ Duration: 86400,
+ Length: 4,
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result := s.config.Validate()
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
+
+func TestOTPConfigDurationTime(t *testing.T) {
+ scenarios := []struct {
+ config core.OTPConfig
+ expected time.Duration
+ }{
+ {core.OTPConfig{}, 0 * time.Second},
+ {core.OTPConfig{Duration: 1234}, 1234 * time.Second},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%d", i, s.config.Duration), func(t *testing.T) {
+ result := s.config.DurationTime()
+
+ if result != s.expected {
+ t.Fatalf("Expected duration %d, got %d", s.expected, result)
+ }
+ })
+ }
+}
+
+func TestMFAConfigValidate(t *testing.T) {
+ scenarios := []struct {
+ name string
+ config core.MFAConfig
+ expectedErrors []string
+ }{
+ {
+ "zero value (disabled)",
+ core.MFAConfig{},
+ []string{},
+ },
+ {
+ "zero value (enabled)",
+ core.MFAConfig{Enabled: true},
+ []string{"duration"},
+ },
+ {
+ "invalid duration (< 10)",
+ core.MFAConfig{Enabled: true, Duration: 9},
+ []string{"duration"},
+ },
+ {
+ "invalid duration (> 86400)",
+ core.MFAConfig{Enabled: true, Duration: 86401},
+ []string{"duration"},
+ },
+ {
+ "valid data",
+ core.MFAConfig{Enabled: true, Duration: 86400},
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result := s.config.Validate()
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
+
+func TestMFAConfigDurationTime(t *testing.T) {
+ scenarios := []struct {
+ config core.MFAConfig
+ expected time.Duration
+ }{
+ {core.MFAConfig{}, 0 * time.Second},
+ {core.MFAConfig{Duration: 1234}, 1234 * time.Second},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%d", i, s.config.Duration), func(t *testing.T) {
+ result := s.config.DurationTime()
+
+ if result != s.expected {
+ t.Fatalf("Expected duration %d, got %d", s.expected, result)
+ }
+ })
+ }
+}
+
+func TestPasswordAuthConfigValidate(t *testing.T) {
+ scenarios := []struct {
+ name string
+ config core.PasswordAuthConfig
+ expectedErrors []string
+ }{
+ {
+ "zero value (disabled)",
+ core.PasswordAuthConfig{},
+ []string{},
+ },
+ {
+ "zero value (enabled)",
+ core.PasswordAuthConfig{Enabled: true},
+ []string{"identityFields"},
+ },
+ {
+ "empty values",
+ core.PasswordAuthConfig{Enabled: true, IdentityFields: []string{"", ""}},
+ []string{"identityFields"},
+ },
+ {
+ "valid data",
+ core.PasswordAuthConfig{Enabled: true, IdentityFields: []string{"abc"}},
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result := s.config.Validate()
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
+
+func TestOAuth2ConfigGetProviderConfig(t *testing.T) {
+ scenarios := []struct {
+ name string
+ providerName string
+ config core.OAuth2Config
+ expectedExists bool
+ }{
+ {
+ "zero value",
+ "gitlab",
+ core.OAuth2Config{},
+ false,
+ },
+ {
+ "empty config with valid provider",
+ "gitlab",
+ core.OAuth2Config{},
+ false,
+ },
+ {
+ "non-empty config with missing provider",
+ "gitlab",
+ core.OAuth2Config{Providers: []core.OAuth2ProviderConfig{{Name: "google"}, {Name: "github"}}},
+ false,
+ },
+ {
+ "config with existing provider",
+ "github",
+ core.OAuth2Config{Providers: []core.OAuth2ProviderConfig{{Name: "google"}, {Name: "github"}}},
+ true,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ config, exists := s.config.GetProviderConfig(s.providerName)
+
+ if exists != s.expectedExists {
+ t.Fatalf("Expected exists %v, got %v", s.expectedExists, exists)
+ }
+
+ if exists {
+ if config.Name != s.providerName {
+ t.Fatalf("Expected config with name %q, got %q", s.providerName, config.Name)
+ }
+ } else {
+ if config.Name != "" {
+ t.Fatalf("Expected empty config, got %v", config)
+ }
+ }
+ })
+ }
+}
+
+func TestOAuth2ConfigValidate(t *testing.T) {
+ scenarios := []struct {
+ name string
+ config core.OAuth2Config
+ expectedErrors []string
+ }{
+ {
+ "zero value (disabled)",
+ core.OAuth2Config{},
+ []string{},
+ },
+ {
+ "zero value (enabled)",
+ core.OAuth2Config{Enabled: true},
+ []string{},
+ },
+ {
+ "unknown provider",
+ core.OAuth2Config{Enabled: true, Providers: []core.OAuth2ProviderConfig{
+ {Name: "missing", ClientId: "abc", ClientSecret: "456"},
+ }},
+ []string{"providers"},
+ },
+ {
+ "known provider with invalid data",
+ core.OAuth2Config{Enabled: true, Providers: []core.OAuth2ProviderConfig{
+ {Name: "gitlab", ClientId: "abc", TokenURL: "!invalid!"},
+ }},
+ []string{"providers"},
+ },
+ {
+ "known provider with valid data",
+ core.OAuth2Config{Enabled: true, Providers: []core.OAuth2ProviderConfig{
+ {Name: "gitlab", ClientId: "abc", ClientSecret: "456", TokenURL: "https://example.com"},
+ }},
+ []string{},
+ },
+ {
+ "known provider with valid data (duplicated)",
+ core.OAuth2Config{Enabled: true, Providers: []core.OAuth2ProviderConfig{
+ {Name: "gitlab", ClientId: "abc1", ClientSecret: "1", TokenURL: "https://example1.com"},
+ {Name: "google", ClientId: "abc2", ClientSecret: "2", TokenURL: "https://example2.com"},
+ {Name: "gitlab", ClientId: "abc3", ClientSecret: "3", TokenURL: "https://example3.com"},
+ }},
+ []string{"providers"},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result := s.config.Validate()
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
+
+func TestOAuth2ProviderConfigValidate(t *testing.T) {
+ scenarios := []struct {
+ name string
+ config core.OAuth2ProviderConfig
+ expectedErrors []string
+ }{
+ {
+ "zero value",
+ core.OAuth2ProviderConfig{},
+ []string{"name", "clientId", "clientSecret"},
+ },
+ {
+ "minimum valid data",
+ core.OAuth2ProviderConfig{Name: "gitlab", ClientId: "abc", ClientSecret: "456"},
+ []string{},
+ },
+ {
+ "non-existing provider",
+ core.OAuth2ProviderConfig{Name: "missing", ClientId: "abc", ClientSecret: "456"},
+ []string{"name"},
+ },
+ {
+ "invalid urls",
+ core.OAuth2ProviderConfig{
+ Name: "gitlab",
+ ClientId: "abc",
+ ClientSecret: "456",
+ AuthURL: "!invalid!",
+ TokenURL: "!invalid!",
+ UserInfoURL: "!invalid!",
+ },
+ []string{"authURL", "tokenURL", "userInfoURL"},
+ },
+ {
+ "valid urls",
+ core.OAuth2ProviderConfig{
+ Name: "gitlab",
+ ClientId: "abc",
+ ClientSecret: "456",
+ AuthURL: "https://example.com/a",
+ TokenURL: "https://example.com/b",
+ UserInfoURL: "https://example.com/c",
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result := s.config.Validate()
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
+
+func TestOAuth2ProviderConfigInitProvider(t *testing.T) {
+ scenarios := []struct {
+ name string
+ config core.OAuth2ProviderConfig
+ expectedConfig core.OAuth2ProviderConfig
+ expectedError bool
+ }{
+ {
+ "empty config",
+ core.OAuth2ProviderConfig{},
+ core.OAuth2ProviderConfig{},
+ true,
+ },
+ {
+ "missing provider",
+ core.OAuth2ProviderConfig{
+ Name: "missing",
+ ClientId: "test_ClientId",
+ ClientSecret: "test_ClientSecret",
+ AuthURL: "test_AuthURL",
+ TokenURL: "test_TokenURL",
+ UserInfoURL: "test_UserInfoURL",
+ DisplayName: "test_DisplayName",
+ PKCE: types.Pointer(true),
+ },
+ core.OAuth2ProviderConfig{
+ Name: "missing",
+ ClientId: "test_ClientId",
+ ClientSecret: "test_ClientSecret",
+ AuthURL: "test_AuthURL",
+ TokenURL: "test_TokenURL",
+ UserInfoURL: "test_UserInfoURL",
+ DisplayName: "test_DisplayName",
+ PKCE: types.Pointer(true),
+ },
+ true,
+ },
+ {
+ "existing provider minimal",
+ core.OAuth2ProviderConfig{
+ Name: "gitlab",
+ },
+ core.OAuth2ProviderConfig{
+ Name: "gitlab",
+ ClientId: "",
+ ClientSecret: "",
+ AuthURL: "https://gitlab.com/oauth/authorize",
+ TokenURL: "https://gitlab.com/oauth/token",
+ UserInfoURL: "https://gitlab.com/api/v4/user",
+ DisplayName: "GitLab",
+ PKCE: types.Pointer(true),
+ },
+ false,
+ },
+ {
+ "existing provider with all fields",
+ core.OAuth2ProviderConfig{
+ Name: "gitlab",
+ ClientId: "test_ClientId",
+ ClientSecret: "test_ClientSecret",
+ AuthURL: "test_AuthURL",
+ TokenURL: "test_TokenURL",
+ UserInfoURL: "test_UserInfoURL",
+ DisplayName: "test_DisplayName",
+ PKCE: types.Pointer(true),
+ },
+ core.OAuth2ProviderConfig{
+ Name: "gitlab",
+ ClientId: "test_ClientId",
+ ClientSecret: "test_ClientSecret",
+ AuthURL: "test_AuthURL",
+ TokenURL: "test_TokenURL",
+ UserInfoURL: "test_UserInfoURL",
+ DisplayName: "test_DisplayName",
+ PKCE: types.Pointer(true),
+ },
+ false,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ provider, err := s.config.InitProvider()
+
+ hasErr := err != nil
+ if hasErr != s.expectedError {
+ t.Fatalf("Expected hasErr %v, got %v", s.expectedError, hasErr)
+ }
+
+ if hasErr {
+ if provider != nil {
+ t.Fatalf("Expected nil provider, got %v", provider)
+ }
+ return
+ }
+
+ factory, ok := auth.Providers[s.expectedConfig.Name]
+ if !ok {
+ t.Fatalf("Missing factory for provider %q", s.expectedConfig.Name)
+ }
+
+ expectedType := fmt.Sprintf("%T", factory())
+ providerType := fmt.Sprintf("%T", provider)
+ if expectedType != providerType {
+ t.Fatalf("Expected provider instanceof %q, got %q", expectedType, providerType)
+ }
+
+ if provider.ClientId() != s.expectedConfig.ClientId {
+ t.Fatalf("Expected ClientId %q, got %q", s.expectedConfig.ClientId, provider.ClientId())
+ }
+
+ if provider.ClientSecret() != s.expectedConfig.ClientSecret {
+ t.Fatalf("Expected ClientSecret %q, got %q", s.expectedConfig.ClientSecret, provider.ClientSecret())
+ }
+
+ if provider.AuthURL() != s.expectedConfig.AuthURL {
+ t.Fatalf("Expected AuthURL %q, got %q", s.expectedConfig.AuthURL, provider.AuthURL())
+ }
+
+ if provider.UserInfoURL() != s.expectedConfig.UserInfoURL {
+ t.Fatalf("Expected UserInfoURL %q, got %q", s.expectedConfig.UserInfoURL, provider.UserInfoURL())
+ }
+
+ if provider.TokenURL() != s.expectedConfig.TokenURL {
+ t.Fatalf("Expected TokenURL %q, got %q", s.expectedConfig.TokenURL, provider.TokenURL())
+ }
+
+ if provider.DisplayName() != s.expectedConfig.DisplayName {
+ t.Fatalf("Expected DisplayName %q, got %q", s.expectedConfig.DisplayName, provider.DisplayName())
+ }
+
+ if provider.PKCE() != *s.expectedConfig.PKCE {
+ t.Fatalf("Expected PKCE %v, got %v", *s.expectedConfig.PKCE, provider.PKCE())
+ }
+ })
+ }
+}
diff --git a/core/collection_model_auth_templates.go b/core/collection_model_auth_templates.go
new file mode 100644
index 00000000..eede2714
--- /dev/null
+++ b/core/collection_model_auth_templates.go
@@ -0,0 +1,75 @@
+package core
+
+// Common settings placeholder tokens
+const (
+ EmailPlaceholderAppName string = "{APP_NAME}"
+ EmailPlaceholderAppURL string = "{APP_URL}"
+ EmailPlaceholderToken string = "{TOKEN}"
+ EmailPlaceholderOTP string = "{OTP}"
+ EmailPlaceholderOTPId string = "{OTP_ID}"
+)
+
+var defaultVerificationTemplate = EmailTemplate{
+ Subject: "Verify your " + EmailPlaceholderAppName + " email",
+ Body: `Hello,
+Thank you for joining us at ` + EmailPlaceholderAppName + `.
+Click on the button below to verify your email address.
+
+ Verify
+
+
+ Thanks,
+ ` + EmailPlaceholderAppName + ` team
+
`,
+}
+
+var defaultResetPasswordTemplate = EmailTemplate{
+ Subject: "Reset your " + EmailPlaceholderAppName + " password",
+ Body: `Hello,
+Click on the button below to reset your password.
+
+ Reset password
+
+If you didn't ask to reset your password, you can ignore this email.
+
+ Thanks,
+ ` + EmailPlaceholderAppName + ` team
+
`,
+}
+
+var defaultConfirmEmailChangeTemplate = EmailTemplate{
+ Subject: "Confirm your " + EmailPlaceholderAppName + " new email address",
+ Body: `Hello,
+Click on the button below to confirm your new email address.
+
+ Confirm new email
+
+If you didn't ask to change your email address, you can ignore this email.
+
+ Thanks,
+ ` + EmailPlaceholderAppName + ` team
+
`,
+}
+
+var defaultOTPTemplate = EmailTemplate{
+ Subject: "OTP for " + EmailPlaceholderAppName,
+ Body: `Hello,
+Your one-time password is: ` + EmailPlaceholderOTP + `
+If you didn't ask for the one-time password, you can ignore this email.
+
+ Thanks,
+ ` + EmailPlaceholderAppName + ` team
+
`,
+}
+
+var defaultAuthAlertTemplate = EmailTemplate{
+ Subject: "Login from a new location",
+ Body: `Hello,
+We noticed a login to your ` + EmailPlaceholderAppName + ` account from a new location.
+If this was you, you may disregard this email.
+If this wasn't you, you should immediately change your ` + EmailPlaceholderAppName + ` account password to revoke access from all other locations.
+
+ Thanks,
+ ` + EmailPlaceholderAppName + ` team
+
`,
+}
diff --git a/core/collection_model_base_options.go b/core/collection_model_base_options.go
new file mode 100644
index 00000000..924312bf
--- /dev/null
+++ b/core/collection_model_base_options.go
@@ -0,0 +1,11 @@
+package core
+
+var _ optionsValidator = (*collectionBaseOptions)(nil)
+
+// collectionBaseOptions defines the options for the "base" type collection.
+type collectionBaseOptions struct {
+}
+
+func (o *collectionBaseOptions) validate(cv *collectionValidator) error {
+ return nil
+}
diff --git a/core/collection_model_test.go b/core/collection_model_test.go
new file mode 100644
index 00000000..879f583a
--- /dev/null
+++ b/core/collection_model_test.go
@@ -0,0 +1,1335 @@
+package core_test
+
+import (
+ "encoding/json"
+ "fmt"
+ "slices"
+ "strings"
+ "testing"
+
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/dbutils"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func TestNewCollection(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ typ string
+ name string
+ expected []string
+ }{
+ {
+ "",
+ "",
+ []string{
+ `"id":""`,
+ `"name":""`,
+ `"type":"base"`,
+ `"system":false`,
+ `"indexes":[]`,
+ `"fields":[{`,
+ `"name":"id"`,
+ `"type":"text"`,
+ `"listRule":null`,
+ `"viewRule":null`,
+ `"createRule":null`,
+ `"updateRule":null`,
+ `"deleteRule":null`,
+ },
+ },
+ {
+ "unknown",
+ "test",
+ []string{
+ `"id":"_pbc_3632233996"`,
+ `"name":"test"`,
+ `"type":"base"`,
+ `"system":false`,
+ `"indexes":[]`,
+ `"fields":[{`,
+ `"name":"id"`,
+ `"type":"text"`,
+ `"listRule":null`,
+ `"viewRule":null`,
+ `"createRule":null`,
+ `"updateRule":null`,
+ `"deleteRule":null`,
+ },
+ },
+ {
+ "base",
+ "test",
+ []string{
+ `"id":"_pbc_3632233996"`,
+ `"name":"test"`,
+ `"type":"base"`,
+ `"system":false`,
+ `"indexes":[]`,
+ `"fields":[{`,
+ `"name":"id"`,
+ `"type":"text"`,
+ `"listRule":null`,
+ `"viewRule":null`,
+ `"createRule":null`,
+ `"updateRule":null`,
+ `"deleteRule":null`,
+ },
+ },
+ {
+ "view",
+ "test",
+ []string{
+ `"id":"_pbc_3632233996"`,
+ `"name":"test"`,
+ `"type":"view"`,
+ `"indexes":[]`,
+ `"fields":[]`,
+ `"system":false`,
+ `"listRule":null`,
+ `"viewRule":null`,
+ `"createRule":null`,
+ `"updateRule":null`,
+ `"deleteRule":null`,
+ },
+ },
+ {
+ "auth",
+ "test",
+ []string{
+ `"id":"_pbc_3632233996"`,
+ `"name":"test"`,
+ `"type":"auth"`,
+ `"fields":[{`,
+ `"system":false`,
+ `"type":"text"`,
+ `"type":"email"`,
+ `"name":"id"`,
+ `"name":"email"`,
+ `"name":"password"`,
+ `"name":"tokenKey"`,
+ `"name":"emailVisibility"`,
+ `"name":"verified"`,
+ `idx_email`,
+ `idx_tokenKey`,
+ `"listRule":null`,
+ `"viewRule":null`,
+ `"createRule":null`,
+ `"updateRule":null`,
+ `"deleteRule":null`,
+ `"identityFields":["email"]`,
+ },
+ },
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s_%s", i, s.typ, s.name), func(t *testing.T) {
+ result := core.NewCollection(s.typ, s.name).String()
+
+ for _, part := range s.expected {
+ if !strings.Contains(result, part) {
+ t.Fatalf("Missing part %q in\n%v", part, result)
+ }
+ }
+ })
+ }
+}
+
+func TestNewBaseCollection(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ name string
+ expected []string
+ }{
+ {
+ "",
+ []string{
+ `"id":""`,
+ `"name":""`,
+ `"type":"base"`,
+ `"system":false`,
+ `"indexes":[]`,
+ `"fields":[{`,
+ `"name":"id"`,
+ `"type":"text"`,
+ `"listRule":null`,
+ `"viewRule":null`,
+ `"createRule":null`,
+ `"updateRule":null`,
+ `"deleteRule":null`,
+ },
+ },
+ {
+ "test",
+ []string{
+ `"id":"_pbc_3632233996"`,
+ `"name":"test"`,
+ `"type":"base"`,
+ `"system":false`,
+ `"indexes":[]`,
+ `"fields":[{`,
+ `"name":"id"`,
+ `"type":"text"`,
+ `"listRule":null`,
+ `"viewRule":null`,
+ `"createRule":null`,
+ `"updateRule":null`,
+ `"deleteRule":null`,
+ },
+ },
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, s.name), func(t *testing.T) {
+ result := core.NewBaseCollection(s.name).String()
+
+ for _, part := range s.expected {
+ if !strings.Contains(result, part) {
+ t.Fatalf("Missing part %q in\n%v", part, result)
+ }
+ }
+ })
+ }
+}
+
+func TestNewViewCollection(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ name string
+ expected []string
+ }{
+ {
+ "",
+ []string{
+ `"id":""`,
+ `"name":""`,
+ `"type":"view"`,
+ `"indexes":[]`,
+ `"fields":[]`,
+ `"system":false`,
+ `"listRule":null`,
+ `"viewRule":null`,
+ `"createRule":null`,
+ `"updateRule":null`,
+ `"deleteRule":null`,
+ },
+ },
+ {
+ "test",
+ []string{
+ `"id":"_pbc_3632233996"`,
+ `"name":"test"`,
+ `"type":"view"`,
+ `"indexes":[]`,
+ `"fields":[]`,
+ `"system":false`,
+ `"listRule":null`,
+ `"viewRule":null`,
+ `"createRule":null`,
+ `"updateRule":null`,
+ `"deleteRule":null`,
+ },
+ },
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, s.name), func(t *testing.T) {
+ result := core.NewViewCollection(s.name).String()
+
+ for _, part := range s.expected {
+ if !strings.Contains(result, part) {
+ t.Fatalf("Missing part %q in\n%v", part, result)
+ }
+ }
+ })
+ }
+}
+
+func TestNewAuthCollection(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ name string
+ expected []string
+ }{
+ {
+ "",
+ []string{
+ `"id":""`,
+ `"name":""`,
+ `"type":"auth"`,
+ `"fields":[{`,
+ `"system":false`,
+ `"type":"text"`,
+ `"type":"email"`,
+ `"name":"id"`,
+ `"name":"email"`,
+ `"name":"password"`,
+ `"name":"tokenKey"`,
+ `"name":"emailVisibility"`,
+ `"name":"verified"`,
+ `idx_email`,
+ `idx_tokenKey`,
+ `"listRule":null`,
+ `"viewRule":null`,
+ `"createRule":null`,
+ `"updateRule":null`,
+ `"deleteRule":null`,
+ `"identityFields":["email"]`,
+ },
+ },
+ {
+ "test",
+ []string{
+ `"id":"_pbc_3632233996"`,
+ `"name":"test"`,
+ `"type":"auth"`,
+ `"fields":[{`,
+ `"system":false`,
+ `"type":"text"`,
+ `"type":"email"`,
+ `"name":"id"`,
+ `"name":"email"`,
+ `"name":"password"`,
+ `"name":"tokenKey"`,
+ `"name":"emailVisibility"`,
+ `"name":"verified"`,
+ `idx_email`,
+ `idx_tokenKey`,
+ `"listRule":null`,
+ `"viewRule":null`,
+ `"createRule":null`,
+ `"updateRule":null`,
+ `"deleteRule":null`,
+ `"identityFields":["email"]`,
+ },
+ },
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, s.name), func(t *testing.T) {
+ result := core.NewAuthCollection(s.name).String()
+
+ for _, part := range s.expected {
+ if !strings.Contains(result, part) {
+ t.Fatalf("Missing part %q in\n%v", part, result)
+ }
+ }
+ })
+ }
+}
+
+func TestCollectionTableName(t *testing.T) {
+ t.Parallel()
+
+ c := core.NewBaseCollection("test")
+ if c.TableName() != "_collections" {
+ t.Fatalf("Expected tableName %q, got %q", "_collections", c.TableName())
+ }
+}
+
+func TestCollectionBaseFilesPath(t *testing.T) {
+ t.Parallel()
+
+ c := core.Collection{}
+
+ if c.BaseFilesPath() != "" {
+ t.Fatalf("Expected empty string, got %q", c.BaseFilesPath())
+ }
+
+ c.Id = "test"
+
+ if c.BaseFilesPath() != c.Id {
+ t.Fatalf("Expected %q, got %q", c.Id, c.BaseFilesPath())
+ }
+}
+
+func TestCollectionIsBase(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ typ string
+ expected bool
+ }{
+ {"unknown", false},
+ {core.CollectionTypeBase, true},
+ {core.CollectionTypeView, false},
+ {core.CollectionTypeAuth, false},
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.typ, func(t *testing.T) {
+ c := core.Collection{}
+ c.Type = s.typ
+
+ if v := c.IsBase(); v != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestCollectionIsView(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ typ string
+ expected bool
+ }{
+ {"unknown", false},
+ {core.CollectionTypeBase, false},
+ {core.CollectionTypeView, true},
+ {core.CollectionTypeAuth, false},
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.typ, func(t *testing.T) {
+ c := core.Collection{}
+ c.Type = s.typ
+
+ if v := c.IsView(); v != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestCollectionIsAuth(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ typ string
+ expected bool
+ }{
+ {"unknown", false},
+ {core.CollectionTypeBase, false},
+ {core.CollectionTypeView, false},
+ {core.CollectionTypeAuth, true},
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.typ, func(t *testing.T) {
+ c := core.Collection{}
+ c.Type = s.typ
+
+ if v := c.IsAuth(); v != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestCollectionPostScan(t *testing.T) {
+ t.Parallel()
+
+ rawOptions := types.JSONRaw(`{
+ "viewQuery":"select 1",
+ "authRule":"1=2"
+ }`)
+
+ scenarios := []struct {
+ typ string
+ rawOptions types.JSONRaw
+ expected []string
+ }{
+ {
+ core.CollectionTypeBase,
+ rawOptions,
+ []string{
+ `lastSavedPK:"test"`,
+ `ViewQuery:""`,
+ `AuthRule:(*string)(nil)`,
+ },
+ },
+ {
+ core.CollectionTypeView,
+ rawOptions,
+ []string{
+ `lastSavedPK:"test"`,
+ `ViewQuery:"select 1"`,
+ `AuthRule:(*string)(nil)`,
+ },
+ },
+ {
+ core.CollectionTypeAuth,
+ rawOptions,
+ []string{
+ `lastSavedPK:"test"`,
+ `ViewQuery:""`,
+ `AuthRule:(*string)(0x`,
+ },
+ },
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, s.typ), func(t *testing.T) {
+ c := core.Collection{}
+ c.Id = "test"
+ c.Type = s.typ
+ c.RawOptions = s.rawOptions
+
+ err := c.PostScan()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if c.IsNew() {
+ t.Fatal("Expected the collection to be marked as not new")
+ }
+
+ rawModel := fmt.Sprintf("%#v", c)
+
+ for _, part := range s.expected {
+ if !strings.Contains(rawModel, part) {
+ t.Fatalf("Missing part %q in\n%v", part, rawModel)
+ }
+ }
+ })
+ }
+}
+
+func TestCollectionUnmarshalJSON(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ name string
+ raw string
+ collection func() *core.Collection
+ expectedCollection func() *core.Collection
+ }{
+ {
+ "base new empty",
+ `{"type":"base","name":"test","listRule":"1=2","authRule":"1=3","viewQuery":"abc"}`,
+ func() *core.Collection {
+ return &core.Collection{}
+ },
+ func() *core.Collection {
+ c := core.NewBaseCollection("test")
+ c.ListRule = types.Pointer("1=2")
+ c.AuthRule = types.Pointer("1=3")
+ c.ViewQuery = "abc"
+ return c
+ },
+ },
+ {
+ "view new empty",
+ `{"type":"view","name":"test","listRule":"1=2","authRule":"1=3","viewQuery":"abc"}`,
+ func() *core.Collection {
+ return &core.Collection{}
+ },
+ func() *core.Collection {
+ c := core.NewViewCollection("test")
+ c.ListRule = types.Pointer("1=2")
+ c.AuthRule = types.Pointer("1=3")
+ c.ViewQuery = "abc"
+ return c
+ },
+ },
+ {
+ "auth new empty",
+ `{"type":"auth","name":"test","listRule":"1=2","authRule":"1=3","viewQuery":"abc"}`,
+ func() *core.Collection {
+ return &core.Collection{}
+ },
+ func() *core.Collection {
+ c := core.NewAuthCollection("test")
+ c.ListRule = types.Pointer("1=2")
+ c.AuthRule = types.Pointer("1=3")
+ c.ViewQuery = "abc"
+ return c
+ },
+ },
+ {
+ "new but with set type (no default fields load)",
+ `{"type":"base","name":"test","listRule":"1=2","authRule":"1=3","viewQuery":"abc"}`,
+ func() *core.Collection {
+ c := &core.Collection{}
+ c.Type = core.CollectionTypeBase
+ return c
+ },
+ func() *core.Collection {
+ c := &core.Collection{}
+ c.Type = core.CollectionTypeBase
+ c.Name = "test"
+ c.ListRule = types.Pointer("1=2")
+ c.AuthRule = types.Pointer("1=3")
+ c.ViewQuery = "abc"
+ return c
+ },
+ },
+ {
+ "existing (no default fields load)",
+ `{"type":"auth","name":"test","listRule":"1=2","authRule":"1=3","viewQuery":"abc"}`,
+ func() *core.Collection {
+ c, _ := app.FindCollectionByNameOrId("demo1")
+ return c
+ },
+ func() *core.Collection {
+ c, _ := app.FindCollectionByNameOrId("demo1")
+ c.Type = core.CollectionTypeAuth
+ c.Name = "test"
+ c.ListRule = types.Pointer("1=2")
+ c.AuthRule = types.Pointer("1=3")
+ c.ViewQuery = "abc"
+ return c
+ },
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ collection := s.collection()
+
+ err := json.Unmarshal([]byte(s.raw), collection)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ rawResult, err := json.Marshal(collection)
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawResultStr := string(rawResult)
+
+ rawExpected, err := json.Marshal(s.expectedCollection())
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawExpectedStr := string(rawExpected)
+
+ if rawResultStr != rawExpectedStr {
+ t.Fatalf("Expected collection\n%s\ngot\n%s", rawExpectedStr, rawResultStr)
+ }
+ })
+ }
+}
+
+func TestCollectionSerialize(t *testing.T) {
+ scenarios := []struct {
+ name string
+ collection func() *core.Collection
+ expected []string
+ notExpected []string
+ }{
+ {
+ "base",
+ func() *core.Collection {
+ c := core.NewCollection(core.CollectionTypeBase, "test")
+ c.ViewQuery = "1=1"
+ c.OAuth2.Providers = []core.OAuth2ProviderConfig{
+ {Name: "test1", ClientId: "test_client_id1", ClientSecret: "test_client_secret1"},
+ {Name: "test2", ClientId: "test_client_id2", ClientSecret: "test_client_secret2"},
+ }
+
+ return c
+ },
+ []string{
+ `"id":"_pbc_3632233996"`,
+ `"name":"test"`,
+ `"type":"base"`,
+ },
+ []string{
+ "verificationTemplate",
+ "manageRule",
+ "authRule",
+ "secret",
+ "oauth2",
+ "clientId",
+ "clientSecret",
+ "viewQuery",
+ },
+ },
+ {
+ "view",
+ func() *core.Collection {
+ c := core.NewCollection(core.CollectionTypeView, "test")
+ c.ViewQuery = "1=1"
+ c.OAuth2.Providers = []core.OAuth2ProviderConfig{
+ {Name: "test1", ClientId: "test_client_id1", ClientSecret: "test_client_secret1"},
+ {Name: "test2", ClientId: "test_client_id2", ClientSecret: "test_client_secret2"},
+ }
+
+ return c
+ },
+ []string{
+ `"id":"_pbc_3632233996"`,
+ `"name":"test"`,
+ `"type":"view"`,
+ `"viewQuery":"1=1"`,
+ },
+ []string{
+ "verificationTemplate",
+ "manageRule",
+ "authRule",
+ "secret",
+ "oauth2",
+ "clientId",
+ "clientSecret",
+ },
+ },
+ {
+ "auth",
+ func() *core.Collection {
+ c := core.NewCollection(core.CollectionTypeAuth, "test")
+ c.ViewQuery = "1=1"
+ c.OAuth2.Providers = []core.OAuth2ProviderConfig{
+ {Name: "test1", ClientId: "test_client_id1", ClientSecret: "test_client_secret1"},
+ {Name: "test2", ClientId: "test_client_id2", ClientSecret: "test_client_secret2"},
+ }
+
+ return c
+ },
+ []string{
+ `"id":"_pbc_3632233996"`,
+ `"name":"test"`,
+ `"type":"auth"`,
+ `"oauth2":{`,
+ `"providers":[{`,
+ `"clientId":"test_client_id1"`,
+ `"clientId":"test_client_id2"`,
+ },
+ []string{
+ "viewQuery",
+ "secret",
+ "clientSecret",
+ },
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ collection := s.collection()
+
+ raw, err := collection.MarshalJSON()
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawStr := string(raw)
+
+ if rawStr != collection.String() {
+ t.Fatalf("Expected the same serialization, got\n%v\nVS\n%v", collection.String(), rawStr)
+ }
+
+ for _, part := range s.expected {
+ if !strings.Contains(rawStr, part) {
+ t.Fatalf("Missing part %q in\n%v", part, rawStr)
+ }
+ }
+
+ for _, part := range s.notExpected {
+ if strings.Contains(rawStr, part) {
+ t.Fatalf("Didn't expect part %q in\n%v", part, rawStr)
+ }
+ }
+ })
+ }
+}
+
+func TestCollectionDBExport(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ date, err := types.ParseDateTime("2024-07-01 01:02:03.456Z")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ typ string
+ expected string
+ }{
+ {
+ "unknown",
+ `{"createRule":"1=3","created":"2024-07-01 01:02:03.456Z","deleteRule":"1=5","fields":[{"hidden":false,"id":"bool597745380","name":"f1","presentable":false,"required":false,"system":true,"type":"bool"},{"hidden":false,"id":"bool3131674462","name":"f2","presentable":false,"required":true,"system":false,"type":"bool"}],"id":"test_id","indexes":["CREATE INDEX idx1 on test_name(id)","CREATE INDEX idx2 on test_name(id)"],"listRule":"1=1","name":"test_name","options":"{}","system":true,"type":"unknown","updateRule":"1=4","updated":"2024-07-01 01:02:03.456Z","viewRule":"1=7"}`,
+ },
+ {
+ core.CollectionTypeBase,
+ `{"createRule":"1=3","created":"2024-07-01 01:02:03.456Z","deleteRule":"1=5","fields":[{"hidden":false,"id":"bool597745380","name":"f1","presentable":false,"required":false,"system":true,"type":"bool"},{"hidden":false,"id":"bool3131674462","name":"f2","presentable":false,"required":true,"system":false,"type":"bool"}],"id":"test_id","indexes":["CREATE INDEX idx1 on test_name(id)","CREATE INDEX idx2 on test_name(id)"],"listRule":"1=1","name":"test_name","options":"{}","system":true,"type":"base","updateRule":"1=4","updated":"2024-07-01 01:02:03.456Z","viewRule":"1=7"}`,
+ },
+ {
+ core.CollectionTypeView,
+ `{"createRule":"1=3","created":"2024-07-01 01:02:03.456Z","deleteRule":"1=5","fields":[{"hidden":false,"id":"bool597745380","name":"f1","presentable":false,"required":false,"system":true,"type":"bool"},{"hidden":false,"id":"bool3131674462","name":"f2","presentable":false,"required":true,"system":false,"type":"bool"}],"id":"test_id","indexes":["CREATE INDEX idx1 on test_name(id)","CREATE INDEX idx2 on test_name(id)"],"listRule":"1=1","name":"test_name","options":{"viewQuery":"select 1"},"system":true,"type":"view","updateRule":"1=4","updated":"2024-07-01 01:02:03.456Z","viewRule":"1=7"}`,
+ },
+ {
+ core.CollectionTypeAuth,
+ `{"createRule":"1=3","created":"2024-07-01 01:02:03.456Z","deleteRule":"1=5","fields":[{"hidden":false,"id":"bool597745380","name":"f1","presentable":false,"required":false,"system":true,"type":"bool"},{"hidden":false,"id":"bool3131674462","name":"f2","presentable":false,"required":true,"system":false,"type":"bool"}],"id":"test_id","indexes":["CREATE INDEX idx1 on test_name(id)","CREATE INDEX idx2 on test_name(id)"],"listRule":"1=1","name":"test_name","options":{"authRule":null,"manageRule":"1=6","authAlert":{"enabled":false,"emailTemplate":{"subject":"","body":""}},"oauth2":{"providers":null,"mappedFields":{"id":"","name":"","username":"","avatarURL":""},"enabled":false},"passwordAuth":{"enabled":false,"identityFields":null},"mfa":{"enabled":false,"duration":0,"rule":""},"otp":{"enabled":false,"duration":0,"length":0,"emailTemplate":{"subject":"","body":""}},"authToken":{"duration":0},"passwordResetToken":{"duration":0},"emailChangeToken":{"duration":0},"verificationToken":{"duration":0},"fileToken":{"duration":0},"verificationTemplate":{"subject":"","body":""},"resetPasswordTemplate":{"subject":"","body":""},"confirmEmailChangeTemplate":{"subject":"","body":""}},"system":true,"type":"auth","updateRule":"1=4","updated":"2024-07-01 01:02:03.456Z","viewRule":"1=7"}`,
+ },
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, s.typ), func(t *testing.T) {
+ c := core.Collection{}
+ c.Type = s.typ
+ c.Id = "test_id"
+ c.Name = "test_name"
+ c.System = true
+ c.ListRule = types.Pointer("1=1")
+ c.ViewRule = types.Pointer("1=2")
+ c.CreateRule = types.Pointer("1=3")
+ c.UpdateRule = types.Pointer("1=4")
+ c.DeleteRule = types.Pointer("1=5")
+ c.ManageRule = types.Pointer("1=6")
+ c.ViewRule = types.Pointer("1=7")
+ c.Created = date
+ c.Updated = date
+ c.Indexes = types.JSONArray[string]{"CREATE INDEX idx1 on test_name(id)", "CREATE INDEX idx2 on test_name(id)"}
+ c.ViewQuery = "select 1"
+ c.Fields.Add(&core.BoolField{Name: "f1", System: true})
+ c.Fields.Add(&core.BoolField{Name: "f2", Required: true})
+ c.RawOptions = types.JSONRaw(`{"viewQuery": "select 2"}`) // should be ignored
+
+ result, err := c.DBExport(app)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ raw, err := json.Marshal(result)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if str := string(raw); str != s.expected {
+ t.Fatalf("Expected\n%v\ngot\n%v", s.expected, str)
+ }
+ })
+ }
+}
+
+func TestCollectionIndexHelpers(t *testing.T) {
+ t.Parallel()
+
+ checkIndexes := func(t *testing.T, indexes, expectedIndexes []string) {
+ if len(indexes) != len(expectedIndexes) {
+ t.Fatalf("Expected %d indexes, got %d\n%v", len(expectedIndexes), len(indexes), indexes)
+ }
+
+ for _, idx := range expectedIndexes {
+ if !slices.Contains(indexes, idx) {
+ t.Fatalf("Missing index\n%v\nin\n%v", idx, indexes)
+ }
+ }
+ }
+
+ c := core.NewBaseCollection("test")
+ checkIndexes(t, c.Indexes, nil)
+
+ c.AddIndex("idx1", false, "colA,colB", "colA != 1")
+ c.AddIndex("idx2", true, "colA", "")
+ c.AddIndex("idx3", false, "colA", "")
+ c.AddIndex("idx3", false, "colB", "") // should overwrite the previous one
+
+ idx1 := "CREATE INDEX `idx1` ON `test` (colA,colB) WHERE colA != 1"
+ idx2 := "CREATE UNIQUE INDEX `idx2` ON `test` (colA)"
+ idx3 := "CREATE INDEX `idx3` ON `test` (colB)"
+
+ checkIndexes(t, c.Indexes, []string{idx1, idx2, idx3})
+
+ c.RemoveIndex("iDx2") // case-insensitive
+ c.RemoveIndex("missing") // noop
+
+ checkIndexes(t, c.Indexes, []string{idx1, idx3})
+
+ expectedIndexes := map[string]string{
+ "missing": "",
+ "idx1": idx1,
+ // the name is case insensitive
+ "iDX3": idx3,
+ }
+ for key, expectedIdx := range expectedIndexes {
+ idx := c.GetIndex(key)
+ if idx != expectedIdx {
+ t.Errorf("Expected index %q to be\n%v\ngot\n%v", key, expectedIdx, idx)
+ }
+ }
+}
+
+// -------------------------------------------------------------------
+
+func TestCollectionDelete(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ name string
+ collection string
+ disableIntegrityChecks bool
+ expectError bool
+ }{
+ {
+ name: "unsaved",
+ collection: "",
+ expectError: true,
+ },
+ {
+ name: "system",
+ collection: core.CollectionNameSuperusers,
+ expectError: true,
+ },
+ {
+ name: "base with references",
+ collection: "demo1",
+ expectError: true,
+ },
+ {
+ name: "base with references with disabled integrity checks",
+ collection: "demo1",
+ disableIntegrityChecks: true,
+ expectError: false,
+ },
+ {
+ name: "base without references",
+ collection: "demo1",
+ expectError: true,
+ },
+ {
+ name: "view with reference",
+ collection: "view1",
+ expectError: true,
+ },
+ {
+ name: "view with references with disabled integrity checks",
+ collection: "view1",
+ disableIntegrityChecks: true,
+ expectError: false,
+ },
+ {
+ name: "view without references",
+ collection: "view2",
+ disableIntegrityChecks: true,
+ expectError: false,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ var col *core.Collection
+
+ if s.collection == "" {
+ col = core.NewBaseCollection("test")
+ } else {
+ var err error
+ col, err = app.FindCollectionByNameOrId(s.collection)
+ if err != nil {
+ t.Fatal(err)
+ }
+ }
+
+ if s.disableIntegrityChecks {
+ col.IntegrityChecks(!s.disableIntegrityChecks)
+ }
+
+ err := app.Delete(col)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ exists := app.HasTable(col.Name)
+
+ if !col.IsNew() && exists != hasErr {
+ t.Fatalf("Expected HasTable %v, got %v", hasErr, exists)
+ }
+
+ if !hasErr {
+ cache, _ := app.FindCachedCollectionByNameOrId(col.Id)
+ if cache != nil {
+ t.Fatal("Expected the collection to be removed from the cache.")
+ }
+ }
+ })
+ }
+}
+
+func TestCollectionSaveModel(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ name string
+ collection func(app core.App) (*core.Collection, error)
+ expectError bool
+ expectColumns []string
+ }{
+ // trigger validators
+ {
+ name: "create - trigger validators",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("!invalid")
+ c.Fields.Add(&core.TextField{Name: "example"})
+ c.AddIndex("test_save_idx", false, "example", "")
+ return c, nil
+ },
+ expectError: true,
+ },
+ {
+ name: "update - trigger validators",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("demo5")
+ c.Name = "demo1"
+ c.Fields.Add(&core.TextField{Name: "example"})
+ c.Fields.RemoveByName("file")
+ c.AddIndex("test_save_idx", false, "example", "")
+ return c, nil
+ },
+ expectError: true,
+ },
+
+ // create
+ {
+ name: "create base collection",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("new")
+ c.Type = "" // should be auto set to "base"
+ c.Fields.RemoveByName("id") // ensure that the default fields will be loaded
+ c.Fields.Add(&core.TextField{Name: "example"})
+ c.AddIndex("test_save_idx", false, "example", "")
+ return c, nil
+ },
+ expectError: false,
+ expectColumns: []string{
+ "id", "example",
+ },
+ },
+ {
+ name: "create auth collection",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new")
+ c.Fields.RemoveByName("id") // ensure that the default fields will be loaded
+ c.Fields.RemoveByName("email") // ensure that the default fields will be loaded
+ c.Fields.Add(&core.TextField{Name: "example"})
+ c.AddIndex("test_save_idx", false, "example", "")
+ return c, nil
+ },
+ expectError: false,
+ expectColumns: []string{
+ "id", "email", "tokenKey", "password",
+ "verified", "emailVisibility", "example",
+ },
+ },
+ {
+ name: "create view collection",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewViewCollection("new")
+ c.Fields.Add(&core.TextField{Name: "ignored"}) // should be ignored
+ c.ViewQuery = "select 1 as id, 2 as example"
+ return c, nil
+ },
+ expectError: false,
+ expectColumns: []string{
+ "id", "example",
+ },
+ },
+
+ // update
+ {
+ name: "update base collection",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("demo5")
+ c.Fields.Add(&core.TextField{Name: "example"})
+ c.Fields.RemoveByName("file")
+ c.Fields.GetByName("total").SetName("total_updated")
+ c.AddIndex("test_save_idx", false, "example", "")
+ return c, nil
+ },
+ expectError: false,
+ expectColumns: []string{
+ "id", "select_one", "select_many", "rel_one", "rel_many",
+ "total_updated", "created", "updated", "example",
+ },
+ },
+ {
+ name: "update auth collection",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("clients")
+ c.Fields.Add(&core.TextField{Name: "example"})
+ c.Fields.RemoveByName("file")
+ c.Fields.GetByName("name").SetName("name_updated")
+ c.AddIndex("test_save_idx", false, "example", "")
+ return c, nil
+ },
+ expectError: false,
+ expectColumns: []string{
+ "id", "email", "emailVisibility", "password", "tokenKey",
+ "verified", "username", "name_updated", "created", "updated", "example",
+ },
+ },
+ {
+ name: "update view collection",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("view2")
+ c.Fields.Add(&core.TextField{Name: "example"}) // should be ignored
+ c.ViewQuery = "select 1 as id, 2 as example"
+ return c, nil
+ },
+ expectError: false,
+ expectColumns: []string{
+ "id", "example",
+ },
+ },
+
+ // auth normalization
+ {
+ name: "unset missing oauth2 mapped fields",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new")
+ c.OAuth2.Enabled = true
+ // shouldn't fail
+ c.OAuth2.MappedFields = core.OAuth2KnownFields{
+ Id: "missing",
+ Name: "missing",
+ Username: "missing",
+ AvatarURL: "missing",
+ }
+ return c, nil
+ },
+ expectError: false,
+ expectColumns: []string{
+ "id", "email", "emailVisibility", "password", "tokenKey", "verified",
+ },
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection, err := s.collection(app)
+ if err != nil {
+ t.Fatalf("Failed to retrieve test collection: %v", err)
+ }
+
+ saveErr := app.Save(collection)
+
+ hasErr := saveErr != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", hasErr, s.expectError, saveErr)
+ }
+
+ if hasErr {
+ return
+ }
+
+ // the collection should always have an id after successful Save
+ if collection.Id == "" {
+ t.Fatal("Expected collection id to be set")
+ }
+
+ // the timestamp fields should be non-empty after successful Save
+ if collection.Created.String() == "" {
+ t.Fatal("Expected collection created to be set")
+ }
+ if collection.Updated.String() == "" {
+ t.Fatal("Expected collection updated to be set")
+ }
+
+ // check if the records table was synced
+ hasTable := app.HasTable(collection.Name)
+ if !hasTable {
+ t.Fatalf("Expected records table %s to be created", collection.Name)
+ }
+
+ // check if the records table has the fields fields
+ columns, err := app.TableColumns(collection.Name)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(columns) != len(s.expectColumns) {
+ t.Fatalf("Expected columns\n%v\ngot\n%v", s.expectColumns, columns)
+ }
+ for i, c := range columns {
+ if !slices.Contains(s.expectColumns, c) {
+ t.Fatalf("[%d] Didn't expect record column %q", i, c)
+ }
+ }
+
+ // make sure that all collection indexes exists
+ indexes, err := app.TableIndexes(collection.Name)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if len(indexes) != len(collection.Indexes) {
+ t.Fatalf("Expected %d indexes, got %d", len(collection.Indexes), len(indexes))
+ }
+ for _, idx := range collection.Indexes {
+ parsed := dbutils.ParseIndex(idx)
+ if _, ok := indexes[parsed.IndexName]; !ok {
+ t.Fatalf("Missing index %q in\n%v", idx, indexes)
+ }
+ }
+ })
+ }
+}
+
+// indirect update of a field used in view should cause view(s) update
+func TestCollectionSaveIndirectViewsUpdate(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection, err := app.FindCollectionByNameOrId("demo1")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ // update MaxSelect fields
+ {
+ relMany := collection.Fields.GetByName("rel_many").(*core.RelationField)
+ relMany.MaxSelect = 1
+
+ fileOne := collection.Fields.GetByName("file_one").(*core.FileField)
+ fileOne.MaxSelect = 10
+
+ if err := app.Save(collection); err != nil {
+ t.Fatal(err)
+ }
+ }
+
+ // check view1 fields
+ {
+ view1, err := app.FindCollectionByNameOrId("view1")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ relMany := view1.Fields.GetByName("rel_many").(*core.RelationField)
+ if relMany.MaxSelect != 1 {
+ t.Fatalf("Expected view1.rel_many MaxSelect to be %d, got %v", 1, relMany.MaxSelect)
+ }
+
+ fileOne := view1.Fields.GetByName("file_one").(*core.FileField)
+ if fileOne.MaxSelect != 10 {
+ t.Fatalf("Expected view1.file_one MaxSelect to be %d, got %v", 10, fileOne.MaxSelect)
+ }
+ }
+
+ // check view2 fields
+ {
+ view2, err := app.FindCollectionByNameOrId("view2")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ relMany := view2.Fields.GetByName("rel_many").(*core.RelationField)
+ if relMany.MaxSelect != 1 {
+ t.Fatalf("Expected view2.rel_many MaxSelect to be %d, got %v", 1, relMany.MaxSelect)
+ }
+ }
+}
+
+func TestCollectionSaveViewWrapping(t *testing.T) {
+ t.Parallel()
+
+ viewName := "test_wrapping"
+
+ scenarios := []struct {
+ name string
+ query string
+ expected string
+ }{
+ {
+ "no wrapping - text field",
+ "select text as id, bool from demo1",
+ "CREATE VIEW `test_wrapping` AS SELECT * FROM (select text as id, bool from demo1)",
+ },
+ {
+ "no wrapping - id field",
+ "select text as id, bool from demo1",
+ "CREATE VIEW `test_wrapping` AS SELECT * FROM (select text as id, bool from demo1)",
+ },
+ {
+ "no wrapping - relation field",
+ "select rel_one as id, bool from demo1",
+ "CREATE VIEW `test_wrapping` AS SELECT * FROM (select rel_one as id, bool from demo1)",
+ },
+ {
+ "no wrapping - select field",
+ "select select_many as id, bool from demo1",
+ "CREATE VIEW `test_wrapping` AS SELECT * FROM (select select_many as id, bool from demo1)",
+ },
+ {
+ "no wrapping - email field",
+ "select email as id, bool from demo1",
+ "CREATE VIEW `test_wrapping` AS SELECT * FROM (select email as id, bool from demo1)",
+ },
+ {
+ "no wrapping - datetime field",
+ "select datetime as id, bool from demo1",
+ "CREATE VIEW `test_wrapping` AS SELECT * FROM (select datetime as id, bool from demo1)",
+ },
+ {
+ "no wrapping - url field",
+ "select url as id, bool from demo1",
+ "CREATE VIEW `test_wrapping` AS SELECT * FROM (select url as id, bool from demo1)",
+ },
+ {
+ "wrapping - bool field",
+ "select bool as id, text as txt, url from demo1",
+ "CREATE VIEW `test_wrapping` AS SELECT * FROM (SELECT CAST(`id` as TEXT) `id`,`txt`,`url` FROM (select bool as id, text as txt, url from demo1))",
+ },
+ {
+ "wrapping - bool field (different order)",
+ "select text as txt, url, bool as id from demo1",
+ "CREATE VIEW `test_wrapping` AS SELECT * FROM (SELECT `txt`,`url`,CAST(`id` as TEXT) `id` FROM (select text as txt, url, bool as id from demo1))",
+ },
+ {
+ "wrapping - json field",
+ "select json as id, text, url from demo1",
+ "CREATE VIEW `test_wrapping` AS SELECT * FROM (SELECT CAST(`id` as TEXT) `id`,`text`,`url` FROM (select json as id, text, url from demo1))",
+ },
+ {
+ "wrapping - numeric id",
+ "select 1 as id",
+ "CREATE VIEW `test_wrapping` AS SELECT * FROM (SELECT CAST(`id` as TEXT) `id` FROM (select 1 as id))",
+ },
+ {
+ "wrapping - expresion",
+ "select ('test') as id",
+ "CREATE VIEW `test_wrapping` AS SELECT * FROM (SELECT CAST(`id` as TEXT) `id` FROM (select ('test') as id))",
+ },
+ {
+ "no wrapping - cast as text",
+ "select cast('test' as text) as id",
+ "CREATE VIEW `test_wrapping` AS SELECT * FROM (select cast('test' as text) as id)",
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewViewCollection(viewName)
+ collection.ViewQuery = s.query
+
+ err := app.Save(collection)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ var sql string
+
+ rowErr := app.DB().NewQuery("SELECT sql FROM sqlite_master WHERE type='view' AND name={:name}").
+ Bind(dbx.Params{"name": viewName}).
+ Row(&sql)
+ if rowErr != nil {
+ t.Fatalf("Failed to retrieve view sql: %v", rowErr)
+ }
+
+ if sql != s.expected {
+ t.Fatalf("Expected query \n%v, \ngot \n%v", s.expected, sql)
+ }
+ })
+ }
+}
diff --git a/core/collection_model_view_options.go b/core/collection_model_view_options.go
new file mode 100644
index 00000000..ffe2859a
--- /dev/null
+++ b/core/collection_model_view_options.go
@@ -0,0 +1,18 @@
+package core
+
+import (
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+)
+
+var _ optionsValidator = (*collectionViewOptions)(nil)
+
+// collectionViewOptions defines the options for the "view" type collection.
+type collectionViewOptions struct {
+ ViewQuery string `form:"viewQuery" json:"viewQuery"`
+}
+
+func (o *collectionViewOptions) validate(cv *collectionValidator) error {
+ return validation.ValidateStruct(o,
+ validation.Field(&o.ViewQuery, validation.Required, validation.By(cv.checkViewQuery)),
+ )
+}
diff --git a/core/collection_model_view_options_test.go b/core/collection_model_view_options_test.go
new file mode 100644
index 00000000..c7da1eff
--- /dev/null
+++ b/core/collection_model_view_options_test.go
@@ -0,0 +1,79 @@
+package core_test
+
+import (
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestCollectionViewOptionsValidate(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ name string
+ collection func(app core.App) (*core.Collection, error)
+ expectedErrors []string
+ }{
+ {
+ name: "view with empty query",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewViewCollection("new_auth")
+ return c, nil
+ },
+ expectedErrors: []string{"fields", "viewQuery"},
+ },
+ {
+ name: "view with invalid query",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewViewCollection("new_auth")
+ c.ViewQuery = "invalid"
+ return c, nil
+ },
+ expectedErrors: []string{"fields", "viewQuery"},
+ },
+ {
+ name: "view with valid query but missing id",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewViewCollection("new_auth")
+ c.ViewQuery = "select 1"
+ return c, nil
+ },
+ expectedErrors: []string{"fields", "viewQuery"},
+ },
+ {
+ name: "view with valid query",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewViewCollection("new_auth")
+ c.ViewQuery = "select demo1.id, text as example from demo1"
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "update view query ",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("view2")
+ c.ViewQuery = "select demo1.id, text as example from demo1"
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection, err := s.collection(app)
+ if err != nil {
+ t.Fatalf("Failed to retrieve test collection: %v", err)
+ }
+
+ result := app.Validate(collection)
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
diff --git a/core/collection_query.go b/core/collection_query.go
new file mode 100644
index 00000000..f5019899
--- /dev/null
+++ b/core/collection_query.go
@@ -0,0 +1,344 @@
+package core
+
+import (
+ "bytes"
+ "database/sql"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "strings"
+
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/tools/list"
+)
+
+const StoreKeyCachedCollections = "pbAppCachedCollections"
+
+// CollectionQuery returns a new Collection select query.
+func (app *BaseApp) CollectionQuery() *dbx.SelectQuery {
+ return app.ModelQuery(&Collection{})
+}
+
+// FindCollections finds all collections by the given type(s).
+//
+// If collectionTypes is not set, it returns all collections.
+//
+// Example:
+//
+// app.FindAllCollections() // all collections
+// app.FindAllCollections("auth", "view") // only auth and view collections
+func (app *BaseApp) FindAllCollections(collectionTypes ...string) ([]*Collection, error) {
+ collections := []*Collection{}
+
+ q := app.CollectionQuery()
+
+ types := list.NonzeroUniques(collectionTypes)
+ if len(types) > 0 {
+ q.AndWhere(dbx.In("type", list.ToInterfaceSlice(types)...))
+ }
+
+ err := q.OrderBy("created ASC").All(&collections)
+ if err != nil {
+ return nil, err
+ }
+
+ return collections, nil
+}
+
+// ReloadCachedCollections fetches all collections and caches them into the app store.
+func (app *BaseApp) ReloadCachedCollections() error {
+ collections, err := app.FindAllCollections()
+ if err != nil {
+ return err
+ }
+
+ app.Store().Set(StoreKeyCachedCollections, collections)
+
+ return nil
+}
+
+// FindCollectionByNameOrId finds a single collection by its name (case insensitive) or id.
+func (app *BaseApp) FindCollectionByNameOrId(nameOrId string) (*Collection, error) {
+ m := &Collection{}
+
+ err := app.CollectionQuery().
+ AndWhere(dbx.NewExp("[[id]]={:id} OR LOWER([[name]])={:name}", dbx.Params{
+ "id": nameOrId,
+ "name": strings.ToLower(nameOrId),
+ })).
+ Limit(1).
+ One(m)
+ if err != nil {
+ return nil, err
+ }
+
+ return m, nil
+}
+
+// FindCachedCollectionByNameOrId is similar to [App.FindCollectionByNameOrId]
+// but retrieves the Collection from the app cache instead of making a db call.
+//
+// NB! This method is suitable for read-only Collection operations.
+//
+// Returns [sql.ErrNoRows] if no Collection is found for consistency
+// with the [App.FindCollectionByNameOrId] method.
+//
+// If you plan making changes to the returned Collection model,
+// use [App.FindCollectionByNameOrId] instead.
+//
+// Caveats:
+//
+// - The returned Collection should be used only for read-only operations.
+// Avoid directly modifying the returned cached Collection as it will affect
+// the global cached value even if you don't persist the changes in the database!
+// - If you are updating a Collection in a transaction and then call this method before commit,
+// it'll return the cached Collection state and not the one from the uncommitted transaction.
+// - The cache is automatically updated on collections db change (create/update/delete).
+// To manually reload the cache you can call [App.ReloadCachedCollections()]
+func (app *BaseApp) FindCachedCollectionByNameOrId(nameOrId string) (*Collection, error) {
+ collections, _ := app.Store().Get(StoreKeyCachedCollections).([]*Collection)
+ if collections == nil {
+ // cache is not initialized yet (eg. run in a system migration)
+ return app.FindCollectionByNameOrId(nameOrId)
+ }
+
+ for _, c := range collections {
+ if strings.EqualFold(c.Name, nameOrId) || c.Id == nameOrId {
+ return c, nil
+ }
+ }
+
+ return nil, sql.ErrNoRows
+}
+
+// IsCollectionNameUnique checks that there is no existing collection
+// with the provided name (case insensitive!).
+//
+// Note: case insensitive check because the name is used also as
+// table name for the records.
+func (app *BaseApp) IsCollectionNameUnique(name string, excludeIds ...string) bool {
+ if name == "" {
+ return false
+ }
+
+ query := app.CollectionQuery().
+ Select("count(*)").
+ AndWhere(dbx.NewExp("LOWER([[name]])={:name}", dbx.Params{"name": strings.ToLower(name)})).
+ Limit(1)
+
+ if uniqueExcludeIds := list.NonzeroUniques(excludeIds); len(uniqueExcludeIds) > 0 {
+ query.AndWhere(dbx.NotIn("id", list.ToInterfaceSlice(uniqueExcludeIds)...))
+ }
+
+ var exists bool
+
+ return query.Row(&exists) == nil && !exists
+}
+
+// FindCollectionReferences returns information for all relation fields
+// referencing the provided collection.
+//
+// If the provided collection has reference to itself then it will be
+// also included in the result. To exclude it, pass the collection id
+// as the excludeIds argument.
+func (app *BaseApp) FindCollectionReferences(collection *Collection, excludeIds ...string) (map[*Collection][]Field, error) {
+ collections := []*Collection{}
+
+ query := app.CollectionQuery()
+
+ if uniqueExcludeIds := list.NonzeroUniques(excludeIds); len(uniqueExcludeIds) > 0 {
+ query.AndWhere(dbx.NotIn("id", list.ToInterfaceSlice(uniqueExcludeIds)...))
+ }
+
+ if err := query.All(&collections); err != nil {
+ return nil, err
+ }
+
+ result := map[*Collection][]Field{}
+
+ for _, c := range collections {
+ for _, rawField := range c.Fields {
+ f, ok := rawField.(*RelationField)
+ if ok && f.CollectionId == collection.Id {
+ result[c] = append(result[c], f)
+ }
+ }
+ }
+
+ return result, nil
+}
+
+// TruncateCollection deletes all records associated with the provided collection.
+//
+// The truncate operation is executed in a single transaction,
+// aka. either everything is deleted or none.
+//
+// Note that this method will also trigger the records related
+// cascade and file delete actions.
+func (app *BaseApp) TruncateCollection(collection *Collection) error {
+ return app.RunInTransaction(func(txApp App) error {
+ records := make([]*Record, 0, 500)
+
+ for {
+ err := txApp.RecordQuery(collection).Limit(500).All(&records)
+ if err != nil {
+ return err
+ }
+
+ if len(records) == 0 {
+ return nil
+ }
+
+ for _, record := range records {
+ err = txApp.Delete(record)
+ if err != nil && !errors.Is(err, sql.ErrNoRows) {
+ return err
+ }
+ }
+
+ records = records[:0]
+ }
+ })
+}
+
+// -------------------------------------------------------------------
+
+// saveViewCollection persists the provided View collection changes:
+// - deletes the old related SQL view (if any)
+// - creates a new SQL view with the latest newCollection.Options.Query
+// - generates new feilds list based on newCollection.Options.Query
+// - updates newCollection.Fields based on the generated view table info and query
+// - saves the newCollection
+//
+// This method returns an error if newCollection is not a "view".
+func saveViewCollection(app App, newCollection, oldCollection *Collection) error {
+ if !newCollection.IsView() {
+ return errors.New("not a view collection")
+ }
+
+ return app.RunInTransaction(func(txApp App) error {
+ query := newCollection.ViewQuery
+
+ // generate collection fields from the query
+ viewFields, err := txApp.CreateViewFields(query)
+ if err != nil {
+ return err
+ }
+
+ // delete old renamed view
+ if oldCollection != nil {
+ if err := txApp.DeleteView(oldCollection.Name); err != nil {
+ return err
+ }
+ }
+
+ // wrap view query if necessary
+ query, err = normalizeViewQueryId(txApp, query)
+ if err != nil {
+ return fmt.Errorf("failed to normalize view query id: %w", err)
+ }
+
+ // (re)create the view
+ if err := txApp.SaveView(newCollection.Name, query); err != nil {
+ return err
+ }
+
+ newCollection.Fields = viewFields
+
+ return txApp.Save(newCollection)
+ })
+}
+
+// normalizeViewQueryId wraps (if necessary) the provided view query
+// with a subselect to ensure that the id column is a text since
+// currently we don't support non-string model ids
+// (see https://github.com/pocketbase/pocketbase/issues/3110).
+func normalizeViewQueryId(app App, query string) (string, error) {
+ query = strings.Trim(strings.TrimSpace(query), ";")
+
+ info, err := getQueryTableInfo(app, query)
+ if err != nil {
+ return "", err
+ }
+
+ for _, row := range info {
+ if strings.EqualFold(row.Name, FieldNameId) && strings.EqualFold(row.Type, "TEXT") {
+ return query, nil // no wrapping needed
+ }
+ }
+
+ // raw parse to preserve the columns order
+ rawParsed := new(identifiersParser)
+ if err := rawParsed.parse(query); err != nil {
+ return "", err
+ }
+
+ columns := make([]string, 0, len(rawParsed.columns))
+ for _, col := range rawParsed.columns {
+ if col.alias == FieldNameId {
+ columns = append(columns, fmt.Sprintf("CAST([[%s]] as TEXT) [[%s]]", col.alias, col.alias))
+ } else {
+ columns = append(columns, "[["+col.alias+"]]")
+ }
+ }
+
+ query = fmt.Sprintf("SELECT %s FROM (%s)", strings.Join(columns, ","), query)
+
+ return query, nil
+}
+
+// resaveViewsWithChangedFields updates all view collections with changed fields.
+func resaveViewsWithChangedFields(app App, excludeIds ...string) error {
+ collections, err := app.FindAllCollections(CollectionTypeView)
+ if err != nil {
+ return err
+ }
+
+ return app.RunInTransaction(func(txApp App) error {
+ for _, collection := range collections {
+ if len(excludeIds) > 0 && list.ExistInSlice(collection.Id, excludeIds) {
+ continue
+ }
+
+ // clone the existing fields for temp modifications
+ oldFields, err := collection.Fields.Clone()
+ if err != nil {
+ return err
+ }
+
+ // generate new fields from the query
+ newFields, err := txApp.CreateViewFields(collection.ViewQuery)
+ if err != nil {
+ return err
+ }
+
+ // unset the fields' ids to exclude from the comparison
+ for _, f := range oldFields {
+ f.SetId("")
+ }
+ for _, f := range newFields {
+ f.SetId("")
+ }
+
+ encodedNewFields, err := json.Marshal(newFields)
+ if err != nil {
+ return err
+ }
+
+ encodedOldFields, err := json.Marshal(oldFields)
+ if err != nil {
+ return err
+ }
+
+ if bytes.EqualFold(encodedNewFields, encodedOldFields) {
+ continue // no changes
+ }
+
+ if err := saveViewCollection(txApp, collection, nil); err != nil {
+ return err
+ }
+ }
+
+ return nil
+ })
+}
diff --git a/core/collection_query_test.go b/core/collection_query_test.go
new file mode 100644
index 00000000..f612ff96
--- /dev/null
+++ b/core/collection_query_test.go
@@ -0,0 +1,381 @@
+package core_test
+
+import (
+ "context"
+ "database/sql"
+ "fmt"
+ "os"
+ "path/filepath"
+ "slices"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/list"
+)
+
+func TestCollectionQuery(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ expected := "SELECT {{_collections}}.* FROM `_collections`"
+
+ sql := app.CollectionQuery().Build().SQL()
+ if sql != expected {
+ t.Errorf("Expected sql %s, got %s", expected, sql)
+ }
+}
+
+func TestReloadCachedCollections(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ err := app.ReloadCachedCollections()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ cached := app.Store().Get(core.StoreKeyCachedCollections)
+
+ cachedCollections, ok := cached.([]*core.Collection)
+ if !ok {
+ t.Fatalf("Expected []*core.Collection, got %T", cached)
+ }
+
+ collections, err := app.FindAllCollections()
+ if err != nil {
+ t.Fatalf("Failed to retrieve all collections: %v", err)
+ }
+
+ if len(cachedCollections) != len(collections) {
+ t.Fatalf("Expected %d collections, got %d", len(collections), len(cachedCollections))
+ }
+
+ for _, c := range collections {
+ var exists bool
+ for _, cc := range cachedCollections {
+ if cc.Id == c.Id {
+ exists = true
+ break
+ }
+ }
+ if !exists {
+ t.Fatalf("The collections cache is missing collection %q", c.Name)
+ }
+ }
+}
+
+func TestFindAllCollections(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ collectionTypes []string
+ expectTotal int
+ }{
+ {nil, 16},
+ {[]string{}, 16},
+ {[]string{""}, 16},
+ {[]string{"unknown"}, 0},
+ {[]string{"unknown", core.CollectionTypeAuth}, 4},
+ {[]string{core.CollectionTypeAuth, core.CollectionTypeView}, 7},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, strings.Join(s.collectionTypes, "_")), func(t *testing.T) {
+ collections, err := app.FindAllCollections(s.collectionTypes...)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if len(collections) != s.expectTotal {
+ t.Fatalf("Expected %d collections, got %d", s.expectTotal, len(collections))
+ }
+
+ expectedTypes := list.NonzeroUniques(s.collectionTypes)
+ if len(expectedTypes) > 0 {
+ for _, c := range collections {
+ if !slices.Contains(expectedTypes, c.Type) {
+ t.Fatalf("Unexpected collection type %s\n%v", c.Type, c)
+ }
+ }
+ }
+ })
+ }
+}
+
+func TestFindCollectionByNameOrId(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ nameOrId string
+ expectError bool
+ }{
+ {"", true},
+ {"missing", true},
+ {"wsmn24bux7wo113", false},
+ {"demo1", false},
+ {"DEMO1", false}, // case insensitive
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, s.nameOrId), func(t *testing.T) {
+ model, err := app.FindCollectionByNameOrId(s.nameOrId)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ if model != nil && model.Id != s.nameOrId && !strings.EqualFold(model.Name, s.nameOrId) {
+ t.Fatalf("Expected model with identifier %s, got %v", s.nameOrId, model)
+ }
+ })
+ }
+}
+
+func TestFindCachedCollectionByNameOrId(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ totalQueries := 0
+ app.DB().(*dbx.DB).QueryLogFunc = func(ctx context.Context, t time.Duration, sql string, rows *sql.Rows, err error) {
+ totalQueries++
+ }
+
+ run := func(withCache bool) {
+ scenarios := []struct {
+ nameOrId string
+ expectError bool
+ }{
+ {"", true},
+ {"missing", true},
+ {"wsmn24bux7wo113", false},
+ {"demo1", false},
+ {"DEMO1", false}, // case insensitive
+ }
+
+ var expectedTotalQueries int
+
+ if withCache {
+ err := app.ReloadCachedCollections()
+ if err != nil {
+ t.Fatal(err)
+ }
+ } else {
+ app.Store().Reset(nil)
+ expectedTotalQueries = len(scenarios)
+ }
+
+ totalQueries = 0
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, s.nameOrId), func(t *testing.T) {
+ model, err := app.FindCachedCollectionByNameOrId(s.nameOrId)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ if model != nil && model.Id != s.nameOrId && !strings.EqualFold(model.Name, s.nameOrId) {
+ t.Fatalf("Expected model with identifier %s, got %v", s.nameOrId, model)
+ }
+ })
+ }
+
+ if totalQueries != expectedTotalQueries {
+ t.Fatalf("Expected %d totalQueries, got %d", expectedTotalQueries, totalQueries)
+ }
+ }
+
+ run(true)
+
+ run(false)
+}
+
+func TestIsCollectionNameUnique(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ name string
+ excludeId string
+ expected bool
+ }{
+ {"", "", false},
+ {"demo1", "", false},
+ {"Demo1", "", false},
+ {"new", "", true},
+ {"demo1", "wsmn24bux7wo113", true},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, s.name), func(t *testing.T) {
+ result := app.IsCollectionNameUnique(s.name, s.excludeId)
+ if result != s.expected {
+ t.Errorf("Expected %v, got %v", s.expected, result)
+ }
+ })
+ }
+}
+
+func TestFindCollectionReferences(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection, err := app.FindCollectionByNameOrId("demo3")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ result, err := app.FindCollectionReferences(
+ collection,
+ collection.Id,
+ // test whether "nonempty" exclude ids condition will be skipped
+ "",
+ "",
+ )
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if len(result) != 1 {
+ t.Fatalf("Expected 1 collection, got %d: %v", len(result), result)
+ }
+
+ expectedFields := []string{
+ "rel_one_no_cascade",
+ "rel_one_no_cascade_required",
+ "rel_one_cascade",
+ "rel_one_unique",
+ "rel_many_no_cascade",
+ "rel_many_no_cascade_required",
+ "rel_many_cascade",
+ "rel_many_unique",
+ }
+
+ for col, fields := range result {
+ if col.Name != "demo4" {
+ t.Fatalf("Expected collection demo4, got %s", col.Name)
+ }
+ if len(fields) != len(expectedFields) {
+ t.Fatalf("Expected fields %v, got %v", expectedFields, fields)
+ }
+ for i, f := range fields {
+ if !slices.Contains(expectedFields, f.GetName()) {
+ t.Fatalf("[%d] Didn't expect field %v", i, f)
+ }
+ }
+ }
+}
+
+func TestFindCollectionTruncate(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ countFiles := func(collectionId string) (int, error) {
+ entries, err := os.ReadDir(filepath.Join(app.DataDir(), "storage", collectionId))
+ return len(entries), err
+ }
+
+ t.Run("truncate failure", func(t *testing.T) {
+ demo3, err := app.FindCollectionByNameOrId("demo3")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ originalTotalRecords, err := app.CountRecords(demo3)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ originalTotalFiles, err := countFiles(demo3.Id)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ err = app.TruncateCollection(demo3)
+ if err == nil {
+ t.Fatalf("Expected truncate to fail due to cascade delete failed required constraint")
+ }
+
+ // short delay to ensure that the file delete goroutine has been executed
+ time.Sleep(100 * time.Millisecond)
+
+ totalRecords, err := app.CountRecords(demo3)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if totalRecords != originalTotalRecords {
+ t.Fatalf("Expected %d records, got %d", originalTotalRecords, totalRecords)
+ }
+
+ totalFiles, err := countFiles(demo3.Id)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if totalFiles != originalTotalFiles {
+ t.Fatalf("Expected %d files, got %d", originalTotalFiles, totalFiles)
+ }
+ })
+
+ t.Run("truncate success", func(t *testing.T) {
+ demo5, err := app.FindCollectionByNameOrId("demo5")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ err = app.TruncateCollection(demo5)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ // short delay to ensure that the file delete goroutine has been executed
+ time.Sleep(100 * time.Millisecond)
+
+ total, err := app.CountRecords(demo5)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if total != 0 {
+ t.Fatalf("Expected all records to be deleted, got %v", total)
+ }
+
+ totalFiles, err := countFiles(demo5.Id)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if totalFiles != 0 {
+ t.Fatalf("Expected truncated record files to be deleted, got %d", totalFiles)
+ }
+
+ // try to truncate again (shouldn't return an error)
+ err = app.TruncateCollection(demo5)
+ if err != nil {
+ t.Fatal(err)
+ }
+ })
+}
diff --git a/core/collection_record_table_sync.go b/core/collection_record_table_sync.go
new file mode 100644
index 00000000..ef6f42df
--- /dev/null
+++ b/core/collection_record_table_sync.go
@@ -0,0 +1,346 @@
+package core
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/tools/dbutils"
+ "github.com/pocketbase/pocketbase/tools/security"
+)
+
+// SyncRecordTableSchema compares the two provided collections
+// and applies the necessary related record table changes.
+//
+// If oldCollection is null, then only newCollection is used to create the record table.
+//
+// This method is automatically invoked as part of a collection create/update/delete operation.
+func (app *BaseApp) SyncRecordTableSchema(newCollection *Collection, oldCollection *Collection) error {
+ if newCollection.IsView() {
+ return nil // nothing to sync since views don't have records table
+ }
+
+ txErr := app.RunInTransaction(func(txApp App) error {
+ // create
+ // -----------------------------------------------------------
+ if oldCollection == nil || !app.HasTable(oldCollection.Name) {
+ tableName := newCollection.Name
+
+ fields := newCollection.Fields
+
+ cols := make(map[string]string, len(fields))
+
+ // add fields definition
+ for _, field := range fields {
+ cols[field.GetName()] = field.ColumnType(app)
+ }
+
+ // create table
+ if _, err := txApp.DB().CreateTable(tableName, cols).Execute(); err != nil {
+ return err
+ }
+
+ return createCollectionIndexes(txApp, newCollection)
+ }
+
+ // update
+ // -----------------------------------------------------------
+ oldTableName := oldCollection.Name
+ newTableName := newCollection.Name
+ oldFields := oldCollection.Fields
+ newFields := newCollection.Fields
+
+ needTableRename := !strings.EqualFold(oldTableName, newTableName)
+
+ var needIndexesUpdate bool
+ if needTableRename ||
+ oldFields.String() != newFields.String() ||
+ oldCollection.Indexes.String() != newCollection.Indexes.String() {
+ needIndexesUpdate = true
+ }
+
+ if needIndexesUpdate {
+ // drop old indexes (if any)
+ if err := dropCollectionIndexes(txApp, oldCollection); err != nil {
+ return err
+ }
+ }
+
+ // check for renamed table
+ if needTableRename {
+ _, err := txApp.DB().RenameTable("{{"+oldTableName+"}}", "{{"+newTableName+"}}").Execute()
+ if err != nil {
+ return err
+ }
+ }
+
+ // check for deleted columns
+ for _, oldField := range oldFields {
+ if f := newFields.GetById(oldField.GetId()); f != nil {
+ continue // exist
+ }
+
+ _, err := txApp.DB().DropColumn(newTableName, oldField.GetName()).Execute()
+ if err != nil {
+ return fmt.Errorf("failed to drop column %s - %w", oldField.GetName(), err)
+ }
+ }
+
+ // check for new or renamed columns
+ toRename := map[string]string{}
+ for _, field := range newFields {
+ oldField := oldFields.GetById(field.GetId())
+ // Note:
+ // We are using a temporary column name when adding or renaming columns
+ // to ensure that there are no name collisions in case there is
+ // names switch/reuse of existing columns (eg. name, title -> title, name).
+ // This way we are always doing 1 more rename operation but it provides better less ambiguous experience.
+
+ if oldField == nil {
+ tempName := field.GetName() + security.PseudorandomString(5)
+ toRename[tempName] = field.GetName()
+
+ // add
+ _, err := txApp.DB().AddColumn(newTableName, tempName, field.ColumnType(txApp)).Execute()
+ if err != nil {
+ return fmt.Errorf("failed to add column %s - %w", field.GetName(), err)
+ }
+ } else if oldField.GetName() != field.GetName() {
+ tempName := field.GetName() + security.PseudorandomString(5)
+ toRename[tempName] = field.GetName()
+
+ // rename
+ _, err := txApp.DB().RenameColumn(newTableName, oldField.GetName(), tempName).Execute()
+ if err != nil {
+ return fmt.Errorf("failed to rename column %s - %w", oldField.GetName(), err)
+ }
+ }
+ }
+
+ // set the actual columns name
+ for tempName, actualName := range toRename {
+ _, err := txApp.DB().RenameColumn(newTableName, tempName, actualName).Execute()
+ if err != nil {
+ return err
+ }
+ }
+
+ if err := normalizeSingleVsMultipleFieldChanges(txApp, newCollection, oldCollection); err != nil {
+ return err
+ }
+
+ if needIndexesUpdate {
+ return createCollectionIndexes(txApp, newCollection)
+ }
+
+ return nil
+ })
+ if txErr != nil {
+ return txErr
+ }
+
+ // run optimize per the SQLite recommendations
+ // (https://www.sqlite.org/pragma.html#pragma_optimize)
+ _, optimizeErr := app.DB().NewQuery("PRAGMA optimize").Execute()
+ if optimizeErr != nil {
+ return fmt.Errorf("failed to run optimize after the fields changes: %w", optimizeErr)
+ }
+
+ return nil
+}
+
+func normalizeSingleVsMultipleFieldChanges(app App, newCollection *Collection, oldCollection *Collection) error {
+ if newCollection.IsView() || oldCollection == nil {
+ return nil // view or not an update
+ }
+
+ return app.RunInTransaction(func(txApp App) error {
+ // temporary disable the schema error checks to prevent view and trigger errors
+ // when "altering" (aka. deleting and recreating) the non-normalized columns
+ if _, err := txApp.DB().NewQuery("PRAGMA writable_schema = ON").Execute(); err != nil {
+ return err
+ }
+ // executed with defer to make sure that the pragma is always reverted
+ // in case of an error and when nested transactions are used
+ defer txApp.DB().NewQuery("PRAGMA writable_schema = RESET").Execute()
+
+ for _, newField := range newCollection.Fields {
+ // allow to continue even if there is no old field for the cases
+ // when a new field is added and there are already inserted data
+ var isOldMultiple bool
+ if oldField := oldCollection.Fields.GetById(newField.GetId()); oldField != nil {
+ if mv, ok := oldField.(MultiValuer); ok {
+ isOldMultiple = mv.IsMultiple()
+ }
+ }
+
+ var isNewMultiple bool
+ if mv, ok := newField.(MultiValuer); ok {
+ isNewMultiple = mv.IsMultiple()
+ }
+
+ if isOldMultiple == isNewMultiple {
+ continue // no change
+ }
+
+ // update the column definition by:
+ // 1. inserting a new column with the new definition
+ // 2. copy normalized values from the original column to the new one
+ // 3. drop the original column
+ // 4. rename the new column to the original column
+ // -------------------------------------------------------
+
+ originalName := newField.GetName()
+ tempName := "_" + newField.GetName() + security.PseudorandomString(5)
+
+ _, err := txApp.DB().AddColumn(newCollection.Name, tempName, newField.ColumnType(txApp)).Execute()
+ if err != nil {
+ return err
+ }
+
+ var copyQuery *dbx.Query
+
+ if !isOldMultiple && isNewMultiple {
+ // single -> multiple (convert to array)
+ copyQuery = txApp.DB().NewQuery(fmt.Sprintf(
+ `UPDATE {{%s}} set [[%s]] = (
+ CASE
+ WHEN COALESCE([[%s]], '') = ''
+ THEN '[]'
+ ELSE (
+ CASE
+ WHEN json_valid([[%s]]) AND json_type([[%s]]) == 'array'
+ THEN [[%s]]
+ ELSE json_array([[%s]])
+ END
+ )
+ END
+ )`,
+ newCollection.Name,
+ tempName,
+ originalName,
+ originalName,
+ originalName,
+ originalName,
+ originalName,
+ ))
+ } else {
+ // multiple -> single (keep only the last element)
+ //
+ // note: for file fields the actual file objects are not
+ // deleted allowing additional custom handling via migration
+ copyQuery = txApp.DB().NewQuery(fmt.Sprintf(
+ `UPDATE {{%s}} set [[%s]] = (
+ CASE
+ WHEN COALESCE([[%s]], '[]') = '[]'
+ THEN ''
+ ELSE (
+ CASE
+ WHEN json_valid([[%s]]) AND json_type([[%s]]) == 'array'
+ THEN COALESCE(json_extract([[%s]], '$[#-1]'), '')
+ ELSE [[%s]]
+ END
+ )
+ END
+ )`,
+ newCollection.Name,
+ tempName,
+ originalName,
+ originalName,
+ originalName,
+ originalName,
+ originalName,
+ ))
+ }
+
+ // copy the normalized values
+ if _, err := copyQuery.Execute(); err != nil {
+ return err
+ }
+
+ // drop the original column
+ if _, err := txApp.DB().DropColumn(newCollection.Name, originalName).Execute(); err != nil {
+ return err
+ }
+
+ // rename the new column back to the original
+ if _, err := txApp.DB().RenameColumn(newCollection.Name, tempName, originalName).Execute(); err != nil {
+ return err
+ }
+ }
+
+ // revert the pragma and reload the schema
+ _, revertErr := txApp.DB().NewQuery("PRAGMA writable_schema = RESET").Execute()
+
+ return revertErr
+ })
+}
+
+func dropCollectionIndexes(app App, collection *Collection) error {
+ if collection.IsView() {
+ return nil // views don't have indexes
+ }
+
+ return app.RunInTransaction(func(txApp App) error {
+ for _, raw := range collection.Indexes {
+ parsed := dbutils.ParseIndex(raw)
+
+ if !parsed.IsValid() {
+ continue
+ }
+
+ if _, err := app.DB().NewQuery(fmt.Sprintf("DROP INDEX IF EXISTS [[%s]]", parsed.IndexName)).Execute(); err != nil {
+ return err
+ }
+ }
+
+ return nil
+ })
+}
+
+func createCollectionIndexes(app App, collection *Collection) error {
+ if collection.IsView() {
+ return nil // views don't have indexes
+ }
+
+ return app.RunInTransaction(func(txApp App) error {
+ // upsert new indexes
+ //
+ // note: we are returning validation errors because the indexes cannot be
+ // easily validated in a form, aka. before persisting the related
+ // collection record table changes
+ errs := validation.Errors{}
+ for i, idx := range collection.Indexes {
+ parsed := dbutils.ParseIndex(idx)
+
+ // ensure that the index is always for the current collection
+ parsed.TableName = collection.Name
+
+ if !parsed.IsValid() {
+ errs[strconv.Itoa(i)] = validation.NewError(
+ "validation_invalid_index_expression",
+ "Invalid CREATE INDEX expression.",
+ )
+ continue
+ }
+
+ if _, err := txApp.DB().NewQuery(parsed.Build()).Execute(); err != nil {
+ errs[strconv.Itoa(i)] = validation.NewError(
+ "validation_invalid_index_expression",
+ fmt.Sprintf("Failed to create index %s - %v.", parsed.IndexName, err.Error()),
+ ).SetParams(map[string]any{
+ "indexName": parsed.IndexName,
+ })
+ continue
+ }
+ }
+
+ if len(errs) > 0 {
+ return validation.Errors{"indexes": errs}
+ }
+
+ return nil
+ })
+}
diff --git a/daos/record_table_sync_test.go b/core/collection_record_table_sync_test.go
similarity index 51%
rename from daos/record_table_sync_test.go
rename to core/collection_record_table_sync_test.go
index e092f167..bc72da86 100644
--- a/daos/record_table_sync_test.go
+++ b/core/collection_record_table_sync_test.go
@@ -1,4 +1,4 @@
-package daos_test
+package core_test
import (
"bytes"
@@ -6,8 +6,7 @@ import (
"testing"
"github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
+ "github.com/pocketbase/pocketbase/core"
"github.com/pocketbase/pocketbase/tests"
"github.com/pocketbase/pocketbase/tools/list"
"github.com/pocketbase/pocketbase/tools/types"
@@ -19,73 +18,55 @@ func TestSyncRecordTableSchema(t *testing.T) {
app, _ := tests.NewTestApp()
defer app.Cleanup()
- oldCollection, err := app.Dao().FindCollectionByNameOrId("demo2")
+ oldCollection, err := app.FindCollectionByNameOrId("demo2")
if err != nil {
t.Fatal(err)
}
- updatedCollection, err := app.Dao().FindCollectionByNameOrId("demo2")
+ updatedCollection, err := app.FindCollectionByNameOrId("demo2")
if err != nil {
t.Fatal(err)
}
updatedCollection.Name = "demo_renamed"
- updatedCollection.Schema.RemoveField(updatedCollection.Schema.GetFieldByName("active").Id)
- updatedCollection.Schema.AddField(
- &schema.SchemaField{
- Name: "new_field",
- Type: schema.FieldTypeEmail,
- },
- )
- updatedCollection.Schema.AddField(
- &schema.SchemaField{
- Id: updatedCollection.Schema.GetFieldByName("title").Id,
- Name: "title_renamed",
- Type: schema.FieldTypeEmail,
- },
- )
- updatedCollection.Indexes = types.JsonArray[string]{"create index idx_title_renamed on anything (title_renamed)"}
+ updatedCollection.Fields.RemoveByName("active")
+ updatedCollection.Fields.Add(&core.EmailField{
+ Name: "new_field",
+ })
+ updatedCollection.Fields.Add(&core.EmailField{
+ Id: updatedCollection.Fields.GetByName("title").GetId(),
+ Name: "title_renamed",
+ })
+ updatedCollection.Indexes = types.JSONArray[string]{"create index idx_title_renamed on anything (title_renamed)"}
+
+ baseCol := core.NewBaseCollection("new_base")
+ baseCol.Fields.Add(&core.TextField{Name: "test"})
+
+ authCol := core.NewAuthCollection("new_auth")
+ authCol.Fields.Add(&core.TextField{Name: "test"})
+ authCol.AddIndex("idx_auth_test", false, "email, id", "")
scenarios := []struct {
name string
- newCollection *models.Collection
- oldCollection *models.Collection
+ newCollection *core.Collection
+ oldCollection *core.Collection
expectedColumns []string
expectedIndexesCount int
}{
{
"new base collection",
- &models.Collection{
- Name: "new_table",
- Schema: schema.NewSchema(
- &schema.SchemaField{
- Name: "test",
- Type: schema.FieldTypeText,
- },
- ),
- },
+ baseCol,
nil,
- []string{"id", "created", "updated", "test"},
+ []string{"id", "test"},
0,
},
{
"new auth collection",
- &models.Collection{
- Name: "new_table_auth",
- Type: models.CollectionTypeAuth,
- Schema: schema.NewSchema(
- &schema.SchemaField{
- Name: "test",
- Type: schema.FieldTypeText,
- },
- ),
- Indexes: types.JsonArray[string]{"create index idx_auth_test on anything (email, username)"},
- },
+ authCol,
nil,
[]string{
- "id", "created", "updated", "test",
- "username", "email", "verified", "emailVisibility",
- "tokenKey", "passwordHash", "lastResetSentAt", "lastVerificationSentAt", "lastLoginAlertSentAt",
+ "id", "test", "email", "verified",
+ "emailVisibility", "tokenKey", "password",
},
- 4,
+ 3,
},
{
"no changes",
@@ -104,32 +85,33 @@ func TestSyncRecordTableSchema(t *testing.T) {
}
for _, s := range scenarios {
- err := app.Dao().SyncRecordTableSchema(s.newCollection, s.oldCollection)
- if err != nil {
- t.Errorf("[%s] %v", s.name, err)
- continue
- }
-
- if !app.Dao().HasTable(s.newCollection.Name) {
- t.Errorf("[%s] Expected table %s to exist", s.name, s.newCollection.Name)
- }
-
- cols, _ := app.Dao().TableColumns(s.newCollection.Name)
- if len(cols) != len(s.expectedColumns) {
- t.Errorf("[%s] Expected columns %v, got %v", s.name, s.expectedColumns, cols)
- }
-
- for _, c := range cols {
- if !list.ExistInSlice(c, s.expectedColumns) {
- t.Errorf("[%s] Couldn't find column %s in %v", s.name, c, s.expectedColumns)
+ t.Run(s.name, func(t *testing.T) {
+ err := app.SyncRecordTableSchema(s.newCollection, s.oldCollection)
+ if err != nil {
+ t.Fatal(err)
}
- }
- indexes, _ := app.Dao().TableIndexes(s.newCollection.Name)
+ if !app.HasTable(s.newCollection.Name) {
+ t.Fatalf("Expected table %s to exist", s.newCollection.Name)
+ }
- if totalIndexes := len(indexes); totalIndexes != s.expectedIndexesCount {
- t.Errorf("[%s] Expected %d indexes, got %d:\n%v", s.name, s.expectedIndexesCount, totalIndexes, indexes)
- }
+ cols, _ := app.TableColumns(s.newCollection.Name)
+ if len(cols) != len(s.expectedColumns) {
+ t.Fatalf("Expected columns %v, got %v", s.expectedColumns, cols)
+ }
+
+ for _, col := range cols {
+ if !list.ExistInSlice(col, s.expectedColumns) {
+ t.Fatalf("Couldn't find column %s in %v", col, s.expectedColumns)
+ }
+ }
+
+ indexes, _ := app.TableIndexes(s.newCollection.Name)
+
+ if totalIndexes := len(indexes); totalIndexes != s.expectedIndexesCount {
+ t.Fatalf("Expected %d indexes, got %d:\n%v", s.expectedIndexesCount, totalIndexes, indexes)
+ }
+ })
}
}
@@ -139,69 +121,41 @@ func TestSingleVsMultipleValuesNormalization(t *testing.T) {
app, _ := tests.NewTestApp()
defer app.Cleanup()
- collection, err := app.Dao().FindCollectionByNameOrId("demo1")
+ collection, err := app.FindCollectionByNameOrId("demo1")
if err != nil {
t.Fatal(err)
}
// mock field changes
- {
- selectOneField := collection.Schema.GetFieldByName("select_one")
- opt := selectOneField.Options.(*schema.SelectOptions)
- opt.MaxSelect = 2
- }
- {
- selectManyField := collection.Schema.GetFieldByName("select_many")
- opt := selectManyField.Options.(*schema.SelectOptions)
- opt.MaxSelect = 1
- }
- {
- fileOneField := collection.Schema.GetFieldByName("file_one")
- opt := fileOneField.Options.(*schema.FileOptions)
- opt.MaxSelect = 2
- }
- {
- fileManyField := collection.Schema.GetFieldByName("file_many")
- opt := fileManyField.Options.(*schema.FileOptions)
- opt.MaxSelect = 1
- }
- {
- relOneField := collection.Schema.GetFieldByName("rel_one")
- opt := relOneField.Options.(*schema.RelationOptions)
- opt.MaxSelect = types.Pointer(2)
- }
- {
- relManyField := collection.Schema.GetFieldByName("rel_many")
- opt := relManyField.Options.(*schema.RelationOptions)
- opt.MaxSelect = types.Pointer(1)
- }
- {
- // new multivaluer field to check whether the array normalization
- // will be applied for already inserted data
- collection.Schema.AddField(&schema.SchemaField{
- Name: "new_multiple",
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{
- Values: []string{"a", "b", "c"},
- MaxSelect: 3,
- },
- })
- }
+ collection.Fields.GetByName("select_one").(*core.SelectField).MaxSelect = 2
+ collection.Fields.GetByName("select_many").(*core.SelectField).MaxSelect = 1
+ collection.Fields.GetByName("file_one").(*core.FileField).MaxSelect = 2
+ collection.Fields.GetByName("file_many").(*core.FileField).MaxSelect = 1
+ collection.Fields.GetByName("rel_one").(*core.RelationField).MaxSelect = 2
+ collection.Fields.GetByName("rel_many").(*core.RelationField).MaxSelect = 1
- if err := app.Dao().SaveCollection(collection); err != nil {
+ // new multivaluer field to check whether the array normalization
+ // will be applied for already inserted data
+ collection.Fields.Add(&core.SelectField{
+ Name: "new_multiple",
+ Values: []string{"a", "b", "c"},
+ MaxSelect: 3,
+ })
+
+ if err := app.Save(collection); err != nil {
t.Fatal(err)
}
// ensures that the writable schema was reverted to its expected default
var writableSchema bool
- app.Dao().DB().NewQuery("PRAGMA writable_schema").Row(&writableSchema)
+ app.DB().NewQuery("PRAGMA writable_schema").Row(&writableSchema)
if writableSchema == true {
t.Fatalf("Expected writable_schema to be OFF, got %v", writableSchema)
}
// check whether the columns DEFAULT definition was updated
// ---------------------------------------------------------------
- tableInfo, err := app.Dao().TableInfo(collection.Name)
+ tableInfo, err := app.TableInfo(collection.Name)
if err != nil {
t.Fatal(err)
}
@@ -217,7 +171,7 @@ func TestSingleVsMultipleValuesNormalization(t *testing.T) {
}
for col, dflt := range tableInfoExpectations {
t.Run("check default for "+col, func(t *testing.T) {
- var row *models.TableInfoRow
+ var row *core.TableInfoRow
for _, r := range tableInfo {
if r.Name == col {
row = r
@@ -228,7 +182,7 @@ func TestSingleVsMultipleValuesNormalization(t *testing.T) {
t.Fatalf("Missing info for column %q", col)
}
- if v := row.DefaultValue.String(); v != dflt {
+ if v := row.DefaultValue.String; v != dflt {
t.Fatalf("Expected default value %q, got %q", dflt, v)
}
})
@@ -292,7 +246,7 @@ func TestSingleVsMultipleValuesNormalization(t *testing.T) {
t.Run("check fields for record "+s.recordId, func(t *testing.T) {
result := new(fieldsExpectation)
- err := app.Dao().DB().Select(
+ err := app.DB().Select(
"select_one",
"select_many",
"file_one",
diff --git a/core/collection_validate.go b/core/collection_validate.go
new file mode 100644
index 00000000..9da95f69
--- /dev/null
+++ b/core/collection_validate.go
@@ -0,0 +1,658 @@
+package core
+
+import (
+ "context"
+ "fmt"
+ "regexp"
+ "strconv"
+ "strings"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/core/validators"
+ "github.com/pocketbase/pocketbase/tools/dbutils"
+ "github.com/pocketbase/pocketbase/tools/list"
+ "github.com/pocketbase/pocketbase/tools/search"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+var collectionNameRegex = regexp.MustCompile(`^\w+$`)
+
+func onCollectionValidate(e *CollectionEvent) error {
+ var original *Collection
+ if !e.Collection.IsNew() {
+ original = &Collection{}
+ if err := e.App.ModelQuery(original).Model(e.Collection.LastSavedPK(), original); err != nil {
+ return fmt.Errorf("failed to fetch old collection state: %w", err)
+ }
+ }
+
+ validator := newCollectionValidator(
+ e.Context,
+ e.App,
+ e.Collection,
+ original,
+ )
+
+ return validator.run()
+}
+
+func newCollectionValidator(ctx context.Context, app App, new, original *Collection) *collectionValidator {
+ validator := &collectionValidator{
+ ctx: ctx,
+ app: app,
+ new: new,
+ original: original,
+ }
+
+ // load old/original collection
+ if validator.original == nil {
+ validator.original = NewCollection(validator.new.Type, "")
+ }
+
+ return validator
+}
+
+type collectionValidator struct {
+ original *Collection
+ new *Collection
+ app App
+ ctx context.Context
+}
+
+type optionsValidator interface {
+ validate(cv *collectionValidator) error
+}
+
+func (validator *collectionValidator) run() error {
+ // generate fields from the query (overwriting any explicit user defined fields)
+ if validator.new.IsView() {
+ validator.new.Fields, _ = validator.app.CreateViewFields(validator.new.ViewQuery)
+ }
+
+ // validate base fields
+ baseErr := validation.ValidateStruct(validator.new,
+ validation.Field(
+ &validator.new.Id,
+ validation.Required,
+ validation.When(
+ validator.original.IsNew(),
+ validation.Length(1, 100),
+ validation.Match(DefaultIdRegex),
+ validation.By(validators.UniqueId(validator.app.DB(), validator.new.TableName())),
+ ).Else(
+ validation.By(validators.Equal(validator.original.Id)),
+ ),
+ ),
+ validation.Field(
+ &validator.new.System,
+ validation.By(validator.ensureNoSystemFlagChange),
+ ),
+ validation.Field(
+ &validator.new.Type,
+ validation.Required,
+ validation.In(
+ CollectionTypeBase,
+ CollectionTypeAuth,
+ CollectionTypeView,
+ ),
+ validation.By(validator.ensureNoTypeChange),
+ ),
+ validation.Field(
+ &validator.new.Name,
+ validation.Required,
+ validation.Length(1, 255),
+ validation.By(checkForVia),
+ validation.Match(collectionNameRegex),
+ validation.By(validator.ensureNoSystemNameChange),
+ validation.By(validator.checkUniqueName),
+ ),
+ validation.Field(
+ &validator.new.Fields,
+ validation.By(validator.checkFieldDuplicates),
+ validation.By(validator.checkMinFields),
+ validation.When(
+ !validator.new.IsView(),
+ validation.By(validator.ensureNoSystemFieldsChange),
+ validation.By(validator.ensureNoFieldsTypeChange),
+ ),
+ validation.When(validator.new.IsAuth(), validation.By(validator.checkReservedAuthKeys)),
+ validation.By(validator.checkFieldValidators),
+ ),
+ validation.Field(
+ &validator.new.ListRule,
+ validation.By(validator.checkRule),
+ validation.By(validator.ensureNoSystemRuleChange(validator.original.ListRule)),
+ ),
+ validation.Field(
+ &validator.new.ViewRule,
+ validation.By(validator.checkRule),
+ validation.By(validator.ensureNoSystemRuleChange(validator.original.ViewRule)),
+ ),
+ validation.Field(
+ &validator.new.CreateRule,
+ validation.When(validator.new.IsView(), validation.Nil),
+ validation.By(validator.checkRule),
+ validation.By(validator.ensureNoSystemRuleChange(validator.original.CreateRule)),
+ ),
+ validation.Field(
+ &validator.new.UpdateRule,
+ validation.When(validator.new.IsView(), validation.Nil),
+ validation.By(validator.checkRule),
+ validation.By(validator.ensureNoSystemRuleChange(validator.original.UpdateRule)),
+ ),
+ validation.Field(
+ &validator.new.DeleteRule,
+ validation.When(validator.new.IsView(), validation.Nil),
+ validation.By(validator.checkRule),
+ validation.By(validator.ensureNoSystemRuleChange(validator.original.DeleteRule)),
+ ),
+ validation.Field(&validator.new.Indexes, validation.By(validator.checkIndexes)),
+ )
+
+ optionsErr := validator.validateOptions()
+
+ return validators.JoinValidationErrors(baseErr, optionsErr)
+}
+
+func (validator *collectionValidator) checkUniqueName(value any) error {
+ v, _ := value.(string)
+
+ // ensure unique collection name
+ if !validator.app.IsCollectionNameUnique(v, validator.original.Id) {
+ return validation.NewError("validation_collection_name_exists", "Collection name must be unique (case insensitive).")
+ }
+
+ // ensure that the collection name doesn't collide with the id of any collection
+ dummyCollection := &Collection{}
+ if validator.app.ModelQuery(dummyCollection).Model(v, dummyCollection) == nil {
+ return validation.NewError("validation_collection_name_id_duplicate", "The name must not match an existing collection id.")
+ }
+
+ // ensure that there is no existing internal table with the provided name
+ if validator.original.Name != v && // has changed
+ validator.app.IsCollectionNameUnique(v) && // is not a collection (in case it was presaved)
+ validator.app.HasTable(v) {
+ return validation.NewError("validation_collection_name_invalid", "The name shouldn't match with an existing internal table.")
+ }
+
+ return nil
+}
+
+func (validator *collectionValidator) ensureNoSystemNameChange(value any) error {
+ v, _ := value.(string)
+
+ if !validator.original.IsNew() && validator.original.System && v != validator.original.Name {
+ return validation.NewError("validation_collection_system_name_change", "System collection name cannot be changed.")
+ }
+
+ return nil
+}
+
+func (validator *collectionValidator) ensureNoSystemFlagChange(value any) error {
+ v, _ := value.(bool)
+
+ if !validator.original.IsNew() && v != validator.original.System {
+ return validation.NewError("validation_collection_system_flag_change", "System collection state cannot be changed.")
+ }
+
+ return nil
+}
+
+func (validator *collectionValidator) ensureNoTypeChange(value any) error {
+ v, _ := value.(string)
+
+ if !validator.original.IsNew() && v != validator.original.Type {
+ return validation.NewError("validation_collection_type_change", "Collection type cannot be changed.")
+ }
+
+ return nil
+}
+
+func (validator *collectionValidator) ensureNoFieldsTypeChange(value any) error {
+ v, ok := value.(FieldsList)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ errs := validation.Errors{}
+
+ for i, field := range v {
+ oldField := validator.original.Fields.GetById(field.GetId())
+
+ if oldField != nil && oldField.Type() != field.Type() {
+ errs[strconv.Itoa(i)] = validation.NewError(
+ "validation_field_type_change",
+ "Field type cannot be changed.",
+ )
+ }
+ }
+ if len(errs) > 0 {
+ return errs
+ }
+
+ return nil
+}
+
+func (validator *collectionValidator) checkFieldDuplicates(value any) error {
+ fields, ok := value.(FieldsList)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ totalFields := len(fields)
+ ids := make([]string, 0, totalFields)
+ names := make([]string, 0, totalFields)
+
+ for i, field := range fields {
+ if list.ExistInSlice(field.GetId(), ids) {
+ return validation.Errors{
+ strconv.Itoa(i): validation.Errors{
+ "id": validation.NewError(
+ "validation_duplicated_field_id",
+ fmt.Sprintf("Duplicated or invalid field id %q", field.GetId()),
+ ),
+ },
+ }
+ }
+
+ // field names are used as db columns and should be case insensitive
+ nameLower := strings.ToLower(field.GetName())
+
+ if list.ExistInSlice(nameLower, names) {
+ return validation.Errors{
+ strconv.Itoa(i): validation.Errors{
+ "name": validation.NewError(
+ "validation_duplicated_field_name",
+ fmt.Sprintf("Duplicated or invalid field name %q", field.GetName()),
+ ).SetParams(map[string]any{
+ "fieldName": field.GetName(),
+ }),
+ },
+ }
+ }
+
+ ids = append(ids, field.GetId())
+ names = append(names, nameLower)
+ }
+
+ return nil
+}
+
+func (validator *collectionValidator) checkFieldValidators(value any) error {
+ fields, ok := value.(FieldsList)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ errs := validation.Errors{}
+
+ for i, field := range fields {
+ if err := field.ValidateSettings(validator.ctx, validator.app, validator.new); err != nil {
+ errs[strconv.Itoa(i)] = err
+ }
+ }
+
+ if len(errs) > 0 {
+ return errs
+ }
+
+ return nil
+}
+
+func (cv *collectionValidator) checkViewQuery(value any) error {
+ v, _ := value.(string)
+ if v == "" {
+ return nil // nothing to check
+ }
+
+ if _, err := cv.app.CreateViewFields(v); err != nil {
+ return validation.NewError(
+ "validation_invalid_view_query",
+ fmt.Sprintf("Invalid query - %s", err.Error()),
+ )
+ }
+
+ return nil
+}
+
+var reservedAuthKeys = []string{"passwordConfirm", "oldPassword"}
+
+func (cv *collectionValidator) checkReservedAuthKeys(value any) error {
+ fields, ok := value.(FieldsList)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ if !cv.new.IsAuth() {
+ return nil // not an auth collection
+ }
+
+ errs := validation.Errors{}
+ for i, field := range fields {
+ if list.ExistInSlice(field.GetName(), reservedAuthKeys) {
+ errs[strconv.Itoa(i)] = validation.Errors{
+ "name": validation.NewError(
+ "validation_reserved_field_name",
+ "The field name is reserved and cannot be used.",
+ ),
+ }
+ }
+ }
+ if len(errs) > 0 {
+ return errs
+ }
+
+ return nil
+}
+
+func (cv *collectionValidator) checkMinFields(value any) error {
+ fields, ok := value.(FieldsList)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ if len(fields) == 0 {
+ return validation.ErrRequired
+ }
+
+ // all collections must have an "id" PK field
+ idField, _ := fields.GetByName(FieldNameId).(*TextField)
+ if idField == nil || !idField.PrimaryKey {
+ return validation.NewError("validation_missing_primary_key", `Missing or invalid "id" PK field.`)
+ }
+
+ switch cv.new.Type {
+ case CollectionTypeAuth:
+ passwordField, _ := fields.GetByName(FieldNamePassword).(*PasswordField)
+ if passwordField == nil {
+ return validation.NewError("validation_missing_password_field", `System "password" field is required.`)
+ }
+ if !passwordField.Hidden || !passwordField.System {
+ return validation.Errors{FieldNamePassword: ErrMustBeSystemAndHidden}
+ }
+
+ tokenKeyField, _ := fields.GetByName(FieldNameTokenKey).(*TextField)
+ if tokenKeyField == nil {
+ return validation.NewError("validation_missing_tokenKey_field", `System "tokenKey" field is required.`)
+ }
+ if !tokenKeyField.Hidden || !tokenKeyField.System {
+ return validation.Errors{FieldNameTokenKey: ErrMustBeSystemAndHidden}
+ }
+
+ emailField, _ := fields.GetByName(FieldNameEmail).(*EmailField)
+ if emailField == nil {
+ return validation.NewError("validation_missing_email_field", `System "email" field is required.`)
+ }
+ if !emailField.System {
+ return validation.Errors{FieldNameEmail: ErrMustBeSystem}
+ }
+
+ emailVisibilityField, _ := fields.GetByName(FieldNameEmailVisibility).(*BoolField)
+ if emailVisibilityField == nil {
+ return validation.NewError("validation_missing_emailVisibility_field", `System "emailVisibility" field is required.`)
+ }
+ if !emailVisibilityField.System {
+ return validation.Errors{FieldNameEmailVisibility: ErrMustBeSystem}
+ }
+
+ verifiedField, _ := fields.GetByName(FieldNameVerified).(*BoolField)
+ if verifiedField == nil {
+ return validation.NewError("validation_missing_verified_field", `System "verified" field is required.`)
+ }
+ if !verifiedField.System {
+ return validation.Errors{FieldNameVerified: ErrMustBeSystem}
+ }
+
+ return nil
+ }
+
+ return nil
+}
+
+func (validator *collectionValidator) ensureNoSystemFieldsChange(value any) error {
+ fields, ok := value.(FieldsList)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ for _, oldField := range validator.original.Fields {
+ if !oldField.GetSystem() {
+ continue
+ }
+
+ newField := fields.GetById(oldField.GetId())
+
+ if newField == nil || oldField.GetName() != newField.GetName() {
+ return validation.NewError("validation_system_field_change", "System fields cannot be deleted or renamed.")
+ }
+ }
+
+ return nil
+}
+
+func (cv *collectionValidator) checkFieldsForUniqueIndex(value any) error {
+ names, ok := value.([]string)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ if len(names) == 0 {
+ return nil // nothing to check
+ }
+
+ for _, name := range names {
+ field := cv.new.Fields.GetByName(name)
+ if field == nil {
+ return validation.NewError("validation_missing_field", fmt.Sprintf("Invalid or missing field %q", name)).
+ SetParams(map[string]any{"fieldName": name})
+ }
+
+ if !dbutils.HasSingleColumnUniqueIndex(name, cv.new.Indexes) {
+ return validation.NewError("validation_missing_unique_constraint", fmt.Sprintf("The field %q doesn't have a UNIQUE constraint.", name)).
+ SetParams(map[string]any{"fieldName": name})
+ }
+ }
+
+ return nil
+}
+
+// note: value could be either *string or string
+func (validator *collectionValidator) checkRule(value any) error {
+ var vStr string
+
+ v, ok := value.(*string)
+ if ok {
+ if v != nil {
+ vStr = *v
+ }
+ } else {
+ vStr, ok = value.(string)
+ }
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ if vStr == "" {
+ return nil // nothing to check
+ }
+
+ r := NewRecordFieldResolver(validator.app, validator.new, nil, true)
+ _, err := search.FilterData(vStr).BuildExpr(r)
+ if err != nil {
+ return validation.NewError("validation_invalid_rule", "Invalid rule. Raw error: "+err.Error())
+ }
+
+ return nil
+}
+
+func (validator *collectionValidator) ensureNoSystemRuleChange(oldRule *string) validation.RuleFunc {
+ return func(value any) error {
+ if validator.original.IsNew() || !validator.original.System {
+ return nil // not an update of a system collection
+ }
+
+ rule, ok := value.(*string)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ if (rule == nil && oldRule == nil) ||
+ (rule != nil && oldRule != nil && *rule == *oldRule) {
+ return nil
+ }
+
+ return validation.NewError("validation_collection_system_rule_change", "System collection API rule cannot be changed.")
+ }
+}
+
+func (cv *collectionValidator) checkIndexes(value any) error {
+ indexes, _ := value.(types.JSONArray[string])
+
+ if cv.new.IsView() && len(indexes) > 0 {
+ return validation.NewError(
+ "validation_indexes_not_supported",
+ "View collections don't support indexes.",
+ )
+ }
+
+ indexNames := make(map[string]struct{}, len(indexes))
+
+ for i, rawIndex := range indexes {
+ parsed := dbutils.ParseIndex(rawIndex)
+
+ // always set a table name because it is ignored anyway in order to keep it in sync with the collection name
+ parsed.TableName = "validator"
+
+ if !parsed.IsValid() {
+ return validation.Errors{
+ strconv.Itoa(i): validation.NewError(
+ "validation_invalid_index_expression",
+ "Invalid CREATE INDEX expression.",
+ ),
+ }
+ }
+
+ _, isDuplicated := indexNames[strings.ToLower(parsed.IndexName)]
+ if isDuplicated {
+ return validation.Errors{
+ strconv.Itoa(i): validation.NewError(
+ "validation_duplicated_index_name",
+ "The index name must be unique.",
+ ),
+ }
+ }
+
+ // ensure that the index name is not used in another collection
+ var usedTblName string
+ _ = cv.app.DB().Select("tbl_name").
+ From("sqlite_master").
+ AndWhere(dbx.HashExp{"type": "index"}).
+ AndWhere(dbx.NewExp("LOWER([[tbl_name]])!=LOWER({:oldName})", dbx.Params{"oldName": cv.original.Name})).
+ AndWhere(dbx.NewExp("LOWER([[tbl_name]])!=LOWER({:newName})", dbx.Params{"newName": cv.new.Name})).
+ AndWhere(dbx.NewExp("LOWER([[name]])=LOWER({:indexName})", dbx.Params{"indexName": parsed.IndexName})).
+ Limit(1).
+ Row(&usedTblName)
+ if usedTblName != "" {
+ return validation.Errors{
+ strconv.Itoa(i): validation.NewError(
+ "validation_existing_index_name",
+ "The index name is already used in "+usedTblName+" collection.",
+ ),
+ }
+ }
+
+ // note: we don't check the index table name because it is always
+ // overwritten by the SyncRecordTableSchema to allow
+ // easier partial modifications (eg. changing only the collection name).
+ // if !strings.EqualFold(parsed.TableName, form.Name) {
+ // return validation.Errors{
+ // strconv.Itoa(i): validation.NewError(
+ // "validation_invalid_index_table",
+ // fmt.Sprintf("The index table must be the same as the collection name."),
+ // ),
+ // }
+ // }
+
+ indexNames[strings.ToLower(parsed.IndexName)] = struct{}{}
+ }
+
+ // ensure that indexes on system fields are not deleted or changed
+ if !cv.original.IsNew() {
+ OLD_INDEXES_LOOP:
+ for _, oldIndex := range cv.original.Indexes {
+ oldParsed := dbutils.ParseIndex(oldIndex)
+
+ for _, column := range oldParsed.Columns {
+ for _, f := range cv.original.Fields {
+ if !f.GetSystem() || !strings.EqualFold(column.Name, f.GetName()) {
+ continue
+ }
+
+ var exists bool
+
+ for i, newIndex := range cv.new.Indexes {
+ newParsed := dbutils.ParseIndex(newIndex)
+ if !strings.EqualFold(newParsed.IndexName, oldParsed.IndexName) {
+ continue
+ }
+
+ // normalize table names of both indexes
+ oldParsed.TableName = "validator"
+ newParsed.TableName = "validator"
+
+ if oldParsed.Build() != newParsed.Build() {
+ return validation.Errors{
+ strconv.Itoa(i): validation.NewError(
+ "validation_system_index_change",
+ "Indexes on system fields cannot change.",
+ ),
+ }
+ }
+
+ exists = true
+ break
+ }
+
+ if !exists {
+ return validation.NewError(
+ "validation_missing_system_index",
+ fmt.Sprintf("Missing system index %q.", oldParsed.IndexName),
+ ).SetParams(map[string]any{"name": oldParsed.IndexName})
+ }
+
+ continue OLD_INDEXES_LOOP
+ }
+ }
+ }
+ }
+
+ // check for required indexes
+ //
+ // note: this is in case the indexes were removed manually when creating/importing new auth collections
+ // and technically is not necessary since on app.Save the missing index will be reinserted by the system collection hook
+ if cv.new.IsAuth() {
+ requiredNames := []string{FieldNameTokenKey, FieldNameEmail}
+ for _, name := range requiredNames {
+ if !dbutils.HasSingleColumnUniqueIndex(name, indexes) {
+ return validation.NewError(
+ "validation_missing_required_unique_index",
+ `Missing required unique index for field "`+name+`".`,
+ )
+ }
+ }
+ }
+
+ return nil
+}
+
+func (validator *collectionValidator) validateOptions() error {
+ switch validator.new.Type {
+ case CollectionTypeAuth:
+ return validator.new.collectionAuthOptions.validate(validator)
+ case CollectionTypeView:
+ return validator.new.collectionViewOptions.validate(validator)
+ }
+
+ return nil
+}
diff --git a/core/collection_validate_test.go b/core/collection_validate_test.go
new file mode 100644
index 00000000..72410376
--- /dev/null
+++ b/core/collection_validate_test.go
@@ -0,0 +1,813 @@
+package core_test
+
+import (
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func TestCollectionValidate(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ name string
+ collection func(app core.App) (*core.Collection, error)
+ expectedErrors []string
+ }{
+ {
+ name: "empty collection",
+ collection: func(app core.App) (*core.Collection, error) {
+ return &core.Collection{}, nil
+ },
+ expectedErrors: []string{
+ "id", "name", "type", "fields", // no default fields because the type is unknown
+ },
+ },
+ {
+ name: "unknown type with all invalid fields",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := &core.Collection{}
+ c.Id = "invalid_id ?!@#$"
+ c.Name = "invalid_name ?!@#$"
+ c.Type = "invalid_type"
+ c.ListRule = types.Pointer("missing = '123'")
+ c.ViewRule = types.Pointer("missing = '123'")
+ c.CreateRule = types.Pointer("missing = '123'")
+ c.UpdateRule = types.Pointer("missing = '123'")
+ c.DeleteRule = types.Pointer("missing = '123'")
+ c.Indexes = []string{"create index '' on '' ()"}
+
+ // type specific fields
+ c.ViewQuery = "invalid" // should be ignored
+ c.AuthRule = types.Pointer("missing = '123'") // should be ignored
+
+ return c, nil
+ },
+ expectedErrors: []string{
+ "id", "name", "type", "indexes",
+ "listRule", "viewRule", "createRule", "updateRule", "deleteRule",
+ "fields", // no default fields because the type is unknown
+ },
+ },
+ {
+ name: "base with invalid fields",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("invalid_name ?!@#$")
+ c.Indexes = []string{"create index '' on '' ()"}
+
+ // type specific fields
+ c.ViewQuery = "invalid" // should be ignored
+ c.AuthRule = types.Pointer("missing = '123'") // should be ignored
+
+ return c, nil
+ },
+ expectedErrors: []string{"name", "indexes"},
+ },
+ {
+ name: "view with invalid fields",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewViewCollection("invalid_name ?!@#$")
+ c.Indexes = []string{"create index '' on '' ()"}
+
+ // type specific fields
+ c.ViewQuery = "invalid"
+ c.AuthRule = types.Pointer("missing = '123'") // should be ignored
+
+ return c, nil
+ },
+ expectedErrors: []string{"indexes", "name", "fields", "viewQuery"},
+ },
+ {
+ name: "auth with invalid fields",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("invalid_name ?!@#$")
+ c.Indexes = []string{"create index '' on '' ()"}
+
+ // type specific fields
+ c.ViewQuery = "invalid" // should be ignored
+ c.AuthRule = types.Pointer("missing = '123'")
+
+ return c, nil
+ },
+ expectedErrors: []string{"indexes", "name", "authRule"},
+ },
+
+ // type checks
+ {
+ name: "empty type",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("test")
+ c.Type = ""
+ return c, nil
+ },
+ expectedErrors: []string{"type"},
+ },
+ {
+ name: "unknown type",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("test")
+ c.Type = "unknown"
+ return c, nil
+ },
+ expectedErrors: []string{"type"},
+ },
+ {
+ name: "base type",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("test")
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "view type",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewViewCollection("test")
+ c.ViewQuery = "select 1 as id"
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "auth type",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("test")
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "changing type",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("users")
+ c.Type = core.CollectionTypeBase
+ return c, nil
+ },
+ expectedErrors: []string{"type"},
+ },
+
+ // system checks
+ {
+ name: "change from system to regular",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId(core.CollectionNameSuperusers)
+ c.System = false
+ return c, nil
+ },
+ expectedErrors: []string{"system"},
+ },
+ {
+ name: "change from regular to system",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("demo1")
+ c.System = true
+ return c, nil
+ },
+ expectedErrors: []string{"system"},
+ },
+ {
+ name: "create system",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("new_system")
+ c.System = true
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+
+ // id checks
+ {
+ name: "empty id",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("test")
+ c.Id = ""
+ return c, nil
+ },
+ expectedErrors: []string{"id"},
+ },
+ {
+ name: "invalid id",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("test")
+ c.Id = "!invalid"
+ return c, nil
+ },
+ expectedErrors: []string{"id"},
+ },
+ {
+ name: "existing id",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("test")
+ c.Id = "_pb_users_auth_"
+ return c, nil
+ },
+ expectedErrors: []string{"id"},
+ },
+ {
+ name: "changing id",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("demo3")
+ c.Id = "anything"
+ return c, nil
+ },
+ expectedErrors: []string{"id"},
+ },
+ {
+ name: "valid id",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("test")
+ c.Id = "anything"
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+
+ // name checks
+ {
+ name: "empty name",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("")
+ c.Id = "test"
+ return c, nil
+ },
+ expectedErrors: []string{"name"},
+ },
+ {
+ name: "invalid name",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("!invalid")
+ return c, nil
+ },
+ expectedErrors: []string{"name"},
+ },
+ {
+ name: "name with _via_",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("a_via_b")
+ return c, nil
+ },
+ expectedErrors: []string{"name"},
+ },
+ {
+ name: "create with existing collection name",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("demo1")
+ return c, nil
+ },
+ expectedErrors: []string{"name"},
+ },
+ {
+ name: "create with existing internal table name",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("_collections")
+ return c, nil
+ },
+ expectedErrors: []string{"name"},
+ },
+ {
+ name: "update with existing collection name",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("users")
+ c.Name = "demo1"
+ return c, nil
+ },
+ expectedErrors: []string{"name"},
+ },
+ {
+ name: "update with existing internal table name",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("users")
+ c.Name = "_collections"
+ return c, nil
+ },
+ expectedErrors: []string{"name"},
+ },
+ {
+ name: "system collection name change",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId(core.CollectionNameSuperusers)
+ c.Name = "superusers_new"
+ return c, nil
+ },
+ expectedErrors: []string{"name"},
+ },
+ {
+ name: "create with valid name",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("new_col")
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "update with valid name",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("demo1")
+ c.Name = "demo1_new"
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+
+ // rule checks
+ {
+ name: "invalid base rules",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("new")
+ c.ListRule = types.Pointer("!invalid")
+ c.ViewRule = types.Pointer("missing = 123")
+ c.CreateRule = types.Pointer("id = 123 && missing = 456")
+ c.UpdateRule = types.Pointer("(id = 123")
+ c.DeleteRule = types.Pointer("missing = 123")
+ return c, nil
+ },
+ expectedErrors: []string{"listRule", "viewRule", "createRule", "updateRule", "deleteRule"},
+ },
+ {
+ name: "valid base rules",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("new")
+ c.Fields.Add(&core.TextField{Name: "f1"}) // dummy field to ensure that new fields can be referenced
+ c.ListRule = types.Pointer("")
+ c.ViewRule = types.Pointer("f1 = 123")
+ c.CreateRule = types.Pointer("id = 123 && f1 = 456")
+ c.UpdateRule = types.Pointer("(id = 123)")
+ c.DeleteRule = types.Pointer("f1 = 123")
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "view with non-nil create/update/delete rules",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewViewCollection("new")
+ c.ViewQuery = "select 1 as id, 'text' as f1"
+ c.ListRule = types.Pointer("id = 123")
+ c.ViewRule = types.Pointer("f1 = 456")
+ c.CreateRule = types.Pointer("")
+ c.UpdateRule = types.Pointer("")
+ c.DeleteRule = types.Pointer("")
+ return c, nil
+ },
+ expectedErrors: []string{"createRule", "updateRule", "deleteRule"},
+ },
+ {
+ name: "view with nil create/update/delete rules",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewViewCollection("new")
+ c.ViewQuery = "select 1 as id, 'text' as f1"
+ c.ListRule = types.Pointer("id = 1")
+ c.ViewRule = types.Pointer("f1 = 456")
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "changing api rules",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("users")
+ c.Fields.Add(&core.TextField{Name: "f1"}) // dummy field to ensure that new fields can be referenced
+ c.ListRule = types.Pointer("id = 1")
+ c.ViewRule = types.Pointer("f1 = 456")
+ c.CreateRule = types.Pointer("id = 123 && f1 = 456")
+ c.UpdateRule = types.Pointer("(id = 123)")
+ c.DeleteRule = types.Pointer("f1 = 123")
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "changing system collection api rules",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId(core.CollectionNameSuperusers)
+ c.ListRule = types.Pointer("1 = 1")
+ c.ViewRule = types.Pointer("1 = 1")
+ c.CreateRule = types.Pointer("1 = 1")
+ c.UpdateRule = types.Pointer("1 = 1")
+ c.DeleteRule = types.Pointer("1 = 1")
+ c.ManageRule = types.Pointer("1 = 1")
+ c.AuthRule = types.Pointer("1 = 1")
+ return c, nil
+ },
+ expectedErrors: []string{
+ "listRule", "viewRule", "createRule", "updateRule",
+ "deleteRule", "manageRule", "authRule",
+ },
+ },
+
+ // indexes checks
+ {
+ name: "invalid index expression",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("demo1")
+ c.Indexes = []string{
+ "create index invalid",
+ "create index idx_test_demo2 on anything (text)", // the name of table shouldn't matter
+ }
+ return c, nil
+ },
+ expectedErrors: []string{"indexes"},
+ },
+ {
+ name: "index name used in other table",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("demo1")
+ c.Indexes = []string{
+ "create index `idx_test_demo1` on demo1 (id)",
+ "create index `__pb_USERS_auth__username_idx` on anything (text)", // should be case-insensitive
+ }
+ return c, nil
+ },
+ expectedErrors: []string{"indexes"},
+ },
+ {
+ name: "duplicated index names",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("demo1")
+ c.Indexes = []string{
+ "create index idx_test_demo1 on demo1 (id)",
+ "create index idx_test_demo1 on anything (text)",
+ }
+ return c, nil
+ },
+ expectedErrors: []string{"indexes"},
+ },
+ {
+ name: "try to add index to a view collection",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("view1")
+ c.Indexes = []string{"create index idx_test_view1 on view1 (id)"}
+ return c, nil
+ },
+ expectedErrors: []string{"indexes"},
+ },
+ {
+ name: "replace old with new indexes",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("demo1")
+ c.Indexes = []string{
+ "create index idx_test_demo1 on demo1 (id)",
+ "create index idx_test_demo2 on anything (text)", // the name of table shouldn't matter
+ }
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "old + new indexes",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("demo1")
+ c.Indexes = []string{
+ "CREATE INDEX `_wsmn24bux7wo113_created_idx` ON `demo1` (`created`)",
+ "create index idx_test_demo1 on anything (id)",
+ }
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "index for missing field",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("demo1")
+ c.Indexes = []string{
+ "create index idx_test_demo1 on anything (missing)", // still valid because it is checked on db persist
+ }
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "auth collection with missing required unique indexes",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.Indexes = []string{}
+ return c, nil
+ },
+ expectedErrors: []string{"indexes", "passwordAuth"},
+ },
+ {
+ name: "auth collection with non-unique required indexes",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.Indexes = []string{
+ "create index test_idx1 on new_auth (tokenKey)",
+ "create index test_idx2 on new_auth (email)",
+ }
+ return c, nil
+ },
+ expectedErrors: []string{"indexes", "passwordAuth"},
+ },
+ {
+ name: "auth collection with unique required indexes",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.Indexes = []string{
+ "create unique index test_idx1 on new_auth (tokenKey)",
+ "create unique index test_idx2 on new_auth (email)",
+ }
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "removing index on system field",
+ collection: func(app core.App) (*core.Collection, error) {
+ demo2, err := app.FindCollectionByNameOrId("demo2")
+ if err != nil {
+ return nil, err
+ }
+
+ // mark the title field as system
+ demo2.Fields.GetByName("title").SetSystem(true)
+ if err = app.Save(demo2); err != nil {
+ return nil, err
+ }
+
+ // refresh
+ demo2, err = app.FindCollectionByNameOrId("demo2")
+ if err != nil {
+ return nil, err
+ }
+
+ demo2.RemoveIndex("idx_unique_demo2_title")
+
+ return demo2, nil
+ },
+ expectedErrors: []string{"indexes"},
+ },
+ {
+ name: "changing index on system field",
+ collection: func(app core.App) (*core.Collection, error) {
+ demo2, err := app.FindCollectionByNameOrId("demo2")
+ if err != nil {
+ return nil, err
+ }
+
+ // mark the title field as system
+ demo2.Fields.GetByName("title").SetSystem(true)
+ if err = app.Save(demo2); err != nil {
+ return nil, err
+ }
+
+ // refresh
+ demo2, err = app.FindCollectionByNameOrId("demo2")
+ if err != nil {
+ return nil, err
+ }
+
+ // replace the index with a partial one
+ demo2.RemoveIndex("idx_unique_demo2_title")
+ demo2.AddIndex("idx_unique_demo2_title", true, "title", "1 = 1")
+
+ return demo2, nil
+ },
+ expectedErrors: []string{"indexes"},
+ },
+ {
+ name: "changing index on non-system field",
+ collection: func(app core.App) (*core.Collection, error) {
+ demo2, err := app.FindCollectionByNameOrId("demo2")
+ if err != nil {
+ return nil, err
+ }
+
+ // replace the index with a partial one
+ demo2.RemoveIndex("idx_demo2_active")
+ demo2.AddIndex("idx_demo2_active", true, "active", "1 = 1")
+
+ return demo2, nil
+ },
+ expectedErrors: []string{},
+ },
+
+ // fields list checks
+ {
+ name: "empty fields",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("new_auth")
+ c.Fields = nil // the minimum fields should auto added
+ return c, nil
+ },
+ expectedErrors: []string{"fields"},
+ },
+ {
+ name: "no id primay key field",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("new_auth")
+ c.Fields = core.NewFieldsList(
+ &core.TextField{Name: "id"},
+ )
+ return c, nil
+ },
+ expectedErrors: []string{"fields"},
+ },
+ {
+ name: "with id primay key field",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("new_auth")
+ c.Fields = core.NewFieldsList(
+ &core.TextField{Name: "id", PrimaryKey: true, Required: true},
+ )
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "duplicated field names",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("new_auth")
+ c.Fields = core.NewFieldsList(
+ &core.TextField{Name: "id", PrimaryKey: true, Required: true},
+ &core.TextField{Id: "f1", Name: "Test"}, // case-insensitive
+ &core.BoolField{Id: "f2", Name: "test"},
+ )
+ return c, nil
+ },
+ expectedErrors: []string{"fields"},
+ },
+ {
+ name: "changing field type",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("demo1")
+ f := c.Fields.GetByName("text")
+ c.Fields.Add(&core.BoolField{Id: f.GetId(), Name: f.GetName()})
+ return c, nil
+ },
+ expectedErrors: []string{"fields"},
+ },
+ {
+ name: "renaming system field",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId(core.CollectionNameAuthOrigins)
+ f := c.Fields.GetByName("fingerprint")
+ f.SetName("fingerprint_new")
+ return c, nil
+ },
+ expectedErrors: []string{"fields"},
+ },
+ {
+ name: "deleting system field",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId(core.CollectionNameAuthOrigins)
+ c.Fields.RemoveByName("fingerprint")
+ return c, nil
+ },
+ expectedErrors: []string{"fields"},
+ },
+ {
+ name: "invalid field setting",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("test_new")
+ c.Fields.Add(&core.TextField{Name: "f1", Min: -10})
+ return c, nil
+ },
+ expectedErrors: []string{"fields"},
+ },
+ {
+ name: "valid field setting",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewBaseCollection("test_new")
+ c.Fields.Add(&core.TextField{Name: "f1", Min: 10})
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "fields view changes should be ignored",
+ collection: func(app core.App) (*core.Collection, error) {
+ c, _ := app.FindCollectionByNameOrId("view1")
+ c.Fields = nil
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "with reserved auth only field name (passwordConfirm)",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.Fields.Add(
+ &core.TextField{Name: "passwordConfirm"},
+ )
+ return c, nil
+ },
+ expectedErrors: []string{"fields"},
+ },
+ {
+ name: "with reserved auth only field name (oldPassword)",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.Fields.Add(
+ &core.TextField{Name: "oldPassword"},
+ )
+ return c, nil
+ },
+ expectedErrors: []string{"fields"},
+ },
+ {
+ name: "with invalid password auth field options (1)",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.Fields.Add(
+ &core.TextField{Name: "password", System: true, Hidden: true}, // should be PasswordField
+ )
+ return c, nil
+ },
+ expectedErrors: []string{"fields"},
+ },
+ {
+ name: "with valid password auth field options (2)",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.Fields.Add(
+ &core.PasswordField{Name: "password", System: true, Hidden: true},
+ )
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "with invalid tokenKey auth field options (1)",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.Fields.Add(
+ &core.TextField{Name: "tokenKey", System: true}, // should be also hidden
+ )
+ return c, nil
+ },
+ expectedErrors: []string{"fields"},
+ },
+ {
+ name: "with valid tokenKey auth field options (2)",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.Fields.Add(
+ &core.TextField{Name: "tokenKey", System: true, Hidden: true},
+ )
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "with invalid email auth field options (1)",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.Fields.Add(
+ &core.TextField{Name: "email", System: true}, // should be EmailField
+ )
+ return c, nil
+ },
+ expectedErrors: []string{"fields"},
+ },
+ {
+ name: "with valid email auth field options (2)",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.Fields.Add(
+ &core.EmailField{Name: "email", System: true},
+ )
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "with invalid verified auth field options (1)",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.Fields.Add(
+ &core.TextField{Name: "verified", System: true}, // should be BoolField
+ )
+ return c, nil
+ },
+ expectedErrors: []string{"fields"},
+ },
+ {
+ name: "with valid verified auth field options (2)",
+ collection: func(app core.App) (*core.Collection, error) {
+ c := core.NewAuthCollection("new_auth")
+ c.Fields.Add(
+ &core.BoolField{Name: "verified", System: true},
+ )
+ return c, nil
+ },
+ expectedErrors: []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection, err := s.collection(app)
+ if err != nil {
+ t.Fatalf("Failed to retrieve test collection: %v", err)
+ }
+
+ result := app.Validate(collection)
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
diff --git a/core/collections_cache.go b/core/collections_cache.go
deleted file mode 100644
index e4ca3d43..00000000
--- a/core/collections_cache.go
+++ /dev/null
@@ -1,72 +0,0 @@
-package core
-
-// -------------------------------------------------------------------
-// This is a small optimization ported from the [ongoing refactoring branch](https://github.com/pocketbase/pocketbase/discussions/4355).
-//
-// @todo remove after the refactoring is finalized.
-// -------------------------------------------------------------------
-
-import (
- "strings"
-
- "github.com/pocketbase/pocketbase/models"
-)
-
-const storeCachedCollectionsKey = "@cachedCollectionsContext"
-
-func registerCachedCollectionsAppHooks(app App) {
- collectionsChangeFunc := func(e *ModelEvent) error {
- if _, ok := e.Model.(*models.Collection); !ok {
- return nil
- }
-
- _ = ReloadCachedCollections(app)
-
- return nil
- }
- app.OnModelAfterCreate().Add(collectionsChangeFunc)
- app.OnModelAfterUpdate().Add(collectionsChangeFunc)
- app.OnModelAfterDelete().Add(collectionsChangeFunc)
- app.OnBeforeServe().Add(func(e *ServeEvent) error {
- _ = ReloadCachedCollections(e.App)
- return nil
- })
-}
-
-func ReloadCachedCollections(app App) error {
- collections := []*models.Collection{}
-
- err := app.Dao().CollectionQuery().All(&collections)
- if err != nil {
- return err
- }
-
- app.Store().Set(storeCachedCollectionsKey, collections)
-
- return nil
-}
-
-func FindCachedCollectionByNameOrId(app App, nameOrId string) (*models.Collection, error) {
- // retrieve from the app cache
- // ---
- collections, _ := app.Store().Get(storeCachedCollectionsKey).([]*models.Collection)
- for _, c := range collections {
- if strings.EqualFold(c.Name, nameOrId) || c.Id == nameOrId {
- return c, nil
- }
- }
-
- // retrieve from the database
- // ---
- found, err := app.Dao().FindCollectionByNameOrId(nameOrId)
- if err != nil {
- return nil, err
- }
-
- err = ReloadCachedCollections(app)
- if err != nil {
- app.Logger().Warn("Failed to reload collections cache", "error", err)
- }
-
- return found, nil
-}
diff --git a/core/db.go b/core/db.go
new file mode 100644
index 00000000..12df9b5f
--- /dev/null
+++ b/core/db.go
@@ -0,0 +1,503 @@
+package core
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "hash/crc32"
+ "regexp"
+ "slices"
+ "strconv"
+ "strings"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/tools/security"
+ "github.com/spf13/cast"
+)
+
+const (
+ idColumn string = "id"
+
+ // DefaultIdLength is the default length of the generated model id.
+ DefaultIdLength int = 15
+
+ // DefaultIdAlphabet is the default characters set used for generating the model id.
+ DefaultIdAlphabet string = "abcdefghijklmnopqrstuvwxyz0123456789"
+)
+
+// DefaultIdRegex specifies the default regex pattern for an id value.
+var DefaultIdRegex = regexp.MustCompile(`^\w+$`)
+
+// DBExporter defines an interface for custom DB data export.
+// Usually used as part of [App.Save].
+type DBExporter interface {
+ // DBExport returns a key-value map with the data to be used when saving the struct in the database.
+ DBExport(app App) (map[string]any, error)
+}
+
+// PreValidator defines an optional model interface for registering a
+// function that will run BEFORE firing the validation hooks (see [App.ValidateWithContext]).
+type PreValidator interface {
+ // PreValidate defines a function that runs BEFORE the validation hooks.
+ PreValidate(ctx context.Context, app App) error
+}
+
+// PostValidator defines an optional model interface for registering a
+// function that will run AFTER executing the validation hooks (see [App.ValidateWithContext]).
+type PostValidator interface {
+ // PostValidate defines a function that runs AFTER the successful
+ // execution of the validation hooks.
+ PostValidate(ctx context.Context, app App) error
+}
+
+// GenerateDefaultRandomId generates a default random id string
+// (note: the generated random string is not intended for security purposes).
+func GenerateDefaultRandomId() string {
+ return security.PseudorandomStringWithAlphabet(DefaultIdLength, DefaultIdAlphabet)
+}
+
+// crc32Checksum generates a stringified crc32 checksum from the provided plain string.
+func crc32Checksum(str string) string {
+ return strconv.Itoa(int(crc32.ChecksumIEEE([]byte(str))))
+}
+
+// ModelQuery creates a new preconfigured select app.DB() query with preset
+// SELECT, FROM and other common fields based on the provided model.
+func (app *BaseApp) ModelQuery(m Model) *dbx.SelectQuery {
+ return app.modelQuery(app.DB(), m)
+}
+
+// AuxModelQuery creates a new preconfigured select app.AuxDB() query with preset
+// SELECT, FROM and other common fields based on the provided model.
+func (app *BaseApp) AuxModelQuery(m Model) *dbx.SelectQuery {
+ return app.modelQuery(app.AuxDB(), m)
+}
+
+func (app *BaseApp) modelQuery(db dbx.Builder, m Model) *dbx.SelectQuery {
+ tableName := m.TableName()
+
+ return db.
+ Select("{{" + tableName + "}}.*").
+ From(tableName).
+ WithBuildHook(func(query *dbx.Query) {
+ query.WithExecHook(execLockRetry(app.config.QueryTimeout, defaultMaxLockRetries))
+ })
+}
+
+// Delete deletes the specified model from the regular app database.
+func (app *BaseApp) Delete(model Model) error {
+ return app.DeleteWithContext(context.Background(), model)
+}
+
+// Delete deletes the specified model from the regular app database
+// (the context could be used to limit the query execution).
+func (app *BaseApp) DeleteWithContext(ctx context.Context, model Model) error {
+ return app.delete(ctx, model, false)
+}
+
+// AuxDelete deletes the specified model from the auxiliary database.
+func (app *BaseApp) AuxDelete(model Model) error {
+ return app.AuxDeleteWithContext(context.Background(), model)
+}
+
+// AuxDeleteWithContext deletes the specified model from the auxiliary database
+// (the context could be used to limit the query execution).
+func (app *BaseApp) AuxDeleteWithContext(ctx context.Context, model Model) error {
+ return app.delete(ctx, model, true)
+}
+
+func (app *BaseApp) delete(ctx context.Context, model Model, isForAuxDB bool) error {
+ event := new(ModelEvent)
+ event.App = app
+ event.Type = ModelEventTypeDelete
+ event.Context = ctx
+ event.Model = model
+
+ deleteErr := app.OnModelDelete().Trigger(event, func(e *ModelEvent) error {
+ pk := cast.ToString(e.Model.LastSavedPK())
+
+ if cast.ToString(pk) == "" {
+ return errors.New("the model can be deleted only if it is existing and has a non-empty primary key")
+ }
+
+ // db write
+ return e.App.OnModelDeleteExecute().Trigger(event, func(e *ModelEvent) error {
+ var db dbx.Builder
+ if isForAuxDB {
+ db = e.App.AuxNonconcurrentDB()
+ } else {
+ db = e.App.NonconcurrentDB()
+ }
+
+ return baseLockRetry(func(attempt int) error {
+ _, err := db.Delete(e.Model.TableName(), dbx.HashExp{
+ idColumn: pk,
+ }).WithContext(e.Context).Execute()
+
+ return err
+ }, defaultMaxLockRetries)
+ })
+ })
+ if deleteErr != nil {
+ hookErr := app.OnModelAfterDeleteError().Trigger(&ModelErrorEvent{
+ ModelEvent: *event,
+ Error: deleteErr,
+ })
+ if hookErr != nil {
+ return errors.Join(deleteErr, hookErr)
+ }
+
+ return deleteErr
+ }
+
+ if app.txInfo != nil {
+ // execute later after the transaction has completed
+ app.txInfo.onAfterFunc(func(txErr error) error {
+ if app.txInfo != nil && app.txInfo.parent != nil {
+ event.App = app.txInfo.parent
+ }
+
+ if txErr != nil {
+ return app.OnModelAfterDeleteError().Trigger(&ModelErrorEvent{
+ ModelEvent: *event,
+ Error: txErr,
+ })
+ }
+
+ return app.OnModelAfterDeleteSuccess().Trigger(event)
+ })
+ } else if err := event.App.OnModelAfterDeleteSuccess().Trigger(event); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// Save validates and saves the specified model into the regular app database.
+//
+// If you don't want to run validations, use [App.SaveNoValidate()].
+func (app *BaseApp) Save(model Model) error {
+ return app.SaveWithContext(context.Background(), model)
+}
+
+// SaveWithContext is the same as [App.Save()] but allows specifying a context to limit the db execution.
+//
+// If you don't want to run validations, use [App.SaveNoValidateWithContext()].
+func (app *BaseApp) SaveWithContext(ctx context.Context, model Model) error {
+ return app.save(ctx, model, true, false)
+}
+
+// SaveNoValidate saves the specified model into the regular app database without performing validations.
+//
+// If you want to also run validations before persisting, use [App.Save()].
+func (app *BaseApp) SaveNoValidate(model Model) error {
+ return app.SaveNoValidateWithContext(context.Background(), model)
+}
+
+// SaveNoValidateWithContext is the same as [App.SaveNoValidate()]
+// but allows specifying a context to limit the db execution.
+//
+// If you want to also run validations before persisting, use [App.SaveWithContext()].
+func (app *BaseApp) SaveNoValidateWithContext(ctx context.Context, model Model) error {
+ return app.save(ctx, model, false, false)
+}
+
+// AuxSave validates and saves the specified model into the auxiliary app database.
+//
+// If you don't want to run validations, use [App.AuxSaveNoValidate()].
+func (app *BaseApp) AuxSave(model Model) error {
+ return app.AuxSaveWithContext(context.Background(), model)
+}
+
+// AuxSaveWithContext is the same as [App.AuxSave()] but allows specifying a context to limit the db execution.
+//
+// If you don't want to run validations, use [App.AuxSaveNoValidateWithContext()].
+func (app *BaseApp) AuxSaveWithContext(ctx context.Context, model Model) error {
+ return app.save(ctx, model, true, true)
+}
+
+// AuxSaveNoValidate saves the specified model into the auxiliary app database without performing validations.
+//
+// If you want to also run validations before persisting, use [App.AuxSave()].
+func (app *BaseApp) AuxSaveNoValidate(model Model) error {
+ return app.AuxSaveNoValidateWithContext(context.Background(), model)
+}
+
+// AuxSaveNoValidateWithContext is the same as [App.AuxSaveNoValidate()]
+// but allows specifying a context to limit the db execution.
+//
+// If you want to also run validations before persisting, use [App.AuxSaveWithContext()].
+func (app *BaseApp) AuxSaveNoValidateWithContext(ctx context.Context, model Model) error {
+ return app.save(ctx, model, false, true)
+}
+
+// Validate triggers the OnModelValidate hook for the specified model.
+func (app *BaseApp) Validate(model Model) error {
+ return app.ValidateWithContext(context.Background(), model)
+}
+
+// ValidateWithContext is the same as Validate but allows specifying the ModelEvent context.
+func (app *BaseApp) ValidateWithContext(ctx context.Context, model Model) error {
+ if m, ok := model.(PreValidator); ok {
+ if err := m.PreValidate(ctx, app); err != nil {
+ return err
+ }
+ }
+
+ event := new(ModelEvent)
+ event.App = app
+ event.Context = ctx
+ event.Type = ModelEventTypeValidate
+ event.Model = model
+
+ return event.App.OnModelValidate().Trigger(event, func(e *ModelEvent) error {
+ if m, ok := e.Model.(PostValidator); ok {
+ if err := m.PostValidate(ctx, e.App); err != nil {
+ return err
+ }
+ }
+
+ return e.Next()
+ })
+}
+
+// -------------------------------------------------------------------
+
+func (app *BaseApp) save(ctx context.Context, model Model, withValidations bool, isForAuxDB bool) error {
+ if model.IsNew() {
+ return app.create(ctx, model, withValidations, isForAuxDB)
+ }
+
+ return app.update(ctx, model, withValidations, isForAuxDB)
+}
+
+func (app *BaseApp) create(ctx context.Context, model Model, withValidations bool, isForAuxDB bool) error {
+ event := new(ModelEvent)
+ event.App = app
+ event.Context = ctx
+ event.Type = ModelEventTypeCreate
+ event.Model = model
+
+ saveErr := app.OnModelCreate().Trigger(event, func(e *ModelEvent) error {
+ // run validations (if any)
+ if withValidations {
+ validateErr := e.App.ValidateWithContext(e.Context, e.Model)
+ if validateErr != nil {
+ return validateErr
+ }
+ }
+
+ // db write
+ return e.App.OnModelCreateExecute().Trigger(event, func(e *ModelEvent) error {
+ var db dbx.Builder
+ if isForAuxDB {
+ db = e.App.AuxNonconcurrentDB()
+ } else {
+ db = e.App.NonconcurrentDB()
+ }
+
+ dbErr := baseLockRetry(func(attempt int) error {
+ if m, ok := e.Model.(DBExporter); ok {
+ data, err := m.DBExport(e.App)
+ if err != nil {
+ return err
+ }
+
+ // manually add the id to the data if missing
+ if _, ok := data[idColumn]; !ok {
+ data[idColumn] = e.Model.PK()
+ }
+
+ if cast.ToString(data[idColumn]) == "" {
+ return errors.New("empty primary key is not allowed when using the DBExporter interface")
+ }
+
+ _, err = db.Insert(e.Model.TableName(), data).WithContext(e.Context).Execute()
+
+ return err
+ }
+
+ return db.Model(e.Model).WithContext(e.Context).Insert()
+ }, defaultMaxLockRetries)
+ if dbErr != nil {
+ return dbErr
+ }
+
+ e.Model.MarkAsNotNew()
+
+ return nil
+ })
+ })
+ if saveErr != nil {
+ event.Model.MarkAsNew() // reset "new" state
+
+ hookErr := app.OnModelAfterCreateError().Trigger(&ModelErrorEvent{
+ ModelEvent: *event,
+ Error: saveErr,
+ })
+ if hookErr != nil {
+ return errors.Join(saveErr, hookErr)
+ }
+
+ return saveErr
+ }
+
+ if app.txInfo != nil {
+ // execute later after the transaction has completed
+ app.txInfo.onAfterFunc(func(txErr error) error {
+ if app.txInfo != nil && app.txInfo.parent != nil {
+ event.App = app.txInfo.parent
+ }
+
+ if txErr != nil {
+ event.Model.MarkAsNew() // reset "new" state
+
+ return app.OnModelAfterCreateError().Trigger(&ModelErrorEvent{
+ ModelEvent: *event,
+ Error: txErr,
+ })
+ }
+
+ return app.OnModelAfterCreateSuccess().Trigger(event)
+ })
+ } else if err := event.App.OnModelAfterCreateSuccess().Trigger(event); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func (app *BaseApp) update(ctx context.Context, model Model, withValidations bool, isForAuxDB bool) error {
+ event := new(ModelEvent)
+ event.App = app
+ event.Context = ctx
+ event.Type = ModelEventTypeUpdate
+ event.Model = model
+
+ saveErr := app.OnModelUpdate().Trigger(event, func(e *ModelEvent) error {
+ // run validations (if any)
+ if withValidations {
+ validateErr := e.App.ValidateWithContext(e.Context, e.Model)
+ if validateErr != nil {
+ return validateErr
+ }
+ }
+
+ // db write
+ return e.App.OnModelUpdateExecute().Trigger(event, func(e *ModelEvent) error {
+ var db dbx.Builder
+ if isForAuxDB {
+ db = e.App.AuxNonconcurrentDB()
+ } else {
+ db = e.App.NonconcurrentDB()
+ }
+
+ return baseLockRetry(func(attempt int) error {
+ if m, ok := e.Model.(DBExporter); ok {
+ data, err := m.DBExport(e.App)
+ if err != nil {
+ return err
+ }
+
+ // note: for now disallow primary key change for consistency with dbx.ModelQuery.Update()
+ if data[idColumn] != e.Model.LastSavedPK() {
+ return errors.New("primary key change is not allowed")
+ }
+
+ _, err = db.Update(e.Model.TableName(), data, dbx.HashExp{
+ idColumn: e.Model.LastSavedPK(),
+ }).WithContext(e.Context).Execute()
+
+ return err
+ }
+
+ return db.Model(e.Model).WithContext(e.Context).Update()
+ }, defaultMaxLockRetries)
+ })
+ })
+ if saveErr != nil {
+ hookErr := app.OnModelAfterUpdateError().Trigger(&ModelErrorEvent{
+ ModelEvent: *event,
+ Error: saveErr,
+ })
+ if hookErr != nil {
+ return errors.Join(saveErr, hookErr)
+ }
+
+ return saveErr
+ }
+
+ if app.txInfo != nil {
+ // execute later after the transaction has completed
+ app.txInfo.onAfterFunc(func(txErr error) error {
+ if app.txInfo != nil && app.txInfo.parent != nil {
+ event.App = app.txInfo.parent
+ }
+
+ if txErr != nil {
+ return app.OnModelAfterUpdateError().Trigger(&ModelErrorEvent{
+ ModelEvent: *event,
+ Error: txErr,
+ })
+ }
+
+ return app.OnModelAfterUpdateSuccess().Trigger(event)
+ })
+ } else if err := event.App.OnModelAfterUpdateSuccess().Trigger(event); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func validateCollectionId(app App, optTypes ...string) validation.RuleFunc {
+ return func(value any) error {
+ id, _ := value.(string)
+ if id == "" {
+ return nil
+ }
+
+ collection := &Collection{}
+ if err := app.ModelQuery(collection).Model(id, collection); err != nil {
+ return validation.NewError("validation_invalid_collection_id", "Missing or invalid collection.")
+ }
+
+ if len(optTypes) > 0 && !slices.Contains(optTypes, collection.Type) {
+ return validation.NewError(
+ "validation_invalid_collection_type",
+ fmt.Sprintf("Invalid collection type - must be %s.", strings.Join(optTypes, ", ")),
+ ).SetParams(map[string]any{"types": optTypes})
+ }
+
+ return nil
+ }
+}
+
+func validateRecordId(app App, collectionNameOrId string) validation.RuleFunc {
+ return func(value any) error {
+ id, _ := value.(string)
+ if id == "" {
+ return nil
+ }
+
+ collection, err := app.FindCachedCollectionByNameOrId(collectionNameOrId)
+ if err != nil {
+ return validation.NewError("validation_invalid_collection", "Missing or invalid collection.")
+ }
+
+ var exists bool
+
+ rowErr := app.DB().Select("(1)").
+ From(collection.Name).
+ AndWhere(dbx.HashExp{"id": id}).
+ Limit(1).
+ Row(&exists)
+
+ if rowErr != nil || !exists {
+ return validation.NewError("validation_invalid_record", "Missing or invalid record.")
+ }
+
+ return nil
+ }
+}
diff --git a/core/db_cgo.go b/core/db_connect_cgo.go
similarity index 95%
rename from core/db_cgo.go
rename to core/db_connect_cgo.go
index 25d46083..c7a7bd84 100644
--- a/core/db_cgo.go
+++ b/core/db_connect_cgo.go
@@ -40,7 +40,7 @@ func init() {
dbx.BuilderFuncMap["pb_sqlite3"] = dbx.BuilderFuncMap["sqlite3"]
}
-func connectDB(dbPath string) (*dbx.DB, error) {
+func dbConnect(dbPath string) (*dbx.DB, error) {
db, err := dbx.Open("pb_sqlite3", dbPath)
if err != nil {
return nil, err
diff --git a/core/db_nocgo.go b/core/db_connect_nocgo.go
similarity index 92%
rename from core/db_nocgo.go
rename to core/db_connect_nocgo.go
index 98fe8c3a..4e784b03 100644
--- a/core/db_nocgo.go
+++ b/core/db_connect_nocgo.go
@@ -7,7 +7,7 @@ import (
_ "modernc.org/sqlite"
)
-func connectDB(dbPath string) (*dbx.DB, error) {
+func dbConnect(dbPath string) (*dbx.DB, error) {
// Note: the busy_timeout pragma must be first because
// the connection needs to be set to block on busy before WAL mode
// is set in case it hasn't been already set by another connection.
diff --git a/core/db_model.go b/core/db_model.go
new file mode 100644
index 00000000..9649e1ae
--- /dev/null
+++ b/core/db_model.go
@@ -0,0 +1,59 @@
+package core
+
+// Model defines an interface with common methods that all db models should have.
+//
+// Note: for simplicity composite pk are not supported.
+type Model interface {
+ TableName() string
+ PK() any
+ LastSavedPK() any
+ IsNew() bool
+ MarkAsNew()
+ MarkAsNotNew()
+}
+
+// BaseModel defines a base struct that is intended to be embedded into other custom models.
+type BaseModel struct {
+ lastSavedPK string
+
+ // Id is the primary key of the model.
+ // It is usually autogenerated by the parent model implementation.
+ Id string `db:"id" json:"id" form:"id" xml:"id"`
+}
+
+// LastSavedPK returns the last saved primary key of the model.
+//
+// Its value is updated to the latest PK value after MarkAsNotNew() or PostScan() calls.
+func (m *BaseModel) LastSavedPK() any {
+ return m.lastSavedPK
+}
+
+func (m *BaseModel) PK() any {
+ return m.Id
+}
+
+// IsNew indicates what type of db query (insert or update)
+// should be used with the model instance.
+func (m *BaseModel) IsNew() bool {
+ return m.lastSavedPK == ""
+}
+
+// MarkAsNew clears the pk field and marks the current model as "new"
+// (aka. forces m.IsNew() to be true).
+func (m *BaseModel) MarkAsNew() {
+ m.lastSavedPK = ""
+}
+
+// MarkAsNew set the pk field to the Id value and marks the current model
+// as NOT "new" (aka. forces m.IsNew() to be false).
+func (m *BaseModel) MarkAsNotNew() {
+ m.lastSavedPK = m.Id
+}
+
+// PostScan implements the [dbx.PostScanner] interface.
+//
+// It is usually executed right after the model is populated with the db row values.
+func (m *BaseModel) PostScan() error {
+ m.MarkAsNotNew()
+ return nil
+}
diff --git a/core/db_model_test.go b/core/db_model_test.go
new file mode 100644
index 00000000..1771a778
--- /dev/null
+++ b/core/db_model_test.go
@@ -0,0 +1,70 @@
+package core_test
+
+import (
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+)
+
+func TestBaseModel(t *testing.T) {
+ id := "test_id"
+
+ m := core.BaseModel{Id: id}
+
+ if m.PK() != id {
+ t.Fatalf("[before PostScan] Expected PK %q, got %q", "", m.PK())
+ }
+
+ if m.LastSavedPK() != "" {
+ t.Fatalf("[before PostScan] Expected LastSavedPK %q, got %q", "", m.LastSavedPK())
+ }
+
+ if !m.IsNew() {
+ t.Fatalf("[before PostScan] Expected IsNew %v, got %v", true, m.IsNew())
+ }
+
+ if err := m.PostScan(); err != nil {
+ t.Fatal(err)
+ }
+
+ if m.PK() != id {
+ t.Fatalf("[after PostScan] Expected PK %q, got %q", "", m.PK())
+ }
+
+ if m.LastSavedPK() != id {
+ t.Fatalf("[after PostScan] Expected LastSavedPK %q, got %q", id, m.LastSavedPK())
+ }
+
+ if m.IsNew() {
+ t.Fatalf("[after PostScan] Expected IsNew %v, got %v", false, m.IsNew())
+ }
+
+ m.MarkAsNew()
+
+ if m.PK() != id {
+ t.Fatalf("[after MarkAsNew] Expected PK %q, got %q", id, m.PK())
+ }
+
+ if m.LastSavedPK() != "" {
+ t.Fatalf("[after MarkAsNew] Expected LastSavedPK %q, got %q", "", m.LastSavedPK())
+ }
+
+ if !m.IsNew() {
+ t.Fatalf("[after MarkAsNew] Expected IsNew %v, got %v", true, m.IsNew())
+ }
+
+ // mark as not new without id
+ m.MarkAsNotNew()
+
+ if m.PK() != id {
+ t.Fatalf("[after MarkAsNotNew] Expected PK %q, got %q", id, m.PK())
+ }
+
+ if m.LastSavedPK() != id {
+ t.Fatalf("[after MarkAsNotNew] Expected LastSavedPK %q, got %q", id, m.LastSavedPK())
+ }
+
+ if m.IsNew() {
+ t.Fatalf("[after MarkAsNotNew] Expected IsNew %v, got %v", false, m.IsNew())
+ }
+}
diff --git a/daos/base_retry.go b/core/db_retry.go
similarity index 85%
rename from daos/base_retry.go
rename to core/db_retry.go
index 8be2409a..161a1e72 100644
--- a/daos/base_retry.go
+++ b/core/db_retry.go
@@ -1,4 +1,4 @@
-package daos
+package core
import (
"context"
@@ -12,7 +12,10 @@ import (
)
// default retries intervals (in ms)
-var defaultRetryIntervals = []int{100, 250, 350, 500, 700, 1000}
+var defaultRetryIntervals = []int{50, 100, 150, 200, 300, 400, 500, 700, 1000}
+
+// default max retry attempts
+const defaultMaxLockRetries = 12
func execLockRetry(timeout time.Duration, maxRetries int) dbx.ExecHookFunc {
return func(q *dbx.Query, op func() error) error {
@@ -45,7 +48,7 @@ Retry:
if err != nil &&
attempt <= maxRetries &&
- // we are checking the err message to handle both the cgo and noncgo errors
+ // we are checking the plain error text to handle both cgo and noncgo errors
strings.Contains(err.Error(), "database is locked") {
// wait and retry
time.Sleep(getDefaultRetryInterval(attempt))
diff --git a/core/db_retry_test.go b/core/db_retry_test.go
new file mode 100644
index 00000000..85fb80b7
--- /dev/null
+++ b/core/db_retry_test.go
@@ -0,0 +1,65 @@
+package core
+
+import (
+ "errors"
+ "fmt"
+ "testing"
+)
+
+func TestGetDefaultRetryInterval(t *testing.T) {
+ t.Parallel()
+
+ if i := getDefaultRetryInterval(-1); i.Milliseconds() != 1000 {
+ t.Fatalf("Expected 1000ms, got %v", i)
+ }
+
+ if i := getDefaultRetryInterval(999); i.Milliseconds() != 1000 {
+ t.Fatalf("Expected 1000ms, got %v", i)
+ }
+
+ if i := getDefaultRetryInterval(3); i.Milliseconds() != 200 {
+ t.Fatalf("Expected 500ms, got %v", i)
+ }
+}
+
+func TestBaseLockRetry(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ err error
+ failUntilAttempt int
+ expectedAttempts int
+ }{
+ {nil, 3, 1},
+ {errors.New("test"), 3, 1},
+ {errors.New("database is locked"), 3, 3},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.err), func(t *testing.T) {
+ lastAttempt := 0
+
+ err := baseLockRetry(func(attempt int) error {
+ lastAttempt = attempt
+
+ if attempt < s.failUntilAttempt {
+ return s.err
+ }
+
+ return nil
+ }, s.failUntilAttempt+2)
+
+ if lastAttempt != s.expectedAttempts {
+ t.Errorf("Expected lastAttempt to be %d, got %d", s.expectedAttempts, lastAttempt)
+ }
+
+ if s.failUntilAttempt == s.expectedAttempts && err != nil {
+ t.Fatalf("Expected nil, got err %v", err)
+ }
+
+ if s.failUntilAttempt != s.expectedAttempts && s.err != nil && err == nil {
+ t.Fatalf("Expected error %q, got nil", s.err)
+ }
+ })
+ }
+}
diff --git a/daos/table.go b/core/db_table.go
similarity index 53%
rename from daos/table.go
rename to core/db_table.go
index 1c2c2ac3..04183b79 100644
--- a/daos/table.go
+++ b/core/db_table.go
@@ -1,17 +1,17 @@
-package daos
+package core
import (
+ "database/sql"
"fmt"
"github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models"
)
// HasTable checks if a table (or view) with the provided name exists (case insensitive).
-func (dao *Dao) HasTable(tableName string) bool {
+func (app *BaseApp) HasTable(tableName string) bool {
var exists bool
- err := dao.DB().Select("count(*)").
+ err := app.DB().Select("(1)").
From("sqlite_schema").
AndWhere(dbx.HashExp{"type": []any{"table", "view"}}).
AndWhere(dbx.NewExp("LOWER([[name]])=LOWER({:tableName})", dbx.Params{"tableName": tableName})).
@@ -22,21 +22,33 @@ func (dao *Dao) HasTable(tableName string) bool {
}
// TableColumns returns all column names of a single table by its name.
-func (dao *Dao) TableColumns(tableName string) ([]string, error) {
+func (app *BaseApp) TableColumns(tableName string) ([]string, error) {
columns := []string{}
- err := dao.DB().NewQuery("SELECT name FROM PRAGMA_TABLE_INFO({:tableName})").
+ err := app.DB().NewQuery("SELECT name FROM PRAGMA_TABLE_INFO({:tableName})").
Bind(dbx.Params{"tableName": tableName}).
Column(&columns)
return columns, err
}
-// TableInfo returns the `table_info` pragma result for the specified table.
-func (dao *Dao) TableInfo(tableName string) ([]*models.TableInfoRow, error) {
- info := []*models.TableInfoRow{}
+type TableInfoRow struct {
+ // the `db:"pk"` tag has special semantic so we cannot rename
+ // the original field without specifying a custom mapper
+ PK int
- err := dao.DB().NewQuery("SELECT * FROM PRAGMA_TABLE_INFO({:tableName})").
+ Index int `db:"cid"`
+ Name string `db:"name"`
+ Type string `db:"type"`
+ NotNull bool `db:"notnull"`
+ DefaultValue sql.NullString `db:"dflt_value"`
+}
+
+// TableInfo returns the "table_info" pragma result for the specified table.
+func (app *BaseApp) TableInfo(tableName string) ([]*TableInfoRow, error) {
+ info := []*TableInfoRow{}
+
+ err := app.DB().NewQuery("SELECT * FROM PRAGMA_TABLE_INFO({:tableName})").
Bind(dbx.Params{"tableName": tableName}).
All(&info)
if err != nil {
@@ -55,13 +67,13 @@ func (dao *Dao) TableInfo(tableName string) ([]*models.TableInfoRow, error) {
// TableIndexes returns a name grouped map with all non empty index of the specified table.
//
// Note: This method doesn't return an error on nonexisting table.
-func (dao *Dao) TableIndexes(tableName string) (map[string]string, error) {
+func (app *BaseApp) TableIndexes(tableName string) (map[string]string, error) {
indexes := []struct {
Name string
Sql string
}{}
- err := dao.DB().Select("name", "sql").
+ err := app.DB().Select("name", "sql").
From("sqlite_master").
AndWhere(dbx.NewExp("sql is not null")).
AndWhere(dbx.HashExp{
@@ -86,10 +98,10 @@ func (dao *Dao) TableIndexes(tableName string) (map[string]string, error) {
//
// This method is a no-op if a table with the provided name doesn't exist.
//
-// Be aware that this method is vulnerable to SQL injection and the
+// NB! Be aware that this method is vulnerable to SQL injection and the
// "tableName" argument must come only from trusted input!
-func (dao *Dao) DeleteTable(tableName string) error {
- _, err := dao.DB().NewQuery(fmt.Sprintf(
+func (app *BaseApp) DeleteTable(tableName string) error {
+ _, err := app.DB().NewQuery(fmt.Sprintf(
"DROP TABLE IF EXISTS {{%s}}",
tableName,
)).Execute()
@@ -97,10 +109,20 @@ func (dao *Dao) DeleteTable(tableName string) error {
return err
}
-// Vacuum executes VACUUM on the current dao.DB() instance in order to
-// reclaim unused db disk space.
-func (dao *Dao) Vacuum() error {
- _, err := dao.DB().NewQuery("VACUUM").Execute()
+// Vacuum executes VACUUM on the current app.DB() instance
+// in order to reclaim unused data db disk space.
+func (app *BaseApp) Vacuum() error {
+ return app.vacuum(app.DB())
+}
+
+// AuxVacuum executes VACUUM on the current app.AuxDB() instance
+// in order to reclaim unused auxiliary db disk space.
+func (app *BaseApp) AuxVacuum() error {
+ return app.vacuum(app.AuxDB())
+}
+
+func (app *BaseApp) vacuum(db dbx.Builder) error {
+ _, err := db.NewQuery("VACUUM").Execute()
return err
}
diff --git a/core/db_table_test.go b/core/db_table_test.go
new file mode 100644
index 00000000..cec3c0d2
--- /dev/null
+++ b/core/db_table_test.go
@@ -0,0 +1,225 @@
+package core_test
+
+import (
+ "context"
+ "database/sql"
+ "encoding/json"
+ "fmt"
+ "slices"
+ "testing"
+ "time"
+
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestHasTable(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ tableName string
+ expected bool
+ }{
+ {"", false},
+ {"test", false},
+ {core.CollectionNameSuperusers, true},
+ {"demo3", true},
+ {"DEMO3", true}, // table names are case insensitives by default
+ {"view1", true}, // view
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.tableName, func(t *testing.T) {
+ result := app.HasTable(s.tableName)
+ if result != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, result)
+ }
+ })
+ }
+}
+
+func TestTableColumns(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ tableName string
+ expected []string
+ }{
+ {"", nil},
+ {"_params", []string{"id", "value", "created", "updated"}},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, s.tableName), func(t *testing.T) {
+ columns, _ := app.TableColumns(s.tableName)
+
+ if len(columns) != len(s.expected) {
+ t.Fatalf("Expected columns %v, got %v", s.expected, columns)
+ }
+
+ for _, c := range columns {
+ if !slices.Contains(s.expected, c) {
+ t.Errorf("Didn't expect column %s", c)
+ }
+ }
+ })
+ }
+}
+
+func TestTableInfo(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ tableName string
+ expected string
+ }{
+ {"", "null"},
+ {"missing", "null"},
+ {
+ "_params",
+ `[{"PK":0,"Index":0,"Name":"created","Type":"TEXT","NotNull":true,"DefaultValue":{"String":"''","Valid":true}},{"PK":1,"Index":1,"Name":"id","Type":"TEXT","NotNull":true,"DefaultValue":{"String":"'r'||lower(hex(randomblob(7)))","Valid":true}},{"PK":0,"Index":2,"Name":"updated","Type":"TEXT","NotNull":true,"DefaultValue":{"String":"''","Valid":true}},{"PK":0,"Index":3,"Name":"value","Type":"JSON","NotNull":false,"DefaultValue":{"String":"NULL","Valid":true}}]`,
+ },
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, s.tableName), func(t *testing.T) {
+ rows, _ := app.TableInfo(s.tableName)
+
+ raw, err := json.Marshal(rows)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if str := string(raw); str != s.expected {
+ t.Fatalf("Expected\n%s\ngot\n%s", s.expected, str)
+ }
+ })
+ }
+}
+
+func TestTableIndexes(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ tableName string
+ expected []string
+ }{
+ {"", nil},
+ {"missing", nil},
+ {
+ core.CollectionNameSuperusers,
+ []string{"idx_email__pbc_3323866339", "idx_tokenKey__pbc_3323866339"},
+ },
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, s.tableName), func(t *testing.T) {
+ indexes, _ := app.TableIndexes(s.tableName)
+
+ if len(indexes) != len(s.expected) {
+ t.Fatalf("Expected %d indexes, got %d\n%v", len(s.expected), len(indexes), indexes)
+ }
+
+ for _, name := range s.expected {
+ if v, ok := indexes[name]; !ok || v == "" {
+ t.Fatalf("Expected non-empty index %q in \n%v", name, indexes)
+ }
+ }
+ })
+ }
+}
+
+func TestDeleteTable(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ tableName string
+ expectError bool
+ }{
+ {"", true},
+ {"test", false}, // missing tables are ignored
+ {"_admins", false},
+ {"demo3", false},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, s.tableName), func(t *testing.T) {
+ err := app.DeleteTable(s.tableName)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v", s.expectError, hasErr)
+ }
+ })
+ }
+}
+
+func TestVacuum(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ calledQueries := []string{}
+ app.DB().(*dbx.DB).QueryLogFunc = func(ctx context.Context, t time.Duration, sql string, rows *sql.Rows, err error) {
+ calledQueries = append(calledQueries, sql)
+ }
+ app.DB().(*dbx.DB).ExecLogFunc = func(ctx context.Context, t time.Duration, sql string, result sql.Result, err error) {
+ calledQueries = append(calledQueries, sql)
+ }
+
+ if err := app.Vacuum(); err != nil {
+ t.Fatal(err)
+ }
+
+ if total := len(calledQueries); total != 1 {
+ t.Fatalf("Expected 1 query, got %d", total)
+ }
+
+ if calledQueries[0] != "VACUUM" {
+ t.Fatalf("Expected VACUUM query, got %s", calledQueries[0])
+ }
+}
+
+func TestAuxVacuum(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ calledQueries := []string{}
+ app.AuxDB().(*dbx.DB).QueryLogFunc = func(ctx context.Context, t time.Duration, sql string, rows *sql.Rows, err error) {
+ calledQueries = append(calledQueries, sql)
+ }
+ app.AuxDB().(*dbx.DB).ExecLogFunc = func(ctx context.Context, t time.Duration, sql string, result sql.Result, err error) {
+ calledQueries = append(calledQueries, sql)
+ }
+
+ if err := app.AuxVacuum(); err != nil {
+ t.Fatal(err)
+ }
+
+ if total := len(calledQueries); total != 1 {
+ t.Fatalf("Expected 1 query, got %d", total)
+ }
+
+ if calledQueries[0] != "VACUUM" {
+ t.Fatalf("Expected VACUUM query, got %s", calledQueries[0])
+ }
+}
diff --git a/core/db_test.go b/core/db_test.go
new file mode 100644
index 00000000..2dc30998
--- /dev/null
+++ b/core/db_test.go
@@ -0,0 +1,113 @@
+package core_test
+
+import (
+ "context"
+ "errors"
+ "testing"
+
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestGenerateDefaultRandomId(t *testing.T) {
+ t.Parallel()
+
+ id1 := core.GenerateDefaultRandomId()
+ id2 := core.GenerateDefaultRandomId()
+
+ if id1 == id2 {
+ t.Fatalf("Expected id1 and id2 to differ, got %q", id1)
+ }
+
+ if l := len(id1); l != 15 {
+ t.Fatalf("Expected id1 length %d, got %d", 15, l)
+ }
+
+ if l := len(id2); l != 15 {
+ t.Fatalf("Expected id2 length %d, got %d", 15, l)
+ }
+}
+
+func TestModelQuery(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ modelsQuery := app.ModelQuery(&core.Collection{})
+ logsModelQuery := app.AuxModelQuery(&core.Collection{})
+
+ if app.DB() == modelsQuery.Info().Builder {
+ t.Fatalf("ModelQuery() is not using app.DB()")
+ }
+
+ if app.AuxDB() == logsModelQuery.Info().Builder {
+ t.Fatalf("AuxModelQuery() is not using app.AuxDB()")
+ }
+
+ expectedSQL := "SELECT {{_collections}}.* FROM `_collections`"
+ for i, q := range []*dbx.SelectQuery{modelsQuery, logsModelQuery} {
+ sql := q.Build().SQL()
+ if sql != expectedSQL {
+ t.Fatalf("[%d] Expected select\n%s\ngot\n%s", i, expectedSQL, sql)
+ }
+ }
+}
+
+func TestValidate(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ u := &mockSuperusers{}
+
+ testErr := errors.New("test")
+
+ app.OnModelValidate().BindFunc(func(e *core.ModelEvent) error {
+ return testErr
+ })
+
+ err := app.Validate(u)
+ if err != testErr {
+ t.Fatalf("Expected error %v, got %v", testErr, err)
+ }
+}
+
+func TestValidateWithContext(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ u := &mockSuperusers{}
+
+ testErr := errors.New("test")
+
+ app.OnModelValidate().BindFunc(func(e *core.ModelEvent) error {
+ if v := e.Context.Value("test"); v != 123 {
+ t.Fatalf("Expected 'test' context value %#v, got %#v", 123, v)
+ }
+ return testErr
+ })
+
+ //nolint:staticcheck
+ ctx := context.WithValue(context.Background(), "test", 123)
+
+ err := app.ValidateWithContext(ctx, u)
+ if err != testErr {
+ t.Fatalf("Expected error %v, got %v", testErr, err)
+ }
+}
+
+// -------------------------------------------------------------------
+
+type mockSuperusers struct {
+ core.BaseModel
+ Email string `db:"email"`
+}
+
+func (m *mockSuperusers) TableName() string {
+ return core.CollectionNameSuperusers
+}
diff --git a/core/db_tx.go b/core/db_tx.go
new file mode 100644
index 00000000..53ef4f2b
--- /dev/null
+++ b/core/db_tx.go
@@ -0,0 +1,105 @@
+package core
+
+import (
+ "errors"
+ "fmt"
+ "sync"
+
+ "github.com/pocketbase/dbx"
+)
+
+// RunInTransaction wraps fn into a transaction for the regular app database.
+//
+// It is safe to nest RunInTransaction calls as long as you use the callback's txApp.
+func (app *BaseApp) RunInTransaction(fn func(txApp App) error) error {
+ return app.runInTransaction(app.NonconcurrentDB(), fn, false)
+}
+
+// AuxRunInTransaction wraps fn into a transaction for the auxiliary app database.
+//
+// It is safe to nest RunInTransaction calls as long as you use the callback's txApp.
+func (app *BaseApp) AuxRunInTransaction(fn func(txApp App) error) error {
+ return app.runInTransaction(app.AuxNonconcurrentDB(), fn, true)
+}
+
+func (app *BaseApp) runInTransaction(db dbx.Builder, fn func(txApp App) error, isForAuxDB bool) error {
+ switch txOrDB := db.(type) {
+ case *dbx.Tx:
+ // run as part of the already existing transaction
+ return fn(app)
+ case *dbx.DB:
+ var txApp *BaseApp
+ txErr := txOrDB.Transactional(func(tx *dbx.Tx) error {
+ txApp = app.createTxApp(tx, isForAuxDB)
+ return fn(txApp)
+ })
+
+ // execute all after event calls on transaction complete
+ if txApp != nil && txApp.txInfo != nil {
+ afterFuncErr := txApp.txInfo.runAfterFuncs(txErr)
+ if afterFuncErr != nil {
+ return errors.Join(txErr, afterFuncErr)
+ }
+ }
+
+ return txErr
+ default:
+ return errors.New("failed to start transaction (unknown db type)")
+ }
+}
+
+// createTxApp shallow clones the current app and assigns a new tx state.
+func (app *BaseApp) createTxApp(tx *dbx.Tx, isForAuxDB bool) *BaseApp {
+ clone := *app
+
+ if isForAuxDB {
+ clone.auxConcurrentDB = tx
+ clone.auxNonconcurrentDB = tx
+ } else {
+ clone.concurrentDB = tx
+ clone.nonconcurrentDB = tx
+ }
+
+ clone.txInfo = &txAppInfo{
+ parent: app,
+ isForAuxDB: isForAuxDB,
+ }
+
+ return &clone
+}
+
+type txAppInfo struct {
+ parent *BaseApp
+ afterFuncs []func(txErr error) error
+ mu sync.Mutex
+ isForAuxDB bool
+}
+
+func (a *txAppInfo) onAfterFunc(fn func(txErr error) error) {
+ a.mu.Lock()
+ defer a.mu.Unlock()
+
+ a.afterFuncs = append(a.afterFuncs, fn)
+}
+
+// note: can be called only once because txAppInfo is cleared
+func (a *txAppInfo) runAfterFuncs(txErr error) error {
+ a.mu.Lock()
+ defer a.mu.Unlock()
+
+ var errs []error
+
+ for _, call := range a.afterFuncs {
+ if err := call(txErr); err != nil {
+ errs = append(errs, err)
+ }
+ }
+
+ a.afterFuncs = nil
+
+ if len(errs) > 0 {
+ return fmt.Errorf("transaction afterFunc errors: %w", errors.Join(errs...))
+ }
+
+ return nil
+}
diff --git a/core/db_tx_test.go b/core/db_tx_test.go
new file mode 100644
index 00000000..848a86b3
--- /dev/null
+++ b/core/db_tx_test.go
@@ -0,0 +1,235 @@
+package core_test
+
+import (
+ "errors"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestRunInTransaction(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ t.Run("failed nested transaction", func(t *testing.T) {
+ app.RunInTransaction(func(txApp core.App) error {
+ superuser, _ := txApp.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test@example.com")
+
+ return txApp.RunInTransaction(func(tx2Dao core.App) error {
+ if err := tx2Dao.Delete(superuser); err != nil {
+ t.Fatal(err)
+ }
+ return errors.New("test error")
+ })
+ })
+
+ // superuser should still exist
+ superuser, _ := app.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test@example.com")
+ if superuser == nil {
+ t.Fatal("Expected superuser test@example.com to not be deleted")
+ }
+ })
+
+ t.Run("successful nested transaction", func(t *testing.T) {
+ app.RunInTransaction(func(txApp core.App) error {
+ superuser, _ := txApp.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test@example.com")
+
+ return txApp.RunInTransaction(func(tx2Dao core.App) error {
+ return tx2Dao.Delete(superuser)
+ })
+ })
+
+ // superuser should have been deleted
+ superuser, _ := app.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test@example.com")
+ if superuser != nil {
+ t.Fatalf("Expected superuser test@example.com to be deleted, found %v", superuser)
+ }
+ })
+}
+
+func TestTransactionHooksCallsOnFailure(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ createHookCalls := 0
+ updateHookCalls := 0
+ deleteHookCalls := 0
+ afterCreateHookCalls := 0
+ afterUpdateHookCalls := 0
+ afterDeleteHookCalls := 0
+
+ app.OnModelCreate().BindFunc(func(e *core.ModelEvent) error {
+ createHookCalls++
+ return e.Next()
+ })
+
+ app.OnModelUpdate().BindFunc(func(e *core.ModelEvent) error {
+ updateHookCalls++
+ return e.Next()
+ })
+
+ app.OnModelDelete().BindFunc(func(e *core.ModelEvent) error {
+ deleteHookCalls++
+ return e.Next()
+ })
+
+ app.OnModelAfterCreateSuccess().BindFunc(func(e *core.ModelEvent) error {
+ afterCreateHookCalls++
+ return e.Next()
+ })
+
+ app.OnModelAfterUpdateSuccess().BindFunc(func(e *core.ModelEvent) error {
+ afterUpdateHookCalls++
+ return e.Next()
+ })
+
+ app.OnModelAfterDeleteSuccess().BindFunc(func(e *core.ModelEvent) error {
+ afterDeleteHookCalls++
+ return e.Next()
+ })
+
+ existingModel, _ := app.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test@example.com")
+
+ app.RunInTransaction(func(txApp1 core.App) error {
+ return txApp1.RunInTransaction(func(txApp2 core.App) error {
+ // test create
+ // ---
+ newModel := core.NewRecord(existingModel.Collection())
+ newModel.SetEmail("test_new1@example.com")
+ newModel.SetPassword("1234567890")
+ if err := txApp2.Save(newModel); err != nil {
+ t.Fatal(err)
+ }
+
+ // test update (twice)
+ // ---
+ if err := txApp2.Save(existingModel); err != nil {
+ t.Fatal(err)
+ }
+ if err := txApp2.Save(existingModel); err != nil {
+ t.Fatal(err)
+ }
+
+ // test delete
+ // ---
+ if err := txApp2.Delete(newModel); err != nil {
+ t.Fatal(err)
+ }
+
+ return errors.New("test_tx_error")
+ })
+ })
+
+ if createHookCalls != 1 {
+ t.Errorf("Expected createHookCalls to be called 1 time, got %d", createHookCalls)
+ }
+ if updateHookCalls != 2 {
+ t.Errorf("Expected updateHookCalls to be called 2 times, got %d", updateHookCalls)
+ }
+ if deleteHookCalls != 1 {
+ t.Errorf("Expected deleteHookCalls to be called 1 time, got %d", deleteHookCalls)
+ }
+ if afterCreateHookCalls != 0 {
+ t.Errorf("Expected afterCreateHookCalls to be called 0 times, got %d", afterCreateHookCalls)
+ }
+ if afterUpdateHookCalls != 0 {
+ t.Errorf("Expected afterUpdateHookCalls to be called 0 times, got %d", afterUpdateHookCalls)
+ }
+ if afterDeleteHookCalls != 0 {
+ t.Errorf("Expected afterDeleteHookCalls to be called 0 times, got %d", afterDeleteHookCalls)
+ }
+}
+
+func TestTransactionHooksCallsOnSuccess(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ createHookCalls := 0
+ updateHookCalls := 0
+ deleteHookCalls := 0
+ afterCreateHookCalls := 0
+ afterUpdateHookCalls := 0
+ afterDeleteHookCalls := 0
+
+ app.OnModelCreate().BindFunc(func(e *core.ModelEvent) error {
+ createHookCalls++
+ return e.Next()
+ })
+
+ app.OnModelUpdate().BindFunc(func(e *core.ModelEvent) error {
+ updateHookCalls++
+ return e.Next()
+ })
+
+ app.OnModelDelete().BindFunc(func(e *core.ModelEvent) error {
+ deleteHookCalls++
+ return e.Next()
+ })
+
+ app.OnModelAfterCreateSuccess().BindFunc(func(e *core.ModelEvent) error {
+ afterCreateHookCalls++
+ return e.Next()
+ })
+
+ app.OnModelAfterUpdateSuccess().BindFunc(func(e *core.ModelEvent) error {
+ afterUpdateHookCalls++
+ return e.Next()
+ })
+
+ app.OnModelAfterDeleteSuccess().BindFunc(func(e *core.ModelEvent) error {
+ afterDeleteHookCalls++
+ return e.Next()
+ })
+
+ existingModel, _ := app.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test@example.com")
+
+ app.RunInTransaction(func(txApp1 core.App) error {
+ return txApp1.RunInTransaction(func(txApp2 core.App) error {
+ // test create
+ // ---
+ newModel := core.NewRecord(existingModel.Collection())
+ newModel.SetEmail("test_new1@example.com")
+ newModel.SetPassword("1234567890")
+ if err := txApp2.Save(newModel); err != nil {
+ t.Fatal(err)
+ }
+
+ // test update (twice)
+ // ---
+ if err := txApp2.Save(existingModel); err != nil {
+ t.Fatal(err)
+ }
+ if err := txApp2.Save(existingModel); err != nil {
+ t.Fatal(err)
+ }
+
+ // test delete
+ // ---
+ if err := txApp2.Delete(newModel); err != nil {
+ t.Fatal(err)
+ }
+
+ return nil
+ })
+ })
+
+ if createHookCalls != 1 {
+ t.Errorf("Expected createHookCalls to be called 1 time, got %d", createHookCalls)
+ }
+ if updateHookCalls != 2 {
+ t.Errorf("Expected updateHookCalls to be called 2 times, got %d", updateHookCalls)
+ }
+ if deleteHookCalls != 1 {
+ t.Errorf("Expected deleteHookCalls to be called 1 time, got %d", deleteHookCalls)
+ }
+ if afterCreateHookCalls != 1 {
+ t.Errorf("Expected afterCreateHookCalls to be called 1 time, got %d", afterCreateHookCalls)
+ }
+ if afterUpdateHookCalls != 2 {
+ t.Errorf("Expected afterUpdateHookCalls to be called 2 times, got %d", afterUpdateHookCalls)
+ }
+ if afterDeleteHookCalls != 1 {
+ t.Errorf("Expected afterDeleteHookCalls to be called 1 time, got %d", afterDeleteHookCalls)
+ }
+}
diff --git a/core/event_request.go b/core/event_request.go
new file mode 100644
index 00000000..caf9ef34
--- /dev/null
+++ b/core/event_request.go
@@ -0,0 +1,195 @@
+package core
+
+import (
+ "maps"
+ "net/netip"
+ "strings"
+ "sync"
+
+ "github.com/pocketbase/pocketbase/tools/inflector"
+ "github.com/pocketbase/pocketbase/tools/router"
+)
+
+// Common request store keys used by the middlewares and api handlers.
+const (
+ RequestEventKeyInfoContext = "infoContext"
+)
+
+// RequestEvent defines the PocketBase router handler event.
+type RequestEvent struct {
+ App App
+
+ cachedRequestInfo *RequestInfo
+
+ Auth *Record
+
+ router.Event
+
+ mu sync.Mutex
+}
+
+// RealIP returns the "real" IP address from the configured trusted proxy headers.
+//
+// If Settings.TrustedProxy is not configured or the found IP is empty,
+// it fallbacks to e.RemoteIP().
+//
+// NB!
+// Be careful when used in a security critical context as it relies on
+// the trusted proxy to be properly configured and your app to be accessible only through it.
+// If you are not sure, use e.RemoteIP().
+func (e *RequestEvent) RealIP() string {
+ settings := e.App.Settings()
+
+ for _, h := range settings.TrustedProxy.Headers {
+ headerValues := e.Request.Header.Values(h)
+ if len(headerValues) == 0 {
+ continue
+ }
+
+ // extract the last header value as it is expected to be the one controlled by the proxy
+ ipsList := headerValues[len(headerValues)-1]
+ if ipsList == "" {
+ continue
+ }
+
+ ips := strings.Split(ipsList, ",")
+
+ if settings.TrustedProxy.UseLeftmostIP {
+ for _, ip := range ips {
+ parsed, err := netip.ParseAddr(strings.TrimSpace(ip))
+ if err == nil {
+ return parsed.StringExpanded()
+ }
+ }
+ } else {
+ for i := len(ips) - 1; i >= 0; i-- {
+ parsed, err := netip.ParseAddr(strings.TrimSpace(ips[i]))
+ if err == nil {
+ return parsed.StringExpanded()
+ }
+ }
+ }
+ }
+
+ return e.RemoteIP()
+}
+
+// HasSuperuserAuth checks whether the current RequestEvent has superuser authentication loaded.
+func (e *RequestEvent) HasSuperuserAuth() bool {
+ return e.Auth != nil && e.Auth.IsSuperuser()
+}
+
+// RequestInfo parses the current request into RequestInfo instance.
+//
+// Note that the returned result is cached to avoid copying the request data multiple times
+// but the auth state and other common store items are always refreshed in case they were changed my another handler.
+func (e *RequestEvent) RequestInfo() (*RequestInfo, error) {
+ e.mu.Lock()
+ defer e.mu.Unlock()
+
+ if e.cachedRequestInfo != nil {
+ e.cachedRequestInfo.Auth = e.Auth
+
+ infoCtx, _ := e.Get(RequestEventKeyInfoContext).(string)
+ if infoCtx != "" {
+ e.cachedRequestInfo.Context = infoCtx
+ } else {
+ e.cachedRequestInfo.Context = RequestInfoContextDefault
+ }
+ } else {
+ // (re)init e.cachedRequestInfo based on the current request event
+ if err := e.initRequestInfo(); err != nil {
+ return nil, err
+ }
+ }
+
+ return e.cachedRequestInfo, nil
+}
+
+func (e *RequestEvent) initRequestInfo() error {
+ infoCtx, _ := e.Get(RequestEventKeyInfoContext).(string)
+ if infoCtx == "" {
+ infoCtx = RequestInfoContextDefault
+ }
+
+ info := &RequestInfo{
+ Context: infoCtx,
+ Method: e.Request.Method,
+ Query: map[string]string{},
+ Headers: map[string]string{},
+ Body: map[string]any{},
+ }
+
+ if err := e.BindBody(&info.Body); err != nil {
+ return err
+ }
+
+ // extract the first value of all query params
+ query := e.Request.URL.Query()
+ for k, v := range query {
+ if len(v) > 0 {
+ info.Query[k] = v[0]
+ }
+ }
+
+ // extract the first value of all headers and normalizes the keys
+ // ("X-Token" is converted to "x_token")
+ for k, v := range e.Request.Header {
+ if len(v) > 0 {
+ info.Headers[inflector.Snakecase(k)] = v[0]
+ }
+ }
+
+ info.Auth = e.Auth
+
+ e.cachedRequestInfo = info
+
+ return nil
+}
+
+// -------------------------------------------------------------------
+
+const (
+ RequestInfoContextDefault = "default"
+ RequestInfoContextExpand = "expand"
+ RequestInfoContextRealtime = "realtime"
+ RequestInfoContextProtectedFile = "protectedFile"
+ RequestInfoContextOAuth2 = "oauth2"
+ RequestInfoContextBatch = "batch"
+)
+
+// RequestInfo defines a HTTP request data struct, usually used
+// as part of the `@request.*` filter resolver.
+//
+// The Query and Headers fields contains only the first value for each found entry.
+type RequestInfo struct {
+ Query map[string]string `json:"query"`
+ Headers map[string]string `json:"headers"`
+ Body map[string]any `json:"body"`
+ Auth *Record `json:"auth"`
+ Method string `json:"method"`
+ Context string `json:"context"`
+}
+
+// HasSuperuserAuth checks whether the current RequestInfo instance
+// has superuser authentication loaded.
+func (info *RequestInfo) HasSuperuserAuth() bool {
+ return info.Auth != nil && info.Auth.IsSuperuser()
+}
+
+// Clone creates a new shallow copy of the current RequestInfo and its Auth record (if any).
+func (info *RequestInfo) Clone() *RequestInfo {
+ clone := &RequestInfo{
+ Method: info.Method,
+ Context: info.Context,
+ Query: maps.Clone(info.Query),
+ Body: maps.Clone(info.Body),
+ Headers: maps.Clone(info.Headers),
+ }
+
+ if info.Auth != nil {
+ clone.Auth = info.Auth.Fresh()
+ }
+
+ return clone
+}
diff --git a/core/event_request_batch.go b/core/event_request_batch.go
new file mode 100644
index 00000000..1f24e075
--- /dev/null
+++ b/core/event_request_batch.go
@@ -0,0 +1,31 @@
+package core
+
+import (
+ "net/http"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+)
+
+type BatchRequestEvent struct {
+ *RequestEvent
+
+ Batch []*InternalRequest
+}
+
+type InternalRequest struct {
+ // note: for uploading files the value must be either *filesystem.File or []*filesystem.File
+ Body map[string]any `form:"body" json:"body"`
+
+ Headers map[string]string `form:"headers" json:"headers"`
+
+ Method string `form:"method" json:"method"`
+
+ URL string `form:"url" json:"url"`
+}
+
+func (br InternalRequest) Validate() error {
+ return validation.ValidateStruct(&br,
+ validation.Field(&br.Method, validation.Required, validation.In(http.MethodGet, http.MethodPost, http.MethodPut, http.MethodPatch, http.MethodDelete)),
+ validation.Field(&br.URL, validation.Required, validation.Length(0, 2000)),
+ )
+}
diff --git a/core/event_request_batch_test.go b/core/event_request_batch_test.go
new file mode 100644
index 00000000..6feaf40d
--- /dev/null
+++ b/core/event_request_batch_test.go
@@ -0,0 +1,74 @@
+package core_test
+
+import (
+ "net/http"
+ "strings"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestInternalRequestValidate(t *testing.T) {
+ scenarios := []struct {
+ name string
+ request core.InternalRequest
+ expectedErrors []string
+ }{
+ {
+ "empty struct",
+ core.InternalRequest{},
+ []string{"method", "url"},
+ },
+
+ // method
+ {
+ "GET method",
+ core.InternalRequest{URL: "test", Method: http.MethodGet},
+ []string{},
+ },
+ {
+ "POST method",
+ core.InternalRequest{URL: "test", Method: http.MethodPost},
+ []string{},
+ },
+ {
+ "PUT method",
+ core.InternalRequest{URL: "test", Method: http.MethodPut},
+ []string{},
+ },
+ {
+ "PATCH method",
+ core.InternalRequest{URL: "test", Method: http.MethodPatch},
+ []string{},
+ },
+ {
+ "DELETE method",
+ core.InternalRequest{URL: "test", Method: http.MethodDelete},
+ []string{},
+ },
+ {
+ "unknown method",
+ core.InternalRequest{URL: "test", Method: "unknown"},
+ []string{"method"},
+ },
+
+ // url
+ {
+ "url <= 2000",
+ core.InternalRequest{URL: strings.Repeat("a", 2000), Method: http.MethodGet},
+ []string{},
+ },
+ {
+ "url > 2000",
+ core.InternalRequest{URL: strings.Repeat("a", 2001), Method: http.MethodGet},
+ []string{"url"},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ tests.TestValidationErrors(t, s.request.Validate(), s.expectedErrors)
+ })
+ }
+}
diff --git a/core/event_request_test.go b/core/event_request_test.go
new file mode 100644
index 00000000..41108025
--- /dev/null
+++ b/core/event_request_test.go
@@ -0,0 +1,334 @@
+package core_test
+
+import (
+ "encoding/json"
+ "net/http"
+ "strings"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestEventRequestRealIP(t *testing.T) {
+ t.Parallel()
+
+ headers := map[string][]string{
+ "CF-Connecting-IP": {"1.2.3.4", "1.1.1.1"},
+ "Fly-Client-IP": {"1.2.3.4", "1.1.1.2"},
+ "X-Real-IP": {"1.2.3.4", "1.1.1.3,1.1.1.4"},
+ "X-Forward-For": {"1.2.3.4", "invalid,1.1.1.5,1.1.1.6,invalid"},
+ }
+
+ scenarios := []struct {
+ name string
+ headers map[string][]string
+ trustedHeaders []string
+ useLeftmostIP bool
+ expected string
+ }{
+ {
+ "no trusted headers",
+ headers,
+ nil,
+ false,
+ "127.0.0.1",
+ },
+ {
+ "non-matching trusted header",
+ headers,
+ []string{"header1", "header2"},
+ false,
+ "127.0.0.1",
+ },
+ {
+ "trusted X-Real-IP (rightmost)",
+ headers,
+ []string{"header1", "x-real-ip", "x-forward-for"},
+ false,
+ "1.1.1.4",
+ },
+ {
+ "trusted X-Real-IP (leftmost)",
+ headers,
+ []string{"header1", "x-real-ip", "x-forward-for"},
+ true,
+ "1.1.1.3",
+ },
+ {
+ "trusted X-Forward-For (rightmost)",
+ headers,
+ []string{"header1", "x-forward-for"},
+ false,
+ "1.1.1.6",
+ },
+ {
+ "trusted X-Forward-For (leftmost)",
+ headers,
+ []string{"header1", "x-forward-for"},
+ true,
+ "1.1.1.5",
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ app, err := tests.NewTestApp()
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer app.Cleanup()
+
+ app.Settings().TrustedProxy.Headers = s.trustedHeaders
+ app.Settings().TrustedProxy.UseLeftmostIP = s.useLeftmostIP
+
+ event := core.RequestEvent{}
+ event.App = app
+
+ event.Request, err = http.NewRequest(http.MethodGet, "/", nil)
+ if err != nil {
+ t.Fatal(err)
+ }
+ event.Request.RemoteAddr = "127.0.0.1:80" // fallback
+
+ for k, values := range s.headers {
+ for _, v := range values {
+ event.Request.Header.Add(k, v)
+ }
+ }
+
+ result := event.RealIP()
+
+ if result != s.expected {
+ t.Fatalf("Expected ip %q, got %q", s.expected, result)
+ }
+ })
+ }
+}
+
+func TestEventRequestHasSuperUserAuth(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ user, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ superuser, err := app.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ name string
+ record *core.Record
+ expected bool
+ }{
+ {"nil record", nil, false},
+ {"regular user record", user, false},
+ {"superuser record", superuser, true},
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ e := core.RequestEvent{}
+ e.Auth = s.record
+
+ result := e.HasSuperuserAuth()
+
+ if result != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, result)
+ }
+ })
+ }
+}
+
+func TestRequestEventRequestInfo(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ userCol, err := app.FindCollectionByNameOrId("users")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ user1 := core.NewRecord(userCol)
+ user1.Id = "user1"
+ user1.SetEmail("test1@example.com")
+
+ user2 := core.NewRecord(userCol)
+ user2.Id = "user2"
+ user2.SetEmail("test2@example.com")
+
+ testBody := `{"a":123,"b":"test"}`
+
+ event := core.RequestEvent{}
+ event.Request, err = http.NewRequest("POST", "/test?q1=123&q2=456", strings.NewReader(testBody))
+ if err != nil {
+ t.Fatal(err)
+ }
+ event.Request.Header.Add("content-type", "application/json")
+ event.Request.Header.Add("x-test", "test")
+ event.Set(core.RequestEventKeyInfoContext, "test")
+ event.Auth = user1
+
+ t.Run("init", func(t *testing.T) {
+ info, err := event.RequestInfo()
+ if err != nil {
+ t.Fatalf("Failed to resolve request info: %v", err)
+ }
+
+ raw, err := json.Marshal(info)
+ if err != nil {
+ t.Fatalf("Failed to serialize request info: %v", err)
+ }
+ rawStr := string(raw)
+
+ expected := `{"query":{"q1":"123","q2":"456"},"headers":{"content_type":"application/json","x_test":"test"},"body":{"a":123,"b":"test"},"auth":{"avatar":"","collectionId":"_pb_users_auth_","collectionName":"users","created":"","emailVisibility":false,"file":[],"id":"user1","name":"","rel":"","updated":"","username":"","verified":false},"method":"POST","context":"test"}`
+
+ if expected != rawStr {
+ t.Fatalf("Expected\n%v\ngot\n%v", expected, rawStr)
+ }
+ })
+
+ t.Run("change user and context", func(t *testing.T) {
+ event.Set(core.RequestEventKeyInfoContext, "test2")
+ event.Auth = user2
+
+ info, err := event.RequestInfo()
+ if err != nil {
+ t.Fatalf("Failed to resolve request info: %v", err)
+ }
+
+ raw, err := json.Marshal(info)
+ if err != nil {
+ t.Fatalf("Failed to serialize request info: %v", err)
+ }
+ rawStr := string(raw)
+
+ expected := `{"query":{"q1":"123","q2":"456"},"headers":{"content_type":"application/json","x_test":"test"},"body":{"a":123,"b":"test"},"auth":{"avatar":"","collectionId":"_pb_users_auth_","collectionName":"users","created":"","emailVisibility":false,"file":[],"id":"user2","name":"","rel":"","updated":"","username":"","verified":false},"method":"POST","context":"test2"}`
+
+ if expected != rawStr {
+ t.Fatalf("Expected\n%v\ngot\n%v", expected, rawStr)
+ }
+ })
+}
+
+func TestRequestInfoHasSuperuserAuth(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ user, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ superuser, err := app.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ event := core.RequestEvent{}
+ event.Request, err = http.NewRequest("POST", "/test?q1=123&q2=456", strings.NewReader(`{"a":123,"b":"test"}`))
+ if err != nil {
+ t.Fatal(err)
+ }
+ event.Request.Header.Add("content-type", "application/json")
+
+ scenarios := []struct {
+ name string
+ record *core.Record
+ expected bool
+ }{
+ {"nil record", nil, false},
+ {"regular user record", user, false},
+ {"superuser record", superuser, true},
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ event.Auth = s.record
+
+ info, err := event.RequestInfo()
+ if err != nil {
+ t.Fatalf("Failed to resolve request info: %v", err)
+ }
+
+ result := info.HasSuperuserAuth()
+
+ if result != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, result)
+ }
+ })
+ }
+}
+
+func TestRequestInfoClone(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ userCol, err := app.FindCollectionByNameOrId("users")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ user := core.NewRecord(userCol)
+ user.Id = "user1"
+ user.SetEmail("test1@example.com")
+
+ event := core.RequestEvent{}
+ event.Request, err = http.NewRequest("POST", "/test?q1=123&q2=456", strings.NewReader(`{"a":123,"b":"test"}`))
+ if err != nil {
+ t.Fatal(err)
+ }
+ event.Request.Header.Add("content-type", "application/json")
+ event.Auth = user
+
+ info, err := event.RequestInfo()
+ if err != nil {
+ t.Fatalf("Failed to resolve request info: %v", err)
+ }
+
+ clone := info.Clone()
+
+ // modify the clone fields to ensure that it is a shallow copy
+ clone.Headers["new_header"] = "test"
+ clone.Query["new_query"] = "test"
+ clone.Body["new_body"] = "test"
+ clone.Auth.Id = "user2" // should be a Fresh copy of the record
+
+ // check the original data
+ // ---
+ originalRaw, err := json.Marshal(info)
+ if err != nil {
+ t.Fatalf("Failed to serialize original request info: %v", err)
+ }
+ originalRawStr := string(originalRaw)
+
+ expectedRawStr := `{"query":{"q1":"123","q2":"456"},"headers":{"content_type":"application/json"},"body":{"a":123,"b":"test"},"auth":{"avatar":"","collectionId":"_pb_users_auth_","collectionName":"users","created":"","emailVisibility":false,"file":[],"id":"user1","name":"","rel":"","updated":"","username":"","verified":false},"method":"POST","context":"default"}`
+ if expectedRawStr != originalRawStr {
+ t.Fatalf("Expected original info\n%v\ngot\n%v", expectedRawStr, originalRawStr)
+ }
+
+ // check the clone data
+ // ---
+ cloneRaw, err := json.Marshal(clone)
+ if err != nil {
+ t.Fatalf("Failed to serialize clone request info: %v", err)
+ }
+ cloneRawStr := string(cloneRaw)
+
+ expectedCloneStr := `{"query":{"new_query":"test","q1":"123","q2":"456"},"headers":{"content_type":"application/json","new_header":"test"},"body":{"a":123,"b":"test","new_body":"test"},"auth":{"avatar":"","collectionId":"_pb_users_auth_","collectionName":"users","created":"","emailVisibility":false,"file":[],"id":"user2","name":"","rel":"","updated":"","username":"","verified":false},"method":"POST","context":"default"}`
+ if expectedCloneStr != cloneRawStr {
+ t.Fatalf("Expected clone info\n%v\ngot\n%v", expectedCloneStr, cloneRawStr)
+ }
+}
diff --git a/core/events.go b/core/events.go
index d12e0e6e..12af18fc 100644
--- a/core/events.go
+++ b/core/events.go
@@ -1,49 +1,62 @@
package core
import (
+ "context"
"net/http"
"time"
- "github.com/labstack/echo/v5"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/models/settings"
"github.com/pocketbase/pocketbase/tools/auth"
- "github.com/pocketbase/pocketbase/tools/filesystem"
"github.com/pocketbase/pocketbase/tools/hook"
"github.com/pocketbase/pocketbase/tools/mailer"
+ "github.com/pocketbase/pocketbase/tools/router"
"github.com/pocketbase/pocketbase/tools/search"
"github.com/pocketbase/pocketbase/tools/subscriptions"
"golang.org/x/crypto/acme/autocert"
)
-var (
- _ hook.Tagger = (*BaseModelEvent)(nil)
- _ hook.Tagger = (*BaseCollectionEvent)(nil)
-)
-
-type BaseModelEvent struct {
- Model models.Model
+type HookTagger interface {
+ HookTags() []string
}
-func (e *BaseModelEvent) Tags() []string {
+// -------------------------------------------------------------------
+
+type baseModelEventData struct {
+ Model Model
+}
+
+func (e *baseModelEventData) Tags() []string {
if e.Model == nil {
return nil
}
- if r, ok := e.Model.(*models.Record); ok && r.Collection() != nil {
- return []string{r.Collection().Id, r.Collection().Name}
+ if ht, ok := e.Model.(HookTagger); ok {
+ return ht.HookTags()
}
return []string{e.Model.TableName()}
}
-type BaseCollectionEvent struct {
- Collection *models.Collection
+// -------------------------------------------------------------------
+
+type baseRecordEventData struct {
+ Record *Record
}
-func (e *BaseCollectionEvent) Tags() []string {
+func (e *baseRecordEventData) Tags() []string {
+ if e.Record == nil {
+ return nil
+ }
+
+ return e.Record.HookTags()
+}
+
+// -------------------------------------------------------------------
+
+type baseCollectionEventData struct {
+ Collection *Collection
+}
+
+func (e *baseCollectionEventData) Tags() []string {
if e.Collection == nil {
return nil
}
@@ -62,356 +75,466 @@ func (e *BaseCollectionEvent) Tags() []string {
}
// -------------------------------------------------------------------
-// Serve events data
+// App events data
// -------------------------------------------------------------------
type BootstrapEvent struct {
+ hook.Event
App App
}
type TerminateEvent struct {
+ hook.Event
App App
IsRestart bool
}
+type BackupEvent struct {
+ hook.Event
+ App App
+ Context context.Context
+ Name string // the name of the backup to create/restore.
+ Exclude []string // list of dir entries to exclude from the backup create/restore.
+}
+
type ServeEvent struct {
+ hook.Event
App App
- Router *echo.Echo
+ Router *router.Router[*RequestEvent]
Server *http.Server
CertManager *autocert.Manager
}
-type ApiErrorEvent struct {
- HttpContext echo.Context
- Error error
+// -------------------------------------------------------------------
+// Settings events data
+// -------------------------------------------------------------------
+
+type SettingsListRequestEvent struct {
+ hook.Event
+ *RequestEvent
+
+ Settings *Settings
}
-// -------------------------------------------------------------------
-// Model DAO events data
-// -------------------------------------------------------------------
+type SettingsUpdateRequestEvent struct {
+ hook.Event
+ *RequestEvent
-type ModelEvent struct {
- BaseModelEvent
+ OldSettings *Settings
+ NewSettings *Settings
+}
- Dao *daos.Dao
+type SettingsReloadEvent struct {
+ hook.Event
+ App App
}
// -------------------------------------------------------------------
// Mailer events data
// -------------------------------------------------------------------
+type MailerEvent struct {
+ hook.Event
+ App App
+
+ Mailer mailer.Mailer
+ Message *mailer.Message
+}
+
type MailerRecordEvent struct {
- BaseCollectionEvent
-
- MailClient mailer.Mailer
- Message *mailer.Message
- Record *models.Record
- Meta map[string]any
-}
-
-type MailerAdminEvent struct {
- MailClient mailer.Mailer
- Message *mailer.Message
- Admin *models.Admin
- Meta map[string]any
+ MailerEvent
+ baseRecordEventData
+ Meta map[string]any
}
// -------------------------------------------------------------------
-// Realtime API events data
+// Model events data
// -------------------------------------------------------------------
-type RealtimeConnectEvent struct {
- HttpContext echo.Context
- Client subscriptions.Client
- IdleTimeout time.Duration
+const (
+ ModelEventTypeCreate = "create"
+ ModelEventTypeUpdate = "update"
+ ModelEventTypeDelete = "delete"
+ ModelEventTypeValidate = "validate"
+)
+
+type ModelEvent struct {
+ hook.Event
+ App App
+ baseModelEventData
+ Context context.Context
+
+ // Could be any of the ModelEventType* constants, like:
+ // - create
+ // - update
+ // - delete
+ // - validate
+ Type string
}
-type RealtimeDisconnectEvent struct {
- HttpContext echo.Context
- Client subscriptions.Client
-}
-
-type RealtimeMessageEvent struct {
- HttpContext echo.Context
- Client subscriptions.Client
- Message *subscriptions.Message
-}
-
-type RealtimeSubscribeEvent struct {
- HttpContext echo.Context
- Client subscriptions.Client
- Subscriptions []string
+type ModelErrorEvent struct {
+ ModelEvent
+ Error error
}
// -------------------------------------------------------------------
-// Settings API events data
+// Record events data
// -------------------------------------------------------------------
-type SettingsListEvent struct {
- HttpContext echo.Context
- RedactedSettings *settings.Settings
+type RecordEvent struct {
+ hook.Event
+ App App
+ baseRecordEventData
+ Context context.Context
+
+ // Could be any of the ModelEventType* constants, like:
+ // - create
+ // - update
+ // - delete
+ // - validate
+ Type string
}
-type SettingsUpdateEvent struct {
- HttpContext echo.Context
- OldSettings *settings.Settings
- NewSettings *settings.Settings
+type RecordErrorEvent struct {
+ RecordEvent
+ Error error
+}
+
+func syncModelEventWithRecordEvent(me *ModelEvent, re *RecordEvent) {
+ me.App = re.App
+ me.Context = re.Context
+ me.Type = re.Type
+
+ // @todo enable if after profiling doesn't have significant impact
+ // skip for now to avoid excessive checks and assume that the
+ // Model and the Record fields still points to the same instance
+ //
+ // if _, ok := me.Model.(*Record); ok {
+ // me.Model = re.Record
+ // } else if proxy, ok := me.Model.(RecordProxy); ok {
+ // proxy.SetProxyRecord(re.Record)
+ // }
+}
+
+func newRecordEventFromModelEvent(me *ModelEvent) (*RecordEvent, bool) {
+ record, ok := me.Model.(*Record)
+ if !ok {
+ proxy, ok := me.Model.(RecordProxy)
+ if !ok {
+ return nil, false
+ }
+ record = proxy.ProxyRecord()
+ }
+
+ re := new(RecordEvent)
+ re.App = me.App
+ re.Context = me.Context
+ re.Type = me.Type
+ re.Record = record
+
+ return re, true
+}
+
+func newRecordErrorEventFromModelErrorEvent(me *ModelErrorEvent) (*RecordErrorEvent, bool) {
+ recordEvent, ok := newRecordEventFromModelEvent(&me.ModelEvent)
+ if !ok {
+ return nil, false
+ }
+
+ re := new(RecordErrorEvent)
+ re.RecordEvent = *recordEvent
+ re.Error = me.Error
+
+ return re, true
+}
+
+func syncModelErrorEventWithRecordErrorEvent(me *ModelErrorEvent, re *RecordErrorEvent) {
+ syncModelEventWithRecordEvent(&me.ModelEvent, &re.RecordEvent)
+ me.Error = re.Error
}
// -------------------------------------------------------------------
-// Record CRUD API events data
+// Collection events data
// -------------------------------------------------------------------
-type RecordsListEvent struct {
- BaseCollectionEvent
+type CollectionEvent struct {
+ hook.Event
+ App App
+ baseCollectionEventData
+ Context context.Context
- HttpContext echo.Context
- Records []*models.Record
- Result *search.Result
+ // Could be any of the ModelEventType* constants, like:
+ // - create
+ // - update
+ // - delete
+ // - validate
+ Type string
}
-type RecordViewEvent struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
- Record *models.Record
+type CollectionErrorEvent struct {
+ CollectionEvent
+ Error error
}
-type RecordCreateEvent struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
- Record *models.Record
- UploadedFiles map[string][]*filesystem.File
+func syncModelEventWithCollectionEvent(me *ModelEvent, ce *CollectionEvent) {
+ me.App = ce.App
+ me.Context = ce.Context
+ me.Type = ce.Type
+ me.Model = ce.Collection
}
-type RecordUpdateEvent struct {
- BaseCollectionEvent
+func newCollectionEventFromModelEvent(me *ModelEvent) (*CollectionEvent, bool) {
+ record, ok := me.Model.(*Collection)
+ if !ok {
+ return nil, false
+ }
- HttpContext echo.Context
- Record *models.Record
- UploadedFiles map[string][]*filesystem.File
+ ce := new(CollectionEvent)
+ ce.App = me.App
+ ce.Context = me.Context
+ ce.Type = me.Type
+ ce.Collection = record
+
+ return ce, true
}
-type RecordDeleteEvent struct {
- BaseCollectionEvent
+func newCollectionErrorEventFromModelErrorEvent(me *ModelErrorEvent) (*CollectionErrorEvent, bool) {
+ collectionevent, ok := newCollectionEventFromModelEvent(&me.ModelEvent)
+ if !ok {
+ return nil, false
+ }
- HttpContext echo.Context
- Record *models.Record
+ ce := new(CollectionErrorEvent)
+ ce.CollectionEvent = *collectionevent
+ ce.Error = me.Error
+
+ return ce, true
}
-// -------------------------------------------------------------------
-// Auth Record API events data
-// -------------------------------------------------------------------
-
-type RecordAuthEvent struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
- Record *models.Record
- Token string
- Meta any
-}
-
-type RecordAuthWithPasswordEvent struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
- Record *models.Record
- Identity string
- Password string
-}
-
-type RecordAuthWithOAuth2Event struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
- ProviderName string
- ProviderClient auth.Provider
- Record *models.Record
- OAuth2User *auth.AuthUser
- IsNewRecord bool
-}
-
-type RecordAuthRefreshEvent struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
- Record *models.Record
-}
-
-type RecordRequestPasswordResetEvent struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
- Record *models.Record
-}
-
-type RecordConfirmPasswordResetEvent struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
- Record *models.Record
-}
-
-type RecordRequestVerificationEvent struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
- Record *models.Record
-}
-
-type RecordConfirmVerificationEvent struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
- Record *models.Record
-}
-
-type RecordRequestEmailChangeEvent struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
- Record *models.Record
-}
-
-type RecordConfirmEmailChangeEvent struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
- Record *models.Record
-}
-
-type RecordListExternalAuthsEvent struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
- Record *models.Record
- ExternalAuths []*models.ExternalAuth
-}
-
-type RecordUnlinkExternalAuthEvent struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
- Record *models.Record
- ExternalAuth *models.ExternalAuth
-}
-
-// -------------------------------------------------------------------
-// Admin API events data
-// -------------------------------------------------------------------
-
-type AdminsListEvent struct {
- HttpContext echo.Context
- Admins []*models.Admin
- Result *search.Result
-}
-
-type AdminViewEvent struct {
- HttpContext echo.Context
- Admin *models.Admin
-}
-
-type AdminCreateEvent struct {
- HttpContext echo.Context
- Admin *models.Admin
-}
-
-type AdminUpdateEvent struct {
- HttpContext echo.Context
- Admin *models.Admin
-}
-
-type AdminDeleteEvent struct {
- HttpContext echo.Context
- Admin *models.Admin
-}
-
-type AdminAuthEvent struct {
- HttpContext echo.Context
- Admin *models.Admin
- Token string
-}
-
-type AdminAuthWithPasswordEvent struct {
- HttpContext echo.Context
- Admin *models.Admin
- Identity string
- Password string
-}
-
-type AdminAuthRefreshEvent struct {
- HttpContext echo.Context
- Admin *models.Admin
-}
-
-type AdminRequestPasswordResetEvent struct {
- HttpContext echo.Context
- Admin *models.Admin
-}
-
-type AdminConfirmPasswordResetEvent struct {
- HttpContext echo.Context
- Admin *models.Admin
-}
-
-// -------------------------------------------------------------------
-// Collection API events data
-// -------------------------------------------------------------------
-
-type CollectionsListEvent struct {
- HttpContext echo.Context
- Collections []*models.Collection
- Result *search.Result
-}
-
-type CollectionViewEvent struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
-}
-
-type CollectionCreateEvent struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
-}
-
-type CollectionUpdateEvent struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
-}
-
-type CollectionDeleteEvent struct {
- BaseCollectionEvent
-
- HttpContext echo.Context
-}
-
-type CollectionsImportEvent struct {
- HttpContext echo.Context
- Collections []*models.Collection
+func syncModelErrorEventWithCollectionErrorEvent(me *ModelErrorEvent, ce *CollectionErrorEvent) {
+ syncModelEventWithCollectionEvent(&me.ModelEvent, &ce.CollectionEvent)
+ me.Error = ce.Error
}
// -------------------------------------------------------------------
// File API events data
// -------------------------------------------------------------------
-type FileTokenEvent struct {
- BaseModelEvent
+type FileTokenRequestEvent struct {
+ hook.Event
+ *RequestEvent
- HttpContext echo.Context
- Token string
+ Token string
}
-type FileDownloadEvent struct {
- BaseCollectionEvent
+type FileDownloadRequestEvent struct {
+ hook.Event
+ *RequestEvent
+ baseCollectionEventData
- HttpContext echo.Context
- Record *models.Record
- FileField *schema.SchemaField
- ServedPath string
- ServedName string
+ Record *Record
+ FileField *FileField
+ ServedPath string
+ ServedName string
+}
+
+// -------------------------------------------------------------------
+// Collection API events data
+// -------------------------------------------------------------------
+
+type CollectionsListRequestEvent struct {
+ hook.Event
+ *RequestEvent
+
+ Collections []*Collection
+ Result *search.Result
+}
+
+type CollectionsImportRequestEvent struct {
+ hook.Event
+ *RequestEvent
+
+ CollectionsData []map[string]any
+ DeleteMissing bool
+}
+
+type CollectionRequestEvent struct {
+ hook.Event
+ *RequestEvent
+ baseCollectionEventData
+}
+
+// -------------------------------------------------------------------
+// Realtime API events data
+// -------------------------------------------------------------------
+
+type RealtimeConnectRequestEvent struct {
+ hook.Event
+ *RequestEvent
+
+ Client subscriptions.Client
+
+ // note: modifying it after the connect has no effect
+ IdleTimeout time.Duration
+}
+
+type RealtimeMessageEvent struct {
+ hook.Event
+ *RequestEvent
+
+ Client subscriptions.Client
+ Message *subscriptions.Message
+}
+
+type RealtimeSubscribeRequestEvent struct {
+ hook.Event
+ *RequestEvent
+
+ Client subscriptions.Client
+ Subscriptions []string
+}
+
+// -------------------------------------------------------------------
+// Record CRUD API events data
+// -------------------------------------------------------------------
+
+type RecordsListRequestEvent struct {
+ hook.Event
+ *RequestEvent
+ baseCollectionEventData
+
+ // @todo consider removing and maybe add as generic to the search.Result?
+ Records []*Record
+ Result *search.Result
+}
+
+type RecordRequestEvent struct {
+ hook.Event
+ *RequestEvent
+ baseCollectionEventData
+
+ Record *Record
+}
+
+type RecordEnrichEvent struct {
+ hook.Event
+ App App
+ baseRecordEventData
+
+ RequestInfo *RequestInfo
+}
+
+// -------------------------------------------------------------------
+// Auth Record API events data
+// -------------------------------------------------------------------
+
+type RecordCreateOTPRequestEvent struct {
+ hook.Event
+ *RequestEvent
+ baseCollectionEventData
+
+ Record *Record
+ Password string
+}
+
+type RecordAuthWithOTPRequestEvent struct {
+ hook.Event
+ *RequestEvent
+ baseCollectionEventData
+
+ Record *Record
+ OTP *OTP
+}
+
+type RecordAuthRequestEvent struct {
+ hook.Event
+ *RequestEvent
+ baseCollectionEventData
+
+ Record *Record
+ Token string
+ Meta any
+ AuthMethod string
+}
+
+type RecordAuthWithPasswordRequestEvent struct {
+ hook.Event
+ *RequestEvent
+ baseCollectionEventData
+
+ Record *Record
+ Identity string
+ IdentityField string
+ Password string
+}
+
+type RecordAuthWithOAuth2RequestEvent struct {
+ hook.Event
+ *RequestEvent
+ baseCollectionEventData
+
+ ProviderName string
+ ProviderClient auth.Provider
+ Record *Record
+ OAuth2User *auth.AuthUser
+ CreateData map[string]any
+ IsNewRecord bool
+}
+
+type RecordAuthRefreshRequestEvent struct {
+ hook.Event
+ *RequestEvent
+ baseCollectionEventData
+
+ Record *Record
+}
+
+type RecordRequestPasswordResetRequestEvent struct {
+ hook.Event
+ *RequestEvent
+ baseCollectionEventData
+
+ Record *Record
+}
+
+type RecordConfirmPasswordResetRequestEvent struct {
+ hook.Event
+ *RequestEvent
+ baseCollectionEventData
+
+ Record *Record
+}
+
+type RecordRequestVerificationRequestEvent struct {
+ hook.Event
+ *RequestEvent
+ baseCollectionEventData
+
+ Record *Record
+}
+
+type RecordConfirmVerificationRequestEvent struct {
+ hook.Event
+ *RequestEvent
+ baseCollectionEventData
+
+ Record *Record
+}
+
+type RecordRequestEmailChangeRequestEvent struct {
+ hook.Event
+ *RequestEvent
+ baseCollectionEventData
+
+ Record *Record
+ NewEmail string
+}
+
+type RecordConfirmEmailChangeRequestEvent struct {
+ hook.Event
+ *RequestEvent
+ baseCollectionEventData
+
+ Record *Record
+ NewEmail string
}
diff --git a/core/events_test.go b/core/events_test.go
deleted file mode 100644
index d4ee5ebc..00000000
--- a/core/events_test.go
+++ /dev/null
@@ -1,84 +0,0 @@
-package core_test
-
-import (
- "testing"
-
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tools/list"
-)
-
-func TestBaseCollectionEventTags(t *testing.T) {
- c1 := new(models.Collection)
-
- c2 := new(models.Collection)
- c2.Id = "a"
-
- c3 := new(models.Collection)
- c3.Name = "b"
-
- c4 := new(models.Collection)
- c4.Id = "a"
- c4.Name = "b"
-
- scenarios := []struct {
- collection *models.Collection
- expectedTags []string
- }{
- {c1, []string{}},
- {c2, []string{"a"}},
- {c3, []string{"b"}},
- {c4, []string{"a", "b"}},
- }
-
- for i, s := range scenarios {
- event := new(core.BaseCollectionEvent)
- event.Collection = s.collection
-
- tags := event.Tags()
-
- if len(s.expectedTags) != len(tags) {
- t.Fatalf("[%d] Expected %v tags, got %v", i, s.expectedTags, tags)
- }
-
- for _, tag := range s.expectedTags {
- if !list.ExistInSlice(tag, tags) {
- t.Fatalf("[%d] Expected %v tags, got %v", i, s.expectedTags, tags)
- }
- }
- }
-}
-
-func TestModelEventTags(t *testing.T) {
- m1 := new(models.Admin)
-
- c := new(models.Collection)
- c.Id = "a"
- c.Name = "b"
- m2 := models.NewRecord(c)
-
- scenarios := []struct {
- model models.Model
- expectedTags []string
- }{
- {m1, []string{"_admins"}},
- {m2, []string{"a", "b"}},
- }
-
- for i, s := range scenarios {
- event := new(core.ModelEvent)
- event.Model = s.model
-
- tags := event.Tags()
-
- if len(s.expectedTags) != len(tags) {
- t.Fatalf("[%d] Expected %v tags, got %v", i, s.expectedTags, tags)
- }
-
- for _, tag := range s.expectedTags {
- if !list.ExistInSlice(tag, tags) {
- t.Fatalf("[%d] Expected %v tags, got %v", i, s.expectedTags, tags)
- }
- }
- }
-}
diff --git a/core/external_auth_model.go b/core/external_auth_model.go
new file mode 100644
index 00000000..3623dfa7
--- /dev/null
+++ b/core/external_auth_model.go
@@ -0,0 +1,140 @@
+package core
+
+import (
+ "context"
+ "errors"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/pocketbase/tools/auth"
+ "github.com/pocketbase/pocketbase/tools/hook"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+var (
+ _ Model = (*ExternalAuth)(nil)
+ _ PreValidator = (*ExternalAuth)(nil)
+ _ RecordProxy = (*ExternalAuth)(nil)
+)
+
+const CollectionNameExternalAuths = "_externalAuths"
+
+// ExternalAuth defines a Record proxy for working with the externalAuths collection.
+type ExternalAuth struct {
+ *Record
+}
+
+// NewExternalAuth instantiates and returns a new blank *ExternalAuth model.
+//
+// Example usage:
+//
+// ea := core.NewExternalAuth(app)
+// ea.SetRecordRef(user.Id)
+// ea.SetCollectionRef(user.Collection().Id)
+// ea.SetProvider("google")
+// ea.SetProviderId("...")
+// app.Save(ea)
+func NewExternalAuth(app App) *ExternalAuth {
+ m := &ExternalAuth{}
+
+ c, err := app.FindCachedCollectionByNameOrId(CollectionNameExternalAuths)
+ if err != nil {
+ // this is just to make tests easier since it is a system collection and it is expected to be always accessible
+ // (note: the loaded record is further checked on ExternalAuth.PreValidate())
+ c = NewBaseCollection("@__invalid__")
+ }
+
+ m.Record = NewRecord(c)
+
+ return m
+}
+
+// PreValidate implements the [PreValidator] interface and checks
+// whether the proxy is properly loaded.
+func (m *ExternalAuth) PreValidate(ctx context.Context, app App) error {
+ if m.Record == nil || m.Record.Collection().Name != CollectionNameExternalAuths {
+ return errors.New("missing or invalid ExternalAuth ProxyRecord")
+ }
+
+ return nil
+}
+
+// ProxyRecord returns the proxied Record model.
+func (m *ExternalAuth) ProxyRecord() *Record {
+ return m.Record
+}
+
+// SetProxyRecord loads the specified record model into the current proxy.
+func (m *ExternalAuth) SetProxyRecord(record *Record) {
+ m.Record = record
+}
+
+// CollectionRef returns the "collectionRef" field value.
+func (m *ExternalAuth) CollectionRef() string {
+ return m.GetString("collectionRef")
+}
+
+// SetCollectionRef updates the "collectionRef" record field value.
+func (m *ExternalAuth) SetCollectionRef(collectionId string) {
+ m.Set("collectionRef", collectionId)
+}
+
+// RecordRef returns the "recordRef" record field value.
+func (m *ExternalAuth) RecordRef() string {
+ return m.GetString("recordRef")
+}
+
+// SetRecordRef updates the "recordRef" record field value.
+func (m *ExternalAuth) SetRecordRef(recordId string) {
+ m.Set("recordRef", recordId)
+}
+
+// Provider returns the "provider" record field value.
+func (m *ExternalAuth) Provider() string {
+ return m.GetString("provider")
+}
+
+// SetProvider updates the "provider" record field value.
+func (m *ExternalAuth) SetProvider(provider string) {
+ m.Set("provider", provider)
+}
+
+// Provider returns the "providerId" record field value.
+func (m *ExternalAuth) ProviderId() string {
+ return m.GetString("providerId")
+}
+
+// SetProvider updates the "providerId" record field value.
+func (m *ExternalAuth) SetProviderId(providerId string) {
+ m.Set("providerId", providerId)
+}
+
+// Created returns the "created" record field value.
+func (m *ExternalAuth) Created() types.DateTime {
+ return m.GetDateTime("created")
+}
+
+// Updated returns the "updated" record field value.
+func (m *ExternalAuth) Updated() types.DateTime {
+ return m.GetDateTime("updated")
+}
+
+func (app *BaseApp) registerExternalAuthHooks() {
+ recordRefHooks[*ExternalAuth](app, CollectionNameExternalAuths, CollectionTypeAuth)
+
+ app.OnRecordValidate(CollectionNameExternalAuths).Bind(&hook.Handler[*RecordEvent]{
+ Func: func(e *RecordEvent) error {
+ providerNames := make([]any, 0, len(auth.Providers))
+ for name := range auth.Providers {
+ providerNames = append(providerNames, name)
+ }
+
+ provider := e.Record.GetString("provider")
+ if err := validation.Validate(provider, validation.Required, validation.In(providerNames...)); err != nil {
+ return validation.Errors{"provider": err}
+ }
+
+ return e.Next()
+ },
+ Priority: 99,
+ })
+}
diff --git a/core/external_auth_model_test.go b/core/external_auth_model_test.go
new file mode 100644
index 00000000..512f6771
--- /dev/null
+++ b/core/external_auth_model_test.go
@@ -0,0 +1,310 @@
+package core_test
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func TestNewExternalAuth(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ ea := core.NewExternalAuth(app)
+
+ if ea.Collection().Name != core.CollectionNameExternalAuths {
+ t.Fatalf("Expected record with %q collection, got %q", core.CollectionNameExternalAuths, ea.Collection().Name)
+ }
+}
+
+func TestExternalAuthProxyRecord(t *testing.T) {
+ t.Parallel()
+
+ record := core.NewRecord(core.NewBaseCollection("test"))
+ record.Id = "test_id"
+
+ ea := core.ExternalAuth{}
+ ea.SetProxyRecord(record)
+
+ if ea.ProxyRecord() == nil || ea.ProxyRecord().Id != record.Id {
+ t.Fatalf("Expected proxy record with id %q, got %v", record.Id, ea.ProxyRecord())
+ }
+}
+
+func TestExternalAuthRecordRef(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ ea := core.NewExternalAuth(app)
+
+ testValues := []string{"test_1", "test2", ""}
+ for i, testValue := range testValues {
+ t.Run(fmt.Sprintf("%d_%q", i, testValue), func(t *testing.T) {
+ ea.SetRecordRef(testValue)
+
+ if v := ea.RecordRef(); v != testValue {
+ t.Fatalf("Expected getter %q, got %q", testValue, v)
+ }
+
+ if v := ea.GetString("recordRef"); v != testValue {
+ t.Fatalf("Expected field value %q, got %q", testValue, v)
+ }
+ })
+ }
+}
+
+func TestExternalAuthCollectionRef(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ ea := core.NewExternalAuth(app)
+
+ testValues := []string{"test_1", "test2", ""}
+ for i, testValue := range testValues {
+ t.Run(fmt.Sprintf("%d_%q", i, testValue), func(t *testing.T) {
+ ea.SetCollectionRef(testValue)
+
+ if v := ea.CollectionRef(); v != testValue {
+ t.Fatalf("Expected getter %q, got %q", testValue, v)
+ }
+
+ if v := ea.GetString("collectionRef"); v != testValue {
+ t.Fatalf("Expected field value %q, got %q", testValue, v)
+ }
+ })
+ }
+}
+
+func TestExternalAuthProvider(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ ea := core.NewExternalAuth(app)
+
+ testValues := []string{"test_1", "test2", ""}
+ for i, testValue := range testValues {
+ t.Run(fmt.Sprintf("%d_%q", i, testValue), func(t *testing.T) {
+ ea.SetProvider(testValue)
+
+ if v := ea.Provider(); v != testValue {
+ t.Fatalf("Expected getter %q, got %q", testValue, v)
+ }
+
+ if v := ea.GetString("provider"); v != testValue {
+ t.Fatalf("Expected field value %q, got %q", testValue, v)
+ }
+ })
+ }
+}
+
+func TestExternalAuthProviderId(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ ea := core.NewExternalAuth(app)
+
+ testValues := []string{"test_1", "test2", ""}
+ for i, testValue := range testValues {
+ t.Run(fmt.Sprintf("%d_%q", i, testValue), func(t *testing.T) {
+ ea.SetProviderId(testValue)
+
+ if v := ea.ProviderId(); v != testValue {
+ t.Fatalf("Expected getter %q, got %q", testValue, v)
+ }
+
+ if v := ea.GetString("providerId"); v != testValue {
+ t.Fatalf("Expected field value %q, got %q", testValue, v)
+ }
+ })
+ }
+}
+
+func TestExternalAuthCreated(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ ea := core.NewExternalAuth(app)
+
+ if v := ea.Created().String(); v != "" {
+ t.Fatalf("Expected empty created, got %q", v)
+ }
+
+ now := types.NowDateTime()
+ ea.SetRaw("created", now)
+
+ if v := ea.Created().String(); v != now.String() {
+ t.Fatalf("Expected %q created, got %q", now.String(), v)
+ }
+}
+
+func TestExternalAuthUpdated(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ ea := core.NewExternalAuth(app)
+
+ if v := ea.Updated().String(); v != "" {
+ t.Fatalf("Expected empty updated, got %q", v)
+ }
+
+ now := types.NowDateTime()
+ ea.SetRaw("updated", now)
+
+ if v := ea.Updated().String(); v != now.String() {
+ t.Fatalf("Expected %q updated, got %q", now.String(), v)
+ }
+}
+
+func TestExternalAuthPreValidate(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ externalAuthsCol, err := app.FindCollectionByNameOrId(core.CollectionNameExternalAuths)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ user, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ t.Run("no proxy record", func(t *testing.T) {
+ externalAuth := &core.ExternalAuth{}
+
+ if err := app.Validate(externalAuth); err == nil {
+ t.Fatal("Expected collection validation error")
+ }
+ })
+
+ t.Run("non-ExternalAuth collection", func(t *testing.T) {
+ externalAuth := &core.ExternalAuth{}
+ externalAuth.SetProxyRecord(core.NewRecord(core.NewBaseCollection("invalid")))
+ externalAuth.SetRecordRef(user.Id)
+ externalAuth.SetCollectionRef(user.Collection().Id)
+ externalAuth.SetProvider("gitlab")
+ externalAuth.SetProviderId("test123")
+
+ if err := app.Validate(externalAuth); err == nil {
+ t.Fatal("Expected collection validation error")
+ }
+ })
+
+ t.Run("ExternalAuth collection", func(t *testing.T) {
+ externalAuth := &core.ExternalAuth{}
+ externalAuth.SetProxyRecord(core.NewRecord(externalAuthsCol))
+ externalAuth.SetRecordRef(user.Id)
+ externalAuth.SetCollectionRef(user.Collection().Id)
+ externalAuth.SetProvider("gitlab")
+ externalAuth.SetProviderId("test123")
+
+ if err := app.Validate(externalAuth); err != nil {
+ t.Fatalf("Expected nil validation error, got %v", err)
+ }
+ })
+}
+
+func TestExternalAuthValidateHook(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ user, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ demo1, err := app.FindRecordById("demo1", "84nmscqy84lsi1t")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ name string
+ externalAuth func() *core.ExternalAuth
+ expectErrors []string
+ }{
+ {
+ "empty",
+ func() *core.ExternalAuth {
+ return core.NewExternalAuth(app)
+ },
+ []string{"collectionRef", "recordRef", "provider", "providerId"},
+ },
+ {
+ "non-auth collection",
+ func() *core.ExternalAuth {
+ ea := core.NewExternalAuth(app)
+ ea.SetCollectionRef(demo1.Collection().Id)
+ ea.SetRecordRef(demo1.Id)
+ ea.SetProvider("gitlab")
+ ea.SetProviderId("test123")
+ return ea
+ },
+ []string{"collectionRef"},
+ },
+ {
+ "disabled provider",
+ func() *core.ExternalAuth {
+ ea := core.NewExternalAuth(app)
+ ea.SetCollectionRef(user.Collection().Id)
+ ea.SetRecordRef("missing")
+ ea.SetProvider("apple")
+ ea.SetProviderId("test123")
+ return ea
+ },
+ []string{"recordRef"},
+ },
+ {
+ "missing record id",
+ func() *core.ExternalAuth {
+ ea := core.NewExternalAuth(app)
+ ea.SetCollectionRef(user.Collection().Id)
+ ea.SetRecordRef("missing")
+ ea.SetProvider("gitlab")
+ ea.SetProviderId("test123")
+ return ea
+ },
+ []string{"recordRef"},
+ },
+ {
+ "valid ref",
+ func() *core.ExternalAuth {
+ ea := core.NewExternalAuth(app)
+ ea.SetCollectionRef(user.Collection().Id)
+ ea.SetRecordRef(user.Id)
+ ea.SetProvider("gitlab")
+ ea.SetProviderId("test123")
+ return ea
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ errs := app.Validate(s.externalAuth())
+ tests.TestValidationErrors(t, errs, s.expectErrors)
+ })
+ }
+}
diff --git a/core/external_auth_query.go b/core/external_auth_query.go
new file mode 100644
index 00000000..9331cfde
--- /dev/null
+++ b/core/external_auth_query.go
@@ -0,0 +1,61 @@
+package core
+
+import (
+ "github.com/pocketbase/dbx"
+)
+
+// FindAllExternalAuthsByRecord returns all ExternalAuth models
+// linked to the provided auth record.
+func (app *BaseApp) FindAllExternalAuthsByRecord(authRecord *Record) ([]*ExternalAuth, error) {
+ auths := []*ExternalAuth{}
+
+ err := app.RecordQuery(CollectionNameExternalAuths).
+ AndWhere(dbx.HashExp{
+ "collectionRef": authRecord.Collection().Id,
+ "recordRef": authRecord.Id,
+ }).
+ OrderBy("created DESC").
+ All(&auths)
+
+ if err != nil {
+ return nil, err
+ }
+
+ return auths, nil
+}
+
+// FindAllExternalAuthsByCollection returns all ExternalAuth models
+// linked to the provided auth collection.
+func (app *BaseApp) FindAllExternalAuthsByCollection(collection *Collection) ([]*ExternalAuth, error) {
+ auths := []*ExternalAuth{}
+
+ err := app.RecordQuery(CollectionNameExternalAuths).
+ AndWhere(dbx.HashExp{"collectionRef": collection.Id}).
+ OrderBy("created DESC").
+ All(&auths)
+
+ if err != nil {
+ return nil, err
+ }
+
+ return auths, nil
+}
+
+// FindFirstExternalAuthByExpr returns the first available (the most recent created)
+// ExternalAuth model that satisfies the non-nil expression.
+func (app *BaseApp) FindFirstExternalAuthByExpr(expr dbx.Expression) (*ExternalAuth, error) {
+ model := &ExternalAuth{}
+
+ err := app.RecordQuery(CollectionNameExternalAuths).
+ AndWhere(dbx.Not(dbx.HashExp{"providerId": ""})). // exclude empty providerIds
+ AndWhere(expr).
+ OrderBy("created DESC").
+ Limit(1).
+ One(model)
+
+ if err != nil {
+ return nil, err
+ }
+
+ return model, nil
+}
diff --git a/core/external_auth_query_test.go b/core/external_auth_query_test.go
new file mode 100644
index 00000000..eaf51d00
--- /dev/null
+++ b/core/external_auth_query_test.go
@@ -0,0 +1,176 @@
+package core_test
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestFindAllExternalAuthsByRecord(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ demo1, err := app.FindRecordById("demo1", "84nmscqy84lsi1t")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ superuser1, err := app.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ user1, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ user2, err := app.FindAuthRecordByEmail("users", "test2@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ user3, err := app.FindAuthRecordByEmail("users", "test3@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ client1, err := app.FindAuthRecordByEmail("clients", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ record *core.Record
+ expected []string
+ }{
+ {demo1, nil},
+ {superuser1, nil},
+ {client1, []string{"f1z5b3843pzc964"}},
+ {user1, []string{"clmflokuq1xl341", "dlmflokuq1xl342"}},
+ {user2, nil},
+ {user3, []string{"5eto7nmys833164"}},
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.record.Collection().Name+"_"+s.record.Id, func(t *testing.T) {
+ result, err := app.FindAllExternalAuthsByRecord(s.record)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if len(result) != len(s.expected) {
+ t.Fatalf("Expected total models %d, got %d", len(s.expected), len(result))
+ }
+
+ for i, id := range s.expected {
+ if result[i].Id != id {
+ t.Errorf("[%d] Expected id %q, got %q", i, id, result[i].Id)
+ }
+ }
+ })
+ }
+}
+
+func TestFindAllExternalAuthsByCollection(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ demo1, err := app.FindCollectionByNameOrId("demo1")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ superusers, err := app.FindCollectionByNameOrId(core.CollectionNameSuperusers)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ clients, err := app.FindCollectionByNameOrId("clients")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ users, err := app.FindCollectionByNameOrId("users")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ collection *core.Collection
+ expected []string
+ }{
+ {demo1, nil},
+ {superusers, nil},
+ {clients, []string{
+ "f1z5b3843pzc964",
+ }},
+ {users, []string{
+ "5eto7nmys833164",
+ "clmflokuq1xl341",
+ "dlmflokuq1xl342",
+ }},
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.collection.Name, func(t *testing.T) {
+ result, err := app.FindAllExternalAuthsByCollection(s.collection)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if len(result) != len(s.expected) {
+ t.Fatalf("Expected total models %d, got %d", len(s.expected), len(result))
+ }
+
+ for i, id := range s.expected {
+ if result[i].Id != id {
+ t.Errorf("[%d] Expected id %q, got %q", i, id, result[i].Id)
+ }
+ }
+ })
+ }
+}
+
+func TestFindFirstExternalAuthByExpr(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ expr dbx.Expression
+ expectedId string
+ }{
+ {dbx.HashExp{"collectionRef": "invalid"}, ""},
+ {dbx.HashExp{"collectionRef": "_pb_users_auth_"}, "5eto7nmys833164"},
+ {dbx.HashExp{"collectionRef": "_pb_users_auth_", "provider": "gitlab"}, "dlmflokuq1xl342"},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%v", i, s.expr.Build(app.DB().(*dbx.DB), dbx.Params{})), func(t *testing.T) {
+ result, err := app.FindFirstExternalAuthByExpr(s.expr)
+
+ hasErr := err != nil
+ expectErr := s.expectedId == ""
+ if hasErr != expectErr {
+ t.Fatalf("Expected hasErr %v, got %v", expectErr, hasErr)
+ }
+
+ if hasErr {
+ return
+ }
+
+ if result.Id != s.expectedId {
+ t.Errorf("Expected id %q, got %q", s.expectedId, result.Id)
+ }
+ })
+ }
+}
diff --git a/core/field.go b/core/field.go
new file mode 100644
index 00000000..e4645d0b
--- /dev/null
+++ b/core/field.go
@@ -0,0 +1,250 @@
+package core
+
+import (
+ "context"
+ "database/sql/driver"
+ "regexp"
+ "strings"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/pocketbase/core/validators"
+ "github.com/pocketbase/pocketbase/tools/list"
+)
+
+var fieldNameRegex = regexp.MustCompile(`^\w+$`)
+
+// Commonly used field names.
+const (
+ FieldNameId = "id"
+ FieldNameCollectionId = "collectionId"
+ FieldNameCollectionName = "collectionName"
+ FieldNameExpand = "expand"
+ FieldNameEmail = "email"
+ FieldNameEmailVisibility = "emailVisibility"
+ FieldNameVerified = "verified"
+ FieldNameTokenKey = "tokenKey"
+ FieldNamePassword = "password"
+)
+
+// SystemFields returns special internal field names that are usually readonly.
+var SystemDynamicFieldNames = []string{
+ FieldNameCollectionId,
+ FieldNameCollectionName,
+ FieldNameExpand,
+}
+
+// Common RecordInterceptor action names.
+const (
+ InterceptorActionValidate = "validate"
+ InterceptorActionDelete = "delete"
+ InterceptorActionDeleteExecute = "deleteExecute"
+ InterceptorActionAfterDelete = "afterDelete"
+ InterceptorActionAfterDeleteError = "afterDeleteError"
+ InterceptorActionCreate = "create"
+ InterceptorActionCreateExecute = "createExecute"
+ InterceptorActionAfterCreate = "afterCreate"
+ InterceptorActionAfterCreateError = "afterCreateFailure"
+ InterceptorActionUpdate = "update"
+ InterceptorActionUpdateExecute = "updateExecute"
+ InterceptorActionAfterUpdate = "afterUpdate"
+ InterceptorActionAfterUpdateError = "afterUpdateError"
+)
+
+// Common field errors.
+var (
+ ErrUnknownField = validation.NewError("validation_unknown_field", "Unknown or invalid field.")
+ ErrInvalidFieldValue = validation.NewError("validation_invalid_field_value", "Invalid field value.")
+ ErrMustBeSystemAndHidden = validation.NewError("validation_must_be_system_and_hidden", `The field must be marked as "System" and "Hidden".`)
+ ErrMustBeSystem = validation.NewError("validation_must_be_system", `The field must be marked as "System".`)
+)
+
+// FieldFactoryFunc defines a simple function to construct a specific Field instance.
+type FieldFactoryFunc func() Field
+
+// Fields holds all available collection fields.
+var Fields = map[string]FieldFactoryFunc{}
+
+// Field defines a common interface that all Collection fields should implement.
+type Field interface {
+ // note: the getters has an explicit "Get" prefix to avoid conflicts with their related field members
+
+ // GetId returns the field id.
+ GetId() string
+
+ // SetId changes the field id.
+ SetId(id string)
+
+ // GetName returns the field name.
+ GetName() string
+
+ // SetName changes the field name.
+ SetName(name string)
+
+ // GetSystem returns the field system flag state.
+ GetSystem() bool
+
+ // SetSystem changes the field system flag state.
+ SetSystem(system bool)
+
+ // GetHidden returns the field hidden flag state.
+ GetHidden() bool
+
+ // SetHidden changes the field hidden flag state.
+ SetHidden(hidden bool)
+
+ // Type returns the unique type of the field.
+ Type() string
+
+ // ColumnType returns the DB column definition of the field.
+ ColumnType(app App) string
+
+ // PrepareValue returns a properly formatted field value based on the provided raw one.
+ //
+ // This method is also called on record construction to initialize its default field value.
+ PrepareValue(record *Record, raw any) (any, error)
+
+ // ValidateSettings validates the current field value associated with the provided record.
+ ValidateValue(ctx context.Context, app App, record *Record) error
+
+ // ValidateSettings validates the current field settings.
+ ValidateSettings(ctx context.Context, app App, collection *Collection) error
+}
+
+// MaxBodySizeCalculator defines an optional field interface for
+// specifying the max size of a field value.
+type MaxBodySizeCalculator interface {
+ // CalculateMaxBodySize returns the approximate max body size of a field value.
+ CalculateMaxBodySize() int64
+}
+
+type (
+ SetterFunc func(record *Record, raw any)
+
+ // SetterFinder defines a field interface for registering custom field value setters.
+ SetterFinder interface {
+ // FindSetter returns a single field value setter function
+ // by performing pattern-like field matching using the specified key.
+ //
+ // The key is usually just the field name but it could also
+ // contains "modifier" characters based on which you can perform custom set operations
+ // (ex. "users+" could be mapped to a function that will append new user to the existing field value).
+ //
+ // Return nil if you want to fallback to the default field value setter.
+ FindSetter(key string) SetterFunc
+ }
+)
+
+type (
+ GetterFunc func(record *Record) any
+
+ // GetterFinder defines a field interface for registering custom field value getters.
+ GetterFinder interface {
+ // FindGetter returns a single field value getter function
+ // by performing pattern-like field matching using the specified key.
+ //
+ // The key is usually just the field name but it could also
+ // contains "modifier" characters based on which you can perform custom get operations
+ // (ex. "description:excerpt" could be mapped to a function that will return an excerpt of the current field value).
+ //
+ // Return nil if you want to fallback to the default field value setter.
+ FindGetter(key string) GetterFunc
+ }
+)
+
+// DriverValuer defines a Field interface for exporting and formatting
+// a field value for the database.
+type DriverValuer interface {
+ // DriverValue exports a single field value for persistence in the database.
+ DriverValue(record *Record) (driver.Value, error)
+}
+
+// MultiValuer defines a field interface that every multi-valued (eg. with MaxSelect) field has.
+type MultiValuer interface {
+ // IsMultiple checks whether the field is configured to support multiple or single values.
+ IsMultiple() bool
+}
+
+// RecordInterceptor defines a field interface for reacting to various
+// Record related operations (create, delete, validate, etc.).
+type RecordInterceptor interface {
+ // Interceptor is invoked when a specific record action occurs
+ // allowing you to perform extra validations and normalization
+ // (ex. uploading or deleting files).
+ //
+ // Note that users must call actionFunc() manually if they want to
+ // execute the specific record action.
+ Intercept(
+ ctx context.Context,
+ app App,
+ record *Record,
+ actionName string,
+ actionFunc func() error,
+ ) error
+}
+
+// DefaultFieldIdValidationRule performs base validation on a field id value.
+func DefaultFieldIdValidationRule(value any) error {
+ v, ok := value.(string)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ rules := []validation.Rule{
+ validation.Required,
+ validation.Length(1, 255),
+ }
+
+ for _, r := range rules {
+ if err := r.Validate(v); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// exclude special filter and system literals
+var excludeNames = append([]any{
+ "null", "true", "false", "_rowid_",
+}, list.ToInterfaceSlice(SystemDynamicFieldNames)...)
+
+// DefaultFieldIdValidationRule performs base validation on a field name value.
+func DefaultFieldNameValidationRule(value any) error {
+ v, ok := value.(string)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ rules := []validation.Rule{
+ validation.Required,
+ validation.Length(1, 255),
+ validation.Match(fieldNameRegex),
+ validation.NotIn(excludeNames...),
+ validation.By(checkForVia),
+ }
+
+ for _, r := range rules {
+ if err := r.Validate(v); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func checkForVia(value any) error {
+ v, _ := value.(string)
+ if v == "" {
+ return nil
+ }
+
+ if strings.Contains(strings.ToLower(v), "_via_") {
+ return validation.NewError("validation_found_via", `The value cannot contain "_via_".`)
+ }
+
+ return nil
+}
+
+func noopSetter(record *Record, raw any) {
+ // do nothing
+}
diff --git a/core/field_autodate.go b/core/field_autodate.go
new file mode 100644
index 00000000..9b54211a
--- /dev/null
+++ b/core/field_autodate.go
@@ -0,0 +1,176 @@
+package core
+
+import (
+ "context"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/pocketbase/core/validators"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func init() {
+ Fields[FieldTypeAutodate] = func() Field {
+ return &AutodateField{}
+ }
+}
+
+const FieldTypeAutodate = "autodate"
+
+var (
+ _ Field = (*AutodateField)(nil)
+ _ SetterFinder = (*AutodateField)(nil)
+ _ RecordInterceptor = (*AutodateField)(nil)
+)
+
+// AutodateField defines an "autodate" type field, aka.
+// field which datetime value could be auto set on record create/update.
+//
+// Requires either both or at least one of the OnCreate or OnUpdate options to be set.
+type AutodateField struct {
+ Id string `form:"id" json:"id"`
+ Name string `form:"name" json:"name"`
+ System bool `form:"system" json:"system"`
+ Hidden bool `form:"hidden" json:"hidden"`
+ Presentable bool `form:"presentable" json:"presentable"`
+
+ // ---
+
+ // OnCreate auto sets the current datetime as field value on record create.
+ OnCreate bool `form:"onCreate" json:"onCreate"`
+
+ // OnUpdate auto sets the current datetime as field value on record update.
+ OnUpdate bool `form:"onUpdate" json:"onUpdate"`
+}
+
+// Type implements [Field.Type] interface method.
+func (f *AutodateField) Type() string {
+ return FieldTypeAutodate
+}
+
+// GetId implements [Field.GetId] interface method.
+func (f *AutodateField) GetId() string {
+ return f.Id
+}
+
+// SetId implements [Field.SetId] interface method.
+func (f *AutodateField) SetId(id string) {
+ f.Id = id
+}
+
+// GetName implements [Field.GetName] interface method.
+func (f *AutodateField) GetName() string {
+ return f.Name
+}
+
+// SetName implements [Field.SetName] interface method.
+func (f *AutodateField) SetName(name string) {
+ f.Name = name
+}
+
+// GetSystem implements [Field.GetSystem] interface method.
+func (f *AutodateField) GetSystem() bool {
+ return f.System
+}
+
+// SetSystem implements [Field.SetSystem] interface method.
+func (f *AutodateField) SetSystem(system bool) {
+ f.System = system
+}
+
+// GetHidden implements [Field.GetHidden] interface method.
+func (f *AutodateField) GetHidden() bool {
+ return f.Hidden
+}
+
+// SetHidden implements [Field.SetHidden] interface method.
+func (f *AutodateField) SetHidden(hidden bool) {
+ f.Hidden = hidden
+}
+
+// ColumnType implements [Field.ColumnType] interface method.
+func (f *AutodateField) ColumnType(app App) string {
+ return "TEXT DEFAULT '' NOT NULL" // note: sqlite doesn't allow adding new columns with non-constant defaults
+}
+
+// PrepareValue implements [Field.PrepareValue] interface method.
+func (f *AutodateField) PrepareValue(record *Record, raw any) (any, error) {
+ val, _ := types.ParseDateTime(raw)
+ return val, nil
+}
+
+// ValidateValue implements [Field.ValidateValue] interface method.
+func (f *AutodateField) ValidateValue(ctx context.Context, app App, record *Record) error {
+ return nil
+}
+
+// ValidateSettings implements [Field.ValidateSettings] interface method.
+func (f *AutodateField) ValidateSettings(ctx context.Context, app App, collection *Collection) error {
+ oldOnCreate := f.OnCreate
+ oldOnUpdate := f.OnUpdate
+
+ oldCollection, _ := app.FindCollectionByNameOrId(collection.Id)
+ if oldCollection != nil {
+ oldField, ok := oldCollection.Fields.GetById(f.Id).(*AutodateField)
+ if ok && oldField != nil {
+ oldOnCreate = oldField.OnCreate
+ oldOnUpdate = oldField.OnUpdate
+ }
+ }
+
+ return validation.ValidateStruct(f,
+ validation.Field(&f.Id, validation.By(DefaultFieldIdValidationRule)),
+ validation.Field(&f.Name, validation.By(DefaultFieldNameValidationRule)),
+ validation.Field(
+ &f.OnCreate,
+ validation.When(f.System, validation.By(validators.Equal(oldOnCreate))),
+ validation.Required.Error("either onCreate or onUpdate must be enabled").When(!f.OnUpdate),
+ ),
+ validation.Field(
+ &f.OnUpdate,
+ validation.When(f.System, validation.By(validators.Equal(oldOnUpdate))),
+ validation.Required.Error("either onCreate or onUpdate must be enabled").When(!f.OnCreate),
+ ),
+ )
+}
+
+// FindSetter implements the [SetterFinder] interface.
+func (f *AutodateField) FindSetter(key string) SetterFunc {
+ switch key {
+ case f.Name:
+ // return noopSetter to disallow updating the value with record.Set()
+ return noopSetter
+ default:
+ return nil
+ }
+}
+
+// Intercept implements the [RecordInterceptor] interface.
+func (f *AutodateField) Intercept(
+ ctx context.Context,
+ app App,
+ record *Record,
+ actionName string,
+ actionFunc func() error,
+) error {
+ switch actionName {
+ case InterceptorActionCreate:
+ // ignore for custom date manually set with record.SetRaw()
+ if f.OnCreate && !f.hasBeenManuallyChanged(record) {
+ record.SetRaw(f.Name, types.NowDateTime())
+ }
+ case InterceptorActionUpdate:
+ // ignore for custom date manually set with record.SetRaw()
+ if f.OnUpdate && !f.hasBeenManuallyChanged(record) {
+ record.SetRaw(f.Name, types.NowDateTime())
+ }
+ }
+
+ return actionFunc()
+}
+
+func (f *AutodateField) hasBeenManuallyChanged(record *Record) bool {
+ vNew, _ := record.GetRaw(f.Name).(types.DateTime)
+ vOld, _ := record.Original().GetRaw(f.Name).(types.DateTime)
+
+ return vNew.String() != vOld.String()
+}
diff --git a/core/field_autodate_test.go b/core/field_autodate_test.go
new file mode 100644
index 00000000..94fe0ec9
--- /dev/null
+++ b/core/field_autodate_test.go
@@ -0,0 +1,349 @@
+package core_test
+
+import (
+ "context"
+ "fmt"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func TestAutodateFieldBaseMethods(t *testing.T) {
+ testFieldBaseMethods(t, core.FieldTypeAutodate)
+}
+
+func TestAutodateFieldColumnType(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.AutodateField{}
+
+ expected := "TEXT DEFAULT '' NOT NULL"
+
+ if v := f.ColumnType(app); v != expected {
+ t.Fatalf("Expected\n%q\ngot\n%q", expected, v)
+ }
+}
+
+func TestAutodateFieldPrepareValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.AutodateField{}
+ record := core.NewRecord(core.NewBaseCollection("test"))
+
+ scenarios := []struct {
+ raw any
+ expected string
+ }{
+ {"", ""},
+ {"invalid", ""},
+ {"2024-01-01 00:11:22.345Z", "2024-01-01 00:11:22.345Z"},
+ {time.Date(2024, 1, 2, 3, 4, 5, 0, time.UTC), "2024-01-02 03:04:05.000Z"},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.raw), func(t *testing.T) {
+ v, err := f.PrepareValue(record, s.raw)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ vDate, ok := v.(types.DateTime)
+ if !ok {
+ t.Fatalf("Expected types.DateTime instance, got %T", v)
+ }
+
+ if vDate.String() != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestAutodateFieldValidateValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field *core.AutodateField
+ record func() *core.Record
+ expectError bool
+ }{
+ {
+ "invalid raw value",
+ &core.AutodateField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 123)
+ return record
+ },
+ false,
+ },
+ {
+ "missing field value",
+ &core.AutodateField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("abc", true)
+ return record
+ },
+ false,
+ },
+ {
+ "existing field value",
+ &core.AutodateField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", types.NowDateTime())
+ return record
+ },
+ false,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ err := s.field.ValidateValue(context.Background(), app, s.record())
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
+
+func TestAutodateFieldValidateSettings(t *testing.T) {
+ testDefaultFieldIdValidation(t, core.FieldTypeAutodate)
+ testDefaultFieldNameValidation(t, core.FieldTypeAutodate)
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ superusers, err := app.FindCollectionByNameOrId(core.CollectionNameSuperusers)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ name string
+ field func() *core.AutodateField
+ expectErrors []string
+ }{
+ {
+ "empty onCreate and onUpdate",
+ func() *core.AutodateField {
+ return &core.AutodateField{
+ Id: "test",
+ Name: "test",
+ }
+ },
+ []string{"onCreate", "onUpdate"},
+ },
+ {
+ "with onCreate",
+ func() *core.AutodateField {
+ return &core.AutodateField{
+ Id: "test",
+ Name: "test",
+ OnCreate: true,
+ }
+ },
+ []string{},
+ },
+ {
+ "with onUpdate",
+ func() *core.AutodateField {
+ return &core.AutodateField{
+ Id: "test",
+ Name: "test",
+ OnUpdate: true,
+ }
+ },
+ []string{},
+ },
+ {
+ "change of a system autodate field",
+ func() *core.AutodateField {
+ created := superusers.Fields.GetByName("created").(*core.AutodateField)
+ created.OnCreate = !created.OnCreate
+ created.OnUpdate = !created.OnUpdate
+ return created
+ },
+ []string{"onCreate", "onUpdate"},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ errs := s.field().ValidateSettings(context.Background(), app, superusers)
+
+ tests.TestValidationErrors(t, errs, s.expectErrors)
+ })
+ }
+}
+
+func TestAutodateFieldFindSetter(t *testing.T) {
+ field := &core.AutodateField{Name: "test"}
+
+ collection := core.NewBaseCollection("test_collection")
+ collection.Fields.Add(field)
+
+ initialDate, err := types.ParseDateTime("2024-01-02 03:04:05.789Z")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ record := core.NewRecord(collection)
+ record.SetRaw("test", initialDate)
+
+ t.Run("no matching setter", func(t *testing.T) {
+ f := field.FindSetter("abc")
+ if f != nil {
+ t.Fatal("Expected nil setter")
+ }
+ })
+
+ t.Run("matching setter", func(t *testing.T) {
+ f := field.FindSetter("test")
+ if f == nil {
+ t.Fatal("Expected non-nil setter")
+ }
+
+ f(record, types.NowDateTime()) // should be ignored
+
+ if v := record.GetString("test"); v != "2024-01-02 03:04:05.789Z" {
+ t.Fatalf("Expected no value change, got %q", v)
+ }
+ })
+}
+
+func TestAutodateFieldIntercept(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ initialDate, err := types.ParseDateTime("2024-01-02 03:04:05.789Z")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ actionName string
+ field *core.AutodateField
+ record func() *core.Record
+ expected string
+ }{
+ {
+ "non-matching action",
+ "test",
+ &core.AutodateField{Name: "test", OnCreate: true, OnUpdate: true},
+ func() *core.Record {
+ return core.NewRecord(collection)
+ },
+ "",
+ },
+ {
+ "create with zero value (disabled onCreate)",
+ core.InterceptorActionCreate,
+ &core.AutodateField{Name: "test", OnCreate: false, OnUpdate: true},
+ func() *core.Record {
+ return core.NewRecord(collection)
+ },
+ "",
+ },
+ {
+ "create with zero value",
+ core.InterceptorActionCreate,
+ &core.AutodateField{Name: "test", OnCreate: true, OnUpdate: true},
+ func() *core.Record {
+ return core.NewRecord(collection)
+ },
+ "{NOW}",
+ },
+ {
+ "create with non-zero value",
+ core.InterceptorActionCreate,
+ &core.AutodateField{Name: "test", OnCreate: true, OnUpdate: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", initialDate)
+ return record
+ },
+ initialDate.String(),
+ },
+ {
+ "update with zero value (disabled onUpdate)",
+ core.InterceptorActionUpdate,
+ &core.AutodateField{Name: "test", OnCreate: true, OnUpdate: false},
+ func() *core.Record {
+ return core.NewRecord(collection)
+ },
+ "",
+ },
+ {
+ "update with zero value",
+ core.InterceptorActionUpdate,
+ &core.AutodateField{Name: "test", OnCreate: true, OnUpdate: true},
+ func() *core.Record {
+ return core.NewRecord(collection)
+ },
+ "{NOW}",
+ },
+ {
+ "update with non-zero value",
+ core.InterceptorActionUpdate,
+ &core.AutodateField{Name: "test", OnCreate: true, OnUpdate: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", initialDate)
+ return record
+ },
+ initialDate.String(),
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ actionCalls := 0
+ record := s.record()
+
+ now := types.NowDateTime().String()
+ err := s.field.Intercept(context.Background(), app, record, s.actionName, func() error {
+ actionCalls++
+ return nil
+ })
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if actionCalls != 1 {
+ t.Fatalf("Expected actionCalls %d, got %d", 1, actionCalls)
+ }
+
+ expected := cutMilliseconds(strings.ReplaceAll(s.expected, "{NOW}", now))
+
+ v := cutMilliseconds(record.GetString(s.field.GetName()))
+ if v != expected {
+ t.Fatalf("Expected value %q, got %q", expected, v)
+ }
+ })
+ }
+}
+
+func cutMilliseconds(datetime string) string {
+ if len(datetime) > 19 {
+ return datetime[:19]
+ }
+ return datetime
+}
diff --git a/core/field_bool.go b/core/field_bool.go
new file mode 100644
index 00000000..cd4e3786
--- /dev/null
+++ b/core/field_bool.go
@@ -0,0 +1,110 @@
+package core
+
+import (
+ "context"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/pocketbase/core/validators"
+ "github.com/spf13/cast"
+)
+
+func init() {
+ Fields[FieldTypeBool] = func() Field {
+ return &BoolField{}
+ }
+}
+
+const FieldTypeBool = "bool"
+
+var _ Field = (*BoolField)(nil)
+
+// BoolField defines "bool" type field to store a single true/false value.
+type BoolField struct {
+ Id string `form:"id" json:"id"`
+ Name string `form:"name" json:"name"`
+ System bool `form:"system" json:"system"`
+ Hidden bool `form:"hidden" json:"hidden"`
+ Presentable bool `form:"presentable" json:"presentable"`
+
+ // ---
+
+ // Required will require the field value to be always "true".
+ Required bool `form:"required" json:"required"`
+}
+
+// Type implements [Field.Type] interface method.
+func (f *BoolField) Type() string {
+ return FieldTypeBool
+}
+
+// GetId implements [Field.GetId] interface method.
+func (f *BoolField) GetId() string {
+ return f.Id
+}
+
+// SetId implements [Field.SetId] interface method.
+func (f *BoolField) SetId(id string) {
+ f.Id = id
+}
+
+// GetName implements [Field.GetName] interface method.
+func (f *BoolField) GetName() string {
+ return f.Name
+}
+
+// SetName implements [Field.SetName] interface method.
+func (f *BoolField) SetName(name string) {
+ f.Name = name
+}
+
+// GetSystem implements [Field.GetSystem] interface method.
+func (f *BoolField) GetSystem() bool {
+ return f.System
+}
+
+// SetSystem implements [Field.SetSystem] interface method.
+func (f *BoolField) SetSystem(system bool) {
+ f.System = system
+}
+
+// GetHidden implements [Field.GetHidden] interface method.
+func (f *BoolField) GetHidden() bool {
+ return f.Hidden
+}
+
+// SetHidden implements [Field.SetHidden] interface method.
+func (f *BoolField) SetHidden(hidden bool) {
+ f.Hidden = hidden
+}
+
+// ColumnType implements [Field.ColumnType] interface method.
+func (f *BoolField) ColumnType(app App) string {
+ return "BOOLEAN DEFAULT FALSE NOT NULL"
+}
+
+// PrepareValue implements [Field.PrepareValue] interface method.
+func (f *BoolField) PrepareValue(record *Record, raw any) (any, error) {
+ return cast.ToBool(raw), nil
+}
+
+// ValidateValue implements [Field.ValidateValue] interface method.
+func (f *BoolField) ValidateValue(ctx context.Context, app App, record *Record) error {
+ v, ok := record.GetRaw(f.Name).(bool)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ if f.Required {
+ return validation.Required.Validate(v)
+ }
+
+ return nil
+}
+
+// ValidateSettings implements [Field.ValidateSettings] interface method.
+func (f *BoolField) ValidateSettings(ctx context.Context, app App, collection *Collection) error {
+ return validation.ValidateStruct(f,
+ validation.Field(&f.Id, validation.By(DefaultFieldIdValidationRule)),
+ validation.Field(&f.Name, validation.By(DefaultFieldNameValidationRule)),
+ )
+}
diff --git a/core/field_bool_test.go b/core/field_bool_test.go
new file mode 100644
index 00000000..6706099b
--- /dev/null
+++ b/core/field_bool_test.go
@@ -0,0 +1,150 @@
+package core_test
+
+import (
+ "context"
+ "fmt"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestBoolFieldBaseMethods(t *testing.T) {
+ testFieldBaseMethods(t, core.FieldTypeBool)
+}
+
+func TestBoolFieldColumnType(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.BoolField{}
+
+ expected := "BOOLEAN DEFAULT FALSE NOT NULL"
+
+ if v := f.ColumnType(app); v != expected {
+ t.Fatalf("Expected\n%q\ngot\n%q", expected, v)
+ }
+}
+
+func TestBoolFieldPrepareValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.BoolField{}
+ record := core.NewRecord(core.NewBaseCollection("test"))
+
+ scenarios := []struct {
+ raw any
+ expected bool
+ }{
+ {"", false},
+ {"f", false},
+ {"t", true},
+ {1, true},
+ {0, false},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.raw), func(t *testing.T) {
+ v, err := f.PrepareValue(record, s.raw)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if v != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestBoolFieldValidateValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field *core.BoolField
+ record func() *core.Record
+ expectError bool
+ }{
+ {
+ "invalid raw value",
+ &core.BoolField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 123)
+ return record
+ },
+ true,
+ },
+ {
+ "missing field value (non-required)",
+ &core.BoolField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("abc", true)
+ return record
+ },
+ true, // because of failed nil.(bool) cast
+ },
+ {
+ "missing field value (required)",
+ &core.BoolField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("abc", true)
+ return record
+ },
+ true,
+ },
+ {
+ "false field value (non-required)",
+ &core.BoolField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", false)
+ return record
+ },
+ false,
+ },
+ {
+ "false field value (required)",
+ &core.BoolField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", false)
+ return record
+ },
+ true,
+ },
+ {
+ "true field value (required)",
+ &core.BoolField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", true)
+ return record
+ },
+ false,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ err := s.field.ValidateValue(context.Background(), app, s.record())
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
+
+func TestBoolFieldValidateSettings(t *testing.T) {
+ testDefaultFieldIdValidation(t, core.FieldTypeBool)
+ testDefaultFieldNameValidation(t, core.FieldTypeBool)
+}
diff --git a/core/field_date.go b/core/field_date.go
new file mode 100644
index 00000000..5bb67cf3
--- /dev/null
+++ b/core/field_date.go
@@ -0,0 +1,160 @@
+package core
+
+import (
+ "context"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/pocketbase/core/validators"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func init() {
+ Fields[FieldTypeDate] = func() Field {
+ return &DateField{}
+ }
+}
+
+const FieldTypeDate = "date"
+
+var _ Field = (*DateField)(nil)
+
+// DateField defines "date" type field to store a single [types.DateTime] value.
+type DateField struct {
+ Id string `form:"id" json:"id"`
+ Name string `form:"name" json:"name"`
+ System bool `form:"system" json:"system"`
+ Hidden bool `form:"hidden" json:"hidden"`
+ Presentable bool `form:"presentable" json:"presentable"`
+
+ // ---
+
+ // Min specifies the min allowed field value.
+ //
+ // Leave it empty to skip the validator.
+ Min types.DateTime `form:"min" json:"min"`
+
+ // Max specifies the max allowed field value.
+ //
+ // Leave it empty to skip the validator.
+ Max types.DateTime `form:"max" json:"max"`
+
+ // Required will require the field value to be non-zero [types.DateTime].
+ Required bool `form:"required" json:"required"`
+}
+
+// Type implements [Field.Type] interface method.
+func (f *DateField) Type() string {
+ return FieldTypeDate
+}
+
+// GetId implements [Field.GetId] interface method.
+func (f *DateField) GetId() string {
+ return f.Id
+}
+
+// SetId implements [Field.SetId] interface method.
+func (f *DateField) SetId(id string) {
+ f.Id = id
+}
+
+// GetName implements [Field.GetName] interface method.
+func (f *DateField) GetName() string {
+ return f.Name
+}
+
+// SetName implements [Field.SetName] interface method.
+func (f *DateField) SetName(name string) {
+ f.Name = name
+}
+
+// GetSystem implements [Field.GetSystem] interface method.
+func (f *DateField) GetSystem() bool {
+ return f.System
+}
+
+// SetSystem implements [Field.SetSystem] interface method.
+func (f *DateField) SetSystem(system bool) {
+ f.System = system
+}
+
+// GetHidden implements [Field.GetHidden] interface method.
+func (f *DateField) GetHidden() bool {
+ return f.Hidden
+}
+
+// SetHidden implements [Field.SetHidden] interface method.
+func (f *DateField) SetHidden(hidden bool) {
+ f.Hidden = hidden
+}
+
+// ColumnType implements [Field.ColumnType] interface method.
+func (f *DateField) ColumnType(app App) string {
+ return "TEXT DEFAULT '' NOT NULL"
+}
+
+// PrepareValue implements [Field.PrepareValue] interface method.
+func (f *DateField) PrepareValue(record *Record, raw any) (any, error) {
+ // ignore scan errors since the format may change between versions
+ // and to allow running db adjusting migrations
+ val, _ := types.ParseDateTime(raw)
+ return val, nil
+}
+
+// ValidateValue implements [Field.ValidateValue] interface method.
+func (f *DateField) ValidateValue(ctx context.Context, app App, record *Record) error {
+ val, ok := record.GetRaw(f.Name).(types.DateTime)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ if val.IsZero() {
+ if f.Required {
+ return validation.ErrRequired
+ }
+ return nil // nothing to check
+ }
+
+ if !f.Min.IsZero() {
+ if err := validation.Min(f.Min.Time()).Validate(val.Time()); err != nil {
+ return err
+ }
+ }
+
+ if !f.Max.IsZero() {
+ if err := validation.Max(f.Max.Time()).Validate(val.Time()); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// ValidateSettings implements [Field.ValidateSettings] interface method.
+func (f *DateField) ValidateSettings(ctx context.Context, app App, collection *Collection) error {
+ return validation.ValidateStruct(f,
+ validation.Field(&f.Id, validation.By(DefaultFieldIdValidationRule)),
+ validation.Field(&f.Name, validation.By(DefaultFieldNameValidationRule)),
+ validation.Field(&f.Max, validation.By(f.checkRange(f.Min, f.Max))),
+ )
+}
+
+func (f *DateField) checkRange(min types.DateTime, max types.DateTime) validation.RuleFunc {
+ return func(value any) error {
+ v, _ := value.(types.DateTime)
+ if v.IsZero() {
+ return nil // nothing to check
+ }
+
+ dr := validation.Date(types.DefaultDateLayout)
+
+ if !min.IsZero() {
+ dr.Min(min.Time())
+ }
+
+ if !max.IsZero() {
+ dr.Max(max.Time())
+ }
+
+ return dr.Validate(v.String())
+ }
+}
diff --git a/core/field_date_test.go b/core/field_date_test.go
new file mode 100644
index 00000000..ea95b191
--- /dev/null
+++ b/core/field_date_test.go
@@ -0,0 +1,229 @@
+package core_test
+
+import (
+ "context"
+ "fmt"
+ "testing"
+ "time"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func TestDateFieldBaseMethods(t *testing.T) {
+ testFieldBaseMethods(t, core.FieldTypeDate)
+}
+
+func TestDateFieldColumnType(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.DateField{}
+
+ expected := "TEXT DEFAULT '' NOT NULL"
+
+ if v := f.ColumnType(app); v != expected {
+ t.Fatalf("Expected\n%q\ngot\n%q", expected, v)
+ }
+}
+
+func TestDateFieldPrepareValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.DateField{}
+ record := core.NewRecord(core.NewBaseCollection("test"))
+
+ scenarios := []struct {
+ raw any
+ expected string
+ }{
+ {"", ""},
+ {"invalid", ""},
+ {"2024-01-01 00:11:22.345Z", "2024-01-01 00:11:22.345Z"},
+ {time.Date(2024, 1, 2, 3, 4, 5, 0, time.UTC), "2024-01-02 03:04:05.000Z"},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.raw), func(t *testing.T) {
+ v, err := f.PrepareValue(record, s.raw)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ vDate, ok := v.(types.DateTime)
+ if !ok {
+ t.Fatalf("Expected types.DateTime instance, got %T", v)
+ }
+
+ if vDate.String() != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestDateFieldValidateValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field *core.DateField
+ record func() *core.Record
+ expectError bool
+ }{
+ {
+ "invalid raw value",
+ &core.DateField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 123)
+ return record
+ },
+ true,
+ },
+ {
+ "zero field value (not required)",
+ &core.DateField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", types.DateTime{})
+ return record
+ },
+ false,
+ },
+ {
+ "zero field value (required)",
+ &core.DateField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", types.DateTime{})
+ return record
+ },
+ true,
+ },
+ {
+ "non-zero field value (required)",
+ &core.DateField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", types.NowDateTime())
+ return record
+ },
+ false,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ err := s.field.ValidateValue(context.Background(), app, s.record())
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
+
+func TestDateFieldValidateSettings(t *testing.T) {
+ testDefaultFieldIdValidation(t, core.FieldTypeDate)
+ testDefaultFieldNameValidation(t, core.FieldTypeDate)
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field func() *core.DateField
+ expectErrors []string
+ }{
+ {
+ "zero Min/Max",
+ func() *core.DateField {
+ return &core.DateField{
+ Id: "test",
+ Name: "test",
+ }
+ },
+ []string{},
+ },
+ {
+ "non-empty Min with empty Max",
+ func() *core.DateField {
+ return &core.DateField{
+ Id: "test",
+ Name: "test",
+ Min: types.NowDateTime(),
+ }
+ },
+ []string{},
+ },
+ {
+ "empty Min non-empty Max",
+ func() *core.DateField {
+ return &core.DateField{
+ Id: "test",
+ Name: "test",
+ Max: types.NowDateTime(),
+ }
+ },
+ []string{},
+ },
+ {
+ "Min = Max",
+ func() *core.DateField {
+ date := types.NowDateTime()
+ return &core.DateField{
+ Id: "test",
+ Name: "test",
+ Min: date,
+ Max: date,
+ }
+ },
+ []string{},
+ },
+ {
+ "Min > Max",
+ func() *core.DateField {
+ min := types.NowDateTime()
+ max := min.Add(-5 * time.Second)
+ return &core.DateField{
+ Id: "test",
+ Name: "test",
+ Min: min,
+ Max: max,
+ }
+ },
+ []string{},
+ },
+ {
+ "Min < Max",
+ func() *core.DateField {
+ max := types.NowDateTime()
+ min := max.Add(-5 * time.Second)
+ return &core.DateField{
+ Id: "test",
+ Name: "test",
+ Min: min,
+ Max: max,
+ }
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ errs := s.field().ValidateSettings(context.Background(), app, collection)
+
+ tests.TestValidationErrors(t, errs, s.expectErrors)
+ })
+ }
+}
diff --git a/core/field_editor.go b/core/field_editor.go
new file mode 100644
index 00000000..be75677f
--- /dev/null
+++ b/core/field_editor.go
@@ -0,0 +1,149 @@
+package core
+
+import (
+ "context"
+ "fmt"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/pocketbase/core/validators"
+ "github.com/spf13/cast"
+)
+
+func init() {
+ Fields[FieldTypeEditor] = func() Field {
+ return &EditorField{}
+ }
+}
+
+const FieldTypeEditor = "editor"
+
+const DefaultEditorFieldMaxSize int64 = 5 << 20
+
+var (
+ _ Field = (*EditorField)(nil)
+ _ MaxBodySizeCalculator = (*EditorField)(nil)
+)
+
+// EditorField defines "editor" type field to store HTML formatted text.
+type EditorField struct {
+ Id string `form:"id" json:"id"`
+ Name string `form:"name" json:"name"`
+ System bool `form:"system" json:"system"`
+ Hidden bool `form:"hidden" json:"hidden"`
+ Presentable bool `form:"presentable" json:"presentable"`
+
+ // ---
+
+ // MaxSize specifies the maximum size of the allowed field value (in bytes).
+ //
+ // If zero, a default limit of ~5MB is applied.
+ MaxSize int64 `form:"maxSize" json:"maxSize"`
+
+ // ConvertURLs is usually used to instruct the editor whether to
+ // apply url conversion (eg. stripping the domain name in case the
+ // urls are using the same domain as the one where the editor is loaded).
+ //
+ // (see also https://www.tiny.cloud/docs/tinymce/6/url-handling/#convert_urls)
+ ConvertURLs bool `form:"convertURLs" json:"convertURLs"`
+
+ // Required will require the field value to be non-empty string.
+ Required bool `form:"required" json:"required"`
+}
+
+// Type implements [Field.Type] interface method.
+func (f *EditorField) Type() string {
+ return FieldTypeEditor
+}
+
+// GetId implements [Field.GetId] interface method.
+func (f *EditorField) GetId() string {
+ return f.Id
+}
+
+// SetId implements [Field.SetId] interface method.
+func (f *EditorField) SetId(id string) {
+ f.Id = id
+}
+
+// GetName implements [Field.GetName] interface method.
+func (f *EditorField) GetName() string {
+ return f.Name
+}
+
+// SetName implements [Field.SetName] interface method.
+func (f *EditorField) SetName(name string) {
+ f.Name = name
+}
+
+// GetSystem implements [Field.GetSystem] interface method.
+func (f *EditorField) GetSystem() bool {
+ return f.System
+}
+
+// SetSystem implements [Field.SetSystem] interface method.
+func (f *EditorField) SetSystem(system bool) {
+ f.System = system
+}
+
+// GetHidden implements [Field.GetHidden] interface method.
+func (f *EditorField) GetHidden() bool {
+ return f.Hidden
+}
+
+// SetHidden implements [Field.SetHidden] interface method.
+func (f *EditorField) SetHidden(hidden bool) {
+ f.Hidden = hidden
+}
+
+// ColumnType implements [Field.ColumnType] interface method.
+func (f *EditorField) ColumnType(app App) string {
+ return "TEXT DEFAULT '' NOT NULL"
+}
+
+// PrepareValue implements [Field.PrepareValue] interface method.
+func (f *EditorField) PrepareValue(record *Record, raw any) (any, error) {
+ return cast.ToString(raw), nil
+}
+
+// ValidateValue implements [Field.ValidateValue] interface method.
+func (f *EditorField) ValidateValue(ctx context.Context, app App, record *Record) error {
+ val, ok := record.GetRaw(f.Name).(string)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ if f.Required {
+ if err := validation.Required.Validate(val); err != nil {
+ return err
+ }
+ }
+
+ maxSize := f.CalculateMaxBodySize()
+
+ if int64(len(val)) > maxSize {
+ return validation.NewError(
+ "validation_content_size_limit",
+ fmt.Sprintf("The maximum allowed content size is %v bytes", maxSize),
+ ).SetParams(map[string]any{"maxSize": maxSize})
+ }
+
+ return nil
+}
+
+// ValidateSettings implements [Field.ValidateSettings] interface method.
+func (f *EditorField) ValidateSettings(ctx context.Context, app App, collection *Collection) error {
+ return validation.ValidateStruct(f,
+ validation.Field(&f.Id, validation.By(DefaultFieldIdValidationRule)),
+ validation.Field(&f.Name, validation.By(DefaultFieldNameValidationRule)),
+ validation.Field(&f.MaxSize, validation.Min(0)),
+ )
+}
+
+// CalculateMaxBodySize implements the [MaxBodySizeCalculator] interface.
+func (f *EditorField) CalculateMaxBodySize() int64 {
+ if f.MaxSize <= 0 {
+ return DefaultEditorFieldMaxSize
+ }
+
+ return f.MaxSize
+}
diff --git a/core/field_editor_test.go b/core/field_editor_test.go
new file mode 100644
index 00000000..3cd1a388
--- /dev/null
+++ b/core/field_editor_test.go
@@ -0,0 +1,241 @@
+package core_test
+
+import (
+ "context"
+ "fmt"
+ "strings"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestEditorFieldBaseMethods(t *testing.T) {
+ testFieldBaseMethods(t, core.FieldTypeEditor)
+}
+
+func TestEditorFieldColumnType(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.EditorField{}
+
+ expected := "TEXT DEFAULT '' NOT NULL"
+
+ if v := f.ColumnType(app); v != expected {
+ t.Fatalf("Expected\n%q\ngot\n%q", expected, v)
+ }
+}
+
+func TestEditorFieldPrepareValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.EditorField{}
+ record := core.NewRecord(core.NewBaseCollection("test"))
+
+ scenarios := []struct {
+ raw any
+ expected string
+ }{
+ {"", ""},
+ {"test", "test"},
+ {false, "false"},
+ {true, "true"},
+ {123.456, "123.456"},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.raw), func(t *testing.T) {
+ v, err := f.PrepareValue(record, s.raw)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ vStr, ok := v.(string)
+ if !ok {
+ t.Fatalf("Expected string instance, got %T", v)
+ }
+
+ if vStr != s.expected {
+ t.Fatalf("Expected %q, got %q", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestEditorFieldValidateValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field *core.EditorField
+ record func() *core.Record
+ expectError bool
+ }{
+ {
+ "invalid raw value",
+ &core.EditorField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 123)
+ return record
+ },
+ true,
+ },
+ {
+ "zero field value (not required)",
+ &core.EditorField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "")
+ return record
+ },
+ false,
+ },
+ {
+ "zero field value (required)",
+ &core.EditorField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "")
+ return record
+ },
+ true,
+ },
+ {
+ "non-zero field value (required)",
+ &core.EditorField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "abc")
+ return record
+ },
+ false,
+ },
+ {
+ "> default MaxSize",
+ &core.EditorField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", strings.Repeat("a", 1+(5<<20)))
+ return record
+ },
+ true,
+ },
+ {
+ "> MaxSize",
+ &core.EditorField{Name: "test", Required: true, MaxSize: 5},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "abcdef")
+ return record
+ },
+ true,
+ },
+ {
+ "<= MaxSize",
+ &core.EditorField{Name: "test", Required: true, MaxSize: 5},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "abcde")
+ return record
+ },
+ false,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ err := s.field.ValidateValue(context.Background(), app, s.record())
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
+
+func TestEditorFieldValidateSettings(t *testing.T) {
+ testDefaultFieldIdValidation(t, core.FieldTypeEditor)
+ testDefaultFieldNameValidation(t, core.FieldTypeEditor)
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field func() *core.EditorField
+ expectErrors []string
+ }{
+ {
+ "< 0 MaxSize",
+ func() *core.EditorField {
+ return &core.EditorField{
+ Id: "test",
+ Name: "test",
+ MaxSize: -1,
+ }
+ },
+ []string{"maxSize"},
+ },
+ {
+ "= 0 MaxSize",
+ func() *core.EditorField {
+ return &core.EditorField{
+ Id: "test",
+ Name: "test",
+ }
+ },
+ []string{},
+ },
+ {
+ "> 0 MaxSize",
+ func() *core.EditorField {
+ return &core.EditorField{
+ Id: "test",
+ Name: "test",
+ MaxSize: 1,
+ }
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ errs := s.field().ValidateSettings(context.Background(), app, collection)
+
+ tests.TestValidationErrors(t, errs, s.expectErrors)
+ })
+ }
+}
+
+func TestEditorFieldCalculateMaxBodySize(t *testing.T) {
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ scenarios := []struct {
+ field *core.EditorField
+ expected int64
+ }{
+ {&core.EditorField{}, core.DefaultEditorFieldMaxSize},
+ {&core.EditorField{MaxSize: 10}, 10},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%d", i, s.field.MaxSize), func(t *testing.T) {
+ result := s.field.CalculateMaxBodySize()
+
+ if result != s.expected {
+ t.Fatalf("Expected %d, got %d", s.expected, result)
+ }
+ })
+ }
+}
diff --git a/core/field_email.go b/core/field_email.go
new file mode 100644
index 00000000..f189aee5
--- /dev/null
+++ b/core/field_email.go
@@ -0,0 +1,153 @@
+package core
+
+import (
+ "context"
+ "slices"
+ "strings"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/go-ozzo/ozzo-validation/v4/is"
+ "github.com/pocketbase/pocketbase/core/validators"
+ "github.com/spf13/cast"
+)
+
+func init() {
+ Fields[FieldTypeEmail] = func() Field {
+ return &EmailField{}
+ }
+}
+
+const FieldTypeEmail = "email"
+
+var _ Field = (*EmailField)(nil)
+
+// EmailField defines "email" type field for storing single email string address.
+type EmailField struct {
+ Id string `form:"id" json:"id"`
+ Name string `form:"name" json:"name"`
+ System bool `form:"system" json:"system"`
+ Hidden bool `form:"hidden" json:"hidden"`
+ Presentable bool `form:"presentable" json:"presentable"`
+
+ // ---
+
+ // ExceptDomains will require the email domain to NOT be included in the listed ones.
+ //
+ // This validator can be set only if OnlyDomains is empty.
+ ExceptDomains []string `form:"exceptDomains" json:"exceptDomains"`
+
+ // OnlyDomains will require the email domain to be included in the listed ones.
+ //
+ // This validator can be set only if ExceptDomains is empty.
+ OnlyDomains []string `form:"onlyDomains" json:"onlyDomains"`
+
+ // Required will require the field value to be non-empty email string.
+ Required bool `form:"required" json:"required"`
+}
+
+// Type implements [Field.Type] interface method.
+func (f *EmailField) Type() string {
+ return FieldTypeEmail
+}
+
+// GetId implements [Field.GetId] interface method.
+func (f *EmailField) GetId() string {
+ return f.Id
+}
+
+// SetId implements [Field.SetId] interface method.
+func (f *EmailField) SetId(id string) {
+ f.Id = id
+}
+
+// GetName implements [Field.GetName] interface method.
+func (f *EmailField) GetName() string {
+ return f.Name
+}
+
+// SetName implements [Field.SetName] interface method.
+func (f *EmailField) SetName(name string) {
+ f.Name = name
+}
+
+// GetSystem implements [Field.GetSystem] interface method.
+func (f *EmailField) GetSystem() bool {
+ return f.System
+}
+
+// SetSystem implements [Field.SetSystem] interface method.
+func (f *EmailField) SetSystem(system bool) {
+ f.System = system
+}
+
+// GetHidden implements [Field.GetHidden] interface method.
+func (f *EmailField) GetHidden() bool {
+ return f.Hidden
+}
+
+// SetHidden implements [Field.SetHidden] interface method.
+func (f *EmailField) SetHidden(hidden bool) {
+ f.Hidden = hidden
+}
+
+// ColumnType implements [Field.ColumnType] interface method.
+func (f *EmailField) ColumnType(app App) string {
+ return "TEXT DEFAULT '' NOT NULL"
+}
+
+// PrepareValue implements [Field.PrepareValue] interface method.
+func (f *EmailField) PrepareValue(record *Record, raw any) (any, error) {
+ return cast.ToString(raw), nil
+}
+
+// ValidateValue implements [Field.ValidateValue] interface method.
+func (f *EmailField) ValidateValue(ctx context.Context, app App, record *Record) error {
+ val, ok := record.GetRaw(f.Name).(string)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ if f.Required {
+ if err := validation.Required.Validate(val); err != nil {
+ return err
+ }
+ }
+
+ if val == "" {
+ return nil // nothing to check
+ }
+
+ if err := is.EmailFormat.Validate(val); err != nil {
+ return err
+ }
+
+ domain := val[strings.LastIndex(val, "@")+1:]
+
+ // only domains check
+ if len(f.OnlyDomains) > 0 && !slices.Contains(f.OnlyDomains, domain) {
+ return validation.NewError("validation_email_domain_not_allowed", "Email domain is not allowed")
+ }
+
+ // except domains check
+ if len(f.ExceptDomains) > 0 && slices.Contains(f.ExceptDomains, domain) {
+ return validation.NewError("validation_email_domain_not_allowed", "Email domain is not allowed")
+ }
+
+ return nil
+}
+
+// ValidateSettings implements [Field.ValidateSettings] interface method.
+func (f *EmailField) ValidateSettings(ctx context.Context, app App, collection *Collection) error {
+ return validation.ValidateStruct(f,
+ validation.Field(&f.Id, validation.By(DefaultFieldIdValidationRule)),
+ validation.Field(&f.Name, validation.By(DefaultFieldNameValidationRule)),
+ validation.Field(
+ &f.ExceptDomains,
+ validation.When(len(f.OnlyDomains) > 0, validation.Empty).Else(validation.Each(is.Domain)),
+ ),
+ validation.Field(
+ &f.OnlyDomains,
+ validation.When(len(f.ExceptDomains) > 0, validation.Empty).Else(validation.Each(is.Domain)),
+ ),
+ )
+}
diff --git a/core/field_email_test.go b/core/field_email_test.go
new file mode 100644
index 00000000..600f2c2c
--- /dev/null
+++ b/core/field_email_test.go
@@ -0,0 +1,271 @@
+package core_test
+
+import (
+ "context"
+ "fmt"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestEmailFieldBaseMethods(t *testing.T) {
+ testFieldBaseMethods(t, core.FieldTypeEmail)
+}
+
+func TestEmailFieldColumnType(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.EmailField{}
+
+ expected := "TEXT DEFAULT '' NOT NULL"
+
+ if v := f.ColumnType(app); v != expected {
+ t.Fatalf("Expected\n%q\ngot\n%q", expected, v)
+ }
+}
+
+func TestEmailFieldPrepareValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.EmailField{}
+ record := core.NewRecord(core.NewBaseCollection("test"))
+
+ scenarios := []struct {
+ raw any
+ expected string
+ }{
+ {"", ""},
+ {"test", "test"},
+ {false, "false"},
+ {true, "true"},
+ {123.456, "123.456"},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.raw), func(t *testing.T) {
+ v, err := f.PrepareValue(record, s.raw)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ vStr, ok := v.(string)
+ if !ok {
+ t.Fatalf("Expected string instance, got %T", v)
+ }
+
+ if vStr != s.expected {
+ t.Fatalf("Expected %q, got %q", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestEmailFieldValidateValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field *core.EmailField
+ record func() *core.Record
+ expectError bool
+ }{
+ {
+ "invalid raw value",
+ &core.EmailField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 123)
+ return record
+ },
+ true,
+ },
+ {
+ "zero field value (not required)",
+ &core.EmailField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "")
+ return record
+ },
+ false,
+ },
+ {
+ "zero field value (required)",
+ &core.EmailField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "")
+ return record
+ },
+ true,
+ },
+ {
+ "non-zero field value (required)",
+ &core.EmailField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "test@example.com")
+ return record
+ },
+ false,
+ },
+ {
+ "invalid email",
+ &core.EmailField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "invalid")
+ return record
+ },
+ true,
+ },
+ {
+ "failed onlyDomains",
+ &core.EmailField{Name: "test", OnlyDomains: []string{"example.org", "example.net"}},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "test@example.com")
+ return record
+ },
+ true,
+ },
+ {
+ "success onlyDomains",
+ &core.EmailField{Name: "test", OnlyDomains: []string{"example.org", "example.com"}},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "test@example.com")
+ return record
+ },
+ false,
+ },
+ {
+ "failed exceptDomains",
+ &core.EmailField{Name: "test", ExceptDomains: []string{"example.org", "example.com"}},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "test@example.com")
+ return record
+ },
+ true,
+ },
+ {
+ "success exceptDomains",
+ &core.EmailField{Name: "test", ExceptDomains: []string{"example.org", "example.net"}},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "test@example.com")
+ return record
+ },
+ false,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ err := s.field.ValidateValue(context.Background(), app, s.record())
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
+
+func TestEmailFieldValidateSettings(t *testing.T) {
+ testDefaultFieldIdValidation(t, core.FieldTypeEmail)
+ testDefaultFieldNameValidation(t, core.FieldTypeEmail)
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field func() *core.EmailField
+ expectErrors []string
+ }{
+ {
+ "zero minimal",
+ func() *core.EmailField {
+ return &core.EmailField{
+ Id: "test",
+ Name: "test",
+ }
+ },
+ []string{},
+ },
+ {
+ "both onlyDomains and exceptDomains",
+ func() *core.EmailField {
+ return &core.EmailField{
+ Id: "test",
+ Name: "test",
+ OnlyDomains: []string{"example.com"},
+ ExceptDomains: []string{"example.org"},
+ }
+ },
+ []string{"onlyDomains", "exceptDomains"},
+ },
+ {
+ "invalid onlyDomains",
+ func() *core.EmailField {
+ return &core.EmailField{
+ Id: "test",
+ Name: "test",
+ OnlyDomains: []string{"example.com", "invalid"},
+ }
+ },
+ []string{"onlyDomains"},
+ },
+ {
+ "valid onlyDomains",
+ func() *core.EmailField {
+ return &core.EmailField{
+ Id: "test",
+ Name: "test",
+ OnlyDomains: []string{"example.com", "example.org"},
+ }
+ },
+ []string{},
+ },
+ {
+ "invalid exceptDomains",
+ func() *core.EmailField {
+ return &core.EmailField{
+ Id: "test",
+ Name: "test",
+ ExceptDomains: []string{"example.com", "invalid"},
+ }
+ },
+ []string{"exceptDomains"},
+ },
+ {
+ "valid exceptDomains",
+ func() *core.EmailField {
+ return &core.EmailField{
+ Id: "test",
+ Name: "test",
+ ExceptDomains: []string{"example.com", "example.org"},
+ }
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ errs := s.field().ValidateSettings(context.Background(), app, collection)
+
+ tests.TestValidationErrors(t, errs, s.expectErrors)
+ })
+ }
+}
diff --git a/core/field_file.go b/core/field_file.go
new file mode 100644
index 00000000..55cc6cb8
--- /dev/null
+++ b/core/field_file.go
@@ -0,0 +1,792 @@
+package core
+
+import (
+ "context"
+ "database/sql/driver"
+ "errors"
+ "fmt"
+ "regexp"
+ "strings"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/core/validators"
+ "github.com/pocketbase/pocketbase/tools/filesystem"
+ "github.com/pocketbase/pocketbase/tools/list"
+ "github.com/pocketbase/pocketbase/tools/types"
+ "github.com/spf13/cast"
+)
+
+func init() {
+ Fields[FieldTypeFile] = func() Field {
+ return &FileField{}
+ }
+}
+
+const FieldTypeFile = "file"
+
+const DefaultFileFieldMaxSize int64 = 5 << 20
+
+var looseFilenameRegex = regexp.MustCompile(`^[^\./\\][^/\\]+$`)
+
+const (
+ deletedFilesPrefix = internalCustomFieldKeyPrefix + "_deletedFilesPrefix_"
+ uploadedFilesPrefix = internalCustomFieldKeyPrefix + "_uploadedFilesPrefix_"
+)
+
+var (
+ _ Field = (*FileField)(nil)
+ _ MultiValuer = (*FileField)(nil)
+ _ DriverValuer = (*FileField)(nil)
+ _ GetterFinder = (*FileField)(nil)
+ _ SetterFinder = (*FileField)(nil)
+ _ RecordInterceptor = (*FileField)(nil)
+ _ MaxBodySizeCalculator = (*FileField)(nil)
+)
+
+// FileField defines "file" type field for managing record file(s).
+//
+// Only the file name is stored as part of the record value.
+// New files (aka. files to upload) are expected to be of *filesytem.File.
+//
+// If MaxSelect is not set or <= 1, then the field value is expected to be a single record id.
+//
+// If MaxSelect is > 1, then the field value is expected to be a slice of record ids.
+//
+// ---
+//
+// The following additional setter keys are available:
+//
+// - "fieldName+" - append one or more files to the existing record one. For example:
+//
+// // []string{"old1.txt", "old2.txt", "new1_ajkvass.txt", "new2_klhfnwd.txt"}
+// record.Set("documents+", []*filesystem.File{new1, new2})
+//
+// - "+fieldName" - prepend one or more files to the existing record one. For example:
+//
+// // []string{"new1_ajkvass.txt", "new2_klhfnwd.txt", "old1.txt", "old2.txt",}
+// record.Set("+documents", []*filesystem.File{new1, new2})
+//
+// - "fieldName-" - subtract one or more files from the existing record one. For example:
+//
+// // []string{"old2.txt",}
+// record.Set("documents-", "old1.txt")
+type FileField struct {
+ Id string `form:"id" json:"id"`
+ Name string `form:"name" json:"name"`
+ System bool `form:"system" json:"system"`
+ Hidden bool `form:"hidden" json:"hidden"`
+ Presentable bool `form:"presentable" json:"presentable"`
+
+ // ---
+
+ // MaxSize specifies the maximum size of a single uploaded file (in bytes).
+ //
+ // If zero, a default limit of 5MB is applied.
+ MaxSize int64 `form:"maxSize" json:"maxSize"`
+
+ // MaxSelect specifies the max allowed files.
+ //
+ // For multiple files the value must be > 1, otherwise fallbacks to single (default).
+ MaxSelect int `form:"maxSelect" json:"maxSelect"`
+
+ // MimeTypes specifies an optional list of the allowed file mime types.
+ //
+ // Leave it empty to disable the validator.
+ MimeTypes []string `form:"mimeTypes" json:"mimeTypes"`
+
+ // Thumbs specifies an optional list of the supported thumbs for image based files.
+ //
+ // Each entry must be in one of the following formats:
+ //
+ // - WxH (eg. 100x300) - crop to WxH viewbox (from center)
+ // - WxHt (eg. 100x300t) - crop to WxH viewbox (from top)
+ // - WxHb (eg. 100x300b) - crop to WxH viewbox (from bottom)
+ // - WxHf (eg. 100x300f) - fit inside a WxH viewbox (without cropping)
+ // - 0xH (eg. 0x300) - resize to H height preserving the aspect ratio
+ // - Wx0 (eg. 100x0) - resize to W width preserving the aspect ratio
+ Thumbs []string `form:"thumbs" json:"thumbs"`
+
+ // Protected will require the users to provide a special file token to access the file.
+ //
+ // Note that by default all files are publicly accessible.
+ //
+ // For the majority of the cases this is fine because by default
+ // all file names have random part appended to their name which
+ // need to be known by the user before accessing the file.
+ Protected bool `form:"protected" json:"protected"`
+
+ // Required will require the field value to have at least one file.
+ Required bool `form:"required" json:"required"`
+}
+
+// Type implements [Field.Type] interface method.
+func (f *FileField) Type() string {
+ return FieldTypeFile
+}
+
+// GetId implements [Field.GetId] interface method.
+func (f *FileField) GetId() string {
+ return f.Id
+}
+
+// SetId implements [Field.SetId] interface method.
+func (f *FileField) SetId(id string) {
+ f.Id = id
+}
+
+// GetName implements [Field.GetName] interface method.
+func (f *FileField) GetName() string {
+ return f.Name
+}
+
+// SetName implements [Field.SetName] interface method.
+func (f *FileField) SetName(name string) {
+ f.Name = name
+}
+
+// GetSystem implements [Field.GetSystem] interface method.
+func (f *FileField) GetSystem() bool {
+ return f.System
+}
+
+// SetSystem implements [Field.SetSystem] interface method.
+func (f *FileField) SetSystem(system bool) {
+ f.System = system
+}
+
+// GetHidden implements [Field.GetHidden] interface method.
+func (f *FileField) GetHidden() bool {
+ return f.Hidden
+}
+
+// SetHidden implements [Field.SetHidden] interface method.
+func (f *FileField) SetHidden(hidden bool) {
+ f.Hidden = hidden
+}
+
+// IsMultiple implements MultiValuer interface and checks whether the
+// current field options support multiple values.
+func (f *FileField) IsMultiple() bool {
+ return f.MaxSelect > 1
+}
+
+// ColumnType implements [Field.ColumnType] interface method.
+func (f *FileField) ColumnType(app App) string {
+ if f.IsMultiple() {
+ return "JSON DEFAULT '[]' NOT NULL"
+ }
+
+ return "TEXT DEFAULT '' NOT NULL"
+}
+
+// PrepareValue implements [Field.PrepareValue] interface method.
+func (f *FileField) PrepareValue(record *Record, raw any) (any, error) {
+ return f.normalizeValue(raw), nil
+}
+
+// DriverValue implements the [DriverValuer] interface.
+func (f *FileField) DriverValue(record *Record) (driver.Value, error) {
+ files := f.toSliceValue(record.GetRaw(f.Name))
+
+ if f.IsMultiple() {
+ ja := make(types.JSONArray[string], len(files))
+ for i, v := range files {
+ ja[i] = f.getFileName(v)
+ }
+ return ja, nil
+ }
+
+ if len(files) == 0 {
+ return "", nil
+ }
+
+ return f.getFileName(files[len(files)-1]), nil
+}
+
+// ValidateSettings implements [Field.ValidateSettings] interface method.
+func (f *FileField) ValidateSettings(ctx context.Context, app App, collection *Collection) error {
+ return validation.ValidateStruct(f,
+ validation.Field(&f.Id, validation.By(DefaultFieldIdValidationRule)),
+ validation.Field(&f.Name, validation.By(DefaultFieldNameValidationRule)),
+ validation.Field(&f.MaxSelect, validation.Min(0)),
+ validation.Field(&f.MaxSize, validation.Min(0)),
+ validation.Field(&f.Thumbs, validation.Each(
+ validation.NotIn("0x0", "0x0t", "0x0b", "0x0f"),
+ validation.Match(filesystem.ThumbSizeRegex),
+ )),
+ )
+}
+
+// ValidateValue implements [Field.ValidateValue] interface method.
+func (f *FileField) ValidateValue(ctx context.Context, app App, record *Record) error {
+ files := f.toSliceValue(record.GetRaw(f.Name))
+ if len(files) == 0 {
+ if f.Required {
+ return validation.ErrRequired
+ }
+ return nil // nothing to check
+ }
+
+ // validate existing and disallow new plain string filenames submission
+ // (new files must be *filesystem.File)
+ // ---
+ oldExistingStrings := f.toSliceValue(f.getLatestOldValue(app, record))
+ existingStrings := list.ToInterfaceSlice(f.extractPlainStrings(files))
+ addedStrings := f.excludeFiles(existingStrings, oldExistingStrings)
+
+ if len(addedStrings) > 0 {
+ return validation.NewError("validation_invalid_file", "Invalid files:"+strings.Join(cast.ToStringSlice(addedStrings), ", ")).
+ SetParams(map[string]any{"invalidFiles": addedStrings})
+ }
+
+ maxSelect := f.maxSelect()
+ if len(files) > maxSelect {
+ return validation.NewError("validation_too_many_files", fmt.Sprintf("The maximum allowed files is %d", maxSelect)).
+ SetParams(map[string]any{"maxSelect": maxSelect})
+ }
+
+ // validate uploaded
+ // ---
+ uploads := f.extractUploadableFiles(files)
+ for _, upload := range uploads {
+ // loosely check the filename just in case it was manually changed after the normalization
+ err := validation.Length(1, 150).Validate(upload.Name)
+ if err != nil {
+ return err
+ }
+ err = validation.Match(looseFilenameRegex).Validate(upload.Name)
+ if err != nil {
+ return err
+ }
+
+ // check size
+ err = validators.UploadedFileSize(f.maxSize())(upload)
+ if err != nil {
+ return err
+ }
+
+ // check type
+ if len(f.MimeTypes) > 0 {
+ err = validators.UploadedFileMimeType(f.MimeTypes)(upload)
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
+
+func (f *FileField) maxSize() int64 {
+ if f.MaxSize <= 0 {
+ return DefaultFileFieldMaxSize
+ }
+
+ return f.MaxSize
+}
+
+func (f *FileField) maxSelect() int {
+ if f.MaxSelect <= 1 {
+ return 1
+ }
+
+ return f.MaxSelect
+}
+
+// CalculateMaxBodySize implements the [MaxBodySizeCalculator] interface.
+func (f *FileField) CalculateMaxBodySize() int64 {
+ return f.maxSize() * int64(f.maxSelect())
+}
+
+// Interceptors
+// -------------------------------------------------------------------
+
+// Intercept implements the [RecordInterceptor] interface.
+//
+// note: files delete after records deletion is handled globally by the app FileManager hook
+func (f *FileField) Intercept(
+ ctx context.Context,
+ app App,
+ record *Record,
+ actionName string,
+ actionFunc func() error,
+) error {
+ switch actionName {
+ case InterceptorActionCreateExecute, InterceptorActionUpdateExecute:
+ oldValue := f.getLatestOldValue(app, record)
+
+ err := f.processFilesToUpload(ctx, app, record)
+ if err != nil {
+ return err
+ }
+
+ err = actionFunc()
+ if err != nil {
+ return errors.Join(err, f.afterRecordExecuteFailure(newContextIfInvalid(ctx), app, record))
+ }
+
+ f.rememberFilesToDelete(app, record, oldValue)
+
+ f.afterRecordExecuteSuccess(newContextIfInvalid(ctx), app, record)
+
+ return nil
+ case InterceptorActionAfterCreateError, InterceptorActionAfterUpdateError:
+ // when in transaction we assume that the error was handled by afterRecordExecuteFailure
+ _, insideTransaction := app.DB().(*dbx.Tx)
+ if insideTransaction {
+ return actionFunc()
+ }
+
+ failedToDelete, deleteErr := f.deleteNewlyUploadedFiles(newContextIfInvalid(ctx), app, record)
+ if deleteErr != nil {
+ app.Logger().Warn(
+ "Failed to cleanup all new files after record commit failure",
+ "error", deleteErr,
+ "failedToDelete", failedToDelete,
+ )
+ }
+
+ record.SetRaw(deletedFilesPrefix+f.Name, nil)
+
+ if record.IsNew() {
+ // try to delete the record directory if there are no other files
+ //
+ // note: executed only on create failure to avoid accidentally
+ // deleting a concurrently updating directory due to the
+ // eventual consistent nature of some storage providers
+ err := f.deleteEmptyRecordDir(newContextIfInvalid(ctx), app, record)
+ if err != nil {
+ app.Logger().Warn("Failed to delete empty dir after new record commit failure", "error", err)
+ }
+ }
+
+ return actionFunc()
+ case InterceptorActionAfterCreate, InterceptorActionAfterUpdate:
+ record.SetRaw(uploadedFilesPrefix+f.Name, nil)
+
+ err := f.processFilesToDelete(ctx, app, record)
+ if err != nil {
+ return err
+ }
+
+ return actionFunc()
+ default:
+ return actionFunc()
+ }
+}
+func (f *FileField) getLatestOldValue(app App, record *Record) any {
+ if !record.IsNew() {
+ latestOriginal, err := app.FindRecordById(record.Collection(), record.Id)
+ if err == nil {
+ return latestOriginal.GetRaw(f.Name)
+ }
+ }
+
+ return record.Original().GetRaw(f.Name)
+}
+
+func (f *FileField) afterRecordExecuteSuccess(ctx context.Context, app App, record *Record) {
+ uploaded, _ := record.GetRaw(uploadedFilesPrefix + f.Name).([]*filesystem.File)
+
+ // replace the uploaded file objects with their plain string names
+ newValue := f.toSliceValue(record.GetRaw(f.Name))
+ for i, v := range newValue {
+ if file, ok := v.(*filesystem.File); ok {
+ uploaded = append(uploaded, file)
+ newValue[i] = file.Name
+ }
+ }
+ f.setValue(record, newValue)
+
+ record.SetRaw(uploadedFilesPrefix+f.Name, uploaded)
+}
+
+func (f *FileField) afterRecordExecuteFailure(ctx context.Context, app App, record *Record) error {
+ uploaded := f.extractUploadableFiles(f.toSliceValue(record.GetRaw(f.Name)))
+
+ toDelete := make([]string, len(uploaded))
+ for i, file := range uploaded {
+ toDelete[i] = file.Name
+ }
+
+ // delete previously uploaded files
+ failedToDelete, deleteErr := f.deleteFilesByNamesList(ctx, app, record, list.ToUniqueStringSlice(toDelete))
+
+ if len(failedToDelete) > 0 {
+ app.Logger().Warn(
+ "Failed to cleanup the new uploaded file after record db write failure",
+ "error", deleteErr,
+ "failedToDelete", failedToDelete,
+ )
+ }
+
+ return deleteErr
+}
+
+func (f *FileField) deleteEmptyRecordDir(ctx context.Context, app App, record *Record) error {
+ fsys, err := app.NewFilesystem()
+ if err != nil {
+ return err
+ }
+ defer fsys.Close()
+ fsys.SetContext(newContextIfInvalid(ctx))
+
+ dir := record.BaseFilesPath()
+
+ if !fsys.IsEmptyDir(dir) {
+ return nil // no-op
+ }
+
+ err = fsys.Delete(dir)
+ if err != nil && !errors.Is(err, filesystem.ErrNotFound) {
+ return err
+ }
+
+ return nil
+}
+
+func (f *FileField) processFilesToDelete(ctx context.Context, app App, record *Record) error {
+ markedForDelete, _ := record.GetRaw(deletedFilesPrefix + f.Name).([]string)
+ if len(markedForDelete) == 0 {
+ return nil
+ }
+
+ old := list.ToInterfaceSlice(markedForDelete)
+ new := list.ToInterfaceSlice(f.extractPlainStrings(f.toSliceValue(record.GetRaw(f.Name))))
+ diff := f.excludeFiles(old, new)
+
+ toDelete := make([]string, len(diff))
+ for i, del := range diff {
+ toDelete[i] = f.getFileName(del)
+ }
+
+ failedToDelete, err := f.deleteFilesByNamesList(ctx, app, record, list.ToUniqueStringSlice(toDelete))
+
+ record.SetRaw(deletedFilesPrefix+f.Name, failedToDelete)
+
+ return err
+}
+
+func (f *FileField) rememberFilesToDelete(app App, record *Record, oldValue any) {
+ old := list.ToInterfaceSlice(f.extractPlainStrings(f.toSliceValue(oldValue)))
+ new := list.ToInterfaceSlice(f.extractPlainStrings(f.toSliceValue(record.GetRaw(f.Name))))
+ diff := f.excludeFiles(old, new)
+
+ toDelete, _ := record.GetRaw(deletedFilesPrefix + f.Name).([]string)
+
+ for _, del := range diff {
+ toDelete = append(toDelete, f.getFileName(del))
+ }
+
+ record.SetRaw(deletedFilesPrefix+f.Name, toDelete)
+}
+
+func (f *FileField) processFilesToUpload(ctx context.Context, app App, record *Record) error {
+ uploads := f.extractUploadableFiles(f.toSliceValue(record.GetRaw(f.Name)))
+ if len(uploads) == 0 {
+ return nil
+ }
+
+ if record.Id == "" {
+ return errors.New("uploading files requires the record to have a valid nonempty id")
+ }
+
+ fsys, err := app.NewFilesystem()
+ if err != nil {
+ return err
+ }
+ defer fsys.Close()
+ fsys.SetContext(ctx)
+
+ var failed []error // list of upload errors
+ var succeeded []string // list of uploaded file names
+
+ for _, upload := range uploads {
+ path := record.BaseFilesPath() + "/" + upload.Name
+ if err := fsys.UploadFile(upload, path); err == nil {
+ succeeded = append(succeeded, upload.Name)
+ } else {
+ failed = append(failed, fmt.Errorf("%q: %w", upload.Name, err))
+ break // for now stop on the first error since we currently don't allow partial uploads
+ }
+ }
+
+ if len(failed) > 0 {
+ // cleanup - try to delete the successfully uploaded files (if any)
+ _, cleanupErr := f.deleteFilesByNamesList(newContextIfInvalid(ctx), app, record, succeeded)
+
+ failed = append(failed, cleanupErr)
+
+ return fmt.Errorf("failed to upload all files: %w", errors.Join(failed...))
+ }
+
+ return nil
+}
+
+func (f *FileField) deleteNewlyUploadedFiles(ctx context.Context, app App, record *Record) ([]string, error) {
+ uploaded, _ := record.GetRaw(uploadedFilesPrefix + f.Name).([]*filesystem.File)
+ if len(uploaded) == 0 {
+ return nil, nil
+ }
+
+ names := make([]string, len(uploaded))
+ for i, file := range uploaded {
+ names[i] = file.Name
+ }
+
+ failed, err := f.deleteFilesByNamesList(ctx, app, record, list.ToUniqueStringSlice(names))
+ if err != nil {
+ return failed, err
+ }
+
+ record.SetRaw(uploadedFilesPrefix+f.Name, nil)
+
+ return nil, nil
+}
+
+// deleteFiles deletes a list of record files by their names.
+// Returns the failed/remaining files.
+func (f *FileField) deleteFilesByNamesList(ctx context.Context, app App, record *Record, filenames []string) ([]string, error) {
+ if len(filenames) == 0 {
+ return nil, nil // nothing to delete
+ }
+
+ if record.Id == "" {
+ return filenames, errors.New("the record doesn't have an id")
+ }
+
+ fsys, err := app.NewFilesystem()
+ if err != nil {
+ return filenames, err
+ }
+ defer fsys.Close()
+ fsys.SetContext(ctx)
+
+ var failures []error
+
+ for i := len(filenames) - 1; i >= 0; i-- {
+ filename := filenames[i]
+ if filename == "" || strings.ContainsAny(filename, "/\\") {
+ continue // empty or not a plain filename
+ }
+
+ path := record.BaseFilesPath() + "/" + filename
+
+ err := fsys.Delete(path)
+ if err != nil && !errors.Is(err, filesystem.ErrNotFound) {
+ // store the delete error
+ failures = append(failures, fmt.Errorf("file %d (%q): %w", i, filename, err))
+ } else {
+ // remove the deleted file from the list
+ filenames = append(filenames[:i], filenames[i+1:]...)
+
+ // try to delete the related file thumbs (if any)
+ thumbsErr := fsys.DeletePrefix(record.BaseFilesPath() + "/thumbs_" + filename + "/")
+ if len(thumbsErr) > 0 {
+ app.Logger().Warn("Failed to delete file thumbs", "error", errors.Join(thumbsErr...))
+ }
+ }
+ }
+
+ if len(failures) > 0 {
+ return filenames, fmt.Errorf("failed to delete all files: %w", errors.Join(failures...))
+ }
+
+ return nil, nil
+}
+
+// newContextIfInvalid returns a new Background context if the provided one was cancelled.
+func newContextIfInvalid(ctx context.Context) context.Context {
+ if ctx.Err() == nil {
+ return ctx
+ }
+
+ return context.Background()
+}
+
+// -------------------------------------------------------------------
+
+// FindGetter implements the [GetterFinder] interface.
+func (f *FileField) FindGetter(key string) GetterFunc {
+ switch key {
+ case f.Name:
+ return func(record *Record) any {
+ return record.GetRaw(f.Name)
+ }
+ case f.Name + ":uploaded":
+ return func(record *Record) any {
+ return f.extractUploadableFiles(f.toSliceValue(record.GetRaw(f.Name)))
+ }
+ default:
+ return nil
+ }
+}
+
+// -------------------------------------------------------------------
+
+// FindSetter implements the [SetterFinder] interface.
+func (f *FileField) FindSetter(key string) SetterFunc {
+ switch key {
+ case f.Name:
+ return f.setValue
+ case "+" + f.Name:
+ return f.prependValue
+ case f.Name + "+":
+ return f.appendValue
+ case f.Name + "-":
+ return f.subtractValue
+ default:
+ return nil
+ }
+}
+
+func (f *FileField) setValue(record *Record, raw any) {
+ val := f.normalizeValue(raw)
+
+ record.SetRaw(f.Name, val)
+}
+
+func (f *FileField) prependValue(record *Record, toPrepend any) {
+ files := f.toSliceValue(record.GetRaw(f.Name))
+ prepends := f.toSliceValue(toPrepend)
+
+ if len(prepends) > 0 {
+ files = append(prepends, files...)
+ }
+
+ f.setValue(record, files)
+}
+
+func (f *FileField) appendValue(record *Record, toAppend any) {
+ files := f.toSliceValue(record.GetRaw(f.Name))
+ appends := f.toSliceValue(toAppend)
+
+ if len(appends) > 0 {
+ files = append(files, appends...)
+ }
+
+ f.setValue(record, files)
+}
+
+func (f *FileField) subtractValue(record *Record, toRemove any) {
+ files := f.excludeFiles(
+ f.toSliceValue(record.GetRaw(f.Name)),
+ f.toSliceValue(toRemove),
+ )
+
+ f.setValue(record, files)
+}
+
+func (f *FileField) normalizeValue(raw any) any {
+ files := f.toSliceValue(raw)
+
+ if f.IsMultiple() {
+ return files
+ }
+
+ if len(files) > 0 {
+ return files[len(files)-1] // the last selected
+ }
+
+ return ""
+}
+
+func (f *FileField) toSliceValue(raw any) []any {
+ var result []any
+
+ switch value := raw.(type) {
+ case nil:
+ // nothing to cast
+ case *filesystem.File:
+ result = append(result, value)
+ case filesystem.File:
+ result = append(result, &value)
+ case []*filesystem.File:
+ for _, v := range value {
+ result = append(result, v)
+ }
+ case []filesystem.File:
+ for _, v := range value {
+ result = append(result, &v)
+ }
+ case []any:
+ for _, v := range value {
+ casted := f.toSliceValue(v)
+ if len(casted) == 1 {
+ result = append(result, casted[0])
+ }
+ }
+ default:
+ result = list.ToInterfaceSlice(list.ToUniqueStringSlice(value))
+ }
+
+ return f.uniqueFiles(result)
+}
+
+func (f *FileField) uniqueFiles(files []any) []any {
+ existing := make(map[string]struct{}, len(files))
+ result := make([]any, 0, len(files))
+
+ for _, fv := range files {
+ name := f.getFileName(fv)
+ if _, ok := existing[name]; !ok {
+ result = append(result, fv)
+ existing[name] = struct{}{}
+ }
+ }
+
+ return result
+}
+
+func (f *FileField) extractPlainStrings(files []any) []string {
+ result := []string{}
+
+ for _, raw := range files {
+ if f, ok := raw.(string); ok {
+ result = append(result, f)
+ }
+ }
+
+ return result
+}
+
+func (f *FileField) extractUploadableFiles(files []any) []*filesystem.File {
+ result := []*filesystem.File{}
+
+ for _, raw := range files {
+ if upload, ok := raw.(*filesystem.File); ok {
+ result = append(result, upload)
+ }
+ }
+
+ return result
+}
+
+func (f *FileField) excludeFiles(base []any, toExclude []any) []any {
+ result := make([]any, 0, len(base))
+
+SUBTRACT_LOOP:
+ for _, fv := range base {
+ for _, exclude := range toExclude {
+ if f.getFileName(exclude) == f.getFileName(fv) {
+ continue SUBTRACT_LOOP // skip
+ }
+ }
+
+ result = append(result, fv)
+ }
+
+ return result
+}
+
+func (f *FileField) getFileName(file any) string {
+ switch v := file.(type) {
+ case string:
+ return v
+ case *filesystem.File:
+ return v.Name
+ default:
+ return ""
+ }
+}
diff --git a/core/field_file_test.go b/core/field_file_test.go
new file mode 100644
index 00000000..dc94dcb3
--- /dev/null
+++ b/core/field_file_test.go
@@ -0,0 +1,1108 @@
+package core_test
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "slices"
+ "strings"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/filesystem"
+ "github.com/pocketbase/pocketbase/tools/list"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func TestFileFieldBaseMethods(t *testing.T) {
+ testFieldBaseMethods(t, core.FieldTypeFile)
+}
+
+func TestFileFieldColumnType(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ name string
+ field *core.FileField
+ expected string
+ }{
+ {
+ "single (zero)",
+ &core.FileField{},
+ "TEXT DEFAULT '' NOT NULL",
+ },
+ {
+ "single",
+ &core.FileField{MaxSelect: 1},
+ "TEXT DEFAULT '' NOT NULL",
+ },
+ {
+ "multiple",
+ &core.FileField{MaxSelect: 2},
+ "JSON DEFAULT '[]' NOT NULL",
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ if v := s.field.ColumnType(app); v != s.expected {
+ t.Fatalf("Expected\n%q\ngot\n%q", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestFileFieldIsMultiple(t *testing.T) {
+ scenarios := []struct {
+ name string
+ field *core.FileField
+ expected bool
+ }{
+ {
+ "zero",
+ &core.FileField{},
+ false,
+ },
+ {
+ "single",
+ &core.FileField{MaxSelect: 1},
+ false,
+ },
+ {
+ "multiple",
+ &core.FileField{MaxSelect: 2},
+ true,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ if v := s.field.IsMultiple(); v != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestFileFieldPrepareValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ record := core.NewRecord(core.NewBaseCollection("test"))
+
+ f1, err := filesystem.NewFileFromBytes([]byte("test"), "test1.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+ f1Raw, err := json.Marshal(f1)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ raw any
+ field *core.FileField
+ expected string
+ }{
+ // single
+ {nil, &core.FileField{MaxSelect: 1}, `""`},
+ {"", &core.FileField{MaxSelect: 1}, `""`},
+ {123, &core.FileField{MaxSelect: 1}, `"123"`},
+ {"a", &core.FileField{MaxSelect: 1}, `"a"`},
+ {`["a"]`, &core.FileField{MaxSelect: 1}, `"a"`},
+ {*f1, &core.FileField{MaxSelect: 1}, string(f1Raw)},
+ {f1, &core.FileField{MaxSelect: 1}, string(f1Raw)},
+ {[]string{}, &core.FileField{MaxSelect: 1}, `""`},
+ {[]string{"a", "b"}, &core.FileField{MaxSelect: 1}, `"b"`},
+
+ // multiple
+ {nil, &core.FileField{MaxSelect: 2}, `[]`},
+ {"", &core.FileField{MaxSelect: 2}, `[]`},
+ {123, &core.FileField{MaxSelect: 2}, `["123"]`},
+ {"a", &core.FileField{MaxSelect: 2}, `["a"]`},
+ {`["a"]`, &core.FileField{MaxSelect: 2}, `["a"]`},
+ {[]any{f1}, &core.FileField{MaxSelect: 2}, `[` + string(f1Raw) + `]`},
+ {[]*filesystem.File{f1}, &core.FileField{MaxSelect: 2}, `[` + string(f1Raw) + `]`},
+ {[]filesystem.File{*f1}, &core.FileField{MaxSelect: 2}, `[` + string(f1Raw) + `]`},
+ {[]string{}, &core.FileField{MaxSelect: 2}, `[]`},
+ {[]string{"a", "b", "c"}, &core.FileField{MaxSelect: 2}, `["a","b","c"]`},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v_%v", i, s.raw, s.field.IsMultiple()), func(t *testing.T) {
+ v, err := s.field.PrepareValue(record, s.raw)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ vRaw, err := json.Marshal(v)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if string(vRaw) != s.expected {
+ t.Fatalf("Expected %q, got %q", s.expected, vRaw)
+ }
+ })
+ }
+}
+
+func TestFileFieldDriverValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f1, err := filesystem.NewFileFromBytes([]byte("test"), "test.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ raw any
+ field *core.FileField
+ expected string
+ }{
+ // single
+ {nil, &core.FileField{MaxSelect: 1}, `""`},
+ {"", &core.FileField{MaxSelect: 1}, `""`},
+ {123, &core.FileField{MaxSelect: 1}, `"123"`},
+ {"a", &core.FileField{MaxSelect: 1}, `"a"`},
+ {`["a"]`, &core.FileField{MaxSelect: 1}, `"a"`},
+ {f1, &core.FileField{MaxSelect: 1}, `"` + f1.Name + `"`},
+ {[]string{}, &core.FileField{MaxSelect: 1}, `""`},
+ {[]string{"a", "b"}, &core.FileField{MaxSelect: 1}, `"b"`},
+
+ // multiple
+ {nil, &core.FileField{MaxSelect: 2}, `[]`},
+ {"", &core.FileField{MaxSelect: 2}, `[]`},
+ {123, &core.FileField{MaxSelect: 2}, `["123"]`},
+ {"a", &core.FileField{MaxSelect: 2}, `["a"]`},
+ {`["a"]`, &core.FileField{MaxSelect: 2}, `["a"]`},
+ {[]any{"a", f1}, &core.FileField{MaxSelect: 2}, `["a","` + f1.Name + `"]`},
+ {[]string{}, &core.FileField{MaxSelect: 2}, `[]`},
+ {[]string{"a", "b", "c"}, &core.FileField{MaxSelect: 2}, `["a","b","c"]`},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v_%v", i, s.raw, s.field.IsMultiple()), func(t *testing.T) {
+ record := core.NewRecord(core.NewBaseCollection("test"))
+ record.SetRaw(s.field.GetName(), s.raw)
+
+ v, err := s.field.DriverValue(record)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if s.field.IsMultiple() {
+ _, ok := v.(types.JSONArray[string])
+ if !ok {
+ t.Fatalf("Expected types.JSONArray value, got %T", v)
+ }
+ } else {
+ _, ok := v.(string)
+ if !ok {
+ t.Fatalf("Expected string value, got %T", v)
+ }
+ }
+
+ vRaw, err := json.Marshal(v)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if string(vRaw) != s.expected {
+ t.Fatalf("Expected %q, got %q", s.expected, vRaw)
+ }
+ })
+ }
+}
+
+func TestFileFieldValidateValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ f1, err := filesystem.NewFileFromBytes([]byte("test"), "test1.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ f2, err := filesystem.NewFileFromBytes([]byte("test"), "test2.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ f3, err := filesystem.NewFileFromBytes([]byte("test_abc"), "test3.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ f4, err := filesystem.NewFileFromBytes(make([]byte, core.DefaultFileFieldMaxSize+1), "test4.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ f5, err := filesystem.NewFileFromBytes(make([]byte, core.DefaultFileFieldMaxSize), "test5.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ name string
+ field *core.FileField
+ record func() *core.Record
+ expectError bool
+ }{
+ // single
+ {
+ "zero field value (not required)",
+ &core.FileField{Name: "test", MaxSize: 9999, MaxSelect: 1},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "")
+ return record
+ },
+ false,
+ },
+ {
+ "zero field value (required)",
+ &core.FileField{Name: "test", MaxSize: 9999, MaxSelect: 1, Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "")
+ return record
+ },
+ true,
+ },
+ {
+ "new plain filename", // new files must be *filesystem.File
+ &core.FileField{Name: "test", MaxSize: 9999, MaxSelect: 1},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "a")
+ return record
+ },
+ true,
+ },
+ {
+ "new file",
+ &core.FileField{Name: "test", MaxSize: 9999, MaxSelect: 1},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", f1)
+ return record
+ },
+ false,
+ },
+ {
+ "new files > MaxSelect",
+ &core.FileField{Name: "test", MaxSize: 9999, MaxSelect: 1},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", []any{f1, f2})
+ return record
+ },
+ true,
+ },
+ {
+ "new files <= MaxSelect",
+ &core.FileField{Name: "test", MaxSize: 9999, MaxSelect: 2},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", []any{f1, f2})
+ return record
+ },
+ false,
+ },
+ {
+ "> default MaxSize",
+ &core.FileField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", f4)
+ return record
+ },
+ true,
+ },
+ {
+ "<= default MaxSize",
+ &core.FileField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", f5)
+ return record
+ },
+ false,
+ },
+ {
+ "> MaxSize",
+ &core.FileField{Name: "test", MaxSize: 4, MaxSelect: 3},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", []any{f1, f2, f3}) // f3=8
+ return record
+ },
+ true,
+ },
+ {
+ "<= MaxSize",
+ &core.FileField{Name: "test", MaxSize: 8, MaxSelect: 3},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", []any{f1, f2, f3})
+ return record
+ },
+ false,
+ },
+ {
+ "non-matching MimeType",
+ &core.FileField{Name: "test", MaxSize: 999, MaxSelect: 3, MimeTypes: []string{"a", "b"}},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", []any{f1, f2})
+ return record
+ },
+ true,
+ },
+ {
+ "matching MimeType",
+ &core.FileField{Name: "test", MaxSize: 999, MaxSelect: 3, MimeTypes: []string{"text/plain", "b"}},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", []any{f1, f2})
+ return record
+ },
+ false,
+ },
+ {
+ "existing files > MaxSelect",
+ &core.FileField{Name: "file_many", MaxSize: 999, MaxSelect: 2},
+ func() *core.Record {
+ record, _ := app.FindRecordById("demo1", "84nmscqy84lsi1t") // 5 files
+ return record
+ },
+ true,
+ },
+ {
+ "existing files should ignore the MaxSize and Mimetypes checks",
+ &core.FileField{Name: "file_many", MaxSize: 1, MaxSelect: 5, MimeTypes: []string{"a", "b"}},
+ func() *core.Record {
+ record, _ := app.FindRecordById("demo1", "84nmscqy84lsi1t")
+ return record
+ },
+ false,
+ },
+ {
+ "existing + new file > MaxSelect (5+2)",
+ &core.FileField{Name: "file_many", MaxSize: 999, MaxSelect: 6},
+ func() *core.Record {
+ record, _ := app.FindRecordById("demo1", "84nmscqy84lsi1t")
+ record.Set("file_many+", []any{f1, f2})
+ return record
+ },
+ true,
+ },
+ {
+ "existing + new file <= MaxSelect (5+2)",
+ &core.FileField{Name: "file_many", MaxSize: 999, MaxSelect: 7},
+ func() *core.Record {
+ record, _ := app.FindRecordById("demo1", "84nmscqy84lsi1t")
+ record.Set("file_many+", []any{f1, f2})
+ return record
+ },
+ false,
+ },
+ {
+ "existing + new filename",
+ &core.FileField{Name: "file_many", MaxSize: 999, MaxSelect: 99},
+ func() *core.Record {
+ record, _ := app.FindRecordById("demo1", "84nmscqy84lsi1t")
+ record.Set("file_many+", "test123.png")
+ return record
+ },
+ true,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ err := s.field.ValidateValue(context.Background(), app, s.record())
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
+
+func TestFileFieldValidateSettings(t *testing.T) {
+ testDefaultFieldIdValidation(t, core.FieldTypeFile)
+ testDefaultFieldNameValidation(t, core.FieldTypeFile)
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ name string
+ field func() *core.FileField
+ expectErrors []string
+ }{
+ {
+ "zero minimal",
+ func() *core.FileField {
+ return &core.FileField{
+ Id: "test",
+ Name: "test",
+ }
+ },
+ []string{},
+ },
+ {
+ "0x0 thumb",
+ func() *core.FileField {
+ return &core.FileField{
+ Id: "test",
+ Name: "test",
+ MaxSelect: 1,
+ Thumbs: []string{"100x200", "0x0"},
+ }
+ },
+ []string{"thumbs"},
+ },
+ {
+ "0x0t thumb",
+ func() *core.FileField {
+ return &core.FileField{
+ Id: "test",
+ Name: "test",
+ MaxSize: 1,
+ MaxSelect: 1,
+ Thumbs: []string{"100x200", "0x0t"},
+ }
+ },
+ []string{"thumbs"},
+ },
+ {
+ "0x0b thumb",
+ func() *core.FileField {
+ return &core.FileField{
+ Id: "test",
+ Name: "test",
+ MaxSize: 1,
+ MaxSelect: 1,
+ Thumbs: []string{"100x200", "0x0b"},
+ }
+ },
+ []string{"thumbs"},
+ },
+ {
+ "0x0f thumb",
+ func() *core.FileField {
+ return &core.FileField{
+ Id: "test",
+ Name: "test",
+ MaxSize: 1,
+ MaxSelect: 1,
+ Thumbs: []string{"100x200", "0x0f"},
+ }
+ },
+ []string{"thumbs"},
+ },
+ {
+ "invalid format",
+ func() *core.FileField {
+ return &core.FileField{
+ Id: "test",
+ Name: "test",
+ MaxSize: 1,
+ MaxSelect: 1,
+ Thumbs: []string{"100x200", "100x"},
+ }
+ },
+ []string{"thumbs"},
+ },
+ {
+ "valid thumbs",
+ func() *core.FileField {
+ return &core.FileField{
+ Id: "test",
+ Name: "test",
+ MaxSize: 1,
+ MaxSelect: 1,
+ Thumbs: []string{"100x200", "100x40", "100x200"},
+ }
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ field := s.field()
+
+ collection := core.NewBaseCollection("test_collection")
+ collection.Fields.Add(field)
+
+ errs := field.ValidateSettings(context.Background(), app, collection)
+
+ tests.TestValidationErrors(t, errs, s.expectErrors)
+ })
+ }
+}
+
+func TestFileFieldCalculateMaxBodySize(t *testing.T) {
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ scenarios := []struct {
+ field *core.FileField
+ expected int64
+ }{
+ {&core.FileField{}, core.DefaultFileFieldMaxSize},
+ {&core.FileField{MaxSelect: 2}, 2 * core.DefaultFileFieldMaxSize},
+ {&core.FileField{MaxSize: 10}, 10},
+ {&core.FileField{MaxSize: 10, MaxSelect: 1}, 10},
+ {&core.FileField{MaxSize: 10, MaxSelect: 2}, 20},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%d_%d", i, s.field.MaxSelect, s.field.MaxSize), func(t *testing.T) {
+ result := s.field.CalculateMaxBodySize()
+
+ if result != s.expected {
+ t.Fatalf("Expected %d, got %d", s.expected, result)
+ }
+ })
+ }
+}
+
+func TestFileFieldFindGetter(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f1, err := filesystem.NewFileFromBytes([]byte("test"), "f1")
+ if err != nil {
+ t.Fatal(err)
+ }
+ f1.Name = "f1"
+
+ f2, err := filesystem.NewFileFromBytes([]byte("test"), "f2")
+ if err != nil {
+ t.Fatal(err)
+ }
+ f2.Name = "f2"
+
+ record, err := app.FindRecordById("demo3", "lcl9d87w22ml6jy")
+ if err != nil {
+ t.Fatal(err)
+ }
+ record.Set("files+", []any{f1, f2})
+ record.Set("files-", "test_FLurQTgrY8.txt")
+
+ field, ok := record.Collection().Fields.GetByName("files").(*core.FileField)
+ if !ok {
+ t.Fatalf("Expected *core.FileField, got %T", record.Collection().Fields.GetByName("files"))
+ }
+
+ scenarios := []struct {
+ name string
+ key string
+ hasGetter bool
+ expected string
+ }{
+ {
+ "no match",
+ "example",
+ false,
+ "",
+ },
+ {
+ "exact match",
+ field.GetName(),
+ true,
+ `["300_UhLKX91HVb.png",{"name":"f1","originalName":"f1","size":4},{"name":"f2","originalName":"f2","size":4}]`,
+ },
+ {
+ "uploaded",
+ field.GetName() + ":uploaded",
+ true,
+ `[{"name":"f1","originalName":"f1","size":4},{"name":"f2","originalName":"f2","size":4}]`,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ getter := field.FindGetter(s.key)
+
+ hasGetter := getter != nil
+ if hasGetter != s.hasGetter {
+ t.Fatalf("Expected hasGetter %v, got %v", s.hasGetter, hasGetter)
+ }
+
+ if !hasGetter {
+ return
+ }
+
+ v := getter(record)
+
+ raw, err := json.Marshal(v)
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawStr := string(raw)
+
+ if rawStr != s.expected {
+ t.Fatalf("Expected\n%v\ngot\n%v", s.expected, rawStr)
+ }
+ })
+ }
+}
+
+func TestFileFieldFindSetter(t *testing.T) {
+ scenarios := []struct {
+ name string
+ key string
+ value any
+ field *core.FileField
+ hasSetter bool
+ expected string
+ }{
+ {
+ "no match",
+ "example",
+ "b",
+ &core.FileField{Name: "test", MaxSelect: 1},
+ false,
+ "",
+ },
+ {
+ "exact match (single)",
+ "test",
+ "b",
+ &core.FileField{Name: "test", MaxSelect: 1},
+ true,
+ `"b"`,
+ },
+ {
+ "exact match (multiple)",
+ "test",
+ []string{"a", "b", "b"},
+ &core.FileField{Name: "test", MaxSelect: 2},
+ true,
+ `["a","b"]`,
+ },
+ {
+ "append (single)",
+ "test+",
+ "b",
+ &core.FileField{Name: "test", MaxSelect: 1},
+ true,
+ `"b"`,
+ },
+ {
+ "append (multiple)",
+ "test+",
+ []string{"a"},
+ &core.FileField{Name: "test", MaxSelect: 2},
+ true,
+ `["c","d","a"]`,
+ },
+ {
+ "prepend (single)",
+ "+test",
+ "b",
+ &core.FileField{Name: "test", MaxSelect: 1},
+ true,
+ `"d"`, // the last of the existing values
+ },
+ {
+ "prepend (multiple)",
+ "+test",
+ []string{"a"},
+ &core.FileField{Name: "test", MaxSelect: 2},
+ true,
+ `["a","c","d"]`,
+ },
+ {
+ "subtract (single)",
+ "test-",
+ "d",
+ &core.FileField{Name: "test", MaxSelect: 1},
+ true,
+ `"c"`,
+ },
+ {
+ "subtract (multiple)",
+ "test-",
+ []string{"unknown", "c"},
+ &core.FileField{Name: "test", MaxSelect: 2},
+ true,
+ `["d"]`,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ collection := core.NewBaseCollection("test_collection")
+ collection.Fields.Add(s.field)
+
+ setter := s.field.FindSetter(s.key)
+
+ hasSetter := setter != nil
+ if hasSetter != s.hasSetter {
+ t.Fatalf("Expected hasSetter %v, got %v", s.hasSetter, hasSetter)
+ }
+
+ if !hasSetter {
+ return
+ }
+
+ record := core.NewRecord(collection)
+ record.SetRaw(s.field.GetName(), []string{"c", "d"})
+
+ setter(record, s.value)
+
+ raw, err := json.Marshal(record.Get(s.field.GetName()))
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawStr := string(raw)
+
+ if rawStr != s.expected {
+ t.Fatalf("Expected %q, got %q", s.expected, rawStr)
+ }
+ })
+ }
+}
+
+func TestFileFieldIntercept(t *testing.T) {
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ demo1, err := testApp.FindCollectionByNameOrId("demo1")
+ if err != nil {
+ t.Fatal(err)
+ }
+ demo1.Fields.GetByName("text").(*core.TextField).Required = true // trigger validation error
+
+ f1, err := filesystem.NewFileFromBytes([]byte("test"), "new1.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ f2, err := filesystem.NewFileFromBytes([]byte("test"), "new2.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ f3, err := filesystem.NewFileFromBytes([]byte("test"), "new3.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ f4, err := filesystem.NewFileFromBytes([]byte("test"), "new4.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ record := core.NewRecord(demo1)
+
+ ok := t.Run("1. create - with validation error", func(t *testing.T) {
+ record.Set("file_many", []any{f1, f2})
+
+ err := testApp.Save(record)
+
+ tests.TestValidationErrors(t, err, []string{"text"})
+
+ value, _ := record.GetRaw("file_many").([]any)
+ if len(value) != 2 {
+ t.Fatalf("Expected the file field value to be unchanged, got %v", value)
+ }
+ })
+ if !ok {
+ return
+ }
+
+ ok = t.Run("2. create - fixing the validation error", func(t *testing.T) {
+ record.Set("text", "abc")
+
+ err := testApp.Save(record)
+ if err != nil {
+ t.Fatalf("Expected save to succeed, got %v", err)
+ }
+
+ expectedKeys := []string{f1.Name, f2.Name}
+
+ raw := record.GetRaw("file_many")
+
+ // ensure that the value was replaced with the file names
+ value := list.ToUniqueStringSlice(raw)
+ if len(value) != len(expectedKeys) {
+ t.Fatalf("Expected the file field to be updated with the %d file names, got\n%v", len(expectedKeys), raw)
+ }
+ for _, name := range expectedKeys {
+ if !slices.Contains(value, name) {
+ t.Fatalf("Missing file %q in %v", name, value)
+ }
+ }
+
+ checkRecordFiles(t, testApp, record, expectedKeys)
+ })
+ if !ok {
+ return
+ }
+
+ ok = t.Run("3. update - validation error", func(t *testing.T) {
+ record.Set("text", "")
+ record.Set("file_many+", f3)
+ record.Set("file_many-", f2.Name)
+
+ err := testApp.Save(record)
+
+ tests.TestValidationErrors(t, err, []string{"text"})
+
+ raw, _ := json.Marshal(record.GetRaw("file_many"))
+ expectedRaw, _ := json.Marshal([]any{f1.Name, f3})
+ if !bytes.Equal(expectedRaw, raw) {
+ t.Fatalf("Expected file field value\n%s\ngot\n%s", expectedRaw, raw)
+ }
+
+ checkRecordFiles(t, testApp, record, []string{f1.Name, f2.Name})
+ })
+ if !ok {
+ return
+ }
+
+ ok = t.Run("4. update - fixing the validation error", func(t *testing.T) {
+ record.Set("text", "abc2")
+
+ err := testApp.Save(record)
+ if err != nil {
+ t.Fatalf("Expected save to succeed, got %v", err)
+ }
+
+ raw, _ := json.Marshal(record.GetRaw("file_many"))
+ expectedRaw, _ := json.Marshal([]any{f1.Name, f3.Name})
+ if !bytes.Equal(expectedRaw, raw) {
+ t.Fatalf("Expected file field value\n%s\ngot\n%s", expectedRaw, raw)
+ }
+
+ checkRecordFiles(t, testApp, record, []string{f1.Name, f3.Name})
+ })
+ if !ok {
+ return
+ }
+
+ t.Run("5. update - second time update", func(t *testing.T) {
+ record.Set("file_many-", f1.Name)
+ record.Set("file_many+", f4)
+
+ err := testApp.Save(record)
+ if err != nil {
+ t.Fatalf("Expected save to succeed, got %v", err)
+ }
+
+ raw, _ := json.Marshal(record.GetRaw("file_many"))
+ expectedRaw, _ := json.Marshal([]any{f3.Name, f4.Name})
+ if !bytes.Equal(expectedRaw, raw) {
+ t.Fatalf("Expected file field value\n%s\ngot\n%s", expectedRaw, raw)
+ }
+
+ checkRecordFiles(t, testApp, record, []string{f3.Name, f4.Name})
+ })
+}
+
+func TestFileFieldInterceptTx(t *testing.T) {
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ demo1, err := testApp.FindCollectionByNameOrId("demo1")
+ if err != nil {
+ t.Fatal(err)
+ }
+ demo1.Fields.GetByName("text").(*core.TextField).Required = true // trigger validation error
+
+ f1, err := filesystem.NewFileFromBytes([]byte("test"), "new1.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ f2, err := filesystem.NewFileFromBytes([]byte("test"), "new2.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ f3, err := filesystem.NewFileFromBytes([]byte("test"), "new3.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ f4, err := filesystem.NewFileFromBytes([]byte("test"), "new4.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ var record *core.Record
+
+ tx := func(succeed bool) func(txApp core.App) error {
+ var txErr error
+ if !succeed {
+ txErr = errors.New("tx error")
+ }
+
+ return func(txApp core.App) error {
+ record = core.NewRecord(demo1)
+ ok := t.Run(fmt.Sprintf("[tx_%v] create with validation error", succeed), func(t *testing.T) {
+ record.Set("text", "")
+ record.Set("file_many", []any{f1, f2})
+
+ err := txApp.Save(record)
+ tests.TestValidationErrors(t, err, []string{"text"})
+
+ checkRecordFiles(t, txApp, record, []string{}) // no uploaded files
+ })
+ if !ok {
+ return txErr
+ }
+
+ // ---
+
+ ok = t.Run(fmt.Sprintf("[tx_%v] create with fixed validation error", succeed), func(t *testing.T) {
+ record.Set("text", "abc")
+
+ err = txApp.Save(record)
+ if err != nil {
+ t.Fatalf("Expected save to succeed, got %v", err)
+ }
+
+ checkRecordFiles(t, txApp, record, []string{f1.Name, f2.Name})
+ })
+ if !ok {
+ return txErr
+ }
+
+ // ---
+
+ ok = t.Run(fmt.Sprintf("[tx_%v] update with validation error", succeed), func(t *testing.T) {
+ record.Set("text", "")
+ record.Set("file_many+", f3)
+ record.Set("file_many-", f2.Name)
+
+ err = txApp.Save(record)
+ tests.TestValidationErrors(t, err, []string{"text"})
+
+ raw, _ := json.Marshal(record.GetRaw("file_many"))
+ expectedRaw, _ := json.Marshal([]any{f1.Name, f3})
+ if !bytes.Equal(expectedRaw, raw) {
+ t.Fatalf("Expected file field value\n%s\ngot\n%s", expectedRaw, raw)
+ }
+
+ checkRecordFiles(t, txApp, record, []string{f1.Name, f2.Name}) // no file changes
+ })
+ if !ok {
+ return txErr
+ }
+
+ // ---
+
+ ok = t.Run(fmt.Sprintf("[tx_%v] update with fixed validation error", succeed), func(t *testing.T) {
+ record.Set("text", "abc2")
+
+ err = txApp.Save(record)
+ if err != nil {
+ t.Fatalf("Expected save to succeed, got %v", err)
+ }
+
+ raw, _ := json.Marshal(record.GetRaw("file_many"))
+ expectedRaw, _ := json.Marshal([]any{f1.Name, f3.Name})
+ if !bytes.Equal(expectedRaw, raw) {
+ t.Fatalf("Expected file field value\n%s\ngot\n%s", expectedRaw, raw)
+ }
+
+ checkRecordFiles(t, txApp, record, []string{f1.Name, f3.Name, f2.Name}) // f2 shouldn't be deleted yet
+ })
+ if !ok {
+ return txErr
+ }
+
+ // ---
+
+ ok = t.Run(fmt.Sprintf("[tx_%v] second time update", succeed), func(t *testing.T) {
+ record.Set("file_many-", f1.Name)
+ record.Set("file_many+", f4)
+
+ err := txApp.Save(record)
+ if err != nil {
+ t.Fatalf("Expected save to succeed, got %v", err)
+ }
+
+ raw, _ := json.Marshal(record.GetRaw("file_many"))
+ expectedRaw, _ := json.Marshal([]any{f3.Name, f4.Name})
+ if !bytes.Equal(expectedRaw, raw) {
+ t.Fatalf("Expected file field value\n%s\ngot\n%s", expectedRaw, raw)
+ }
+
+ checkRecordFiles(t, txApp, record, []string{f3.Name, f4.Name, f1.Name, f2.Name}) // f1 and f2 shouldn't be deleted yet
+ })
+ if !ok {
+ return txErr
+ }
+
+ // ---
+
+ return txErr
+ }
+ }
+
+ // failed transaction
+ txErr := testApp.RunInTransaction(tx(false))
+ if txErr == nil {
+ t.Fatal("Expected transaction error")
+ }
+ // there shouldn't be any fails associated with the record id
+ checkRecordFiles(t, testApp, record, []string{})
+
+ txErr = testApp.RunInTransaction(tx(true))
+ if txErr != nil {
+ t.Fatalf("Expected transaction to succeed, got %v", txErr)
+ }
+ // only the last updated files should remain
+ checkRecordFiles(t, testApp, record, []string{f3.Name, f4.Name})
+}
+
+// -------------------------------------------------------------------
+
+func checkRecordFiles(t *testing.T, testApp core.App, record *core.Record, expectedKeys []string) {
+ fsys, err := testApp.NewFilesystem()
+ if err != nil {
+ t.Fatal(err)
+ }
+ defer fsys.Close()
+
+ objects, err := fsys.List(record.BaseFilesPath() + "/")
+ if err != nil {
+ t.Fatal(err)
+ }
+ objectKeys := make([]string, 0, len(objects))
+ for _, obj := range objects {
+ // exclude thumbs
+ if !strings.Contains(obj.Key, "/thumbs_") {
+ objectKeys = append(objectKeys, obj.Key)
+ }
+ }
+
+ if len(objectKeys) != len(expectedKeys) {
+ t.Fatalf("Expected files:\n%v\ngot\n%v", expectedKeys, objectKeys)
+ }
+ for _, key := range expectedKeys {
+ fullKey := record.BaseFilesPath() + "/" + key
+ if !slices.Contains(objectKeys, fullKey) {
+ t.Fatalf("Missing expected file key\n%q\nin\n%v", fullKey, objectKeys)
+ }
+ }
+}
diff --git a/core/field_json.go b/core/field_json.go
new file mode 100644
index 00000000..45adc473
--- /dev/null
+++ b/core/field_json.go
@@ -0,0 +1,182 @@
+package core
+
+import (
+ "context"
+ "fmt"
+ "slices"
+ "strconv"
+ "strings"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/go-ozzo/ozzo-validation/v4/is"
+ "github.com/pocketbase/pocketbase/core/validators"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func init() {
+ Fields[FieldTypeJSON] = func() Field {
+ return &JSONField{}
+ }
+}
+
+const FieldTypeJSON = "json"
+
+const DefaultJSONFieldMaxSize int64 = 5 << 20
+
+var (
+ _ Field = (*JSONField)(nil)
+ _ MaxBodySizeCalculator = (*JSONField)(nil)
+)
+
+// JSONField defines "json" type field for storing any serialized JSON value.
+type JSONField struct {
+ Id string `form:"id" json:"id"`
+ Name string `form:"name" json:"name"`
+ System bool `form:"system" json:"system"`
+ Hidden bool `form:"hidden" json:"hidden"`
+ Presentable bool `form:"presentable" json:"presentable"`
+
+ // ---
+
+ // MaxSize specifies the maximum size of the allowed field value (in bytes).
+ //
+ // If zero, a default limit of 5MB is applied.
+ MaxSize int64 `form:"maxSize" json:"maxSize"`
+
+ // Required will require the field value to be non-empty JSON value
+ // (aka. not "null", `""`, "[]", "{}").
+ Required bool `form:"required" json:"required"`
+}
+
+// Type implements [Field.Type] interface method.
+func (f *JSONField) Type() string {
+ return FieldTypeJSON
+}
+
+// GetId implements [Field.GetId] interface method.
+func (f *JSONField) GetId() string {
+ return f.Id
+}
+
+// SetId implements [Field.SetId] interface method.
+func (f *JSONField) SetId(id string) {
+ f.Id = id
+}
+
+// GetName implements [Field.GetName] interface method.
+func (f *JSONField) GetName() string {
+ return f.Name
+}
+
+// SetName implements [Field.SetName] interface method.
+func (f *JSONField) SetName(name string) {
+ f.Name = name
+}
+
+// GetSystem implements [Field.GetSystem] interface method.
+func (f *JSONField) GetSystem() bool {
+ return f.System
+}
+
+// SetSystem implements [Field.SetSystem] interface method.
+func (f *JSONField) SetSystem(system bool) {
+ f.System = system
+}
+
+// GetHidden implements [Field.GetHidden] interface method.
+func (f *JSONField) GetHidden() bool {
+ return f.Hidden
+}
+
+// SetHidden implements [Field.SetHidden] interface method.
+func (f *JSONField) SetHidden(hidden bool) {
+ f.Hidden = hidden
+}
+
+// ColumnType implements [Field.ColumnType] interface method.
+func (f *JSONField) ColumnType(app App) string {
+ return "JSON DEFAULT NULL"
+}
+
+// PrepareValue implements [Field.PrepareValue] interface method.
+func (f *JSONField) PrepareValue(record *Record, raw any) (any, error) {
+ if str, ok := raw.(string); ok {
+ // in order to support seamlessly both json and multipart/form-data requests,
+ // the following normalization rules are applied for plain string values:
+ // - "true" is converted to the json `true`
+ // - "false" is converted to the json `false`
+ // - "null" is converted to the json `null`
+ // - "[1,2,3]" is converted to the json `[1,2,3]`
+ // - "{\"a\":1,\"b\":2}" is converted to the json `{"a":1,"b":2}`
+ // - numeric strings are converted to json number
+ // - double quoted strings are left as they are (aka. without normalizations)
+ // - any other string (empty string too) is double quoted
+ if str == "" {
+ raw = strconv.Quote(str)
+ } else if str == "null" || str == "true" || str == "false" {
+ raw = str
+ } else if ((str[0] >= '0' && str[0] <= '9') ||
+ str[0] == '-' ||
+ str[0] == '"' ||
+ str[0] == '[' ||
+ str[0] == '{') &&
+ is.JSON.Validate(str) == nil {
+ raw = str
+ } else {
+ raw = strconv.Quote(str)
+ }
+ }
+
+ return types.ParseJSONRaw(raw)
+}
+
+var emptyJSONValues = []string{
+ "null", `""`, "[]", "{}", "",
+}
+
+// ValidateValue implements [Field.ValidateValue] interface method.
+func (f *JSONField) ValidateValue(ctx context.Context, app App, record *Record) error {
+ raw, ok := record.GetRaw(f.Name).(types.JSONRaw)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ maxSize := f.CalculateMaxBodySize()
+
+ if int64(len(raw)) > maxSize {
+ return validation.NewError(
+ "validation_json_size_limit",
+ fmt.Sprintf("The maximum allowed JSON size is %v bytes", maxSize),
+ ).SetParams(map[string]any{"maxSize": maxSize})
+ }
+
+ if is.JSON.Validate(raw) != nil {
+ return validation.NewError("validation_invalid_json", "Must be a valid json value")
+ }
+
+ rawStr := strings.TrimSpace(raw.String())
+
+ if f.Required && slices.Contains(emptyJSONValues, rawStr) {
+ return validation.ErrRequired
+ }
+
+ return nil
+}
+
+// ValidateSettings implements [Field.ValidateSettings] interface method.
+func (f *JSONField) ValidateSettings(ctx context.Context, app App, collection *Collection) error {
+ return validation.ValidateStruct(f,
+ validation.Field(&f.Id, validation.By(DefaultFieldIdValidationRule)),
+ validation.Field(&f.Name, validation.By(DefaultFieldNameValidationRule)),
+ validation.Field(&f.MaxSize, validation.Min(0)),
+ )
+}
+
+// CalculateMaxBodySize implements the [MaxBodySizeCalculator] interface.
+func (f *JSONField) CalculateMaxBodySize() int64 {
+ if f.MaxSize <= 0 {
+ return DefaultJSONFieldMaxSize
+ }
+
+ return f.MaxSize
+}
diff --git a/core/field_json_test.go b/core/field_json_test.go
new file mode 100644
index 00000000..1904e59c
--- /dev/null
+++ b/core/field_json_test.go
@@ -0,0 +1,266 @@
+package core_test
+
+import (
+ "context"
+ "fmt"
+ "strings"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func TestJSONFieldBaseMethods(t *testing.T) {
+ testFieldBaseMethods(t, core.FieldTypeJSON)
+}
+
+func TestJSONFieldColumnType(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.JSONField{}
+
+ expected := "JSON DEFAULT NULL"
+
+ if v := f.ColumnType(app); v != expected {
+ t.Fatalf("Expected\n%q\ngot\n%q", expected, v)
+ }
+}
+
+func TestJSONFieldPrepareValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.JSONField{}
+ record := core.NewRecord(core.NewBaseCollection("test"))
+
+ scenarios := []struct {
+ raw any
+ expected string
+ }{
+ {"null", `null`},
+ {"", `""`},
+ {"true", `true`},
+ {"false", `false`},
+ {"test", `"test"`},
+ {"123", `123`},
+ {"-456", `-456`},
+ {"[1,2,3]", `[1,2,3]`},
+ {"[1,2,3", `"[1,2,3"`},
+ {`{"a":1,"b":2}`, `{"a":1,"b":2}`},
+ {`{"a":1,"b":2`, `"{\"a\":1,\"b\":2"`},
+ {[]int{1, 2, 3}, `[1,2,3]`},
+ {map[string]int{"a": 1, "b": 2}, `{"a":1,"b":2}`},
+ {nil, `null`},
+ {false, `false`},
+ {true, `true`},
+ {-78, `-78`},
+ {123.456, `123.456`},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.raw), func(t *testing.T) {
+ v, err := f.PrepareValue(record, s.raw)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ raw, ok := v.(types.JSONRaw)
+ if !ok {
+ t.Fatalf("Expected string instance, got %T", v)
+ }
+ rawStr := raw.String()
+
+ if rawStr != s.expected {
+ t.Fatalf("Expected\n%#v\ngot\n%#v", s.expected, rawStr)
+ }
+ })
+ }
+}
+
+func TestJSONFieldValidateValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field *core.JSONField
+ record func() *core.Record
+ expectError bool
+ }{
+ {
+ "invalid raw value",
+ &core.JSONField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 123)
+ return record
+ },
+ true,
+ },
+ {
+ "zero field value (not required)",
+ &core.JSONField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", types.JSONRaw{})
+ return record
+ },
+ false,
+ },
+ {
+ "zero field value (required)",
+ &core.JSONField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", types.JSONRaw{})
+ return record
+ },
+ true,
+ },
+ {
+ "non-zero field value (required)",
+ &core.JSONField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", types.JSONRaw("[1,2,3]"))
+ return record
+ },
+ false,
+ },
+ {
+ "non-zero field value (required)",
+ &core.JSONField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", types.JSONRaw(`"aaa"`))
+ return record
+ },
+ false,
+ },
+ {
+ "> default MaxSize",
+ &core.JSONField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", types.JSONRaw(`"`+strings.Repeat("a", (5<<20))+`"`))
+ return record
+ },
+ true,
+ },
+ {
+ "> MaxSize",
+ &core.JSONField{Name: "test", MaxSize: 5},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", types.JSONRaw(`"aaaa"`))
+ return record
+ },
+ true,
+ },
+ {
+ "<= MaxSize",
+ &core.JSONField{Name: "test", MaxSize: 5},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", types.JSONRaw(`"aaa"`))
+ return record
+ },
+ false,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ err := s.field.ValidateValue(context.Background(), app, s.record())
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
+
+func TestJSONFieldValidateSettings(t *testing.T) {
+ testDefaultFieldIdValidation(t, core.FieldTypeJSON)
+ testDefaultFieldNameValidation(t, core.FieldTypeJSON)
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field func() *core.JSONField
+ expectErrors []string
+ }{
+ {
+ "< 0 MaxSize",
+ func() *core.JSONField {
+ return &core.JSONField{
+ Id: "test",
+ Name: "test",
+ MaxSize: -1,
+ }
+ },
+ []string{"maxSize"},
+ },
+ {
+ "= 0 MaxSize",
+ func() *core.JSONField {
+ return &core.JSONField{
+ Id: "test",
+ Name: "test",
+ }
+ },
+ []string{},
+ },
+ {
+ "> 0 MaxSize",
+ func() *core.JSONField {
+ return &core.JSONField{
+ Id: "test",
+ Name: "test",
+ MaxSize: 1,
+ }
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ errs := s.field().ValidateSettings(context.Background(), app, collection)
+
+ tests.TestValidationErrors(t, errs, s.expectErrors)
+ })
+ }
+}
+
+func TestJSONFieldCalculateMaxBodySize(t *testing.T) {
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ scenarios := []struct {
+ field *core.JSONField
+ expected int64
+ }{
+ {&core.JSONField{}, core.DefaultJSONFieldMaxSize},
+ {&core.JSONField{MaxSize: 10}, 10},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%d", i, s.field.MaxSize), func(t *testing.T) {
+ result := s.field.CalculateMaxBodySize()
+
+ if result != s.expected {
+ t.Fatalf("Expected %d, got %d", s.expected, result)
+ }
+ })
+ }
+}
diff --git a/core/field_number.go b/core/field_number.go
new file mode 100644
index 00000000..0704d40f
--- /dev/null
+++ b/core/field_number.go
@@ -0,0 +1,203 @@
+package core
+
+import (
+ "context"
+ "fmt"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/pocketbase/core/validators"
+ "github.com/spf13/cast"
+)
+
+func init() {
+ Fields[FieldTypeNumber] = func() Field {
+ return &NumberField{}
+ }
+}
+
+const FieldTypeNumber = "number"
+
+var (
+ _ Field = (*NumberField)(nil)
+ _ SetterFinder = (*NumberField)(nil)
+)
+
+// NumberField defines "number" type field for storing numeric (float64) value.
+//
+// The following additional setter keys are available:
+//
+// - "fieldName+" - appends to the existing record value. For example:
+// record.Set("total+", 5)
+// - "fieldName-" - subtracts from the existing record value. For example:
+// record.Set("total-", 5)
+type NumberField struct {
+ Id string `form:"id" json:"id"`
+ Name string `form:"name" json:"name"`
+ System bool `form:"system" json:"system"`
+ Hidden bool `form:"hidden" json:"hidden"`
+ Presentable bool `form:"presentable" json:"presentable"`
+
+ // ---
+
+ // Min specifies the min allowed field value.
+ //
+ // Leave it nil to skip the validator.
+ Min *float64 `form:"min" json:"min"`
+
+ // Max specifies the max allowed field value.
+ //
+ // Leave it nil to skip the validator.
+ Max *float64 `form:"max" json:"max"`
+
+ // OnlyInt will require the field value to be integer.
+ OnlyInt bool `form:"onlyInt" json:"onlyInt"`
+
+ // Required will require the field value to be non-zero.
+ Required bool `form:"required" json:"required"`
+}
+
+// Type implements [Field.Type] interface method.
+func (f *NumberField) Type() string {
+ return FieldTypeNumber
+}
+
+// GetId implements [Field.GetId] interface method.
+func (f *NumberField) GetId() string {
+ return f.Id
+}
+
+// SetId implements [Field.SetId] interface method.
+func (f *NumberField) SetId(id string) {
+ f.Id = id
+}
+
+// GetName implements [Field.GetName] interface method.
+func (f *NumberField) GetName() string {
+ return f.Name
+}
+
+// SetName implements [Field.SetName] interface method.
+func (f *NumberField) SetName(name string) {
+ f.Name = name
+}
+
+// GetSystem implements [Field.GetSystem] interface method.
+func (f *NumberField) GetSystem() bool {
+ return f.System
+}
+
+// SetSystem implements [Field.SetSystem] interface method.
+func (f *NumberField) SetSystem(system bool) {
+ f.System = system
+}
+
+// GetHidden implements [Field.GetHidden] interface method.
+func (f *NumberField) GetHidden() bool {
+ return f.Hidden
+}
+
+// SetHidden implements [Field.SetHidden] interface method.
+func (f *NumberField) SetHidden(hidden bool) {
+ f.Hidden = hidden
+}
+
+// ColumnType implements [Field.ColumnType] interface method.
+func (f *NumberField) ColumnType(app App) string {
+ return "NUMERIC DEFAULT 0 NOT NULL"
+}
+
+// PrepareValue implements [Field.PrepareValue] interface method.
+func (f *NumberField) PrepareValue(record *Record, raw any) (any, error) {
+ return cast.ToFloat64(raw), nil
+}
+
+// ValidateValue implements [Field.ValidateValue] interface method.
+func (f *NumberField) ValidateValue(ctx context.Context, app App, record *Record) error {
+ val, ok := record.GetRaw(f.Name).(float64)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ if val == 0 {
+ if f.Required {
+ if err := validation.Required.Validate(val); err != nil {
+ return err
+ }
+ }
+ return nil
+ }
+
+ if f.OnlyInt && val != float64(int64(val)) {
+ return validation.NewError("validation_only_int_constraint", "Decimal numbers are not allowed")
+ }
+
+ if f.Min != nil && val < *f.Min {
+ return validation.NewError("validation_min_number_constraint", fmt.Sprintf("Must be larger than %f", *f.Min))
+ }
+
+ if f.Max != nil && val > *f.Max {
+ return validation.NewError("validation_max_number_constraint", fmt.Sprintf("Must be less than %f", *f.Max))
+ }
+
+ return nil
+}
+
+// ValidateSettings implements [Field.ValidateSettings] interface method.
+func (f *NumberField) ValidateSettings(ctx context.Context, app App, collection *Collection) error {
+ maxRules := []validation.Rule{
+ validation.By(f.checkOnlyInt),
+ }
+ if f.Min != nil && f.Max != nil {
+ maxRules = append(maxRules, validation.Min(*f.Min))
+ }
+
+ return validation.ValidateStruct(f,
+ validation.Field(&f.Id, validation.By(DefaultFieldIdValidationRule)),
+ validation.Field(&f.Name, validation.By(DefaultFieldNameValidationRule)),
+ validation.Field(&f.Min, validation.By(f.checkOnlyInt)),
+ validation.Field(&f.Max, maxRules...),
+ )
+}
+
+func (f *NumberField) checkOnlyInt(value any) error {
+ v, _ := value.(*float64)
+ if v == nil || !f.OnlyInt {
+ return nil // nothing to check
+ }
+
+ if *v != float64(int64(*v)) {
+ return validation.NewError("validation_only_int_constraint", "Decimal numbers are not allowed.")
+ }
+
+ return nil
+}
+
+// FindSetter implements the [SetterFinder] interface.
+func (f *NumberField) FindSetter(key string) SetterFunc {
+ switch key {
+ case f.Name:
+ return f.setValue
+ case f.Name + "+":
+ return f.addValue
+ case f.Name + "-":
+ return f.subtractValue
+ default:
+ return nil
+ }
+}
+
+func (f *NumberField) setValue(record *Record, raw any) {
+ record.SetRaw(f.Name, cast.ToFloat64(raw))
+}
+
+func (f *NumberField) addValue(record *Record, raw any) {
+ val := cast.ToFloat64(record.GetRaw(f.Name))
+
+ record.SetRaw(f.Name, val+cast.ToFloat64(raw))
+}
+
+func (f *NumberField) subtractValue(record *Record, raw any) {
+ val := cast.ToFloat64(record.GetRaw(f.Name))
+
+ record.SetRaw(f.Name, val-cast.ToFloat64(raw))
+}
diff --git a/core/field_number_test.go b/core/field_number_test.go
new file mode 100644
index 00000000..d1e06567
--- /dev/null
+++ b/core/field_number_test.go
@@ -0,0 +1,383 @@
+package core_test
+
+import (
+ "context"
+ "fmt"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func TestNumberFieldBaseMethods(t *testing.T) {
+ testFieldBaseMethods(t, core.FieldTypeNumber)
+}
+
+func TestNumberFieldColumnType(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.NumberField{}
+
+ expected := "NUMERIC DEFAULT 0 NOT NULL"
+
+ if v := f.ColumnType(app); v != expected {
+ t.Fatalf("Expected\n%q\ngot\n%q", expected, v)
+ }
+}
+
+func TestNumberFieldPrepareValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.NumberField{}
+ record := core.NewRecord(core.NewBaseCollection("test"))
+
+ scenarios := []struct {
+ raw any
+ expected float64
+ }{
+ {"", 0},
+ {"test", 0},
+ {false, 0},
+ {true, 1},
+ {-2, -2},
+ {123.456, 123.456},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.raw), func(t *testing.T) {
+ vRaw, err := f.PrepareValue(record, s.raw)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ v, ok := vRaw.(float64)
+ if !ok {
+ t.Fatalf("Expected float64 instance, got %T", v)
+ }
+
+ if v != s.expected {
+ t.Fatalf("Expected %f, got %f", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestNumberFieldValidateValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field *core.NumberField
+ record func() *core.Record
+ expectError bool
+ }{
+ {
+ "invalid raw value",
+ &core.NumberField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "123")
+ return record
+ },
+ true,
+ },
+ {
+ "zero field value (not required)",
+ &core.NumberField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 0.0)
+ return record
+ },
+ false,
+ },
+ {
+ "zero field value (required)",
+ &core.NumberField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 0.0)
+ return record
+ },
+ true,
+ },
+ {
+ "non-zero field value (required)",
+ &core.NumberField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 123.0)
+ return record
+ },
+ false,
+ },
+ {
+ "decimal with onlyInt",
+ &core.NumberField{Name: "test", OnlyInt: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 123.456)
+ return record
+ },
+ true,
+ },
+ {
+ "int with onlyInt",
+ &core.NumberField{Name: "test", OnlyInt: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 123.0)
+ return record
+ },
+ false,
+ },
+ {
+ "< min",
+ &core.NumberField{Name: "test", Min: types.Pointer(2.0)},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 1.0)
+ return record
+ },
+ true,
+ },
+ {
+ ">= min",
+ &core.NumberField{Name: "test", Min: types.Pointer(2.0)},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 2.0)
+ return record
+ },
+ false,
+ },
+ {
+ "> max",
+ &core.NumberField{Name: "test", Max: types.Pointer(2.0)},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 3.0)
+ return record
+ },
+ true,
+ },
+ {
+ "<= max",
+ &core.NumberField{Name: "test", Max: types.Pointer(2.0)},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 2.0)
+ return record
+ },
+ false,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ err := s.field.ValidateValue(context.Background(), app, s.record())
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
+
+func TestNumberFieldValidateSettings(t *testing.T) {
+ testDefaultFieldIdValidation(t, core.FieldTypeNumber)
+ testDefaultFieldNameValidation(t, core.FieldTypeNumber)
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field func() *core.NumberField
+ expectErrors []string
+ }{
+ {
+ "zero",
+ func() *core.NumberField {
+ return &core.NumberField{
+ Id: "test",
+ Name: "test",
+ }
+ },
+ []string{},
+ },
+ {
+ "decumal min",
+ func() *core.NumberField {
+ return &core.NumberField{
+ Id: "test",
+ Name: "test",
+ Min: types.Pointer(1.2),
+ }
+ },
+ []string{},
+ },
+ {
+ "decumal min (onlyInt)",
+ func() *core.NumberField {
+ return &core.NumberField{
+ Id: "test",
+ Name: "test",
+ OnlyInt: true,
+ Min: types.Pointer(1.2),
+ }
+ },
+ []string{"min"},
+ },
+ {
+ "int min (onlyInt)",
+ func() *core.NumberField {
+ return &core.NumberField{
+ Id: "test",
+ Name: "test",
+ OnlyInt: true,
+ Min: types.Pointer(1.0),
+ }
+ },
+ []string{},
+ },
+ {
+ "decumal max",
+ func() *core.NumberField {
+ return &core.NumberField{
+ Id: "test",
+ Name: "test",
+ Max: types.Pointer(1.2),
+ }
+ },
+ []string{},
+ },
+ {
+ "decumal max (onlyInt)",
+ func() *core.NumberField {
+ return &core.NumberField{
+ Id: "test",
+ Name: "test",
+ OnlyInt: true,
+ Max: types.Pointer(1.2),
+ }
+ },
+ []string{"max"},
+ },
+ {
+ "int max (onlyInt)",
+ func() *core.NumberField {
+ return &core.NumberField{
+ Id: "test",
+ Name: "test",
+ OnlyInt: true,
+ Max: types.Pointer(1.0),
+ }
+ },
+ []string{},
+ },
+ {
+ "min > max",
+ func() *core.NumberField {
+ return &core.NumberField{
+ Id: "test",
+ Name: "test",
+ Min: types.Pointer(2.0),
+ Max: types.Pointer(1.0),
+ }
+ },
+ []string{"max"},
+ },
+ {
+ "min <= max",
+ func() *core.NumberField {
+ return &core.NumberField{
+ Id: "test",
+ Name: "test",
+ Min: types.Pointer(2.0),
+ Max: types.Pointer(2.0),
+ }
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ errs := s.field().ValidateSettings(context.Background(), app, collection)
+
+ tests.TestValidationErrors(t, errs, s.expectErrors)
+ })
+ }
+}
+
+func TestNumberFieldFindSetter(t *testing.T) {
+ field := &core.NumberField{Name: "test"}
+
+ collection := core.NewBaseCollection("test_collection")
+ collection.Fields.Add(field)
+
+ t.Run("no match", func(t *testing.T) {
+ f := field.FindSetter("abc")
+ if f != nil {
+ t.Fatal("Expected nil setter")
+ }
+ })
+
+ t.Run("direct name match", func(t *testing.T) {
+ f := field.FindSetter("test")
+ if f == nil {
+ t.Fatal("Expected non-nil setter")
+ }
+
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 2.0)
+
+ f(record, "123.456") // should be casted
+
+ if v := record.Get("test"); v != 123.456 {
+ t.Fatalf("Expected %f, got %f", 123.456, v)
+ }
+ })
+
+ t.Run("name+ match", func(t *testing.T) {
+ f := field.FindSetter("test+")
+ if f == nil {
+ t.Fatal("Expected non-nil setter")
+ }
+
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 2.0)
+
+ f(record, "1.5") // should be casted and appended to the existing value
+
+ if v := record.Get("test"); v != 3.5 {
+ t.Fatalf("Expected %f, got %f", 3.5, v)
+ }
+ })
+
+ t.Run("name- match", func(t *testing.T) {
+ f := field.FindSetter("test-")
+ if f == nil {
+ t.Fatal("Expected non-nil setter")
+ }
+
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 2.0)
+
+ f(record, "1.5") // should be casted and subtracted from the existing value
+
+ if v := record.Get("test"); v != 0.5 {
+ t.Fatalf("Expected %f, got %f", 0.5, v)
+ }
+ })
+}
diff --git a/core/field_password.go b/core/field_password.go
new file mode 100644
index 00000000..147e279c
--- /dev/null
+++ b/core/field_password.go
@@ -0,0 +1,306 @@
+package core
+
+import (
+ "context"
+ "database/sql/driver"
+ "fmt"
+ "regexp"
+ "strings"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/pocketbase/core/validators"
+ "github.com/spf13/cast"
+ "golang.org/x/crypto/bcrypt"
+)
+
+func init() {
+ Fields[FieldTypePassword] = func() Field {
+ return &PasswordField{}
+ }
+}
+
+const FieldTypePassword = "password"
+
+var (
+ _ Field = (*PasswordField)(nil)
+ _ GetterFinder = (*PasswordField)(nil)
+ _ SetterFinder = (*PasswordField)(nil)
+ _ DriverValuer = (*PasswordField)(nil)
+ _ RecordInterceptor = (*PasswordField)(nil)
+)
+
+// PasswordField defines "password" type field for storing bcrypt hashed strings
+// (usually used only internally for the "password" auth collection system field).
+//
+// If you want to set a direct bcrypt hash as record field value you can use the SetRaw method, for example:
+//
+// // generates a bcrypt hash of "123456" and set it as field value
+// // (record.GetString("password") returns the plain password until persisted, otherwise empty string)
+// record.Set("password", "123456")
+//
+// // set directly a bcrypt hash of "123456" as field value
+// // (record.GetString("password") returns empty string)
+// record.SetRaw("password", "$2a$10$.5Elh8fgxypNUWhpUUr/xOa2sZm0VIaE0qWuGGl9otUfobb46T1Pq")
+//
+// The following additional getter keys are available:
+//
+// - "fieldName:hash" - returns the bcrypt hash string of the record field value (if any). For example:
+// record.GetString("password:hash")
+type PasswordField struct {
+ Id string `form:"id" json:"id"`
+ Name string `form:"name" json:"name"`
+ System bool `form:"system" json:"system"`
+ Hidden bool `form:"hidden" json:"hidden"`
+ Presentable bool `form:"presentable" json:"presentable"`
+
+ // ---
+
+ // Pattern specifies an optional regex pattern to match against the field value.
+ //
+ // Leave it empty to skip the pattern check.
+ Pattern string `form:"pattern" json:"pattern"`
+
+ // Min specifies an optional required field string length.
+ Min int `form:"min" json:"min"`
+
+ // Max specifies an optional required field string length.
+ //
+ // If zero, fallback to max 71 bytes.
+ Max int `form:"max" json:"max"`
+
+ // Cost specifies the cost/weight/iteration/etc. bcrypt factor.
+ //
+ // If zero, fallback to [bcrypt.DefaultCost].
+ //
+ // If explicitly set, must be between [bcrypt.MinCost] and [bcrypt.MaxCost].
+ Cost int `form:"cost" json:"cost"`
+
+ // Required will require the field value to be non-empty string.
+ Required bool `form:"required" json:"required"`
+}
+
+// Type implements [Field.Type] interface method.
+func (f *PasswordField) Type() string {
+ return FieldTypePassword
+}
+
+// GetId implements [Field.GetId] interface method.
+func (f *PasswordField) GetId() string {
+ return f.Id
+}
+
+// SetId implements [Field.SetId] interface method.
+func (f *PasswordField) SetId(id string) {
+ f.Id = id
+}
+
+// GetName implements [Field.GetName] interface method.
+func (f *PasswordField) GetName() string {
+ return f.Name
+}
+
+// SetName implements [Field.SetName] interface method.
+func (f *PasswordField) SetName(name string) {
+ f.Name = name
+}
+
+// GetSystem implements [Field.GetSystem] interface method.
+func (f *PasswordField) GetSystem() bool {
+ return f.System
+}
+
+// SetSystem implements [Field.SetSystem] interface method.
+func (f *PasswordField) SetSystem(system bool) {
+ f.System = system
+}
+
+// GetHidden implements [Field.GetHidden] interface method.
+func (f *PasswordField) GetHidden() bool {
+ return f.Hidden
+}
+
+// SetHidden implements [Field.SetHidden] interface method.
+func (f *PasswordField) SetHidden(hidden bool) {
+ f.Hidden = hidden
+}
+
+// ColumnType implements [Field.ColumnType] interface method.
+func (f *PasswordField) ColumnType(app App) string {
+ return "TEXT DEFAULT '' NOT NULL"
+}
+
+// DriverValue implements the [DriverValuer] interface.
+func (f *PasswordField) DriverValue(record *Record) (driver.Value, error) {
+ fp := f.getPasswordValue(record)
+ return fp.Hash, fp.LastError
+}
+
+// PrepareValue implements [Field.PrepareValue] interface method.
+func (f *PasswordField) PrepareValue(record *Record, raw any) (any, error) {
+ return &PasswordFieldValue{
+ Hash: cast.ToString(raw),
+ }, nil
+}
+
+// ValidateValue implements [Field.ValidateValue] interface method.
+func (f *PasswordField) ValidateValue(ctx context.Context, app App, record *Record) error {
+ fp, ok := record.GetRaw(f.Name).(*PasswordFieldValue)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ if fp.LastError != nil {
+ return fp.LastError
+ }
+
+ if f.Required {
+ if err := validation.Required.Validate(fp.Hash); err != nil {
+ return err
+ }
+ }
+
+ if fp.Plain == "" {
+ return nil // nothing to check
+ }
+
+ // note: casted to []rune to count multi-byte chars as one for the
+ // sake of more intuitive UX and clearer user error messages
+ //
+ // note2: technically multi-byte strings could produce bigger length than the bcrypt limit
+ // but it should be fine as it will be just truncated (even if it cuts a byte sequence in the middle)
+ length := len([]rune(fp.Plain))
+
+ if length < f.Min {
+ return validation.NewError("validation_min_text_constraint", fmt.Sprintf("Must be at least %d character(s)", f.Min))
+ }
+
+ maxLength := f.Max
+ if maxLength <= 0 {
+ maxLength = 71
+ }
+ if length > maxLength {
+ return validation.NewError("validation_max_text_constraint", fmt.Sprintf("Must be less than %d character(s)", maxLength))
+ }
+
+ if f.Pattern != "" {
+ match, _ := regexp.MatchString(f.Pattern, fp.Plain)
+ if !match {
+ return validation.NewError("validation_invalid_format", "Invalid value format")
+ }
+ }
+
+ return nil
+}
+
+// ValidateSettings implements [Field.ValidateSettings] interface method.
+func (f *PasswordField) ValidateSettings(ctx context.Context, app App, collection *Collection) error {
+ return validation.ValidateStruct(f,
+ validation.Field(&f.Id, validation.By(DefaultFieldIdValidationRule)),
+ validation.Field(&f.Name, validation.By(DefaultFieldNameValidationRule)),
+ validation.Field(&f.Min, validation.Min(1), validation.Max(71)),
+ validation.Field(&f.Max, validation.Min(f.Min), validation.Max(71)),
+ validation.Field(&f.Cost, validation.Min(bcrypt.MinCost), validation.Max(bcrypt.MaxCost)),
+ validation.Field(&f.Pattern, validation.By(validators.IsRegex)),
+ )
+}
+
+func (f *PasswordField) getPasswordValue(record *Record) *PasswordFieldValue {
+ raw := record.GetRaw(f.Name)
+
+ switch v := raw.(type) {
+ case *PasswordFieldValue:
+ return v
+ case string:
+ // we assume that any raw string starting with $2 is bcrypt hash
+ if strings.HasPrefix(v, "$2") {
+ return &PasswordFieldValue{Hash: v}
+ }
+ }
+
+ return &PasswordFieldValue{}
+}
+
+// Intercept implements the [RecordInterceptor] interface.
+func (f *PasswordField) Intercept(
+ ctx context.Context,
+ app App,
+ record *Record,
+ actionName string,
+ actionFunc func() error,
+) error {
+ switch actionName {
+ case InterceptorActionAfterCreate, InterceptorActionAfterUpdate:
+ // unset the plain field value after successful create/update
+ fp := f.getPasswordValue(record)
+ fp.Plain = ""
+ }
+
+ return actionFunc()
+}
+
+// FindGetter implements the [GetterFinder] interface.
+func (f *PasswordField) FindGetter(key string) GetterFunc {
+ switch key {
+ case f.Name:
+ return func(record *Record) any {
+ return f.getPasswordValue(record).Plain
+ }
+ case f.Name + ":hash":
+ return func(record *Record) any {
+ return f.getPasswordValue(record).Hash
+ }
+ default:
+ return nil
+ }
+}
+
+// FindSetter implements the [SetterFinder] interface.
+func (f *PasswordField) FindSetter(key string) SetterFunc {
+ switch key {
+ case f.Name:
+ return f.setValue
+ default:
+ return nil
+ }
+}
+
+func (f *PasswordField) setValue(record *Record, raw any) {
+ fv := &PasswordFieldValue{
+ Plain: cast.ToString(raw),
+ }
+
+ // hash the password
+ if fv.Plain != "" {
+ cost := f.Cost
+ if cost <= 0 {
+ cost = bcrypt.DefaultCost
+ }
+
+ hash, err := bcrypt.GenerateFromPassword([]byte(fv.Plain), cost)
+ if err != nil {
+ fv.LastError = err
+ }
+
+ fv.Hash = string(hash)
+ }
+
+ record.SetRaw(f.Name, fv)
+}
+
+// -------------------------------------------------------------------
+
+type PasswordFieldValue struct {
+ LastError error
+ Hash string
+ Plain string
+}
+
+func (pv PasswordFieldValue) Validate(pass string) bool {
+ if pv.Hash == "" || pv.LastError != nil {
+ return false
+ }
+
+ err := bcrypt.CompareHashAndPassword([]byte(pv.Hash), []byte(pass))
+
+ return err == nil
+}
diff --git a/core/field_password_test.go b/core/field_password_test.go
new file mode 100644
index 00000000..31a1113d
--- /dev/null
+++ b/core/field_password_test.go
@@ -0,0 +1,568 @@
+package core_test
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "strings"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "golang.org/x/crypto/bcrypt"
+)
+
+func TestPasswordFieldBaseMethods(t *testing.T) {
+ testFieldBaseMethods(t, core.FieldTypePassword)
+}
+
+func TestPasswordFieldColumnType(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.PasswordField{}
+
+ expected := "TEXT DEFAULT '' NOT NULL"
+
+ if v := f.ColumnType(app); v != expected {
+ t.Fatalf("Expected\n%q\ngot\n%q", expected, v)
+ }
+}
+
+func TestPasswordFieldPrepareValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.PasswordField{}
+ record := core.NewRecord(core.NewBaseCollection("test"))
+
+ scenarios := []struct {
+ raw any
+ expected string
+ }{
+ {"", ""},
+ {"test", "test"},
+ {false, "false"},
+ {true, "true"},
+ {123.456, "123.456"},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.raw), func(t *testing.T) {
+ v, err := f.PrepareValue(record, s.raw)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ pv, ok := v.(*core.PasswordFieldValue)
+ if !ok {
+ t.Fatalf("Expected PasswordFieldValue instance, got %T", v)
+ }
+
+ if pv.Hash != s.expected {
+ t.Fatalf("Expected %q, got %q", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestPasswordFieldDriverValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.PasswordField{Name: "test"}
+
+ err := errors.New("example_err")
+
+ scenarios := []struct {
+ raw any
+ expected *core.PasswordFieldValue
+ }{
+ {123, &core.PasswordFieldValue{}},
+ {"abc", &core.PasswordFieldValue{}},
+ {"$2abc", &core.PasswordFieldValue{Hash: "$2abc"}},
+ {&core.PasswordFieldValue{Hash: "test", LastError: err}, &core.PasswordFieldValue{Hash: "test", LastError: err}},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%v", i, s.raw), func(t *testing.T) {
+ record := core.NewRecord(core.NewBaseCollection("test"))
+ record.SetRaw(f.GetName(), s.raw)
+
+ v, err := f.DriverValue(record)
+
+ vStr, ok := v.(string)
+ if !ok {
+ t.Fatalf("Expected string instance, got %T", v)
+ }
+
+ var errStr string
+ if err != nil {
+ errStr = err.Error()
+ }
+
+ var expectedErrStr string
+ if s.expected.LastError != nil {
+ expectedErrStr = s.expected.LastError.Error()
+ }
+
+ if errStr != expectedErrStr {
+ t.Fatalf("Expected error %q, got %q", expectedErrStr, errStr)
+ }
+
+ if vStr != s.expected.Hash {
+ t.Fatalf("Expected hash %q, got %q", s.expected.Hash, vStr)
+ }
+ })
+ }
+}
+
+func TestPasswordFieldValidateValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field *core.PasswordField
+ record func() *core.Record
+ expectError bool
+ }{
+ {
+ "invalid raw value",
+ &core.PasswordField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "123")
+ return record
+ },
+ true,
+ },
+ {
+ "zero field value (not required)",
+ &core.PasswordField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", &core.PasswordFieldValue{})
+ return record
+ },
+ false,
+ },
+ {
+ "zero field value (required)",
+ &core.PasswordField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", &core.PasswordFieldValue{})
+ return record
+ },
+ true,
+ },
+ {
+ "empty hash but non-empty plain password (required)",
+ &core.PasswordField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", &core.PasswordFieldValue{Plain: "test"})
+ return record
+ },
+ true,
+ },
+ {
+ "non-empty hash (required)",
+ &core.PasswordField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", &core.PasswordFieldValue{Hash: "test"})
+ return record
+ },
+ false,
+ },
+ {
+ "with LastError",
+ &core.PasswordField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", &core.PasswordFieldValue{LastError: errors.New("test")})
+ return record
+ },
+ true,
+ },
+ {
+ "< Min",
+ &core.PasswordField{Name: "test", Min: 3},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", &core.PasswordFieldValue{Plain: "аб"}) // multi-byte chars test
+ return record
+ },
+ true,
+ },
+ {
+ ">= Min",
+ &core.PasswordField{Name: "test", Min: 3},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", &core.PasswordFieldValue{Plain: "абв"}) // multi-byte chars test
+ return record
+ },
+ false,
+ },
+ {
+ "> default Max",
+ &core.PasswordField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", &core.PasswordFieldValue{Plain: strings.Repeat("a", 72)})
+ return record
+ },
+ true,
+ },
+ {
+ "<= default Max",
+ &core.PasswordField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", &core.PasswordFieldValue{Plain: strings.Repeat("a", 71)})
+ return record
+ },
+ false,
+ },
+ {
+ "> Max",
+ &core.PasswordField{Name: "test", Max: 2},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", &core.PasswordFieldValue{Plain: "абв"}) // multi-byte chars test
+ return record
+ },
+ true,
+ },
+ {
+ "<= Max",
+ &core.PasswordField{Name: "test", Max: 2},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", &core.PasswordFieldValue{Plain: "аб"}) // multi-byte chars test
+ return record
+ },
+ false,
+ },
+ {
+ "non-matching pattern",
+ &core.PasswordField{Name: "test", Pattern: `\d+`},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", &core.PasswordFieldValue{Plain: "abc"})
+ return record
+ },
+ true,
+ },
+ {
+ "matching pattern",
+ &core.PasswordField{Name: "test", Pattern: `\d+`},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", &core.PasswordFieldValue{Plain: "123"})
+ return record
+ },
+ false,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ err := s.field.ValidateValue(context.Background(), app, s.record())
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
+
+func TestPasswordFieldValidateSettings(t *testing.T) {
+ testDefaultFieldIdValidation(t, core.FieldTypePassword)
+ testDefaultFieldNameValidation(t, core.FieldTypePassword)
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ name string
+ field func(col *core.Collection) *core.PasswordField
+ expectErrors []string
+ }{
+ {
+ "zero minimal",
+ func(col *core.Collection) *core.PasswordField {
+ return &core.PasswordField{
+ Id: "test",
+ Name: "test",
+ }
+ },
+ []string{},
+ },
+ {
+ "invalid pattern",
+ func(col *core.Collection) *core.PasswordField {
+ return &core.PasswordField{
+ Id: "test",
+ Name: "test",
+ Pattern: "(invalid",
+ }
+ },
+ []string{"pattern"},
+ },
+ {
+ "valid pattern",
+ func(col *core.Collection) *core.PasswordField {
+ return &core.PasswordField{
+ Id: "test",
+ Name: "test",
+ Pattern: `\d+`,
+ }
+ },
+ []string{},
+ },
+ {
+ "Min < 0",
+ func(col *core.Collection) *core.PasswordField {
+ return &core.PasswordField{
+ Id: "test",
+ Name: "test",
+ Min: -1,
+ }
+ },
+ []string{"min"},
+ },
+ {
+ "Min > 71",
+ func(col *core.Collection) *core.PasswordField {
+ return &core.PasswordField{
+ Id: "test",
+ Name: "test",
+ Min: 72,
+ }
+ },
+ []string{"min"},
+ },
+ {
+ "valid Min",
+ func(col *core.Collection) *core.PasswordField {
+ return &core.PasswordField{
+ Id: "test",
+ Name: "test",
+ Min: 5,
+ }
+ },
+ []string{},
+ },
+ {
+ "Max < Min",
+ func(col *core.Collection) *core.PasswordField {
+ return &core.PasswordField{
+ Id: "test",
+ Name: "test",
+ Min: 2,
+ Max: 1,
+ }
+ },
+ []string{"max"},
+ },
+ {
+ "Min > Min",
+ func(col *core.Collection) *core.PasswordField {
+ return &core.PasswordField{
+ Id: "test",
+ Name: "test",
+ Min: 2,
+ Max: 3,
+ }
+ },
+ []string{},
+ },
+ {
+ "Max > 71",
+ func(col *core.Collection) *core.PasswordField {
+ return &core.PasswordField{
+ Id: "test",
+ Name: "test",
+ Max: 72,
+ }
+ },
+ []string{"max"},
+ },
+ {
+ "cost < bcrypt.MinCost",
+ func(col *core.Collection) *core.PasswordField {
+ return &core.PasswordField{
+ Id: "test",
+ Name: "test",
+ Cost: bcrypt.MinCost - 1,
+ }
+ },
+ []string{"cost"},
+ },
+ {
+ "cost > bcrypt.MaxCost",
+ func(col *core.Collection) *core.PasswordField {
+ return &core.PasswordField{
+ Id: "test",
+ Name: "test",
+ Cost: bcrypt.MaxCost + 1,
+ }
+ },
+ []string{"cost"},
+ },
+ {
+ "valid cost",
+ func(col *core.Collection) *core.PasswordField {
+ return &core.PasswordField{
+ Id: "test",
+ Name: "test",
+ Cost: 12,
+ }
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ collection := core.NewBaseCollection("test_collection")
+ collection.Fields.GetByName("id").SetId("test") // set a dummy known id so that it can be replaced
+
+ field := s.field(collection)
+
+ collection.Fields.Add(field)
+
+ errs := field.ValidateSettings(context.Background(), app, collection)
+
+ tests.TestValidationErrors(t, errs, s.expectErrors)
+ })
+ }
+}
+
+func TestPasswordFieldFindSetter(t *testing.T) {
+ scenarios := []struct {
+ name string
+ key string
+ value any
+ field *core.PasswordField
+ hasSetter bool
+ expected string
+ }{
+ {
+ "no match",
+ "example",
+ "abc",
+ &core.PasswordField{Name: "test"},
+ false,
+ "",
+ },
+ {
+ "exact match",
+ "test",
+ "abc",
+ &core.PasswordField{Name: "test"},
+ true,
+ `"abc"`,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ collection := core.NewBaseCollection("test_collection")
+ collection.Fields.Add(s.field)
+
+ setter := s.field.FindSetter(s.key)
+
+ hasSetter := setter != nil
+ if hasSetter != s.hasSetter {
+ t.Fatalf("Expected hasSetter %v, got %v", s.hasSetter, hasSetter)
+ }
+
+ if !hasSetter {
+ return
+ }
+
+ record := core.NewRecord(collection)
+ record.SetRaw(s.field.GetName(), []string{"c", "d"})
+
+ setter(record, s.value)
+
+ raw, err := json.Marshal(record.Get(s.field.GetName()))
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawStr := string(raw)
+
+ if rawStr != s.expected {
+ t.Fatalf("Expected %q, got %q", s.expected, rawStr)
+ }
+ })
+ }
+}
+
+func TestPasswordFieldFindGetter(t *testing.T) {
+ scenarios := []struct {
+ name string
+ key string
+ field *core.PasswordField
+ hasGetter bool
+ expected string
+ }{
+ {
+ "no match",
+ "example",
+ &core.PasswordField{Name: "test"},
+ false,
+ "",
+ },
+ {
+ "field name match",
+ "test",
+ &core.PasswordField{Name: "test"},
+ true,
+ "test_plain",
+ },
+ {
+ "field name hash modifier",
+ "test:hash",
+ &core.PasswordField{Name: "test"},
+ true,
+ "test_hash",
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ collection := core.NewBaseCollection("test_collection")
+ collection.Fields.Add(s.field)
+
+ getter := s.field.FindGetter(s.key)
+
+ hasGetter := getter != nil
+ if hasGetter != s.hasGetter {
+ t.Fatalf("Expected hasGetter %v, got %v", s.hasGetter, hasGetter)
+ }
+
+ if !hasGetter {
+ return
+ }
+
+ record := core.NewRecord(collection)
+ record.SetRaw(s.field.GetName(), &core.PasswordFieldValue{Hash: "test_hash", Plain: "test_plain"})
+
+ result := getter(record)
+
+ if result != s.expected {
+ t.Fatalf("Expected %q, got %#v", s.expected, result)
+ }
+ })
+ }
+}
diff --git a/core/field_relation.go b/core/field_relation.go
new file mode 100644
index 00000000..e41a440b
--- /dev/null
+++ b/core/field_relation.go
@@ -0,0 +1,337 @@
+package core
+
+import (
+ "context"
+ "database/sql/driver"
+ "fmt"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/tools/list"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func init() {
+ Fields[FieldTypeRelation] = func() Field {
+ return &RelationField{}
+ }
+}
+
+const FieldTypeRelation = "relation"
+
+var (
+ _ Field = (*RelationField)(nil)
+ _ MultiValuer = (*RelationField)(nil)
+ _ DriverValuer = (*RelationField)(nil)
+ _ SetterFinder = (*RelationField)(nil)
+)
+
+// RelationField defines "relation" type field for storing single or
+// multiple collection record references.
+//
+// Requires the CollectionId option to be set.
+//
+// If MaxSelect is not set or <= 1, then the field value is expected to be a single record id.
+//
+// If MaxSelect is > 1, then the field value is expected to be a slice of record ids.
+//
+// ---
+//
+// The following additional setter keys are available:
+//
+// - "fieldName+" - append one or more values to the existing record one. For example:
+//
+// record.Set("categories+", []string{"new1", "new2"}) // []string{"old1", "old2", "new1", "new2"}
+//
+// - "+fieldName" - prepend one or more values to the existing record one. For example:
+//
+// record.Set("+categories", []string{"new1", "new2"}) // []string{"new1", "new2", "old1", "old2"}
+//
+// - "fieldName-" - subtract one or more values from the existing record one. For example:
+//
+// record.Set("categories-", "old1") // []string{"old2"}
+type RelationField struct {
+ Id string `form:"id" json:"id"`
+ Name string `form:"name" json:"name"`
+ System bool `form:"system" json:"system"`
+ Hidden bool `form:"hidden" json:"hidden"`
+ Presentable bool `form:"presentable" json:"presentable"`
+
+ // ---
+
+ // CollectionId is the id of the related collection.
+ CollectionId string `form:"collectionId" json:"collectionId"`
+
+ // CascadeDelete indicates whether the root model should be deleted
+ // in case of delete of all linked relations.
+ CascadeDelete bool `form:"cascadeDelete" json:"cascadeDelete"`
+
+ // MinSelect indicates the min number of allowed relation records
+ // that could be linked to the main model.
+ //
+ // No min limit is applied if it is zero or negative value.
+ MinSelect int `form:"minSelect" json:"minSelect"`
+
+ // MaxSelect indicates the max number of allowed relation records
+ // that could be linked to the main model.
+ //
+ // For multiple select the value must be > 1, otherwise fallbacks to single (default).
+ //
+ // If MinSelect is set, MaxSelect must be at least >= MinSelect.
+ MaxSelect int `form:"maxSelect" json:"maxSelect"`
+
+ // Required will require the field value to be non-empty.
+ Required bool `form:"required" json:"required"`
+}
+
+// Type implements [Field.Type] interface method.
+func (f *RelationField) Type() string {
+ return FieldTypeRelation
+}
+
+// GetId implements [Field.GetId] interface method.
+func (f *RelationField) GetId() string {
+ return f.Id
+}
+
+// SetId implements [Field.SetId] interface method.
+func (f *RelationField) SetId(id string) {
+ f.Id = id
+}
+
+// GetName implements [Field.GetName] interface method.
+func (f *RelationField) GetName() string {
+ return f.Name
+}
+
+// SetName implements [Field.SetName] interface method.
+func (f *RelationField) SetName(name string) {
+ f.Name = name
+}
+
+// GetSystem implements [Field.GetSystem] interface method.
+func (f *RelationField) GetSystem() bool {
+ return f.System
+}
+
+// SetSystem implements [Field.SetSystem] interface method.
+func (f *RelationField) SetSystem(system bool) {
+ f.System = system
+}
+
+// GetHidden implements [Field.GetHidden] interface method.
+func (f *RelationField) GetHidden() bool {
+ return f.Hidden
+}
+
+// SetHidden implements [Field.SetHidden] interface method.
+func (f *RelationField) SetHidden(hidden bool) {
+ f.Hidden = hidden
+}
+
+// IsMultiple implements [MultiValuer] interface and checks whether the
+// current field options support multiple values.
+func (f *RelationField) IsMultiple() bool {
+ return f.MaxSelect > 1
+}
+
+// ColumnType implements [Field.ColumnType] interface method.
+func (f *RelationField) ColumnType(app App) string {
+ if f.IsMultiple() {
+ return "JSON DEFAULT '[]' NOT NULL"
+ }
+
+ return "TEXT DEFAULT '' NOT NULL"
+}
+
+// PrepareValue implements [Field.PrepareValue] interface method.
+func (f *RelationField) PrepareValue(record *Record, raw any) (any, error) {
+ return f.normalizeValue(raw), nil
+}
+
+func (f *RelationField) normalizeValue(raw any) any {
+ val := list.ToUniqueStringSlice(raw)
+
+ if !f.IsMultiple() {
+ if len(val) > 0 {
+ return val[len(val)-1] // the last selected
+ }
+ return ""
+ }
+
+ return val
+}
+
+// DriverValue implements the [DriverValuer] interface.
+func (f *RelationField) DriverValue(record *Record) (driver.Value, error) {
+ val := list.ToUniqueStringSlice(record.GetRaw(f.Name))
+
+ if !f.IsMultiple() {
+ if len(val) > 0 {
+ return val[len(val)-1], nil // the last selected
+ }
+ return "", nil
+ }
+
+ // serialize as json string array
+ return append(types.JSONArray[string]{}, val...), nil
+}
+
+// ValidateValue implements [Field.ValidateValue] interface method.
+func (f *RelationField) ValidateValue(ctx context.Context, app App, record *Record) error {
+ ids := list.ToUniqueStringSlice(record.GetRaw(f.Name))
+ if len(ids) == 0 {
+ if f.Required {
+ return validation.ErrRequired
+ }
+ return nil // nothing to check
+ }
+
+ if f.MinSelect > 0 && len(ids) < f.MinSelect {
+ return validation.NewError("validation_not_enough_values", fmt.Sprintf("Select at least %d", f.MinSelect)).
+ SetParams(map[string]any{"minSelect": f.MinSelect})
+ }
+
+ maxSelect := max(f.MaxSelect, 1)
+ if len(ids) > maxSelect {
+ return validation.NewError("validation_too_many_values", fmt.Sprintf("Select no more than %d", maxSelect)).
+ SetParams(map[string]any{"maxSelect": maxSelect})
+ }
+
+ // check if the related records exist
+ // ---
+ relCollection, err := app.FindCachedCollectionByNameOrId(f.CollectionId)
+ if err != nil {
+ return validation.NewError("validation_missing_rel_collection", "Relation connection is missing or cannot be accessed")
+ }
+
+ var total int
+ _ = app.DB().
+ Select("count(*)").
+ From(relCollection.Name).
+ AndWhere(dbx.In("id", list.ToInterfaceSlice(ids)...)).
+ Row(&total)
+ if total != len(ids) {
+ return validation.NewError("validation_missing_rel_records", "Failed to find all relation records with the provided ids")
+ }
+ // ---
+
+ return nil
+}
+
+// ValidateSettings implements [Field.ValidateSettings] interface method.
+func (f *RelationField) ValidateSettings(ctx context.Context, app App, collection *Collection) error {
+ return validation.ValidateStruct(f,
+ validation.Field(&f.Id, validation.By(DefaultFieldIdValidationRule)),
+ validation.Field(&f.Name, validation.By(DefaultFieldNameValidationRule)),
+ validation.Field(&f.CollectionId, validation.Required, validation.By(f.checkCollectionId(app, collection))),
+ validation.Field(&f.MinSelect, validation.Min(0)),
+ validation.Field(&f.MaxSelect, validation.When(f.MinSelect > 0, validation.Required), validation.Min(f.MinSelect)),
+ )
+}
+
+func (f *RelationField) checkCollectionId(app App, collection *Collection) validation.RuleFunc {
+ return func(value any) error {
+ v, _ := value.(string)
+ if v == "" {
+ return nil // nothing to check
+ }
+
+ var oldCollection *Collection
+
+ if !collection.IsNew() {
+ var err error
+ oldCollection, err = app.FindCachedCollectionByNameOrId(collection.Id)
+ if err != nil {
+ return err
+ }
+ }
+
+ // prevent collectionId change
+ if oldCollection != nil {
+ oldField, _ := oldCollection.Fields.GetById(f.Id).(*RelationField)
+ if oldField != nil && oldField.CollectionId != v {
+ return validation.NewError(
+ "validation_field_relation_change",
+ "The relation collection cannot be changed.",
+ )
+ }
+ }
+
+ relCollection, _ := app.FindCachedCollectionByNameOrId(v)
+
+ // validate collectionId
+ if relCollection == nil || relCollection.Id != v {
+ return validation.NewError(
+ "validation_field_relation_missing_collection",
+ "The relation collection doesn't exist.",
+ )
+ }
+
+ // allow only views to have relations to other views
+ // (see https://github.com/pocketbase/pocketbase/issues/3000)
+ if !collection.IsView() && relCollection.IsView() {
+ return validation.NewError(
+ "validation_relation_field_non_view_base_collection",
+ "Only view collections are allowed to have relations to other views.",
+ )
+ }
+
+ return nil
+ }
+}
+
+// ---
+
+// FindSetter implements [SetterFinder] interface method.
+func (f *RelationField) FindSetter(key string) SetterFunc {
+ switch key {
+ case f.Name:
+ return f.setValue
+ case "+" + f.Name:
+ return f.prependValue
+ case f.Name + "+":
+ return f.appendValue
+ case f.Name + "-":
+ return f.subtractValue
+ default:
+ return nil
+ }
+}
+
+func (f *RelationField) setValue(record *Record, raw any) {
+ record.SetRaw(f.Name, f.normalizeValue(raw))
+}
+
+func (f *RelationField) appendValue(record *Record, modifierValue any) {
+ val := record.GetRaw(f.Name)
+
+ val = append(
+ list.ToUniqueStringSlice(val),
+ list.ToUniqueStringSlice(modifierValue)...,
+ )
+
+ f.setValue(record, val)
+}
+
+func (f *RelationField) prependValue(record *Record, modifierValue any) {
+ val := record.GetRaw(f.Name)
+
+ val = append(
+ list.ToUniqueStringSlice(modifierValue),
+ list.ToUniqueStringSlice(val)...,
+ )
+
+ f.setValue(record, val)
+}
+
+func (f *RelationField) subtractValue(record *Record, modifierValue any) {
+ val := record.GetRaw(f.Name)
+
+ val = list.SubtractSlice(
+ list.ToUniqueStringSlice(val),
+ list.ToUniqueStringSlice(modifierValue),
+ )
+
+ f.setValue(record, val)
+}
diff --git a/core/field_relation_test.go b/core/field_relation_test.go
new file mode 100644
index 00000000..598e361b
--- /dev/null
+++ b/core/field_relation_test.go
@@ -0,0 +1,603 @@
+package core_test
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func TestRelationFieldBaseMethods(t *testing.T) {
+ testFieldBaseMethods(t, core.FieldTypeRelation)
+}
+
+func TestRelationFieldColumnType(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ name string
+ field *core.RelationField
+ expected string
+ }{
+ {
+ "single (zero)",
+ &core.RelationField{},
+ "TEXT DEFAULT '' NOT NULL",
+ },
+ {
+ "single",
+ &core.RelationField{MaxSelect: 1},
+ "TEXT DEFAULT '' NOT NULL",
+ },
+ {
+ "multiple",
+ &core.RelationField{MaxSelect: 2},
+ "JSON DEFAULT '[]' NOT NULL",
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ if v := s.field.ColumnType(app); v != s.expected {
+ t.Fatalf("Expected\n%q\ngot\n%q", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestRelationFieldIsMultiple(t *testing.T) {
+ scenarios := []struct {
+ name string
+ field *core.RelationField
+ expected bool
+ }{
+ {
+ "zero",
+ &core.RelationField{},
+ false,
+ },
+ {
+ "single",
+ &core.RelationField{MaxSelect: 1},
+ false,
+ },
+ {
+ "multiple",
+ &core.RelationField{MaxSelect: 2},
+ true,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ if v := s.field.IsMultiple(); v != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestRelationFieldPrepareValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ record := core.NewRecord(core.NewBaseCollection("test"))
+
+ scenarios := []struct {
+ raw any
+ field *core.RelationField
+ expected string
+ }{
+ // single
+ {nil, &core.RelationField{MaxSelect: 1}, `""`},
+ {"", &core.RelationField{MaxSelect: 1}, `""`},
+ {123, &core.RelationField{MaxSelect: 1}, `"123"`},
+ {"a", &core.RelationField{MaxSelect: 1}, `"a"`},
+ {`["a"]`, &core.RelationField{MaxSelect: 1}, `"a"`},
+ {[]string{}, &core.RelationField{MaxSelect: 1}, `""`},
+ {[]string{"a", "b"}, &core.RelationField{MaxSelect: 1}, `"b"`},
+
+ // multiple
+ {nil, &core.RelationField{MaxSelect: 2}, `[]`},
+ {"", &core.RelationField{MaxSelect: 2}, `[]`},
+ {123, &core.RelationField{MaxSelect: 2}, `["123"]`},
+ {"a", &core.RelationField{MaxSelect: 2}, `["a"]`},
+ {`["a"]`, &core.RelationField{MaxSelect: 2}, `["a"]`},
+ {[]string{}, &core.RelationField{MaxSelect: 2}, `[]`},
+ {[]string{"a", "b", "c"}, &core.RelationField{MaxSelect: 2}, `["a","b","c"]`},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v_%v", i, s.raw, s.field.IsMultiple()), func(t *testing.T) {
+ v, err := s.field.PrepareValue(record, s.raw)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ vRaw, err := json.Marshal(v)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if string(vRaw) != s.expected {
+ t.Fatalf("Expected %q, got %q", s.expected, vRaw)
+ }
+ })
+ }
+}
+
+func TestRelationFieldDriverValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ raw any
+ field *core.RelationField
+ expected string
+ }{
+ // single
+ {nil, &core.RelationField{MaxSelect: 1}, `""`},
+ {"", &core.RelationField{MaxSelect: 1}, `""`},
+ {123, &core.RelationField{MaxSelect: 1}, `"123"`},
+ {"a", &core.RelationField{MaxSelect: 1}, `"a"`},
+ {`["a"]`, &core.RelationField{MaxSelect: 1}, `"a"`},
+ {[]string{}, &core.RelationField{MaxSelect: 1}, `""`},
+ {[]string{"a", "b"}, &core.RelationField{MaxSelect: 1}, `"b"`},
+
+ // multiple
+ {nil, &core.RelationField{MaxSelect: 2}, `[]`},
+ {"", &core.RelationField{MaxSelect: 2}, `[]`},
+ {123, &core.RelationField{MaxSelect: 2}, `["123"]`},
+ {"a", &core.RelationField{MaxSelect: 2}, `["a"]`},
+ {`["a"]`, &core.RelationField{MaxSelect: 2}, `["a"]`},
+ {[]string{}, &core.RelationField{MaxSelect: 2}, `[]`},
+ {[]string{"a", "b", "c"}, &core.RelationField{MaxSelect: 2}, `["a","b","c"]`},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v_%v", i, s.raw, s.field.IsMultiple()), func(t *testing.T) {
+ record := core.NewRecord(core.NewBaseCollection("test"))
+ record.SetRaw(s.field.GetName(), s.raw)
+
+ v, err := s.field.DriverValue(record)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if s.field.IsMultiple() {
+ _, ok := v.(types.JSONArray[string])
+ if !ok {
+ t.Fatalf("Expected types.JSONArray value, got %T", v)
+ }
+ } else {
+ _, ok := v.(string)
+ if !ok {
+ t.Fatalf("Expected string value, got %T", v)
+ }
+ }
+
+ vRaw, err := json.Marshal(v)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if string(vRaw) != s.expected {
+ t.Fatalf("Expected %q, got %q", s.expected, vRaw)
+ }
+ })
+ }
+}
+
+func TestRelationFieldValidateValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ demo1, err := app.FindCollectionByNameOrId("demo1")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ name string
+ field *core.RelationField
+ record func() *core.Record
+ expectError bool
+ }{
+ // single
+ {
+ "[single] zero field value (not required)",
+ &core.RelationField{Name: "test", MaxSelect: 1, CollectionId: demo1.Id},
+ func() *core.Record {
+ record := core.NewRecord(core.NewBaseCollection("test_collection"))
+ record.SetRaw("test", "")
+ return record
+ },
+ false,
+ },
+ {
+ "[single] zero field value (required)",
+ &core.RelationField{Name: "test", MaxSelect: 1, CollectionId: demo1.Id, Required: true},
+ func() *core.Record {
+ record := core.NewRecord(core.NewBaseCollection("test_collection"))
+ record.SetRaw("test", "")
+ return record
+ },
+ true,
+ },
+ {
+ "[single] id from other collection",
+ &core.RelationField{Name: "test", MaxSelect: 1, CollectionId: demo1.Id},
+ func() *core.Record {
+ record := core.NewRecord(core.NewBaseCollection("test_collection"))
+ record.SetRaw("test", "achvryl401bhse3")
+ return record
+ },
+ true,
+ },
+ {
+ "[single] valid id",
+ &core.RelationField{Name: "test", MaxSelect: 1, CollectionId: demo1.Id},
+ func() *core.Record {
+ record := core.NewRecord(core.NewBaseCollection("test_collection"))
+ record.SetRaw("test", "84nmscqy84lsi1t")
+ return record
+ },
+ false,
+ },
+ {
+ "[single] > MaxSelect",
+ &core.RelationField{Name: "test", MaxSelect: 1, CollectionId: demo1.Id},
+ func() *core.Record {
+ record := core.NewRecord(core.NewBaseCollection("test_collection"))
+ record.SetRaw("test", []string{"84nmscqy84lsi1t", "al1h9ijdeojtsjy"})
+ return record
+ },
+ true,
+ },
+
+ // multiple
+ {
+ "[multiple] zero field value (not required)",
+ &core.RelationField{Name: "test", MaxSelect: 2, CollectionId: demo1.Id},
+ func() *core.Record {
+ record := core.NewRecord(core.NewBaseCollection("test_collection"))
+ record.SetRaw("test", []string{})
+ return record
+ },
+ false,
+ },
+ {
+ "[multiple] zero field value (required)",
+ &core.RelationField{Name: "test", MaxSelect: 2, CollectionId: demo1.Id, Required: true},
+ func() *core.Record {
+ record := core.NewRecord(core.NewBaseCollection("test_collection"))
+ record.SetRaw("test", []string{})
+ return record
+ },
+ true,
+ },
+ {
+ "[multiple] id from other collection",
+ &core.RelationField{Name: "test", MaxSelect: 2, CollectionId: demo1.Id},
+ func() *core.Record {
+ record := core.NewRecord(core.NewBaseCollection("test_collection"))
+ record.SetRaw("test", []string{"84nmscqy84lsi1t", "achvryl401bhse3"})
+ return record
+ },
+ true,
+ },
+ {
+ "[multiple] valid id",
+ &core.RelationField{Name: "test", MaxSelect: 2, CollectionId: demo1.Id},
+ func() *core.Record {
+ record := core.NewRecord(core.NewBaseCollection("test_collection"))
+ record.SetRaw("test", []string{"84nmscqy84lsi1t", "al1h9ijdeojtsjy"})
+ return record
+ },
+ false,
+ },
+ {
+ "[multiple] > MaxSelect",
+ &core.RelationField{Name: "test", MaxSelect: 2, CollectionId: demo1.Id},
+ func() *core.Record {
+ record := core.NewRecord(core.NewBaseCollection("test_collection"))
+ record.SetRaw("test", []string{"84nmscqy84lsi1t", "al1h9ijdeojtsjy", "imy661ixudk5izi"})
+ return record
+ },
+ true,
+ },
+ {
+ "[multiple] < MinSelect",
+ &core.RelationField{Name: "test", MinSelect: 2, MaxSelect: 99, CollectionId: demo1.Id},
+ func() *core.Record {
+ record := core.NewRecord(core.NewBaseCollection("test_collection"))
+ record.SetRaw("test", []string{"84nmscqy84lsi1t"})
+ return record
+ },
+ true,
+ },
+ {
+ "[multiple] >= MinSelect",
+ &core.RelationField{Name: "test", MinSelect: 2, MaxSelect: 99, CollectionId: demo1.Id},
+ func() *core.Record {
+ record := core.NewRecord(core.NewBaseCollection("test_collection"))
+ record.SetRaw("test", []string{"84nmscqy84lsi1t", "al1h9ijdeojtsjy", "imy661ixudk5izi"})
+ return record
+ },
+ false,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ err := s.field.ValidateValue(context.Background(), app, s.record())
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
+
+func TestRelationFieldValidateSettings(t *testing.T) {
+ testDefaultFieldIdValidation(t, core.FieldTypeRelation)
+ testDefaultFieldNameValidation(t, core.FieldTypeRelation)
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ demo1, err := app.FindCollectionByNameOrId("demo1")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ name string
+ field func(col *core.Collection) *core.RelationField
+ expectErrors []string
+ }{
+ {
+ "zero minimal",
+ func(col *core.Collection) *core.RelationField {
+ return &core.RelationField{
+ Id: "test",
+ Name: "test",
+ }
+ },
+ []string{"collectionId"},
+ },
+ {
+ "invalid collectionId",
+ func(col *core.Collection) *core.RelationField {
+ return &core.RelationField{
+ Id: "test",
+ Name: "test",
+ CollectionId: demo1.Name,
+ }
+ },
+ []string{"collectionId"},
+ },
+ {
+ "valid collectionId",
+ func(col *core.Collection) *core.RelationField {
+ return &core.RelationField{
+ Id: "test",
+ Name: "test",
+ CollectionId: demo1.Id,
+ }
+ },
+ []string{},
+ },
+ {
+ "base->view",
+ func(col *core.Collection) *core.RelationField {
+ return &core.RelationField{
+ Id: "test",
+ Name: "test",
+ CollectionId: "v9gwnfh02gjq1q0",
+ }
+ },
+ []string{"collectionId"},
+ },
+ {
+ "view->view",
+ func(col *core.Collection) *core.RelationField {
+ col.Type = core.CollectionTypeView
+ return &core.RelationField{
+ Id: "test",
+ Name: "test",
+ CollectionId: "v9gwnfh02gjq1q0",
+ }
+ },
+ []string{},
+ },
+ {
+ "MinSelect < 0",
+ func(col *core.Collection) *core.RelationField {
+ return &core.RelationField{
+ Id: "test",
+ Name: "test",
+ CollectionId: demo1.Id,
+ MinSelect: -1,
+ }
+ },
+ []string{"minSelect"},
+ },
+ {
+ "MinSelect > 0",
+ func(col *core.Collection) *core.RelationField {
+ return &core.RelationField{
+ Id: "test",
+ Name: "test",
+ CollectionId: demo1.Id,
+ MinSelect: 1,
+ }
+ },
+ []string{"maxSelect"},
+ },
+ {
+ "MaxSelect < MinSelect",
+ func(col *core.Collection) *core.RelationField {
+ return &core.RelationField{
+ Id: "test",
+ Name: "test",
+ CollectionId: demo1.Id,
+ MinSelect: 2,
+ MaxSelect: 1,
+ }
+ },
+ []string{"maxSelect"},
+ },
+ {
+ "MaxSelect >= MinSelect",
+ func(col *core.Collection) *core.RelationField {
+ return &core.RelationField{
+ Id: "test",
+ Name: "test",
+ CollectionId: demo1.Id,
+ MinSelect: 2,
+ MaxSelect: 2,
+ }
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ collection := core.NewBaseCollection("test_collection")
+ collection.Fields.GetByName("id").SetId("test") // set a dummy known id so that it can be replaced
+
+ field := s.field(collection)
+
+ collection.Fields.Add(field)
+
+ errs := field.ValidateSettings(context.Background(), app, collection)
+
+ tests.TestValidationErrors(t, errs, s.expectErrors)
+ })
+ }
+}
+
+func TestRelationFieldFindSetter(t *testing.T) {
+ scenarios := []struct {
+ name string
+ key string
+ value any
+ field *core.RelationField
+ hasSetter bool
+ expected string
+ }{
+ {
+ "no match",
+ "example",
+ "b",
+ &core.RelationField{Name: "test", MaxSelect: 1},
+ false,
+ "",
+ },
+ {
+ "exact match (single)",
+ "test",
+ "b",
+ &core.RelationField{Name: "test", MaxSelect: 1},
+ true,
+ `"b"`,
+ },
+ {
+ "exact match (multiple)",
+ "test",
+ []string{"a", "b"},
+ &core.RelationField{Name: "test", MaxSelect: 2},
+ true,
+ `["a","b"]`,
+ },
+ {
+ "append (single)",
+ "test+",
+ "b",
+ &core.RelationField{Name: "test", MaxSelect: 1},
+ true,
+ `"b"`,
+ },
+ {
+ "append (multiple)",
+ "test+",
+ []string{"a"},
+ &core.RelationField{Name: "test", MaxSelect: 2},
+ true,
+ `["c","d","a"]`,
+ },
+ {
+ "prepend (single)",
+ "+test",
+ "b",
+ &core.RelationField{Name: "test", MaxSelect: 1},
+ true,
+ `"d"`, // the last of the existing values
+ },
+ {
+ "prepend (multiple)",
+ "+test",
+ []string{"a"},
+ &core.RelationField{Name: "test", MaxSelect: 2},
+ true,
+ `["a","c","d"]`,
+ },
+ {
+ "subtract (single)",
+ "test-",
+ "d",
+ &core.RelationField{Name: "test", MaxSelect: 1},
+ true,
+ `"c"`,
+ },
+ {
+ "subtract (multiple)",
+ "test-",
+ []string{"unknown", "c"},
+ &core.RelationField{Name: "test", MaxSelect: 2},
+ true,
+ `["d"]`,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ collection := core.NewBaseCollection("test_collection")
+ collection.Fields.Add(s.field)
+
+ setter := s.field.FindSetter(s.key)
+
+ hasSetter := setter != nil
+ if hasSetter != s.hasSetter {
+ t.Fatalf("Expected hasSetter %v, got %v", s.hasSetter, hasSetter)
+ }
+
+ if !hasSetter {
+ return
+ }
+
+ record := core.NewRecord(collection)
+ record.SetRaw(s.field.GetName(), []string{"c", "d"})
+
+ setter(record, s.value)
+
+ raw, err := json.Marshal(record.Get(s.field.GetName()))
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawStr := string(raw)
+
+ if rawStr != s.expected {
+ t.Fatalf("Expected %q, got %q", s.expected, rawStr)
+ }
+ })
+ }
+}
diff --git a/core/field_select.go b/core/field_select.go
new file mode 100644
index 00000000..18344095
--- /dev/null
+++ b/core/field_select.go
@@ -0,0 +1,262 @@
+package core
+
+import (
+ "context"
+ "database/sql/driver"
+ "fmt"
+ "slices"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/pocketbase/tools/list"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func init() {
+ Fields[FieldTypeSelect] = func() Field {
+ return &SelectField{}
+ }
+}
+
+const FieldTypeSelect = "select"
+
+var (
+ _ Field = (*SelectField)(nil)
+ _ MultiValuer = (*SelectField)(nil)
+ _ DriverValuer = (*SelectField)(nil)
+ _ SetterFinder = (*SelectField)(nil)
+)
+
+// SelectField defines "select" type field for storing single or
+// multiple string values from a predefined list.
+//
+// Requires the Values option to be set.
+//
+// If MaxSelect is not set or <= 1, then the field value is expected to be a single Values element.
+//
+// If MaxSelect is > 1, then the field value is expected to be a subset of Values slice.
+//
+// ---
+//
+// The following additional setter keys are available:
+//
+// - "fieldName+" - append one or more values to the existing record one. For example:
+//
+// record.Set("roles+", []string{"new1", "new2"}) // []string{"old1", "old2", "new1", "new2"}
+//
+// - "+fieldName" - prepend one or more values to the existing record one. For example:
+//
+// record.Set("+roles", []string{"new1", "new2"}) // []string{"new1", "new2", "old1", "old2"}
+//
+// - "fieldName-" - subtract one or more values from the existing record one. For example:
+//
+// record.Set("roles-", "old1") // []string{"old2"}
+type SelectField struct {
+ Id string `form:"id" json:"id"`
+ Name string `form:"name" json:"name"`
+ System bool `form:"system" json:"system"`
+ Hidden bool `form:"hidden" json:"hidden"`
+ Presentable bool `form:"presentable" json:"presentable"`
+
+ // ---
+
+ // Values specifies the list of accepted values.
+ Values []string `form:"values" json:"values"`
+
+ // MaxSelect specifies the max allowed selected values.
+ //
+ // For multiple select the value must be > 1, otherwise fallbacks to single (default).
+ MaxSelect int `form:"maxSelect" json:"maxSelect"`
+
+ // Required will require the field value to be non-empty.
+ Required bool `form:"required" json:"required"`
+}
+
+// Type implements [Field.Type] interface method.
+func (f *SelectField) Type() string {
+ return FieldTypeSelect
+}
+
+// GetId implements [Field.GetId] interface method.
+func (f *SelectField) GetId() string {
+ return f.Id
+}
+
+// SetId implements [Field.SetId] interface method.
+func (f *SelectField) SetId(id string) {
+ f.Id = id
+}
+
+// GetName implements [Field.GetName] interface method.
+func (f *SelectField) GetName() string {
+ return f.Name
+}
+
+// SetName implements [Field.SetName] interface method.
+func (f *SelectField) SetName(name string) {
+ f.Name = name
+}
+
+// GetSystem implements [Field.GetSystem] interface method.
+func (f *SelectField) GetSystem() bool {
+ return f.System
+}
+
+// SetSystem implements [Field.SetSystem] interface method.
+func (f *SelectField) SetSystem(system bool) {
+ f.System = system
+}
+
+// GetHidden implements [Field.GetHidden] interface method.
+func (f *SelectField) GetHidden() bool {
+ return f.Hidden
+}
+
+// SetHidden implements [Field.SetHidden] interface method.
+func (f *SelectField) SetHidden(hidden bool) {
+ f.Hidden = hidden
+}
+
+// IsMultiple implements [MultiValuer] interface and checks whether the
+// current field options support multiple values.
+func (f *SelectField) IsMultiple() bool {
+ return f.MaxSelect > 1
+}
+
+// ColumnType implements [Field.ColumnType] interface method.
+func (f *SelectField) ColumnType(app App) string {
+ if f.IsMultiple() {
+ return "JSON DEFAULT '[]' NOT NULL"
+ }
+
+ return "TEXT DEFAULT '' NOT NULL"
+}
+
+// PrepareValue implements [Field.PrepareValue] interface method.
+func (f *SelectField) PrepareValue(record *Record, raw any) (any, error) {
+ return f.normalizeValue(raw), nil
+}
+
+func (f *SelectField) normalizeValue(raw any) any {
+ val := list.ToUniqueStringSlice(raw)
+
+ if !f.IsMultiple() {
+ if len(val) > 0 {
+ return val[len(val)-1] // the last selected
+ }
+ return ""
+ }
+
+ return val
+}
+
+// DriverValue implements the [DriverValuer] interface.
+func (f *SelectField) DriverValue(record *Record) (driver.Value, error) {
+ val := list.ToUniqueStringSlice(record.GetRaw(f.Name))
+
+ if !f.IsMultiple() {
+ if len(val) > 0 {
+ return val[len(val)-1], nil // the last selected
+ }
+ return "", nil
+ }
+
+ // serialize as json string array
+ return append(types.JSONArray[string]{}, val...), nil
+}
+
+// ValidateValue implements [Field.ValidateValue] interface method.
+func (f *SelectField) ValidateValue(ctx context.Context, app App, record *Record) error {
+ normalizedVal := list.ToUniqueStringSlice(record.GetRaw(f.Name))
+ if len(normalizedVal) == 0 {
+ if f.Required {
+ return validation.ErrRequired
+ }
+ return nil // nothing to check
+ }
+
+ maxSelect := max(f.MaxSelect, 1)
+
+ // check max selected items
+ if len(normalizedVal) > maxSelect {
+ return validation.NewError("validation_too_many_values", fmt.Sprintf("Select no more than %d", maxSelect)).
+ SetParams(map[string]any{"maxSelect": maxSelect})
+ }
+
+ // check against the allowed values
+ for _, val := range normalizedVal {
+ if !slices.Contains(f.Values, val) {
+ return validation.NewError("validation_invalid_value", "Invalid value "+val).
+ SetParams(map[string]any{"value": val})
+ }
+ }
+
+ return nil
+}
+
+// ValidateSettings implements [Field.ValidateSettings] interface method.
+func (f *SelectField) ValidateSettings(ctx context.Context, app App, collection *Collection) error {
+ max := len(f.Values)
+ if max == 0 {
+ max = 1
+ }
+
+ return validation.ValidateStruct(f,
+ validation.Field(&f.Id, validation.By(DefaultFieldIdValidationRule)),
+ validation.Field(&f.Name, validation.By(DefaultFieldNameValidationRule)),
+ validation.Field(&f.Values, validation.Required),
+ validation.Field(&f.MaxSelect, validation.Min(0), validation.Max(max)),
+ )
+}
+
+// FindSetter implements the [SetterFinder] interface.
+func (f *SelectField) FindSetter(key string) SetterFunc {
+ switch key {
+ case f.Name:
+ return f.setValue
+ case "+" + f.Name:
+ return f.prependValue
+ case f.Name + "+":
+ return f.appendValue
+ case f.Name + "-":
+ return f.subtractValue
+ default:
+ return nil
+ }
+}
+
+func (f *SelectField) setValue(record *Record, raw any) {
+ record.SetRaw(f.Name, f.normalizeValue(raw))
+}
+
+func (f *SelectField) appendValue(record *Record, modifierValue any) {
+ val := record.GetRaw(f.Name)
+
+ val = append(
+ list.ToUniqueStringSlice(val),
+ list.ToUniqueStringSlice(modifierValue)...,
+ )
+
+ f.setValue(record, val)
+}
+
+func (f *SelectField) prependValue(record *Record, modifierValue any) {
+ val := record.GetRaw(f.Name)
+
+ val = append(
+ list.ToUniqueStringSlice(modifierValue),
+ list.ToUniqueStringSlice(val)...,
+ )
+
+ f.setValue(record, val)
+}
+
+func (f *SelectField) subtractValue(record *Record, modifierValue any) {
+ val := record.GetRaw(f.Name)
+
+ val = list.SubtractSlice(
+ list.ToUniqueStringSlice(val),
+ list.ToUniqueStringSlice(modifierValue),
+ )
+
+ f.setValue(record, val)
+}
diff --git a/core/field_select_test.go b/core/field_select_test.go
new file mode 100644
index 00000000..808c1096
--- /dev/null
+++ b/core/field_select_test.go
@@ -0,0 +1,516 @@
+package core_test
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func TestSelectFieldBaseMethods(t *testing.T) {
+ testFieldBaseMethods(t, core.FieldTypeSelect)
+}
+
+func TestSelectFieldColumnType(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ name string
+ field *core.SelectField
+ expected string
+ }{
+ {
+ "single (zero)",
+ &core.SelectField{},
+ "TEXT DEFAULT '' NOT NULL",
+ },
+ {
+ "single",
+ &core.SelectField{MaxSelect: 1},
+ "TEXT DEFAULT '' NOT NULL",
+ },
+ {
+ "multiple",
+ &core.SelectField{MaxSelect: 2},
+ "JSON DEFAULT '[]' NOT NULL",
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ if v := s.field.ColumnType(app); v != s.expected {
+ t.Fatalf("Expected\n%q\ngot\n%q", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestSelectFieldIsMultiple(t *testing.T) {
+ scenarios := []struct {
+ name string
+ field *core.SelectField
+ expected bool
+ }{
+ {
+ "single (zero)",
+ &core.SelectField{},
+ false,
+ },
+ {
+ "single",
+ &core.SelectField{MaxSelect: 1},
+ false,
+ },
+ {
+ "multiple (>1)",
+ &core.SelectField{MaxSelect: 2},
+ true,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ if v := s.field.IsMultiple(); v != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestSelectFieldPrepareValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ record := core.NewRecord(core.NewBaseCollection("test"))
+
+ scenarios := []struct {
+ raw any
+ field *core.SelectField
+ expected string
+ }{
+ // single
+ {nil, &core.SelectField{}, `""`},
+ {"", &core.SelectField{}, `""`},
+ {123, &core.SelectField{}, `"123"`},
+ {"a", &core.SelectField{}, `"a"`},
+ {`["a"]`, &core.SelectField{}, `"a"`},
+ {[]string{}, &core.SelectField{}, `""`},
+ {[]string{"a", "b"}, &core.SelectField{}, `"b"`},
+
+ // multiple
+ {nil, &core.SelectField{MaxSelect: 2}, `[]`},
+ {"", &core.SelectField{MaxSelect: 2}, `[]`},
+ {123, &core.SelectField{MaxSelect: 2}, `["123"]`},
+ {"a", &core.SelectField{MaxSelect: 2}, `["a"]`},
+ {`["a"]`, &core.SelectField{MaxSelect: 2}, `["a"]`},
+ {[]string{}, &core.SelectField{MaxSelect: 2}, `[]`},
+ {[]string{"a", "b", "c"}, &core.SelectField{MaxSelect: 2}, `["a","b","c"]`},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v_%v", i, s.raw, s.field.IsMultiple()), func(t *testing.T) {
+ v, err := s.field.PrepareValue(record, s.raw)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ vRaw, err := json.Marshal(v)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if string(vRaw) != s.expected {
+ t.Fatalf("Expected %q, got %q", s.expected, vRaw)
+ }
+ })
+ }
+}
+
+func TestSelectFieldDriverValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ raw any
+ field *core.SelectField
+ expected string
+ }{
+ // single
+ {nil, &core.SelectField{}, `""`},
+ {"", &core.SelectField{}, `""`},
+ {123, &core.SelectField{}, `"123"`},
+ {"a", &core.SelectField{}, `"a"`},
+ {`["a"]`, &core.SelectField{}, `"a"`},
+ {[]string{}, &core.SelectField{}, `""`},
+ {[]string{"a", "b"}, &core.SelectField{}, `"b"`},
+
+ // multiple
+ {nil, &core.SelectField{MaxSelect: 2}, `[]`},
+ {"", &core.SelectField{MaxSelect: 2}, `[]`},
+ {123, &core.SelectField{MaxSelect: 2}, `["123"]`},
+ {"a", &core.SelectField{MaxSelect: 2}, `["a"]`},
+ {`["a"]`, &core.SelectField{MaxSelect: 2}, `["a"]`},
+ {[]string{}, &core.SelectField{MaxSelect: 2}, `[]`},
+ {[]string{"a", "b", "c"}, &core.SelectField{MaxSelect: 2}, `["a","b","c"]`},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v_%v", i, s.raw, s.field.IsMultiple()), func(t *testing.T) {
+ record := core.NewRecord(core.NewBaseCollection("test"))
+ record.SetRaw(s.field.GetName(), s.raw)
+
+ v, err := s.field.DriverValue(record)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if s.field.IsMultiple() {
+ _, ok := v.(types.JSONArray[string])
+ if !ok {
+ t.Fatalf("Expected types.JSONArray value, got %T", v)
+ }
+ } else {
+ _, ok := v.(string)
+ if !ok {
+ t.Fatalf("Expected string value, got %T", v)
+ }
+ }
+
+ vRaw, err := json.Marshal(v)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if string(vRaw) != s.expected {
+ t.Fatalf("Expected %q, got %q", s.expected, vRaw)
+ }
+ })
+ }
+}
+
+func TestSelectFieldValidateValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ values := []string{"a", "b", "c"}
+
+ scenarios := []struct {
+ name string
+ field *core.SelectField
+ record func() *core.Record
+ expectError bool
+ }{
+ // single
+ {
+ "[single] zero field value (not required)",
+ &core.SelectField{Name: "test", Values: values, MaxSelect: 1},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "")
+ return record
+ },
+ false,
+ },
+ {
+ "[single] zero field value (required)",
+ &core.SelectField{Name: "test", Values: values, MaxSelect: 1, Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "")
+ return record
+ },
+ true,
+ },
+ {
+ "[single] unknown value",
+ &core.SelectField{Name: "test", Values: values, MaxSelect: 1},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "unknown")
+ return record
+ },
+ true,
+ },
+ {
+ "[single] known value",
+ &core.SelectField{Name: "test", Values: values, MaxSelect: 1},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "a")
+ return record
+ },
+ false,
+ },
+ {
+ "[single] > MaxSelect",
+ &core.SelectField{Name: "test", Values: values, MaxSelect: 1},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", []string{"a", "b"})
+ return record
+ },
+ true,
+ },
+
+ // multiple
+ {
+ "[multiple] zero field value (not required)",
+ &core.SelectField{Name: "test", Values: values, MaxSelect: 2},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", []string{})
+ return record
+ },
+ false,
+ },
+ {
+ "[multiple] zero field value (required)",
+ &core.SelectField{Name: "test", Values: values, MaxSelect: 2, Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", []string{})
+ return record
+ },
+ true,
+ },
+ {
+ "[multiple] unknown value",
+ &core.SelectField{Name: "test", Values: values, MaxSelect: 2},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", []string{"a", "unknown"})
+ return record
+ },
+ true,
+ },
+ {
+ "[multiple] known value",
+ &core.SelectField{Name: "test", Values: values, MaxSelect: 2},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", []string{"a", "b"})
+ return record
+ },
+ false,
+ },
+ {
+ "[multiple] > MaxSelect",
+ &core.SelectField{Name: "test", Values: values, MaxSelect: 2},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", []string{"a", "b", "c"})
+ return record
+ },
+ true,
+ },
+ {
+ "[multiple] > MaxSelect (duplicated values)",
+ &core.SelectField{Name: "test", Values: values, MaxSelect: 2},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", []string{"a", "b", "b", "a"})
+ return record
+ },
+ false,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ err := s.field.ValidateValue(context.Background(), app, s.record())
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
+
+func TestSelectFieldValidateSettings(t *testing.T) {
+ testDefaultFieldIdValidation(t, core.FieldTypeSelect)
+ testDefaultFieldNameValidation(t, core.FieldTypeSelect)
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ name string
+ field func() *core.SelectField
+ expectErrors []string
+ }{
+ {
+ "zero minimal",
+ func() *core.SelectField {
+ return &core.SelectField{
+ Id: "test",
+ Name: "test",
+ }
+ },
+ []string{"values"},
+ },
+ {
+ "MaxSelect > Values length",
+ func() *core.SelectField {
+ return &core.SelectField{
+ Id: "test",
+ Name: "test",
+ Values: []string{"a", "b"},
+ MaxSelect: 3,
+ }
+ },
+ []string{"maxSelect"},
+ },
+ {
+ "MaxSelect <= Values length",
+ func() *core.SelectField {
+ return &core.SelectField{
+ Id: "test",
+ Name: "test",
+ Values: []string{"a", "b"},
+ MaxSelect: 2,
+ }
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ field := s.field()
+
+ collection := core.NewBaseCollection("test_collection")
+ collection.Fields.Add(field)
+
+ errs := field.ValidateSettings(context.Background(), app, collection)
+
+ tests.TestValidationErrors(t, errs, s.expectErrors)
+ })
+ }
+}
+
+func TestSelectFieldFindSetter(t *testing.T) {
+ values := []string{"a", "b", "c", "d"}
+
+ scenarios := []struct {
+ name string
+ key string
+ value any
+ field *core.SelectField
+ hasSetter bool
+ expected string
+ }{
+ {
+ "no match",
+ "example",
+ "b",
+ &core.SelectField{Name: "test", MaxSelect: 1, Values: values},
+ false,
+ "",
+ },
+ {
+ "exact match (single)",
+ "test",
+ "b",
+ &core.SelectField{Name: "test", MaxSelect: 1, Values: values},
+ true,
+ `"b"`,
+ },
+ {
+ "exact match (multiple)",
+ "test",
+ []string{"a", "b"},
+ &core.SelectField{Name: "test", MaxSelect: 2, Values: values},
+ true,
+ `["a","b"]`,
+ },
+ {
+ "append (single)",
+ "test+",
+ "b",
+ &core.SelectField{Name: "test", MaxSelect: 1, Values: values},
+ true,
+ `"b"`,
+ },
+ {
+ "append (multiple)",
+ "test+",
+ []string{"a"},
+ &core.SelectField{Name: "test", MaxSelect: 2, Values: values},
+ true,
+ `["c","d","a"]`,
+ },
+ {
+ "prepend (single)",
+ "+test",
+ "b",
+ &core.SelectField{Name: "test", MaxSelect: 1, Values: values},
+ true,
+ `"d"`, // the last of the existing values
+ },
+ {
+ "prepend (multiple)",
+ "+test",
+ []string{"a"},
+ &core.SelectField{Name: "test", MaxSelect: 2, Values: values},
+ true,
+ `["a","c","d"]`,
+ },
+ {
+ "subtract (single)",
+ "test-",
+ "d",
+ &core.SelectField{Name: "test", MaxSelect: 1, Values: values},
+ true,
+ `"c"`,
+ },
+ {
+ "subtract (multiple)",
+ "test-",
+ []string{"unknown", "c"},
+ &core.SelectField{Name: "test", MaxSelect: 2, Values: values},
+ true,
+ `["d"]`,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ collection := core.NewBaseCollection("test_collection")
+ collection.Fields.Add(s.field)
+
+ setter := s.field.FindSetter(s.key)
+
+ hasSetter := setter != nil
+ if hasSetter != s.hasSetter {
+ t.Fatalf("Expected hasSetter %v, got %v", s.hasSetter, hasSetter)
+ }
+
+ if !hasSetter {
+ return
+ }
+
+ record := core.NewRecord(collection)
+ record.SetRaw(s.field.GetName(), []string{"c", "d"})
+
+ setter(record, s.value)
+
+ raw, err := json.Marshal(record.Get(s.field.GetName()))
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawStr := string(raw)
+
+ if rawStr != s.expected {
+ t.Fatalf("Expected %q, got %q", s.expected, rawStr)
+ }
+ })
+ }
+}
diff --git a/core/field_test.go b/core/field_test.go
new file mode 100644
index 00000000..28bc46ec
--- /dev/null
+++ b/core/field_test.go
@@ -0,0 +1,261 @@
+package core_test
+
+import (
+ "context"
+ "strings"
+ "testing"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func testFieldBaseMethods(t *testing.T, fieldType string) {
+ factory, ok := core.Fields[fieldType]
+ if !ok {
+ t.Fatalf("Missing %q field factory", fieldType)
+ }
+
+ f := factory()
+ if f == nil {
+ t.Fatal("Expected non-nil Field instance")
+ }
+
+ t.Run("type", func(t *testing.T) {
+ if v := f.Type(); v != fieldType {
+ t.Fatalf("Expected type %q, got %q", fieldType, v)
+ }
+ })
+
+ t.Run("id", func(t *testing.T) {
+ testValues := []string{"new_id", ""}
+ for _, expected := range testValues {
+ f.SetId(expected)
+ if v := f.GetId(); v != expected {
+ t.Fatalf("Expected id %q, got %q", expected, v)
+ }
+ }
+ })
+
+ t.Run("name", func(t *testing.T) {
+ testValues := []string{"new_name", ""}
+ for _, expected := range testValues {
+ f.SetName(expected)
+ if v := f.GetName(); v != expected {
+ t.Fatalf("Expected name %q, got %q", expected, v)
+ }
+ }
+ })
+
+ t.Run("system", func(t *testing.T) {
+ testValues := []bool{false, true}
+ for _, expected := range testValues {
+ f.SetSystem(expected)
+ if v := f.GetSystem(); v != expected {
+ t.Fatalf("Expected system %v, got %v", expected, v)
+ }
+ }
+ })
+
+ t.Run("hidden", func(t *testing.T) {
+ testValues := []bool{false, true}
+ for _, expected := range testValues {
+ f.SetHidden(expected)
+ if v := f.GetHidden(); v != expected {
+ t.Fatalf("Expected hidden %v, got %v", expected, v)
+ }
+ }
+ })
+}
+
+func testDefaultFieldIdValidation(t *testing.T, fieldType string) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field func() core.Field
+ expectError bool
+ }{
+ {
+ "empty value",
+ func() core.Field {
+ f := core.Fields[fieldType]()
+ return f
+ },
+ true,
+ },
+ {
+ "invalid length",
+ func() core.Field {
+ f := core.Fields[fieldType]()
+ f.SetId(strings.Repeat("a", 256))
+ return f
+ },
+ true,
+ },
+ {
+ "valid length",
+ func() core.Field {
+ f := core.Fields[fieldType]()
+ f.SetId(strings.Repeat("a", 255))
+ return f
+ },
+ false,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run("[id] "+s.name, func(t *testing.T) {
+ errs, _ := s.field().ValidateSettings(context.Background(), app, collection).(validation.Errors)
+
+ hasErr := errs["id"] != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v", s.expectError, hasErr)
+ }
+ })
+ }
+}
+
+func testDefaultFieldNameValidation(t *testing.T, fieldType string) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field func() core.Field
+ expectError bool
+ }{
+ {
+ "empty value",
+ func() core.Field {
+ f := core.Fields[fieldType]()
+ return f
+ },
+ true,
+ },
+ {
+ "invalid length",
+ func() core.Field {
+ f := core.Fields[fieldType]()
+ f.SetName(strings.Repeat("a", 256))
+ return f
+ },
+ true,
+ },
+ {
+ "valid length",
+ func() core.Field {
+ f := core.Fields[fieldType]()
+ f.SetName(strings.Repeat("a", 255))
+ return f
+ },
+ false,
+ },
+ {
+ "invalid regex",
+ func() core.Field {
+ f := core.Fields[fieldType]()
+ f.SetName("test(")
+ return f
+ },
+ true,
+ },
+ {
+ "valid regex",
+ func() core.Field {
+ f := core.Fields[fieldType]()
+ f.SetName("test_123")
+ return f
+ },
+ false,
+ },
+ {
+ "_via_",
+ func() core.Field {
+ f := core.Fields[fieldType]()
+ f.SetName("a_via_b")
+ return f
+ },
+ true,
+ },
+ {
+ "system reserved - null",
+ func() core.Field {
+ f := core.Fields[fieldType]()
+ f.SetName("null")
+ return f
+ },
+ true,
+ },
+ {
+ "system reserved - false",
+ func() core.Field {
+ f := core.Fields[fieldType]()
+ f.SetName("false")
+ return f
+ },
+ true,
+ },
+ {
+ "system reserved - true",
+ func() core.Field {
+ f := core.Fields[fieldType]()
+ f.SetName("true")
+ return f
+ },
+ true,
+ },
+ {
+ "system reserved - _rowid_",
+ func() core.Field {
+ f := core.Fields[fieldType]()
+ f.SetName("_rowid_")
+ return f
+ },
+ true,
+ },
+ {
+ "system reserved - expand",
+ func() core.Field {
+ f := core.Fields[fieldType]()
+ f.SetName("expand")
+ return f
+ },
+ true,
+ },
+ {
+ "system reserved - collectionId",
+ func() core.Field {
+ f := core.Fields[fieldType]()
+ f.SetName("collectionId")
+ return f
+ },
+ true,
+ },
+ {
+ "system reserved - collectionName",
+ func() core.Field {
+ f := core.Fields[fieldType]()
+ f.SetName("collectionName")
+ return f
+ },
+ true,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run("[name] "+s.name, func(t *testing.T) {
+ errs, _ := s.field().ValidateSettings(context.Background(), app, collection).(validation.Errors)
+
+ hasErr := errs["name"] != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v", s.expectError, hasErr)
+ }
+ })
+ }
+}
diff --git a/core/field_text.go b/core/field_text.go
new file mode 100644
index 00000000..c5aeb80c
--- /dev/null
+++ b/core/field_text.go
@@ -0,0 +1,315 @@
+package core
+
+import (
+ "context"
+ "fmt"
+ "regexp"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/pocketbase/core/validators"
+ "github.com/pocketbase/pocketbase/tools/security"
+ "github.com/spf13/cast"
+)
+
+func init() {
+ Fields[FieldTypeText] = func() Field {
+ return &TextField{}
+ }
+}
+
+const FieldTypeText = "text"
+
+const autogenerateModifier = ":autogenerate"
+
+var (
+ _ Field = (*TextField)(nil)
+ _ SetterFinder = (*TextField)(nil)
+ _ RecordInterceptor = (*TextField)(nil)
+)
+
+// TextField defines "text" type field for storing any string value.
+//
+// The following additional setter keys are available:
+//
+// - "fieldName:autogenerate" - autogenerate field value if AutogeneratePattern is set. For example:
+//
+// record.Set("slug:autogenerate", "") // [random value]
+// record.Set("slug:autogenerate", "abc-") // abc-[random value]
+type TextField struct {
+ Id string `form:"id" json:"id"`
+ Name string `form:"name" json:"name"`
+ System bool `form:"system" json:"system"`
+ Hidden bool `form:"hidden" json:"hidden"`
+ Presentable bool `form:"presentable" json:"presentable"`
+
+ // ---
+
+ // Min specifies the minimum required string characters.
+ //
+ // if zero value, no min limit is applied.
+ Min int `form:"min" json:"min"`
+
+ // Max specifies the maximum allowed string characters.
+ //
+ // If zero, a default limit of 5000 is applied.
+ Max int `form:"max" json:"max"`
+
+ // Pattern specifies an optional regex pattern to match against the field value.
+ //
+ // Leave it empty to skip the pattern check.
+ Pattern string `form:"pattern" json:"pattern"`
+
+ // AutogeneratePattern specifies an optional regex pattern that could
+ // be used to generate random string from it and set it automatically
+ // on record create if no explicit value is set or when the `:autogenerate` modifier is used.
+ //
+ // Note: the generated value still needs to satisfy min, max, pattern (if set)
+ AutogeneratePattern string `form:"autogeneratePattern" json:"autogeneratePattern"`
+
+ // Required will require the field value to be non-empty string.
+ Required bool `form:"required" json:"required"`
+
+ // PrimaryKey will mark the field as primary key.
+ //
+ // A single collection can have only 1 field marked as primary key.
+ PrimaryKey bool `form:"primaryKey" json:"primaryKey"`
+}
+
+// Type implements [Field.Type] interface method.
+func (f *TextField) Type() string {
+ return FieldTypeText
+}
+
+// GetId implements [Field.GetId] interface method.
+func (f *TextField) GetId() string {
+ return f.Id
+}
+
+// SetId implements [Field.SetId] interface method.
+func (f *TextField) SetId(id string) {
+ f.Id = id
+}
+
+// GetName implements [Field.GetName] interface method.
+func (f *TextField) GetName() string {
+ return f.Name
+}
+
+// SetName implements [Field.SetName] interface method.
+func (f *TextField) SetName(name string) {
+ f.Name = name
+}
+
+// GetSystem implements [Field.GetSystem] interface method.
+func (f *TextField) GetSystem() bool {
+ return f.System
+}
+
+// SetSystem implements [Field.SetSystem] interface method.
+func (f *TextField) SetSystem(system bool) {
+ f.System = system
+}
+
+// GetHidden implements [Field.GetHidden] interface method.
+func (f *TextField) GetHidden() bool {
+ return f.Hidden
+}
+
+// SetHidden implements [Field.SetHidden] interface method.
+func (f *TextField) SetHidden(hidden bool) {
+ f.Hidden = hidden
+}
+
+// ColumnType implements [Field.ColumnType] interface method.
+func (f *TextField) ColumnType(app App) string {
+ if f.PrimaryKey {
+ // note: the default is just a last resort fallback to avoid empty
+ // string values in case the record was inserted with raw sql and
+ // it is not actually used when operating with the db abstraction
+ return "TEXT PRIMARY KEY DEFAULT ('r'||lower(hex(randomblob(7)))) NOT NULL"
+ }
+
+ return "TEXT DEFAULT '' NOT NULL"
+}
+
+// PrepareValue implements [Field.PrepareValue] interface method.
+func (f *TextField) PrepareValue(record *Record, raw any) (any, error) {
+ return cast.ToString(raw), nil
+}
+
+// ValidateValue implements [Field.ValidateValue] interface method.
+func (f *TextField) ValidateValue(ctx context.Context, app App, record *Record) error {
+ newVal, ok := record.GetRaw(f.Name).(string)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ // disallow PK change
+ if f.PrimaryKey && !record.IsNew() {
+ oldVal := record.LastSavedPK()
+ if oldVal != newVal {
+ return validation.NewError("validation_pk_change", "The record primary key cannot be changed.")
+ }
+ if oldVal != "" {
+ return nil // no need to further validate since the id can't be updated anyway
+ }
+ }
+
+ return f.ValidatePlainValue(newVal)
+}
+
+// ValidatePlainValue validates the provided string against the field options.
+func (f *TextField) ValidatePlainValue(value string) error {
+ if f.Required || f.PrimaryKey {
+ if err := validation.Required.Validate(value); err != nil {
+ return err
+ }
+ }
+
+ if value == "" {
+ return nil // nothing to check
+ }
+
+ // note: casted to []rune to count multi-byte chars as one
+ length := len([]rune(value))
+
+ if f.Min > 0 && length < f.Min {
+ return validation.NewError("validation_min_text_constraint", fmt.Sprintf("Must be at least %d character(s)", f.Min)).
+ SetParams(map[string]any{"min": f.Min})
+ }
+
+ max := f.Max
+ if max == 0 {
+ max = 5000
+ }
+
+ if max > 0 && length > max {
+ return validation.NewError("validation_max_text_constraint", fmt.Sprintf("Must be less than %d character(s)", max)).
+ SetParams(map[string]any{"max": f.Max})
+ }
+
+ if f.Pattern != "" {
+ match, _ := regexp.MatchString(f.Pattern, value)
+ if !match {
+ return validation.NewError("validation_invalid_format", "Invalid value format")
+ }
+ }
+
+ return nil
+}
+
+// ValidateSettings implements [Field.ValidateSettings] interface method.
+func (f *TextField) ValidateSettings(ctx context.Context, app App, collection *Collection) error {
+ return validation.ValidateStruct(f,
+ validation.Field(&f.Id, validation.By(DefaultFieldIdValidationRule)),
+ validation.Field(&f.Name,
+ validation.By(DefaultFieldNameValidationRule),
+ validation.When(f.PrimaryKey, validation.In(idColumn).Error(`The primary key must be named "id".`)),
+ ),
+ validation.Field(&f.PrimaryKey, validation.By(f.checkOtherFieldsForPK(collection))),
+ validation.Field(&f.Min, validation.Min(0)),
+ validation.Field(&f.Max, validation.Min(f.Min)),
+ validation.Field(&f.Pattern, validation.By(validators.IsRegex)),
+ validation.Field(&f.Hidden, validation.When(f.PrimaryKey, validation.Empty)),
+ validation.Field(&f.Required, validation.When(f.PrimaryKey, validation.Required)),
+ validation.Field(&f.AutogeneratePattern, validation.By(validators.IsRegex), validation.By(f.checkAutogeneratePattern)),
+ )
+}
+
+func (f *TextField) checkOtherFieldsForPK(collection *Collection) validation.RuleFunc {
+ return func(value any) error {
+ v, _ := value.(bool)
+ if !v {
+ return nil // not a pk
+ }
+
+ totalPrimaryKeys := 0
+ for _, field := range collection.Fields {
+ if text, ok := field.(*TextField); ok && text.PrimaryKey {
+ totalPrimaryKeys++
+ }
+
+ if totalPrimaryKeys > 1 {
+ return validation.NewError("validation_unsupported_composite_pk", "Composite PKs are not supported and the collection must have only 1 PK.")
+ }
+ }
+
+ return nil
+ }
+}
+
+func (f *TextField) checkAutogeneratePattern(value any) error {
+ v, _ := value.(string)
+ if v == "" {
+ return nil // nothing to check
+ }
+
+ // run 10 tests to check for conflicts with the other field validators
+ for i := 0; i < 10; i++ {
+ generated, err := security.RandomStringByRegex(v)
+ if err != nil {
+ return validation.NewError("validation_invalid_autogenerate_pattern", err.Error())
+ }
+
+ // (loosely) check whether the generated pattern satisfies the current field settings
+ if err := f.ValidatePlainValue(generated); err != nil {
+ return validation.NewError(
+ "validation_invalid_autogenerate_pattern_value",
+ fmt.Sprintf("The provided autogenerate pattern could produce invalid field values, ex.: %q", generated),
+ )
+ }
+ }
+
+ return nil
+}
+
+// Intercept implements the [RecordInterceptor] interface.
+func (f *TextField) Intercept(
+ ctx context.Context,
+ app App,
+ record *Record,
+ actionName string,
+ actionFunc func() error,
+) error {
+ // set autogenerated value if missing for new records
+ switch actionName {
+ case InterceptorActionValidate, InterceptorActionCreate:
+ if f.AutogeneratePattern != "" && f.hasZeroValue(record) && record.IsNew() {
+ v, err := security.RandomStringByRegex(f.AutogeneratePattern)
+ if err != nil {
+ return fmt.Errorf("failed to autogenerate %q value: %w", f.Name, err)
+ }
+ record.SetRaw(f.Name, v)
+ }
+ }
+
+ return actionFunc()
+}
+
+func (f *TextField) hasZeroValue(record *Record) bool {
+ v, _ := record.GetRaw(f.Name).(string)
+ return v == ""
+}
+
+// FindSetter implements the [SetterFinder] interface.
+func (f *TextField) FindSetter(key string) SetterFunc {
+ switch key {
+ case f.Name:
+ return func(record *Record, raw any) {
+ record.SetRaw(f.Name, cast.ToString(raw))
+ }
+ case f.Name + autogenerateModifier:
+ return func(record *Record, raw any) {
+ v := cast.ToString(raw)
+
+ if f.AutogeneratePattern != "" {
+ generated, _ := security.RandomStringByRegex(f.AutogeneratePattern)
+ v += generated
+ }
+
+ record.SetRaw(f.Name, v)
+ }
+ default:
+ return nil
+ }
+}
diff --git a/core/field_text_test.go b/core/field_text_test.go
new file mode 100644
index 00000000..c05d26b2
--- /dev/null
+++ b/core/field_text_test.go
@@ -0,0 +1,536 @@
+package core_test
+
+import (
+ "context"
+ "fmt"
+ "strings"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestTextFieldBaseMethods(t *testing.T) {
+ testFieldBaseMethods(t, core.FieldTypeText)
+}
+
+func TestTextFieldColumnType(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.TextField{}
+
+ expected := "TEXT DEFAULT '' NOT NULL"
+
+ if v := f.ColumnType(app); v != expected {
+ t.Fatalf("Expected\n%q\ngot\n%q", expected, v)
+ }
+}
+
+func TestTextFieldPrepareValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.TextField{}
+ record := core.NewRecord(core.NewBaseCollection("test"))
+
+ scenarios := []struct {
+ raw any
+ expected string
+ }{
+ {"", ""},
+ {"test", "test"},
+ {false, "false"},
+ {true, "true"},
+ {123.456, "123.456"},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.raw), func(t *testing.T) {
+ v, err := f.PrepareValue(record, s.raw)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ vStr, ok := v.(string)
+ if !ok {
+ t.Fatalf("Expected string instance, got %T", v)
+ }
+
+ if vStr != s.expected {
+ t.Fatalf("Expected %q, got %q", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestTextFieldValidateValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field *core.TextField
+ record func() *core.Record
+ expectError bool
+ }{
+ {
+ "invalid raw value",
+ &core.TextField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 123)
+ return record
+ },
+ true,
+ },
+ {
+ "zero field value (not required)",
+ &core.TextField{Name: "test", Pattern: `\d+`, Min: 10, Max: 100}, // other fields validators should be ignored
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "")
+ return record
+ },
+ false,
+ },
+ {
+ "zero field value (required)",
+ &core.TextField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "")
+ return record
+ },
+ true,
+ },
+ {
+ "non-zero field value (required)",
+ &core.TextField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "abc")
+ return record
+ },
+ false,
+ },
+ {
+ "zero field value (primaryKey)",
+ &core.TextField{Name: "test", PrimaryKey: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "")
+ return record
+ },
+ true,
+ },
+ {
+ "non-zero field value (primaryKey)",
+ &core.TextField{Name: "test", PrimaryKey: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "abc")
+ return record
+ },
+ false,
+ },
+ {
+ "< min",
+ &core.TextField{Name: "test", Min: 4},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "абв") // multi-byte
+ return record
+ },
+ true,
+ },
+ {
+ ">= min",
+ &core.TextField{Name: "test", Min: 3},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "абв") // multi-byte
+ return record
+ },
+ false,
+ },
+ {
+ "> default max",
+ &core.TextField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", strings.Repeat("a", 5001))
+ return record
+ },
+ true,
+ },
+ {
+ "<= default max",
+ &core.TextField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", strings.Repeat("a", 500))
+ return record
+ },
+ false,
+ },
+ {
+ "> max",
+ &core.TextField{Name: "test", Max: 2},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "абв") // multi-byte
+ return record
+ },
+ true,
+ },
+ {
+ "<= max",
+ &core.TextField{Name: "test", Min: 3},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "абв") // multi-byte
+ return record
+ },
+ false,
+ },
+ {
+ "mismatched pattern",
+ &core.TextField{Name: "test", Pattern: `\d+`},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "abc")
+ return record
+ },
+ true,
+ },
+ {
+ "matched pattern",
+ &core.TextField{Name: "test", Pattern: `\d+`},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "123")
+ return record
+ },
+ false,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ err := s.field.ValidateValue(context.Background(), app, s.record())
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
+
+func TestTextFieldValidateSettings(t *testing.T) {
+ testDefaultFieldIdValidation(t, core.FieldTypeText)
+ testDefaultFieldNameValidation(t, core.FieldTypeText)
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ name string
+ field func() *core.TextField
+ expectErrors []string
+ }{
+ {
+ "zero minimal",
+ func() *core.TextField {
+ return &core.TextField{
+ Id: "test",
+ Name: "test",
+ }
+ },
+ []string{},
+ },
+ {
+ "primaryKey without required",
+ func() *core.TextField {
+ return &core.TextField{
+ Id: "test",
+ Name: "id",
+ PrimaryKey: true,
+ }
+ },
+ []string{"required"},
+ },
+ {
+ "primaryKey with hidden",
+ func() *core.TextField {
+ return &core.TextField{
+ Id: "test",
+ Name: "id",
+ Required: true,
+ PrimaryKey: true,
+ Hidden: true,
+ }
+ },
+ []string{"hidden"},
+ },
+ {
+ "primaryKey with name != id",
+ func() *core.TextField {
+ return &core.TextField{
+ Id: "test",
+ Name: "test",
+ PrimaryKey: true,
+ Required: true,
+ }
+ },
+ []string{"name"},
+ },
+ {
+ "multiple primaryKey fields",
+ func() *core.TextField {
+ return &core.TextField{
+ Id: "test2",
+ Name: "id",
+ PrimaryKey: true,
+ Required: true,
+ }
+ },
+ []string{"primaryKey"},
+ },
+ {
+ "invalid pattern",
+ func() *core.TextField {
+ return &core.TextField{
+ Id: "test2",
+ Name: "id",
+ Pattern: `(invalid`,
+ }
+ },
+ []string{"pattern"},
+ },
+ {
+ "valid pattern",
+ func() *core.TextField {
+ return &core.TextField{
+ Id: "test2",
+ Name: "id",
+ Pattern: `\d+`,
+ }
+ },
+ []string{},
+ },
+ {
+ "invalid autogeneratePattern",
+ func() *core.TextField {
+ return &core.TextField{
+ Id: "test2",
+ Name: "id",
+ AutogeneratePattern: `(invalid`,
+ }
+ },
+ []string{"autogeneratePattern"},
+ },
+ {
+ "valid autogeneratePattern",
+ func() *core.TextField {
+ return &core.TextField{
+ Id: "test2",
+ Name: "id",
+ AutogeneratePattern: `[a-z]+`,
+ }
+ },
+ []string{},
+ },
+ {
+ "conflicting pattern and autogeneratePattern",
+ func() *core.TextField {
+ return &core.TextField{
+ Id: "test2",
+ Name: "id",
+ Pattern: `\d+`,
+ AutogeneratePattern: `[a-z]+`,
+ }
+ },
+ []string{"autogeneratePattern"},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ field := s.field()
+
+ collection := core.NewBaseCollection("test_collection")
+ collection.Fields.GetByName("id").SetId("test") // set a dummy known id so that it can be replaced
+ collection.Fields.Add(field)
+
+ errs := field.ValidateSettings(context.Background(), app, collection)
+
+ tests.TestValidationErrors(t, errs, s.expectErrors)
+ })
+ }
+}
+
+func TestTextFieldAutogenerate(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ actionName string
+ field *core.TextField
+ record func() *core.Record
+ expected string
+ }{
+ {
+ "non-matching action",
+ core.InterceptorActionUpdate,
+ &core.TextField{Name: "test", AutogeneratePattern: "abc"},
+ func() *core.Record {
+ return core.NewRecord(collection)
+ },
+ "",
+ },
+ {
+ "matching action (create)",
+ core.InterceptorActionCreate,
+ &core.TextField{Name: "test", AutogeneratePattern: "abc"},
+ func() *core.Record {
+ return core.NewRecord(collection)
+ },
+ "abc",
+ },
+ {
+ "matching action (validate)",
+ core.InterceptorActionValidate,
+ &core.TextField{Name: "test", AutogeneratePattern: "abc"},
+ func() *core.Record {
+ return core.NewRecord(collection)
+ },
+ "abc",
+ },
+ {
+ "existing non-zero value",
+ core.InterceptorActionCreate,
+ &core.TextField{Name: "test", AutogeneratePattern: "abc"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "123")
+ return record
+ },
+ "123",
+ },
+ {
+ "non-new record",
+ core.InterceptorActionValidate,
+ &core.TextField{Name: "test", AutogeneratePattern: "abc"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.Id = "test"
+ record.PostScan()
+ return record
+ },
+ "",
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ actionCalls := 0
+ record := s.record()
+
+ err := s.field.Intercept(context.Background(), app, record, s.actionName, func() error {
+ actionCalls++
+ return nil
+ })
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if actionCalls != 1 {
+ t.Fatalf("Expected actionCalls %d, got %d", 1, actionCalls)
+ }
+
+ v := record.GetString(s.field.GetName())
+ if v != s.expected {
+ t.Fatalf("Expected value %q, got %q", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestTextFieldFindSetter(t *testing.T) {
+ scenarios := []struct {
+ name string
+ key string
+ value any
+ field *core.TextField
+ hasSetter bool
+ expected string
+ }{
+ {
+ "no match",
+ "example",
+ "abc",
+ &core.TextField{Name: "test", AutogeneratePattern: "test"},
+ false,
+ "",
+ },
+ {
+ "exact match",
+ "test",
+ "abc",
+ &core.TextField{Name: "test", AutogeneratePattern: "test"},
+ true,
+ "abc",
+ },
+ {
+ "autogenerate modifier",
+ "test:autogenerate",
+ "abc",
+ &core.TextField{Name: "test", AutogeneratePattern: "test"},
+ true,
+ "abctest",
+ },
+ {
+ "autogenerate modifier without AutogeneratePattern option",
+ "test:autogenerate",
+ "abc",
+ &core.TextField{Name: "test"},
+ true,
+ "abc",
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ collection := core.NewBaseCollection("test_collection")
+ collection.Fields.Add(s.field)
+
+ setter := s.field.FindSetter(s.key)
+
+ hasSetter := setter != nil
+ if hasSetter != s.hasSetter {
+ t.Fatalf("Expected hasSetter %v, got %v", s.hasSetter, hasSetter)
+ }
+
+ if !hasSetter {
+ return
+ }
+
+ record := core.NewRecord(collection)
+
+ setter(record, s.value)
+
+ result := record.GetString(s.field.Name)
+
+ if result != s.expected {
+ t.Fatalf("Expected %q, got %q", s.expected, result)
+ }
+ })
+ }
+}
diff --git a/core/field_url.go b/core/field_url.go
new file mode 100644
index 00000000..7066f78e
--- /dev/null
+++ b/core/field_url.go
@@ -0,0 +1,154 @@
+package core
+
+import (
+ "context"
+ "net/url"
+ "slices"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/go-ozzo/ozzo-validation/v4/is"
+ "github.com/pocketbase/pocketbase/core/validators"
+ "github.com/spf13/cast"
+)
+
+func init() {
+ Fields[FieldTypeURL] = func() Field {
+ return &URLField{}
+ }
+}
+
+const FieldTypeURL = "url"
+
+var _ Field = (*URLField)(nil)
+
+// URLField defines "url" type field for storing URL string value.
+type URLField struct {
+ Id string `form:"id" json:"id"`
+ Name string `form:"name" json:"name"`
+ System bool `form:"system" json:"system"`
+ Hidden bool `form:"hidden" json:"hidden"`
+ Presentable bool `form:"presentable" json:"presentable"`
+
+ // ---
+
+ // ExceptDomains will require the URL domain to NOT be included in the listed ones.
+ //
+ // This validator can be set only if OnlyDomains is empty.
+ ExceptDomains []string `form:"exceptDomains" json:"exceptDomains"`
+
+ // OnlyDomains will require the URL domain to be included in the listed ones.
+ //
+ // This validator can be set only if ExceptDomains is empty.
+ OnlyDomains []string `form:"onlyDomains" json:"onlyDomains"`
+
+ // Required will require the field value to be non-empty URL string.
+ Required bool `form:"required" json:"required"`
+}
+
+// Type implements [Field.Type] interface method.
+func (f *URLField) Type() string {
+ return FieldTypeURL
+}
+
+// GetId implements [Field.GetId] interface method.
+func (f *URLField) GetId() string {
+ return f.Id
+}
+
+// SetId implements [Field.SetId] interface method.
+func (f *URLField) SetId(id string) {
+ f.Id = id
+}
+
+// GetName implements [Field.GetName] interface method.
+func (f *URLField) GetName() string {
+ return f.Name
+}
+
+// SetName implements [Field.SetName] interface method.
+func (f *URLField) SetName(name string) {
+ f.Name = name
+}
+
+// GetSystem implements [Field.GetSystem] interface method.
+func (f *URLField) GetSystem() bool {
+ return f.System
+}
+
+// SetSystem implements [Field.SetSystem] interface method.
+func (f *URLField) SetSystem(system bool) {
+ f.System = system
+}
+
+// GetHidden implements [Field.GetHidden] interface method.
+func (f *URLField) GetHidden() bool {
+ return f.Hidden
+}
+
+// SetHidden implements [Field.SetHidden] interface method.
+func (f *URLField) SetHidden(hidden bool) {
+ f.Hidden = hidden
+}
+
+// ColumnType implements [Field.ColumnType] interface method.
+func (f *URLField) ColumnType(app App) string {
+ return "TEXT DEFAULT '' NOT NULL"
+}
+
+// PrepareValue implements [Field.PrepareValue] interface method.
+func (f *URLField) PrepareValue(record *Record, raw any) (any, error) {
+ return cast.ToString(raw), nil
+}
+
+// ValidateValue implements [Field.ValidateValue] interface method.
+func (f *URLField) ValidateValue(ctx context.Context, app App, record *Record) error {
+ val, ok := record.GetRaw(f.Name).(string)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ if f.Required {
+ if err := validation.Required.Validate(val); err != nil {
+ return err
+ }
+ }
+
+ if val == "" {
+ return nil // nothing to check
+ }
+
+ if is.URL.Validate(val) != nil {
+ return validation.NewError("validation_invalid_url", "Must be a valid url")
+ }
+
+ // extract host/domain
+ u, _ := url.Parse(val)
+
+ // only domains check
+ if len(f.OnlyDomains) > 0 && !slices.Contains(f.OnlyDomains, u.Host) {
+ return validation.NewError("validation_url_domain_not_allowed", "Url domain is not allowed")
+ }
+
+ // except domains check
+ if len(f.ExceptDomains) > 0 && slices.Contains(f.ExceptDomains, u.Host) {
+ return validation.NewError("validation_url_domain_not_allowed", "Url domain is not allowed")
+ }
+
+ return nil
+}
+
+// ValidateSettings implements [Field.ValidateSettings] interface method.
+func (f *URLField) ValidateSettings(ctx context.Context, app App, collection *Collection) error {
+ return validation.ValidateStruct(f,
+ validation.Field(&f.Id, validation.By(DefaultFieldIdValidationRule)),
+ validation.Field(&f.Name, validation.By(DefaultFieldNameValidationRule)),
+ validation.Field(
+ &f.ExceptDomains,
+ validation.When(len(f.OnlyDomains) > 0, validation.Empty).Else(validation.Each(is.Domain)),
+ ),
+ validation.Field(
+ &f.OnlyDomains,
+ validation.When(len(f.ExceptDomains) > 0, validation.Empty).Else(validation.Each(is.Domain)),
+ ),
+ )
+}
diff --git a/core/field_url_test.go b/core/field_url_test.go
new file mode 100644
index 00000000..0b2db39d
--- /dev/null
+++ b/core/field_url_test.go
@@ -0,0 +1,271 @@
+package core_test
+
+import (
+ "context"
+ "fmt"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestURLFieldBaseMethods(t *testing.T) {
+ testFieldBaseMethods(t, core.FieldTypeURL)
+}
+
+func TestURLFieldColumnType(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.URLField{}
+
+ expected := "TEXT DEFAULT '' NOT NULL"
+
+ if v := f.ColumnType(app); v != expected {
+ t.Fatalf("Expected\n%q\ngot\n%q", expected, v)
+ }
+}
+
+func TestURLFieldPrepareValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f := &core.URLField{}
+ record := core.NewRecord(core.NewBaseCollection("test"))
+
+ scenarios := []struct {
+ raw any
+ expected string
+ }{
+ {"", ""},
+ {"test", "test"},
+ {false, "false"},
+ {true, "true"},
+ {123.456, "123.456"},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.raw), func(t *testing.T) {
+ v, err := f.PrepareValue(record, s.raw)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ vStr, ok := v.(string)
+ if !ok {
+ t.Fatalf("Expected string instance, got %T", v)
+ }
+
+ if vStr != s.expected {
+ t.Fatalf("Expected %q, got %q", s.expected, v)
+ }
+ })
+ }
+}
+
+func TestURLFieldValidateValue(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field *core.URLField
+ record func() *core.Record
+ expectError bool
+ }{
+ {
+ "invalid raw value",
+ &core.URLField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", 123)
+ return record
+ },
+ true,
+ },
+ {
+ "zero field value (not required)",
+ &core.URLField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "")
+ return record
+ },
+ false,
+ },
+ {
+ "zero field value (required)",
+ &core.URLField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "")
+ return record
+ },
+ true,
+ },
+ {
+ "non-zero field value (required)",
+ &core.URLField{Name: "test", Required: true},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "https://example.com")
+ return record
+ },
+ false,
+ },
+ {
+ "invalid url",
+ &core.URLField{Name: "test"},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "invalid")
+ return record
+ },
+ true,
+ },
+ {
+ "failed onlyDomains",
+ &core.URLField{Name: "test", OnlyDomains: []string{"example.org", "example.net"}},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "https://example.com")
+ return record
+ },
+ true,
+ },
+ {
+ "success onlyDomains",
+ &core.URLField{Name: "test", OnlyDomains: []string{"example.org", "example.com"}},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "https://example.com")
+ return record
+ },
+ false,
+ },
+ {
+ "failed exceptDomains",
+ &core.URLField{Name: "test", ExceptDomains: []string{"example.org", "example.com"}},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "https://example.com")
+ return record
+ },
+ true,
+ },
+ {
+ "success exceptDomains",
+ &core.URLField{Name: "test", ExceptDomains: []string{"example.org", "example.net"}},
+ func() *core.Record {
+ record := core.NewRecord(collection)
+ record.SetRaw("test", "https://example.com")
+ return record
+ },
+ false,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ err := s.field.ValidateValue(context.Background(), app, s.record())
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
+
+func TestURLFieldValidateSettings(t *testing.T) {
+ testDefaultFieldIdValidation(t, core.FieldTypeURL)
+ testDefaultFieldNameValidation(t, core.FieldTypeURL)
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test_collection")
+
+ scenarios := []struct {
+ name string
+ field func() *core.URLField
+ expectErrors []string
+ }{
+ {
+ "zero minimal",
+ func() *core.URLField {
+ return &core.URLField{
+ Id: "test",
+ Name: "test",
+ }
+ },
+ []string{},
+ },
+ {
+ "both onlyDomains and exceptDomains",
+ func() *core.URLField {
+ return &core.URLField{
+ Id: "test",
+ Name: "test",
+ OnlyDomains: []string{"example.com"},
+ ExceptDomains: []string{"example.org"},
+ }
+ },
+ []string{"onlyDomains", "exceptDomains"},
+ },
+ {
+ "invalid onlyDomains",
+ func() *core.URLField {
+ return &core.URLField{
+ Id: "test",
+ Name: "test",
+ OnlyDomains: []string{"example.com", "invalid"},
+ }
+ },
+ []string{"onlyDomains"},
+ },
+ {
+ "valid onlyDomains",
+ func() *core.URLField {
+ return &core.URLField{
+ Id: "test",
+ Name: "test",
+ OnlyDomains: []string{"example.com", "example.org"},
+ }
+ },
+ []string{},
+ },
+ {
+ "invalid exceptDomains",
+ func() *core.URLField {
+ return &core.URLField{
+ Id: "test",
+ Name: "test",
+ ExceptDomains: []string{"example.com", "invalid"},
+ }
+ },
+ []string{"exceptDomains"},
+ },
+ {
+ "valid exceptDomains",
+ func() *core.URLField {
+ return &core.URLField{
+ Id: "test",
+ Name: "test",
+ ExceptDomains: []string{"example.com", "example.org"},
+ }
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ errs := s.field().ValidateSettings(context.Background(), app, collection)
+
+ tests.TestValidationErrors(t, errs, s.expectErrors)
+ })
+ }
+}
diff --git a/core/fields_list.go b/core/fields_list.go
new file mode 100644
index 00000000..e5f09b86
--- /dev/null
+++ b/core/fields_list.go
@@ -0,0 +1,261 @@
+package core
+
+import (
+ "database/sql/driver"
+ "encoding/json"
+ "fmt"
+
+ "github.com/pocketbase/pocketbase/tools/security"
+)
+
+// NewFieldsList creates a new FieldsList instance with the provided fields.
+func NewFieldsList(fields ...Field) FieldsList {
+ s := make(FieldsList, 0, len(fields))
+
+ for _, f := range fields {
+ s.Add(f)
+ }
+
+ return s
+}
+
+// FieldsList defines a Collection slice of fields.
+type FieldsList []Field
+
+// Clone creates a deep clone of the current list.
+func (s FieldsList) Clone() (FieldsList, error) {
+ copyRaw, err := json.Marshal(s)
+ if err != nil {
+ return nil, err
+ }
+
+ result := FieldsList{}
+ if err := json.Unmarshal(copyRaw, &result); err != nil {
+ return nil, err
+ }
+
+ return result, nil
+}
+
+// FieldNames returns a slice with the name of all list fields.
+func (s FieldsList) FieldNames() []string {
+ result := make([]string, len(s))
+
+ for i, field := range s {
+ result[i] = field.GetName()
+ }
+
+ return result
+}
+
+// AsMap returns a map with all registered list field.
+// The returned map is indexed with each field name.
+func (s FieldsList) AsMap() map[string]Field {
+ result := make(map[string]Field, len(s))
+
+ for _, field := range s {
+ result[field.GetName()] = field
+ }
+
+ return result
+}
+
+// GetById returns a single field by its id.
+func (s FieldsList) GetById(fieldId string) Field {
+ for _, field := range s {
+ if field.GetId() == fieldId {
+ return field
+ }
+ }
+ return nil
+}
+
+// GetByName returns a single field by its name.
+func (s FieldsList) GetByName(fieldName string) Field {
+ for _, field := range s {
+ if field.GetName() == fieldName {
+ return field
+ }
+ }
+ return nil
+}
+
+// RemoveById removes a single field by its id.
+//
+// This method does nothing if field with the specified id doesn't exist.
+func (s *FieldsList) RemoveById(fieldId string) {
+ fields := *s
+ for i, field := range fields {
+ if field.GetId() == fieldId {
+ *s = append(fields[:i], fields[i+1:]...)
+ return
+ }
+ }
+}
+
+// RemoveByName removes a single field by its name.
+//
+// This method does nothing if field with the specified name doesn't exist.
+func (s *FieldsList) RemoveByName(fieldName string) {
+ fields := *s
+ for i, field := range fields {
+ if field.GetName() == fieldName {
+ *s = append(fields[:i], fields[i+1:]...)
+ return
+ }
+ }
+}
+
+// Add adds one or more fields to the current list.
+//
+// If any of the new fields doesn't have an id it will try to set a
+// default one based on its type and name.
+//
+// If the list already has a field with the same id,
+// then the existing field is replaced with the new one.
+//
+// Otherwise the new field is appended after the other list fields.
+func (s *FieldsList) Add(fields ...Field) {
+ for _, f := range fields {
+ s.add(f)
+ }
+}
+
+func (s *FieldsList) add(newField Field) {
+ newFieldId := newField.GetId()
+
+ // set default id
+ if newFieldId == "" {
+ if newField.GetName() != "" {
+ newFieldId = newField.Type() + crc32Checksum(newField.GetName())
+ } else {
+ newFieldId = newField.Type() + security.RandomString(5)
+ }
+ newField.SetId(newFieldId)
+ }
+
+ fields := *s
+
+ for i, field := range fields {
+ // replace existing
+ if newFieldId != "" && field.GetId() == newFieldId {
+ (*s)[i] = newField
+ return
+ }
+ }
+
+ // add new field
+ *s = append(fields, newField)
+}
+
+// String returns the string representation of the current list.
+func (s FieldsList) String() string {
+ v, _ := json.Marshal(s)
+ return string(v)
+}
+
+type onlyFieldType struct {
+ Type string `json:"type"`
+}
+
+type fieldWithType struct {
+ Field
+ Type string `json:"type"`
+}
+
+func (fwt *fieldWithType) UnmarshalJSON(data []byte) error {
+ // extract the field type to init a blank factory
+ t := &onlyFieldType{}
+ if err := json.Unmarshal(data, t); err != nil {
+ return fmt.Errorf("failed to unmarshal field type: %w", err)
+ }
+
+ factory, ok := Fields[t.Type]
+ if !ok {
+ return fmt.Errorf("missing or unknown field type in %s", data)
+ }
+
+ fwt.Type = t.Type
+ fwt.Field = factory()
+
+ // unmarshal the rest of the data into the created field
+ if err := json.Unmarshal(data, fwt.Field); err != nil {
+ return fmt.Errorf("failed to unmarshal field: %w", err)
+ }
+
+ return nil
+}
+
+// UnmarshalJSON implements [json.Unmarshaler] and
+// loads the provided json data into the current FieldsList.
+func (s *FieldsList) UnmarshalJSON(data []byte) error {
+ fwts := []fieldWithType{}
+
+ if err := json.Unmarshal(data, &fwts); err != nil {
+ return err
+ }
+
+ *s = []Field{} // reset
+
+ for _, fwt := range fwts {
+ s.Add(fwt.Field)
+ }
+
+ return nil
+}
+
+// MarshalJSON implements the [json.Marshaler] interface.
+func (s FieldsList) MarshalJSON() ([]byte, error) {
+ if s == nil {
+ s = []Field{} // always init to ensure that it is serialized as empty array
+ }
+
+ wrapper := make([]map[string]any, 0, len(s))
+
+ for _, f := range s {
+ // precompute the json into a map so that we can append the type to a flatten object
+ raw, err := json.Marshal(f)
+ if err != nil {
+ return nil, err
+ }
+
+ data := map[string]any{}
+ if err := json.Unmarshal(raw, &data); err != nil {
+ return nil, err
+ }
+ data["type"] = f.Type()
+
+ wrapper = append(wrapper, data)
+ }
+
+ return json.Marshal(wrapper)
+}
+
+// Value implements the [driver.Valuer] interface.
+func (s FieldsList) Value() (driver.Value, error) {
+ data, err := json.Marshal(s)
+
+ return string(data), err
+}
+
+// Scan implements [sql.Scanner] interface to scan the provided value
+// into the current FieldsList instance.
+func (s *FieldsList) Scan(value any) error {
+ var data []byte
+ switch v := value.(type) {
+ case nil:
+ // no cast needed
+ case []byte:
+ data = v
+ case string:
+ data = []byte(v)
+ default:
+ return fmt.Errorf("failed to unmarshal FieldsList value %q", value)
+ }
+
+ if len(data) == 0 {
+ data = []byte("[]")
+ }
+
+ return s.UnmarshalJSON(data)
+}
diff --git a/core/fields_list_test.go b/core/fields_list_test.go
new file mode 100644
index 00000000..08defbdd
--- /dev/null
+++ b/core/fields_list_test.go
@@ -0,0 +1,365 @@
+package core_test
+
+import (
+ "slices"
+ "strings"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+)
+
+func TestNewFieldsList(t *testing.T) {
+ fields := core.NewFieldsList(
+ &core.TextField{Id: "id1", Name: "test1"},
+ &core.TextField{Name: "test2"},
+ &core.TextField{Id: "id1", Name: "test1_new"}, // should replace the original id1 field
+ )
+
+ if len(fields) != 2 {
+ t.Fatalf("Expected 2 fields, got %d (%v)", len(fields), fields)
+ }
+
+ for _, f := range fields {
+ if f.GetId() == "" {
+ t.Fatalf("Expected field id to be set, found empty id for field %v", f)
+ }
+ }
+
+ if fields[0].GetName() != "test1_new" {
+ t.Fatalf("Expected field with name test1_new, got %s", fields[0].GetName())
+ }
+
+ if fields[1].GetName() != "test2" {
+ t.Fatalf("Expected field with name test2, got %s", fields[1].GetName())
+ }
+}
+
+func TestFieldsListClone(t *testing.T) {
+ f1 := &core.TextField{Name: "test1"}
+ f2 := &core.EmailField{Name: "test2"}
+ s1 := core.NewFieldsList(f1, f2)
+
+ s2, err := s1.Clone()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ s1Str := s1.String()
+ s2Str := s2.String()
+
+ if s1Str != s2Str {
+ t.Fatalf("Expected the cloned list to be equal, got \n%v\nVS\n%v", s1, s2)
+ }
+
+ // change in one list shouldn't result to change in the other
+ // (aka. check if it is a deep clone)
+ s1[0].SetName("test1_update")
+ if s2[0].GetName() != "test1" {
+ t.Fatalf("Expected s2 field name to not change, got %q", s2[0].GetName())
+ }
+}
+
+func TestFieldsListFieldNames(t *testing.T) {
+ f1 := &core.TextField{Name: "test1"}
+ f2 := &core.EmailField{Name: "test2"}
+ testFieldsList := core.NewFieldsList(f1, f2)
+
+ result := testFieldsList.FieldNames()
+
+ expected := []string{f1.Name, f2.Name}
+
+ if len(result) != len(expected) {
+ t.Fatalf("Expected %d slice elements, got %d\n%v", len(expected), len(result), result)
+ }
+
+ for _, name := range expected {
+ if !slices.Contains(result, name) {
+ t.Fatalf("Missing name %q in %v", name, result)
+ }
+ }
+}
+
+func TestFieldsListAsMap(t *testing.T) {
+ f1 := &core.TextField{Name: "test1"}
+ f2 := &core.EmailField{Name: "test2"}
+ testFieldsList := core.NewFieldsList(f1, f2)
+
+ result := testFieldsList.AsMap()
+
+ expectedIndexes := []string{f1.Name, f2.Name}
+
+ if len(result) != len(expectedIndexes) {
+ t.Fatalf("Expected %d map elements, got %d\n%v", len(expectedIndexes), len(result), result)
+ }
+
+ for _, index := range expectedIndexes {
+ if _, ok := result[index]; !ok {
+ t.Fatalf("Missing index %q", index)
+ }
+ }
+}
+
+func TestFieldsListGetById(t *testing.T) {
+ f1 := &core.TextField{Id: "id1", Name: "test1"}
+ f2 := &core.EmailField{Id: "id2", Name: "test2"}
+ testFieldsList := core.NewFieldsList(f1, f2)
+
+ // missing field id
+ result1 := testFieldsList.GetById("test1")
+ if result1 != nil {
+ t.Fatalf("Found unexpected field %v", result1)
+ }
+
+ // existing field id
+ result2 := testFieldsList.GetById("id2")
+ if result2 == nil || result2.GetId() != "id2" {
+ t.Fatalf("Cannot find field with id %q, got %v ", "id2", result2)
+ }
+}
+
+func TestFieldsListGetByName(t *testing.T) {
+ f1 := &core.TextField{Id: "id1", Name: "test1"}
+ f2 := &core.EmailField{Id: "id2", Name: "test2"}
+ testFieldsList := core.NewFieldsList(f1, f2)
+
+ // missing field name
+ result1 := testFieldsList.GetByName("id1")
+ if result1 != nil {
+ t.Fatalf("Found unexpected field %v", result1)
+ }
+
+ // existing field name
+ result2 := testFieldsList.GetByName("test2")
+ if result2 == nil || result2.GetName() != "test2" {
+ t.Fatalf("Cannot find field with name %q, got %v ", "test2", result2)
+ }
+}
+
+func TestFieldsListRemove(t *testing.T) {
+ testFieldsList := core.NewFieldsList(
+ &core.TextField{Id: "id1", Name: "test1"},
+ &core.TextField{Id: "id2", Name: "test2"},
+ &core.TextField{Id: "id3", Name: "test3"},
+ &core.TextField{Id: "id4", Name: "test4"},
+ &core.TextField{Id: "id5", Name: "test5"},
+ &core.TextField{Id: "id6", Name: "test6"},
+ )
+
+ // remove by id
+ testFieldsList.RemoveById("id2")
+ testFieldsList.RemoveById("test3") // should do nothing
+
+ // remove by name
+ testFieldsList.RemoveByName("test5")
+ testFieldsList.RemoveByName("id6") // should do nothing
+
+ expected := []string{"test1", "test3", "test4", "test6"}
+
+ if len(testFieldsList) != len(expected) {
+ t.Fatalf("Expected %d, got %d\n%v", len(expected), len(testFieldsList), testFieldsList)
+ }
+
+ for _, name := range expected {
+ if f := testFieldsList.GetByName(name); f == nil {
+ t.Fatalf("Missing field %q", name)
+ }
+ }
+}
+
+func TestFieldsListAdd(t *testing.T) {
+ f0 := &core.TextField{}
+ f1 := &core.TextField{Name: "test1"}
+ f2 := &core.TextField{Id: "f2Id", Name: "test2"}
+ f3 := &core.TextField{Id: "f3Id", Name: "test3"}
+ testFieldsList := core.NewFieldsList(f0, f1, f2, f3)
+
+ f2New := &core.EmailField{Id: "f2Id", Name: "test2_new"}
+ f4 := &core.URLField{Name: "test4"}
+
+ testFieldsList.Add(f2New)
+ testFieldsList.Add(f4)
+
+ if len(testFieldsList) != 5 {
+ t.Fatalf("Expected %d, got %d\n%v", 5, len(testFieldsList), testFieldsList)
+ }
+
+ // check if each field has id
+ for _, f := range testFieldsList {
+ if f.GetId() == "" {
+ t.Fatalf("Expected field id to be set, found empty id for field %v", f)
+ }
+ }
+
+ // check if f2 field was replaced
+ if f := testFieldsList.GetById("f2Id"); f == nil || f.Type() != core.FieldTypeEmail {
+ t.Fatalf("Expected f2 field to be replaced, found %v", f)
+ }
+
+ // check if f4 was added
+ if f := testFieldsList.GetByName("test4"); f == nil || f.GetName() != "test4" {
+ t.Fatalf("Expected f4 field to be added, found %v", f)
+ }
+}
+
+func TestFieldsListStringAndValue(t *testing.T) {
+ t.Run("empty list", func(t *testing.T) {
+ testFieldsList := core.NewFieldsList()
+
+ str := testFieldsList.String()
+ if str != "[]" {
+ t.Fatalf("Expected empty slice, got\n%q", str)
+ }
+
+ v, err := testFieldsList.Value()
+ if err != nil {
+ t.Fatal(err)
+ }
+ if v != str {
+ t.Fatalf("Expected String and Value to match")
+ }
+ })
+
+ t.Run("list with fields", func(t *testing.T) {
+ testFieldsList := core.NewFieldsList(
+ &core.TextField{Id: "f1id", Name: "test1"},
+ &core.BoolField{Id: "f2id", Name: "test2"},
+ &core.URLField{Id: "f3id", Name: "test3"},
+ )
+
+ str := testFieldsList.String()
+
+ v, err := testFieldsList.Value()
+ if err != nil {
+ t.Fatal(err)
+ }
+ if v != str {
+ t.Fatalf("Expected String and Value to match")
+ }
+
+ expectedParts := []string{
+ `"type":"bool"`,
+ `"type":"url"`,
+ `"type":"text"`,
+ `"id":"f1id"`,
+ `"id":"f2id"`,
+ `"id":"f3id"`,
+ `"name":"test1"`,
+ `"name":"test2"`,
+ `"name":"test3"`,
+ }
+
+ for _, part := range expectedParts {
+ if !strings.Contains(str, part) {
+ t.Fatalf("Missing %q in\nn%v", part, str)
+ }
+ }
+ })
+}
+
+func TestFieldsListScan(t *testing.T) {
+ scenarios := []struct {
+ name string
+ data any
+ expectError bool
+ expectJSON string
+ }{
+ {"nil", nil, false, "[]"},
+ {"empty string", "", false, "[]"},
+ {"empty byte", []byte{}, false, "[]"},
+ {"empty string array", "[]", false, "[]"},
+ {"invalid string", "invalid", true, "[]"},
+ {"non-string", 123, true, "[]"},
+ {"item with no field type", `[{}]`, true, "[]"},
+ {
+ "unknown field type",
+ `[{"id":"123","name":"test1","type":"unknown"},{"id":"456","name":"test2","type":"bool"}]`,
+ true,
+ `[]`,
+ },
+ {
+ "only the minimum field options",
+ `[{"id":"123","name":"test1","type":"text","required":true},{"id":"456","name":"test2","type":"bool"}]`,
+ false,
+ `[{"autogeneratePattern":"","hidden":false,"id":"123","max":0,"min":0,"name":"test1","pattern":"","presentable":false,"primaryKey":false,"required":true,"system":false,"type":"text"},{"hidden":false,"id":"456","name":"test2","presentable":false,"required":false,"system":false,"type":"bool"}]`,
+ },
+ {
+ "all field options",
+ `[{"autogeneratePattern":"","hidden":true,"id":"123","max":12,"min":0,"name":"test1","pattern":"","presentable":true,"primaryKey":false,"required":true,"system":false,"type":"text"},{"hidden":false,"id":"456","name":"test2","presentable":false,"required":false,"system":true,"type":"bool"}]`,
+ false,
+ `[{"autogeneratePattern":"","hidden":true,"id":"123","max":12,"min":0,"name":"test1","pattern":"","presentable":true,"primaryKey":false,"required":true,"system":false,"type":"text"},{"hidden":false,"id":"456","name":"test2","presentable":false,"required":false,"system":true,"type":"bool"}]`,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ testFieldsList := core.FieldsList{}
+
+ err := testFieldsList.Scan(s.data)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ str := testFieldsList.String()
+ if str != s.expectJSON {
+ t.Fatalf("Expected\n%v\ngot\n%v", s.expectJSON, str)
+ }
+ })
+ }
+}
+
+func TestFieldsListJSON(t *testing.T) {
+ scenarios := []struct {
+ name string
+ data string
+ expectError bool
+ expectJSON string
+ }{
+ {"empty string", "", true, "[]"},
+ {"invalid string", "invalid", true, "[]"},
+ {"empty string array", "[]", false, "[]"},
+ {"item with no field type", `[{}]`, true, "[]"},
+ {
+ "unknown field type",
+ `[{"id":"123","name":"test1","type":"unknown"},{"id":"456","name":"test2","type":"bool"}]`,
+ true,
+ `[]`,
+ },
+ {
+ "only the minimum field options",
+ `[{"id":"123","name":"test1","type":"text","required":true},{"id":"456","name":"test2","type":"bool"}]`,
+ false,
+ `[{"autogeneratePattern":"","hidden":false,"id":"123","max":0,"min":0,"name":"test1","pattern":"","presentable":false,"primaryKey":false,"required":true,"system":false,"type":"text"},{"hidden":false,"id":"456","name":"test2","presentable":false,"required":false,"system":false,"type":"bool"}]`,
+ },
+ {
+ "all field options",
+ `[{"autogeneratePattern":"","hidden":true,"id":"123","max":12,"min":0,"name":"test1","pattern":"","presentable":true,"primaryKey":false,"required":true,"system":false,"type":"text"},{"hidden":false,"id":"456","name":"test2","presentable":false,"required":false,"system":true,"type":"bool"}]`,
+ false,
+ `[{"autogeneratePattern":"","hidden":true,"id":"123","max":12,"min":0,"name":"test1","pattern":"","presentable":true,"primaryKey":false,"required":true,"system":false,"type":"text"},{"hidden":false,"id":"456","name":"test2","presentable":false,"required":false,"system":true,"type":"bool"}]`,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ testFieldsList := core.FieldsList{}
+
+ err := testFieldsList.UnmarshalJSON([]byte(s.data))
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ raw, err := testFieldsList.MarshalJSON()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ str := string(raw)
+ if str != s.expectJSON {
+ t.Fatalf("Expected\n%v\ngot\n%v", s.expectJSON, str)
+ }
+ })
+ }
+}
diff --git a/core/log_model.go b/core/log_model.go
new file mode 100644
index 00000000..c2c3e7aa
--- /dev/null
+++ b/core/log_model.go
@@ -0,0 +1,22 @@
+package core
+
+import "github.com/pocketbase/pocketbase/tools/types"
+
+var (
+ _ Model = (*Log)(nil)
+)
+
+const LogsTableName = "_logs"
+
+type Log struct {
+ BaseModel
+
+ Created types.DateTime `db:"created" json:"created"`
+ Data types.JSONMap[any] `db:"data" json:"data"`
+ Message string `db:"message" json:"message"`
+ Level int `db:"level" json:"level"`
+}
+
+func (m *Log) TableName() string {
+ return LogsTableName
+}
diff --git a/core/log_printer_test.go b/core/log_printer_test.go
new file mode 100644
index 00000000..26d26c2c
--- /dev/null
+++ b/core/log_printer_test.go
@@ -0,0 +1,115 @@
+package core
+
+import (
+ "context"
+ "log/slog"
+ "os"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/tools/list"
+ "github.com/pocketbase/pocketbase/tools/logger"
+)
+
+func TestBaseAppLoggerLevelDevPrint(t *testing.T) {
+ testLogLevel := 4
+
+ scenarios := []struct {
+ name string
+ isDev bool
+ levels []int
+ printedLevels []int
+ persistedLevels []int
+ }{
+ {
+ "dev mode",
+ true,
+ []int{testLogLevel - 1, testLogLevel, testLogLevel + 1},
+ []int{testLogLevel - 1, testLogLevel, testLogLevel + 1},
+ []int{testLogLevel, testLogLevel + 1},
+ },
+ {
+ "nondev mode",
+ false,
+ []int{testLogLevel - 1, testLogLevel, testLogLevel + 1},
+ []int{},
+ []int{testLogLevel, testLogLevel + 1},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ const testDataDir = "./pb_base_app_test_data_dir/"
+ defer os.RemoveAll(testDataDir)
+
+ app := NewBaseApp(BaseAppConfig{
+ DataDir: testDataDir,
+ IsDev: s.isDev,
+ })
+ defer app.ResetBootstrapState()
+
+ if err := app.Bootstrap(); err != nil {
+ t.Fatal(err)
+ }
+
+ app.Settings().Logs.MinLevel = testLogLevel
+ if err := app.Save(app.Settings()); err != nil {
+ t.Fatal(err)
+ }
+
+ var printedLevels []int
+ var persistedLevels []int
+
+ ctx := context.Background()
+
+ // track printed logs
+ originalPrintLog := printLog
+ defer func() {
+ printLog = originalPrintLog
+ }()
+ printLog = func(log *logger.Log) {
+ printedLevels = append(printedLevels, int(log.Level))
+ }
+
+ // track persisted logs
+ app.OnModelAfterCreateSuccess("_logs").BindFunc(func(e *ModelEvent) error {
+ l, ok := e.Model.(*Log)
+ if ok {
+ persistedLevels = append(persistedLevels, l.Level)
+ }
+ return e.Next()
+ })
+
+ // write and persist logs
+ for _, l := range s.levels {
+ app.Logger().Log(ctx, slog.Level(l), "test")
+ }
+ handler, ok := app.Logger().Handler().(*logger.BatchHandler)
+ if !ok {
+ t.Fatalf("Expected BatchHandler, got %v", app.Logger().Handler())
+ }
+ if err := handler.WriteAll(ctx); err != nil {
+ t.Fatalf("Failed to write all logs: %v", err)
+ }
+
+ // check persisted log levels
+ if len(s.persistedLevels) != len(persistedLevels) {
+ t.Fatalf("Expected persisted levels \n%v\ngot\n%v", s.persistedLevels, persistedLevels)
+ }
+ for _, l := range persistedLevels {
+ if !list.ExistInSlice(l, s.persistedLevels) {
+ t.Fatalf("Missing expected persisted level %v in %v", l, persistedLevels)
+ }
+ }
+
+ // check printed log levels
+ if len(s.printedLevels) != len(printedLevels) {
+ t.Fatalf("Expected printed levels \n%v\ngot\n%v", s.printedLevels, printedLevels)
+ }
+ for _, l := range printedLevels {
+ if !list.ExistInSlice(l, s.printedLevels) {
+ t.Fatalf("Missing expected printed level %v in %v", l, printedLevels)
+ }
+ }
+ })
+ }
+}
diff --git a/daos/log.go b/core/log_query.go
similarity index 61%
rename from daos/log.go
rename to core/log_query.go
index eaa11a07..701bfad8 100644
--- a/daos/log.go
+++ b/core/log_query.go
@@ -1,23 +1,22 @@
-package daos
+package core
import (
"time"
"github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models"
"github.com/pocketbase/pocketbase/tools/types"
)
// LogQuery returns a new Log select query.
-func (dao *Dao) LogQuery() *dbx.SelectQuery {
- return dao.ModelQuery(&models.Log{})
+func (app *BaseApp) LogQuery() *dbx.SelectQuery {
+ return app.AuxModelQuery(&Log{})
}
// FindLogById finds a single Log entry by its id.
-func (dao *Dao) FindLogById(id string) (*models.Log, error) {
- model := &models.Log{}
+func (app *BaseApp) FindLogById(id string) (*Log, error) {
+ model := &Log{}
- err := dao.LogQuery().
+ err := app.LogQuery().
AndWhere(dbx.HashExp{"id": id}).
Limit(1).
One(model)
@@ -29,16 +28,17 @@ func (dao *Dao) FindLogById(id string) (*models.Log, error) {
return model, nil
}
+// LogsStatsItem defines the total number of logs for a specific time period.
type LogsStatsItem struct {
- Total int `db:"total" json:"total"`
Date types.DateTime `db:"date" json:"date"`
+ Total int `db:"total" json:"total"`
}
// LogsStats returns hourly grouped requests logs statistics.
-func (dao *Dao) LogsStats(expr dbx.Expression) ([]*LogsStatsItem, error) {
+func (app *BaseApp) LogsStats(expr dbx.Expression) ([]*LogsStatsItem, error) {
result := []*LogsStatsItem{}
- query := dao.LogQuery().
+ query := app.LogQuery().
Select("count(id) as total", "strftime('%Y-%m-%d %H:00:00', created) as date").
GroupBy("date")
@@ -52,16 +52,11 @@ func (dao *Dao) LogsStats(expr dbx.Expression) ([]*LogsStatsItem, error) {
}
// DeleteOldLogs delete all requests that are created before createdBefore.
-func (dao *Dao) DeleteOldLogs(createdBefore time.Time) error {
+func (app *BaseApp) DeleteOldLogs(createdBefore time.Time) error {
formattedDate := createdBefore.UTC().Format(types.DefaultDateLayout)
expr := dbx.NewExp("[[created]] <= {:date}", dbx.Params{"date": formattedDate})
- _, err := dao.NonconcurrentDB().Delete((&models.Log{}).TableName(), expr).Execute()
+ _, err := app.auxNonconcurrentDB.Delete((&Log{}).TableName(), expr).Execute()
return err
}
-
-// SaveLog upserts the provided Log model.
-func (dao *Dao) SaveLog(log *models.Log) error {
- return dao.Save(log)
-}
diff --git a/core/log_query_test.go b/core/log_query_test.go
new file mode 100644
index 00000000..df361987
--- /dev/null
+++ b/core/log_query_test.go
@@ -0,0 +1,114 @@
+package core_test
+
+import (
+ "encoding/json"
+ "fmt"
+ "testing"
+ "time"
+
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func TestFindLogById(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ tests.StubLogsData(app)
+
+ scenarios := []struct {
+ id string
+ expectError bool
+ }{
+ {"", true},
+ {"invalid", true},
+ {"00000000-9f38-44fb-bf82-c8f53b310d91", true},
+ {"873f2133-9f38-44fb-bf82-c8f53b310d91", false},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, s.id), func(t *testing.T) {
+ log, err := app.FindLogById(s.id)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ if log != nil && log.Id != s.id {
+ t.Fatalf("Expected log with id %q, got %q", s.id, log.Id)
+ }
+ })
+ }
+}
+
+func TestLogsStats(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ tests.StubLogsData(app)
+
+ expected := `[{"date":"2022-05-01 10:00:00.000Z","total":1},{"date":"2022-05-02 10:00:00.000Z","total":1}]`
+
+ now := time.Now().UTC().Format(types.DefaultDateLayout)
+ exp := dbx.NewExp("[[created]] <= {:date}", dbx.Params{"date": now})
+ result, err := app.LogsStats(exp)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ encoded, _ := json.Marshal(result)
+ if string(encoded) != expected {
+ t.Fatalf("Expected\n%q\ngot\n%q", expected, string(encoded))
+ }
+}
+
+func TestDeleteOldLogs(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ tests.StubLogsData(app)
+
+ scenarios := []struct {
+ date string
+ expectedTotal int
+ }{
+ {"2022-01-01 10:00:00.000Z", 2}, // no logs to delete before that time
+ {"2022-05-01 11:00:00.000Z", 1}, // only 1 log should have left
+ {"2022-05-03 11:00:00.000Z", 0}, // no more logs should have left
+ {"2022-05-04 11:00:00.000Z", 0}, // no more logs should have left
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.date, func(t *testing.T) {
+ date, dateErr := time.Parse(types.DefaultDateLayout, s.date)
+ if dateErr != nil {
+ t.Fatalf("Date error %v", dateErr)
+ }
+
+ deleteErr := app.DeleteOldLogs(date)
+ if deleteErr != nil {
+ t.Fatalf("Delete error %v", deleteErr)
+ }
+
+ // check total remaining logs
+ var total int
+ countErr := app.AuxModelQuery(&core.Log{}).Select("count(*)").Row(&total)
+ if countErr != nil {
+ t.Errorf("Count error %v", countErr)
+ }
+
+ if total != s.expectedTotal {
+ t.Errorf("Expected %d remaining logs, got %d", s.expectedTotal, total)
+ }
+ })
+ }
+}
diff --git a/core/mfa_model.go b/core/mfa_model.go
new file mode 100644
index 00000000..be9f36f5
--- /dev/null
+++ b/core/mfa_model.go
@@ -0,0 +1,157 @@
+package core
+
+import (
+ "context"
+ "errors"
+ "time"
+
+ "github.com/pocketbase/pocketbase/tools/hook"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+const (
+ MFAMethodPassword = "password"
+ MFAMethodOAuth2 = "oauth2"
+ MFAMethodOTP = "otp"
+)
+
+const CollectionNameMFAs = "_mfas"
+
+var (
+ _ Model = (*MFA)(nil)
+ _ PreValidator = (*MFA)(nil)
+ _ RecordProxy = (*MFA)(nil)
+)
+
+// MFA defines a Record proxy for working with the mfas collection.
+type MFA struct {
+ *Record
+}
+
+// NewMFA instantiates and returns a new blank *MFA model.
+//
+// Example usage:
+//
+// mfa := core.NewMFA(app)
+// mfa.SetRecordRef(user.Id)
+// mfa.SetCollectionRef(user.Collection().Id)
+// mfa.SetMethod(core.MFAMethodPassword)
+// app.Save(mfa)
+func NewMFA(app App) *MFA {
+ m := &MFA{}
+
+ c, err := app.FindCachedCollectionByNameOrId(CollectionNameMFAs)
+ if err != nil {
+ // this is just to make tests easier since mfa is a system collection and it is expected to be always accessible
+ // (note: the loaded record is further checked on MFA.PreValidate())
+ c = NewBaseCollection("@__invalid__")
+ }
+
+ m.Record = NewRecord(c)
+
+ return m
+}
+
+// PreValidate implements the [PreValidator] interface and checks
+// whether the proxy is properly loaded.
+func (m *MFA) PreValidate(ctx context.Context, app App) error {
+ if m.Record == nil || m.Record.Collection().Name != CollectionNameMFAs {
+ return errors.New("missing or invalid mfa ProxyRecord")
+ }
+
+ return nil
+}
+
+// ProxyRecord returns the proxied Record model.
+func (m *MFA) ProxyRecord() *Record {
+ return m.Record
+}
+
+// SetProxyRecord loads the specified record model into the current proxy.
+func (m *MFA) SetProxyRecord(record *Record) {
+ m.Record = record
+}
+
+// CollectionRef returns the "collectionRef" field value.
+func (m *MFA) CollectionRef() string {
+ return m.GetString("collectionRef")
+}
+
+// SetCollectionRef updates the "collectionRef" record field value.
+func (m *MFA) SetCollectionRef(collectionId string) {
+ m.Set("collectionRef", collectionId)
+}
+
+// RecordRef returns the "recordRef" record field value.
+func (m *MFA) RecordRef() string {
+ return m.GetString("recordRef")
+}
+
+// SetRecordRef updates the "recordRef" record field value.
+func (m *MFA) SetRecordRef(recordId string) {
+ m.Set("recordRef", recordId)
+}
+
+// Method returns the "method" record field value.
+func (m *MFA) Method() string {
+ return m.GetString("method")
+}
+
+// SetMethod updates the "method" record field value.
+func (m *MFA) SetMethod(method string) {
+ m.Set("method", method)
+}
+
+// Created returns the "created" record field value.
+func (m *MFA) Created() types.DateTime {
+ return m.GetDateTime("created")
+}
+
+// Updated returns the "updated" record field value.
+func (m *MFA) Updated() types.DateTime {
+ return m.GetDateTime("updated")
+}
+
+// HasExpired checks if the mfa is expired, aka. whether it has been
+// more than maxElapsed time since its creation.
+func (m *MFA) HasExpired(maxElapsed time.Duration) bool {
+ return time.Since(m.Created().Time()) > maxElapsed
+}
+
+func (app *BaseApp) registerMFAHooks() {
+ recordRefHooks[*MFA](app, CollectionNameMFAs, CollectionTypeAuth)
+
+ // run on every hour to cleanup expired mfa sessions
+ app.Cron().Add("__mfasCleanup__", "0 * * * *", func() {
+ if err := app.DeleteExpiredMFAs(); err != nil {
+ app.Logger().Warn("Failed to delete expired MFA sessions", "error", err)
+ }
+ })
+
+ // delete existing mfas on password change
+ app.OnRecordUpdate().Bind(&hook.Handler[*RecordEvent]{
+ Func: func(e *RecordEvent) error {
+ err := e.Next()
+ if err != nil || !e.Record.Collection().IsAuth() {
+ return err
+ }
+
+ old := e.Record.Original().GetString(FieldNamePassword + ":hash")
+ new := e.Record.GetString(FieldNamePassword + ":hash")
+ if old != new {
+ err = e.App.DeleteAllMFAsByRecord(e.Record)
+ if err != nil {
+ e.App.Logger().Warn(
+ "Failed to delete all previous mfas",
+ "error", err,
+ "recordId", e.Record.Id,
+ "collectionId", e.Record.Collection().Id,
+ )
+ }
+ }
+
+ return nil
+ },
+ Priority: 99,
+ })
+}
diff --git a/core/mfa_model_test.go b/core/mfa_model_test.go
new file mode 100644
index 00000000..97669749
--- /dev/null
+++ b/core/mfa_model_test.go
@@ -0,0 +1,302 @@
+package core_test
+
+import (
+ "fmt"
+ "testing"
+ "time"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func TestNewMFA(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ mfa := core.NewMFA(app)
+
+ if mfa.Collection().Name != core.CollectionNameMFAs {
+ t.Fatalf("Expected record with %q collection, got %q", core.CollectionNameMFAs, mfa.Collection().Name)
+ }
+}
+
+func TestMFAProxyRecord(t *testing.T) {
+ t.Parallel()
+
+ record := core.NewRecord(core.NewBaseCollection("test"))
+ record.Id = "test_id"
+
+ mfa := core.MFA{}
+ mfa.SetProxyRecord(record)
+
+ if mfa.ProxyRecord() == nil || mfa.ProxyRecord().Id != record.Id {
+ t.Fatalf("Expected proxy record with id %q, got %v", record.Id, mfa.ProxyRecord())
+ }
+}
+
+func TestMFARecordRef(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ mfa := core.NewMFA(app)
+
+ testValues := []string{"test_1", "test2", ""}
+ for i, testValue := range testValues {
+ t.Run(fmt.Sprintf("%d_%q", i, testValue), func(t *testing.T) {
+ mfa.SetRecordRef(testValue)
+
+ if v := mfa.RecordRef(); v != testValue {
+ t.Fatalf("Expected getter %q, got %q", testValue, v)
+ }
+
+ if v := mfa.GetString("recordRef"); v != testValue {
+ t.Fatalf("Expected field value %q, got %q", testValue, v)
+ }
+ })
+ }
+}
+
+func TestMFACollectionRef(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ mfa := core.NewMFA(app)
+
+ testValues := []string{"test_1", "test2", ""}
+ for i, testValue := range testValues {
+ t.Run(fmt.Sprintf("%d_%q", i, testValue), func(t *testing.T) {
+ mfa.SetCollectionRef(testValue)
+
+ if v := mfa.CollectionRef(); v != testValue {
+ t.Fatalf("Expected getter %q, got %q", testValue, v)
+ }
+
+ if v := mfa.GetString("collectionRef"); v != testValue {
+ t.Fatalf("Expected field value %q, got %q", testValue, v)
+ }
+ })
+ }
+}
+
+func TestMFAMethod(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ mfa := core.NewMFA(app)
+
+ testValues := []string{"test_1", "test2", ""}
+ for i, testValue := range testValues {
+ t.Run(fmt.Sprintf("%d_%q", i, testValue), func(t *testing.T) {
+ mfa.SetMethod(testValue)
+
+ if v := mfa.Method(); v != testValue {
+ t.Fatalf("Expected getter %q, got %q", testValue, v)
+ }
+
+ if v := mfa.GetString("method"); v != testValue {
+ t.Fatalf("Expected field value %q, got %q", testValue, v)
+ }
+ })
+ }
+}
+
+func TestMFACreated(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ mfa := core.NewMFA(app)
+
+ if v := mfa.Created().String(); v != "" {
+ t.Fatalf("Expected empty created, got %q", v)
+ }
+
+ now := types.NowDateTime()
+ mfa.SetRaw("created", now)
+
+ if v := mfa.Created().String(); v != now.String() {
+ t.Fatalf("Expected %q created, got %q", now.String(), v)
+ }
+}
+
+func TestMFAUpdated(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ mfa := core.NewMFA(app)
+
+ if v := mfa.Updated().String(); v != "" {
+ t.Fatalf("Expected empty updated, got %q", v)
+ }
+
+ now := types.NowDateTime()
+ mfa.SetRaw("updated", now)
+
+ if v := mfa.Updated().String(); v != now.String() {
+ t.Fatalf("Expected %q updated, got %q", now.String(), v)
+ }
+}
+
+func TestMFAHasExpired(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ now := types.NowDateTime()
+
+ mfa := core.NewMFA(app)
+ mfa.SetRaw("created", now.Add(-5*time.Minute))
+
+ scenarios := []struct {
+ maxElapsed time.Duration
+ expected bool
+ }{
+ {0 * time.Minute, true},
+ {3 * time.Minute, true},
+ {5 * time.Minute, true},
+ {6 * time.Minute, false},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, s.maxElapsed.String()), func(t *testing.T) {
+ result := mfa.HasExpired(s.maxElapsed)
+
+ if result != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, result)
+ }
+ })
+ }
+}
+
+func TestMFAPreValidate(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ mfasCol, err := app.FindCollectionByNameOrId(core.CollectionNameMFAs)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ user, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ t.Run("no proxy record", func(t *testing.T) {
+ mfa := &core.MFA{}
+
+ if err := app.Validate(mfa); err == nil {
+ t.Fatal("Expected collection validation error")
+ }
+ })
+
+ t.Run("non-MFA collection", func(t *testing.T) {
+ mfa := &core.MFA{}
+ mfa.SetProxyRecord(core.NewRecord(core.NewBaseCollection("invalid")))
+ mfa.SetRecordRef(user.Id)
+ mfa.SetCollectionRef(user.Collection().Id)
+ mfa.SetMethod("test123")
+
+ if err := app.Validate(mfa); err == nil {
+ t.Fatal("Expected collection validation error")
+ }
+ })
+
+ t.Run("MFA collection", func(t *testing.T) {
+ mfa := &core.MFA{}
+ mfa.SetProxyRecord(core.NewRecord(mfasCol))
+ mfa.SetRecordRef(user.Id)
+ mfa.SetCollectionRef(user.Collection().Id)
+ mfa.SetMethod("test123")
+
+ if err := app.Validate(mfa); err != nil {
+ t.Fatalf("Expected nil validation error, got %v", err)
+ }
+ })
+}
+
+func TestMFAValidateHook(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ user, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ demo1, err := app.FindRecordById("demo1", "84nmscqy84lsi1t")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ name string
+ mfa func() *core.MFA
+ expectErrors []string
+ }{
+ {
+ "empty",
+ func() *core.MFA {
+ return core.NewMFA(app)
+ },
+ []string{"collectionRef", "recordRef", "method"},
+ },
+ {
+ "non-auth collection",
+ func() *core.MFA {
+ mfa := core.NewMFA(app)
+ mfa.SetCollectionRef(demo1.Collection().Id)
+ mfa.SetRecordRef(demo1.Id)
+ mfa.SetMethod("test123")
+ return mfa
+ },
+ []string{"collectionRef"},
+ },
+ {
+ "missing record id",
+ func() *core.MFA {
+ mfa := core.NewMFA(app)
+ mfa.SetCollectionRef(user.Collection().Id)
+ mfa.SetRecordRef("missing")
+ mfa.SetMethod("test123")
+ return mfa
+ },
+ []string{"recordRef"},
+ },
+ {
+ "valid ref",
+ func() *core.MFA {
+ mfa := core.NewMFA(app)
+ mfa.SetCollectionRef(user.Collection().Id)
+ mfa.SetRecordRef(user.Id)
+ mfa.SetMethod("test123")
+ return mfa
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ errs := app.Validate(s.mfa())
+ tests.TestValidationErrors(t, errs, s.expectErrors)
+ })
+ }
+}
diff --git a/core/mfa_query.go b/core/mfa_query.go
new file mode 100644
index 00000000..1ccfd457
--- /dev/null
+++ b/core/mfa_query.go
@@ -0,0 +1,117 @@
+package core
+
+import (
+ "errors"
+ "time"
+
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+// FindAllMFAsByRecord returns all MFA models linked to the provided auth record.
+func (app *BaseApp) FindAllMFAsByRecord(authRecord *Record) ([]*MFA, error) {
+ result := []*MFA{}
+
+ err := app.RecordQuery(CollectionNameMFAs).
+ AndWhere(dbx.HashExp{
+ "collectionRef": authRecord.Collection().Id,
+ "recordRef": authRecord.Id,
+ }).
+ OrderBy("created DESC").
+ All(&result)
+
+ if err != nil {
+ return nil, err
+ }
+
+ return result, nil
+}
+
+// FindAllMFAsByCollection returns all MFA models linked to the provided collection.
+func (app *BaseApp) FindAllMFAsByCollection(collection *Collection) ([]*MFA, error) {
+ result := []*MFA{}
+
+ err := app.RecordQuery(CollectionNameMFAs).
+ AndWhere(dbx.HashExp{"collectionRef": collection.Id}).
+ OrderBy("created DESC").
+ All(&result)
+
+ if err != nil {
+ return nil, err
+ }
+
+ return result, nil
+}
+
+// FindMFAById returns a single MFA model by its id.
+func (app *BaseApp) FindMFAById(id string) (*MFA, error) {
+ result := &MFA{}
+
+ err := app.RecordQuery(CollectionNameMFAs).
+ AndWhere(dbx.HashExp{"id": id}).
+ Limit(1).
+ One(result)
+
+ if err != nil {
+ return nil, err
+ }
+
+ return result, nil
+}
+
+// DeleteAllMFAsByRecord deletes all MFA models associated with the provided record.
+//
+// Returns a combined error with the failed deletes.
+func (app *BaseApp) DeleteAllMFAsByRecord(authRecord *Record) error {
+ models, err := app.FindAllMFAsByRecord(authRecord)
+ if err != nil {
+ return err
+ }
+
+ var errs []error
+ for _, m := range models {
+ if err := app.Delete(m); err != nil {
+ errs = append(errs, err)
+ }
+ }
+ if len(errs) > 0 {
+ return errors.Join(errs...)
+ }
+
+ return nil
+}
+
+// DeleteExpiredMFAs deletes the expired MFAs for all auth collections.
+func (app *BaseApp) DeleteExpiredMFAs() error {
+ authCollections, err := app.FindAllCollections(CollectionTypeAuth)
+ if err != nil {
+ return err
+ }
+
+ // note: perform even if MFA is disabled to ensure that there are no dangling old records
+ for _, collection := range authCollections {
+ minValidDate, err := types.ParseDateTime(time.Now().Add(-1 * collection.MFA.DurationTime()))
+ if err != nil {
+ return err
+ }
+
+ items := []*Record{}
+
+ err = app.RecordQuery(CollectionNameMFAs).
+ AndWhere(dbx.HashExp{"collectionRef": collection.Id}).
+ AndWhere(dbx.NewExp("[[created]] < {:date}", dbx.Params{"date": minValidDate})).
+ All(&items)
+ if err != nil {
+ return err
+ }
+
+ for _, item := range items {
+ err = app.Delete(item)
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
diff --git a/core/mfa_query_test.go b/core/mfa_query_test.go
new file mode 100644
index 00000000..e26e2b3c
--- /dev/null
+++ b/core/mfa_query_test.go
@@ -0,0 +1,311 @@
+package core_test
+
+import (
+ "fmt"
+ "slices"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestFindAllMFAsByRecord(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ if err := tests.StubMFARecords(app); err != nil {
+ t.Fatal(err)
+ }
+
+ demo1, err := app.FindRecordById("demo1", "84nmscqy84lsi1t")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ superuser2, err := app.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test2@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ superuser4, err := app.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test4@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ user1, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ record *core.Record
+ expected []string
+ }{
+ {demo1, nil},
+ {superuser2, []string{"superuser2_0", "superuser2_3", "superuser2_2", "superuser2_1", "superuser2_4"}},
+ {superuser4, nil},
+ {user1, []string{"user1_0"}},
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.record.Collection().Name+"_"+s.record.Id, func(t *testing.T) {
+ result, err := app.FindAllMFAsByRecord(s.record)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if len(result) != len(s.expected) {
+ t.Fatalf("Expected total mfas %d, got %d", len(s.expected), len(result))
+ }
+
+ for i, id := range s.expected {
+ if result[i].Id != id {
+ t.Errorf("[%d] Expected id %q, got %q", i, id, result[i].Id)
+ }
+ }
+ })
+ }
+}
+
+func TestFindAllMFAsByCollection(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ if err := tests.StubMFARecords(app); err != nil {
+ t.Fatal(err)
+ }
+
+ demo1, err := app.FindCollectionByNameOrId("demo1")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ superusers, err := app.FindCollectionByNameOrId(core.CollectionNameSuperusers)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ clients, err := app.FindCollectionByNameOrId("clients")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ users, err := app.FindCollectionByNameOrId("users")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ collection *core.Collection
+ expected []string
+ }{
+ {demo1, nil},
+ {superusers, []string{
+ "superuser2_0",
+ "superuser2_3",
+ "superuser3_0",
+ "superuser2_2",
+ "superuser3_1",
+ "superuser2_1",
+ "superuser2_4",
+ }},
+ {clients, nil},
+ {users, []string{"user1_0"}},
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.collection.Name, func(t *testing.T) {
+ result, err := app.FindAllMFAsByCollection(s.collection)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if len(result) != len(s.expected) {
+ t.Fatalf("Expected total mfas %d, got %d", len(s.expected), len(result))
+ }
+
+ for i, id := range s.expected {
+ if result[i].Id != id {
+ t.Errorf("[%d] Expected id %q, got %q", i, id, result[i].Id)
+ }
+ }
+ })
+ }
+}
+
+func TestFindMFAById(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ if err := tests.StubMFARecords(app); err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ id string
+ expectError bool
+ }{
+ {"", true},
+ {"84nmscqy84lsi1t", true}, // non-mfa id
+ {"superuser2_0", false},
+ {"superuser2_4", false}, // expired
+ {"user1_0", false},
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.id, func(t *testing.T) {
+ result, err := app.FindMFAById(s.id)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ if hasErr {
+ return
+ }
+
+ if result.Id != s.id {
+ t.Fatalf("Expected record with id %q, got %q", s.id, result.Id)
+ }
+ })
+ }
+}
+
+func TestDeleteAllMFAsByRecord(t *testing.T) {
+ t.Parallel()
+
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ demo1, err := testApp.FindRecordById("demo1", "84nmscqy84lsi1t")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ superuser2, err := testApp.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test2@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ superuser4, err := testApp.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test4@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ user1, err := testApp.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ record *core.Record
+ deletedIds []string
+ }{
+ {demo1, nil}, // non-auth record
+ {superuser2, []string{"superuser2_0", "superuser2_1", "superuser2_3", "superuser2_2", "superuser2_4"}},
+ {superuser4, nil},
+ {user1, []string{"user1_0"}},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s_%s", i, s.record.Collection().Name, s.record.Id), func(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ if err := tests.StubMFARecords(app); err != nil {
+ t.Fatal(err)
+ }
+
+ deletedIds := []string{}
+ app.OnRecordAfterDeleteSuccess().BindFunc(func(e *core.RecordEvent) error {
+ deletedIds = append(deletedIds, e.Record.Id)
+ return e.Next()
+ })
+
+ err := app.DeleteAllMFAsByRecord(s.record)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if len(deletedIds) != len(s.deletedIds) {
+ t.Fatalf("Expected deleted ids\n%v\ngot\n%v", s.deletedIds, deletedIds)
+ }
+
+ for _, id := range s.deletedIds {
+ if !slices.Contains(deletedIds, id) {
+ t.Errorf("Expected to find deleted id %q in %v", id, deletedIds)
+ }
+ }
+ })
+ }
+}
+
+func TestDeleteExpiredMFAs(t *testing.T) {
+ t.Parallel()
+
+ checkDeletedIds := func(app core.App, t *testing.T, expectedDeletedIds []string) {
+ if err := tests.StubMFARecords(app); err != nil {
+ t.Fatal(err)
+ }
+
+ deletedIds := []string{}
+ app.OnRecordDelete().BindFunc(func(e *core.RecordEvent) error {
+ deletedIds = append(deletedIds, e.Record.Id)
+ return e.Next()
+ })
+
+ if err := app.DeleteExpiredMFAs(); err != nil {
+ t.Fatal(err)
+ }
+
+ if len(deletedIds) != len(expectedDeletedIds) {
+ t.Fatalf("Expected deleted ids\n%v\ngot\n%v", expectedDeletedIds, deletedIds)
+ }
+
+ for _, id := range expectedDeletedIds {
+ if !slices.Contains(deletedIds, id) {
+ t.Errorf("Expected to find deleted id %q in %v", id, deletedIds)
+ }
+ }
+ }
+
+ t.Run("default test collections", func(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ checkDeletedIds(app, t, []string{
+ "user1_0",
+ "superuser2_1",
+ "superuser2_4",
+ })
+ })
+
+ t.Run("mfa collection duration mock", func(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ superusers, err := app.FindCollectionByNameOrId(core.CollectionNameSuperusers)
+ if err != nil {
+ t.Fatal(err)
+ }
+ superusers.MFA.Duration = 60
+ if err := app.Save(superusers); err != nil {
+ t.Fatalf("Failed to mock superusers mfa duration: %v", err)
+ }
+
+ checkDeletedIds(app, t, []string{
+ "user1_0",
+ "superuser2_1",
+ "superuser2_2",
+ "superuser2_4",
+ "superuser3_1",
+ })
+ })
+}
diff --git a/tools/migrate/list.go b/core/migrations_list.go
similarity index 72%
rename from tools/migrate/list.go
rename to core/migrations_list.go
index 65d24992..87ef0b39 100644
--- a/tools/migrate/list.go
+++ b/core/migrations_list.go
@@ -1,17 +1,15 @@
-package migrate
+package core
import (
"path/filepath"
"runtime"
"sort"
-
- "github.com/pocketbase/dbx"
)
type Migration struct {
+ Up func(txApp App) error
+ Down func(txApp App) error
File string
- Up func(db dbx.Builder) error
- Down func(db dbx.Builder) error
}
// MigrationsList defines a list with migration definitions
@@ -29,14 +27,21 @@ func (l *MigrationsList) Items() []*Migration {
return l.list
}
+// Copy copies all provided list migrations into the current one.
+func (l *MigrationsList) Copy(list MigrationsList) {
+ for _, item := range list.Items() {
+ l.Register(item.Up, item.Down, item.File)
+ }
+}
+
// Register adds new migration definition to the list.
//
// If `optFilename` is not provided, it will try to get the name from its .go file.
//
// The list will be sorted automatically based on the migrations file name.
func (l *MigrationsList) Register(
- up func(db dbx.Builder) error,
- down func(db dbx.Builder) error,
+ up func(txApp App) error,
+ down func(txApp App) error,
optFilename ...string,
) {
var file string
@@ -53,7 +58,7 @@ func (l *MigrationsList) Register(
Down: down,
})
- sort.Slice(l.list, func(i int, j int) bool {
+ sort.SliceStable(l.list, func(i int, j int) bool {
return l.list[i].File < l.list[j].File
})
}
diff --git a/core/migrations_list_test.go b/core/migrations_list_test.go
new file mode 100644
index 00000000..13875a6d
--- /dev/null
+++ b/core/migrations_list_test.go
@@ -0,0 +1,39 @@
+package core_test
+
+import (
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+)
+
+func TestMigrationsList(t *testing.T) {
+ l1 := core.MigrationsList{}
+ l1.Register(nil, nil, "3_test.go")
+ l1.Register(nil, nil, "1_test.go")
+ l1.Register(nil, nil, "2_test.go")
+ l1.Register(nil, nil /* auto detect file name */)
+
+ l2 := core.MigrationsList{}
+ l2.Register(nil, nil, "4_test.go")
+ l2.Copy(l1)
+
+ expected := []string{
+ "1_test.go",
+ "2_test.go",
+ "3_test.go",
+ "4_test.go",
+ "migrations_list_test.go",
+ }
+
+ items := l2.Items()
+ if len(items) != len(expected) {
+ t.Fatalf("Expected %d items, got %d: \n%#v", len(expected), len(items), items)
+ }
+
+ for i, name := range expected {
+ item := l2.Item(i)
+ if item.File != name {
+ t.Fatalf("Expected name %s for index %d, got %s", name, i, item.File)
+ }
+ }
+}
diff --git a/core/migrations_runner.go b/core/migrations_runner.go
new file mode 100644
index 00000000..43519850
--- /dev/null
+++ b/core/migrations_runner.go
@@ -0,0 +1,308 @@
+package core
+
+import (
+ "fmt"
+ "strings"
+ "time"
+
+ "github.com/AlecAivazis/survey/v2"
+ "github.com/fatih/color"
+ "github.com/pocketbase/dbx"
+ "github.com/spf13/cast"
+)
+
+var AppMigrations MigrationsList
+
+var SystemMigrations MigrationsList
+
+const DefaultMigrationsTable = "_migrations"
+
+// MigrationsRunner defines a simple struct for managing the execution of db migrations.
+type MigrationsRunner struct {
+ app App
+ tableName string
+ migrationsList MigrationsList
+ inited bool
+}
+
+// NewMigrationsRunner creates and initializes a new db migrations MigrationsRunner instance.
+func NewMigrationsRunner(app App, migrationsList MigrationsList) *MigrationsRunner {
+ return &MigrationsRunner{
+ app: app,
+ migrationsList: migrationsList,
+ tableName: DefaultMigrationsTable,
+ }
+}
+
+// Run interactively executes the current runner with the provided args.
+//
+// The following commands are supported:
+// - up - applies all migrations
+// - down [n] - reverts the last n (default 1) applied migrations
+// - history-sync - syncs the migrations table with the runner's migrations list
+func (r *MigrationsRunner) Run(args ...string) error {
+ if err := r.initMigrationsTable(); err != nil {
+ return err
+ }
+
+ cmd := "up"
+ if len(args) > 0 {
+ cmd = args[0]
+ }
+
+ switch cmd {
+ case "up":
+ applied, err := r.Up()
+ if err != nil {
+ return err
+ }
+
+ if len(applied) == 0 {
+ color.Green("No new migrations to apply.")
+ } else {
+ for _, file := range applied {
+ color.Green("Applied %s", file)
+ }
+ }
+
+ return nil
+ case "down":
+ toRevertCount := 1
+ if len(args) > 1 {
+ toRevertCount = cast.ToInt(args[1])
+ if toRevertCount < 0 {
+ // revert all applied migrations
+ toRevertCount = len(r.migrationsList.Items())
+ }
+ }
+
+ names, err := r.lastAppliedMigrations(toRevertCount)
+ if err != nil {
+ return err
+ }
+
+ confirm := false
+ prompt := &survey.Confirm{
+ Message: fmt.Sprintf(
+ "\n%v\nDo you really want to revert the last %d applied migration(s)?",
+ strings.Join(names, "\n"),
+ toRevertCount,
+ ),
+ }
+ survey.AskOne(prompt, &confirm)
+ if !confirm {
+ fmt.Println("The command has been cancelled")
+ return nil
+ }
+
+ reverted, err := r.Down(toRevertCount)
+ if err != nil {
+ return err
+ }
+
+ if len(reverted) == 0 {
+ color.Green("No migrations to revert.")
+ } else {
+ for _, file := range reverted {
+ color.Green("Reverted %s", file)
+ }
+ }
+
+ return nil
+ case "history-sync":
+ if err := r.RemoveMissingAppliedMigrations(); err != nil {
+ return err
+ }
+
+ color.Green("The %s table was synced with the available migrations.", r.tableName)
+ return nil
+ default:
+ return fmt.Errorf("Unsupported command: %q\n", cmd)
+ }
+}
+
+// Up executes all unapplied migrations for the provided runner.
+//
+// On success returns list with the applied migrations file names.
+func (r *MigrationsRunner) Up() ([]string, error) {
+ if err := r.initMigrationsTable(); err != nil {
+ return nil, err
+ }
+
+ applied := []string{}
+
+ err := r.app.AuxRunInTransaction(func(txApp App) error {
+ return txApp.RunInTransaction(func(txApp App) error {
+ for _, m := range r.migrationsList.Items() {
+ // skip applied
+ if r.isMigrationApplied(txApp, m.File) {
+ continue
+ }
+
+ // ignore empty Up action
+ if m.Up != nil {
+ if err := m.Up(txApp); err != nil {
+ return fmt.Errorf("Failed to apply migration %s: %w", m.File, err)
+ }
+ }
+
+ if err := r.saveAppliedMigration(txApp, m.File); err != nil {
+ return fmt.Errorf("Failed to save applied migration info for %s: %w", m.File, err)
+ }
+
+ applied = append(applied, m.File)
+ }
+
+ return nil
+ })
+ })
+
+ if err != nil {
+ return nil, err
+ }
+ return applied, nil
+}
+
+// Down reverts the last `toRevertCount` applied migrations
+// (in the order they were applied).
+//
+// On success returns list with the reverted migrations file names.
+func (r *MigrationsRunner) Down(toRevertCount int) ([]string, error) {
+ if err := r.initMigrationsTable(); err != nil {
+ return nil, err
+ }
+
+ reverted := make([]string, 0, toRevertCount)
+
+ names, appliedErr := r.lastAppliedMigrations(toRevertCount)
+ if appliedErr != nil {
+ return nil, appliedErr
+ }
+
+ err := r.app.AuxRunInTransaction(func(txApp App) error {
+ return txApp.RunInTransaction(func(txApp App) error {
+ for _, name := range names {
+ for _, m := range r.migrationsList.Items() {
+ if m.File != name {
+ continue
+ }
+
+ // revert limit reached
+ if toRevertCount-len(reverted) <= 0 {
+ return nil
+ }
+
+ // ignore empty Down action
+ if m.Down != nil {
+ if err := m.Down(txApp); err != nil {
+ return fmt.Errorf("Failed to revert migration %s: %w", m.File, err)
+ }
+ }
+
+ if err := r.saveRevertedMigration(txApp, m.File); err != nil {
+ return fmt.Errorf("Failed to save reverted migration info for %s: %w", m.File, err)
+ }
+
+ reverted = append(reverted, m.File)
+ }
+ }
+ return nil
+ })
+ })
+
+ if err != nil {
+ return nil, err
+ }
+
+ return reverted, nil
+}
+
+// RemoveMissingAppliedMigrations removes the db entries of all applied migrations
+// that are not listed in the runner's migrations list.
+func (r *MigrationsRunner) RemoveMissingAppliedMigrations() error {
+ loadedMigrations := r.migrationsList.Items()
+
+ names := make([]any, len(loadedMigrations))
+ for i, migration := range loadedMigrations {
+ names[i] = migration.File
+ }
+
+ _, err := r.app.DB().Delete(r.tableName, dbx.Not(dbx.HashExp{
+ "file": names,
+ })).Execute()
+
+ return err
+}
+
+func (r *MigrationsRunner) initMigrationsTable() error {
+ if r.inited {
+ return nil // already inited
+ }
+
+ rawQuery := fmt.Sprintf(
+ "CREATE TABLE IF NOT EXISTS {{%s}} (file VARCHAR(255) PRIMARY KEY NOT NULL, applied INTEGER NOT NULL)",
+ r.tableName,
+ )
+
+ _, err := r.app.DB().NewQuery(rawQuery).Execute()
+
+ if err == nil {
+ r.inited = true
+ }
+
+ return err
+}
+
+func (r *MigrationsRunner) isMigrationApplied(txApp App, file string) bool {
+ var exists bool
+
+ err := txApp.DB().Select("count(*)").
+ From(r.tableName).
+ Where(dbx.HashExp{"file": file}).
+ Limit(1).
+ Row(&exists)
+
+ return err == nil && exists
+}
+
+func (r *MigrationsRunner) saveAppliedMigration(txApp App, file string) error {
+ _, err := txApp.DB().Insert(r.tableName, dbx.Params{
+ "file": file,
+ "applied": time.Now().UnixMicro(),
+ }).Execute()
+
+ return err
+}
+
+func (r *MigrationsRunner) saveRevertedMigration(txApp App, file string) error {
+ _, err := txApp.DB().Delete(r.tableName, dbx.HashExp{"file": file}).Execute()
+
+ return err
+}
+
+func (r *MigrationsRunner) lastAppliedMigrations(limit int) ([]string, error) {
+ var files = make([]string, 0, limit)
+
+ loadedMigrations := r.migrationsList.Items()
+
+ names := make([]any, len(loadedMigrations))
+ for i, migration := range loadedMigrations {
+ names[i] = migration.File
+ }
+
+ err := r.app.DB().Select("file").
+ From(r.tableName).
+ Where(dbx.Not(dbx.HashExp{"applied": nil})).
+ AndWhere(dbx.HashExp{"file": names}).
+ // unify microseconds and seconds applied time for backward compatibility
+ OrderBy("substr(applied||'0000000000000000', 0, 17) DESC").
+ AndOrderBy("file DESC").
+ Limit(int64(limit)).
+ Column(&files)
+
+ if err != nil {
+ return nil, err
+ }
+
+ return files, nil
+}
diff --git a/core/migrations_runner_test.go b/core/migrations_runner_test.go
new file mode 100644
index 00000000..247324c9
--- /dev/null
+++ b/core/migrations_runner_test.go
@@ -0,0 +1,197 @@
+package core_test
+
+import (
+ "encoding/json"
+ "fmt"
+ "testing"
+ "time"
+
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestMigrationsRunnerUpAndDown(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ callsOrder := []string{}
+
+ l := core.MigrationsList{}
+ l.Register(func(app core.App) error {
+ callsOrder = append(callsOrder, "up2")
+ return nil
+ }, func(app core.App) error {
+ callsOrder = append(callsOrder, "down2")
+ return nil
+ }, "2_test")
+ l.Register(func(app core.App) error {
+ callsOrder = append(callsOrder, "up3")
+ return nil
+ }, func(app core.App) error {
+ callsOrder = append(callsOrder, "down3")
+ return nil
+ }, "3_test")
+ l.Register(func(app core.App) error {
+ callsOrder = append(callsOrder, "up1")
+ return nil
+ }, func(app core.App) error {
+ callsOrder = append(callsOrder, "down1")
+ return nil
+ }, "1_test")
+
+ runner := core.NewMigrationsRunner(app, l)
+
+ // simulate partially out-of-order run migration
+ _, err := app.DB().Insert(core.DefaultMigrationsTable, dbx.Params{
+ "file": "2_test",
+ "applied": time.Now().UnixMicro(),
+ }).Execute()
+ if err != nil {
+ t.Fatalf("Failed to insert 2_test migration: %v", err)
+ }
+
+ // ---------------------------------------------------------------
+ // Up()
+ // ---------------------------------------------------------------
+
+ if _, err := runner.Up(); err != nil {
+ t.Fatal(err)
+ }
+
+ expectedUpCallsOrder := `["up1","up3"]` // skip up2 since it was applied previously
+
+ upCallsOrder, err := json.Marshal(callsOrder)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if v := string(upCallsOrder); v != expectedUpCallsOrder {
+ t.Fatalf("Expected Up() calls order %s, got %s", expectedUpCallsOrder, upCallsOrder)
+ }
+
+ // ---------------------------------------------------------------
+
+ // reset callsOrder
+ callsOrder = []string{}
+
+ // simulate unrun migration
+ l.Register(nil, func(app core.App) error {
+ callsOrder = append(callsOrder, "down4")
+ return nil
+ }, "4_test")
+
+ // simulate applied migrations from different migrations list
+ _, err = app.DB().Insert(core.DefaultMigrationsTable, dbx.Params{
+ "file": "from_different_list",
+ "applied": time.Now().UnixMicro(),
+ }).Execute()
+ if err != nil {
+ t.Fatalf("Failed to insert from_different_list migration: %v", err)
+ }
+
+ // ---------------------------------------------------------------
+
+ // ---------------------------------------------------------------
+ // Down()
+ // ---------------------------------------------------------------
+
+ if _, err := runner.Down(2); err != nil {
+ t.Fatal(err)
+ }
+
+ expectedDownCallsOrder := `["down3","down1"]` // revert in the applied order
+
+ downCallsOrder, err := json.Marshal(callsOrder)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if v := string(downCallsOrder); v != expectedDownCallsOrder {
+ t.Fatalf("Expected Down() calls order %s, got %s", expectedDownCallsOrder, downCallsOrder)
+ }
+}
+
+func TestMigrationsRunnerRemoveMissingAppliedMigrations(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ // mock migrations history
+ for i := 1; i <= 3; i++ {
+ _, err := app.DB().Insert(core.DefaultMigrationsTable, dbx.Params{
+ "file": fmt.Sprintf("%d_test", i),
+ "applied": time.Now().UnixMicro(),
+ }).Execute()
+ if err != nil {
+ t.Fatal(err)
+ }
+ }
+
+ if !isMigrationApplied(app, "2_test") {
+ t.Fatalf("Expected 2_test migration to be applied")
+ }
+
+ // create a runner without 2_test to mock deleted migration
+ l := core.MigrationsList{}
+ l.Register(func(app core.App) error {
+ return nil
+ }, func(app core.App) error {
+ return nil
+ }, "1_test")
+ l.Register(func(app core.App) error {
+ return nil
+ }, func(app core.App) error {
+ return nil
+ }, "3_test")
+
+ r := core.NewMigrationsRunner(app, l)
+
+ if err := r.RemoveMissingAppliedMigrations(); err != nil {
+ t.Fatalf("Failed to remove missing applied migrations: %v", err)
+ }
+
+ if isMigrationApplied(app, "2_test") {
+ t.Fatalf("Expected 2_test migration to NOT be applied")
+ }
+}
+
+func isMigrationApplied(app core.App, file string) bool {
+ var exists bool
+
+ err := app.DB().Select("count(*)").
+ From(core.DefaultMigrationsTable).
+ Where(dbx.HashExp{"file": file}).
+ Limit(1).
+ Row(&exists)
+
+ return err == nil && exists
+}
+
+// // -------------------------------------------------------------------
+
+// type testDB struct {
+// *dbx.DB
+// CalledQueries []string
+// }
+
+// // NB! Don't forget to call `db.Close()` at the end of the test.
+// func createTestDB() (*testDB, error) {
+// sqlDB, err := sql.Open("sqlite", ":memory:")
+// if err != nil {
+// return nil, err
+// }
+
+// db := testDB{DB: dbx.NewFromDB(sqlDB, "sqlite")}
+// db.QueryLogFunc = func(ctx context.Context, t time.Duration, sql string, rows *sql.Rows, err error) {
+// db.CalledQueries = append(db.CalledQueries, sql)
+// }
+// db.ExecLogFunc = func(ctx context.Context, t time.Duration, sql string, result sql.Result, err error) {
+// db.CalledQueries = append(db.CalledQueries, sql)
+// }
+
+// return &db, nil
+// }
diff --git a/core/otp_model.go b/core/otp_model.go
new file mode 100644
index 00000000..dd4fc52e
--- /dev/null
+++ b/core/otp_model.go
@@ -0,0 +1,113 @@
+package core
+
+import (
+ "context"
+ "errors"
+ "time"
+
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+const CollectionNameOTPs = "_otps"
+
+var (
+ _ Model = (*OTP)(nil)
+ _ PreValidator = (*OTP)(nil)
+ _ RecordProxy = (*OTP)(nil)
+)
+
+// OTP defines a Record proxy for working with the otps collection.
+type OTP struct {
+ *Record
+}
+
+// NewOTP instantiates and returns a new blank *OTP model.
+//
+// Example usage:
+//
+// otp := core.NewOTP(app)
+// otp.SetRecordRef(user.Id)
+// otp.SetCollectionRef(user.Collection().Id)
+// otp.SetPassword(security.RandomStringWithAlphabet(6, "1234567890"))
+// app.Save(otp)
+func NewOTP(app App) *OTP {
+ m := &OTP{}
+
+ c, err := app.FindCachedCollectionByNameOrId(CollectionNameOTPs)
+ if err != nil {
+ // this is just to make tests easier since otp is a system collection and it is expected to be always accessible
+ // (note: the loaded record is further checked on OTP.PreValidate())
+ c = NewBaseCollection("__invalid__")
+ }
+
+ m.Record = NewRecord(c)
+
+ return m
+}
+
+// PreValidate implements the [PreValidator] interface and checks
+// whether the proxy is properly loaded.
+func (m *OTP) PreValidate(ctx context.Context, app App) error {
+ if m.Record == nil || m.Record.Collection().Name != CollectionNameOTPs {
+ return errors.New("missing or invalid otp ProxyRecord")
+ }
+
+ return nil
+}
+
+// ProxyRecord returns the proxied Record model.
+func (m *OTP) ProxyRecord() *Record {
+ return m.Record
+}
+
+// SetProxyRecord loads the specified record model into the current proxy.
+func (m *OTP) SetProxyRecord(record *Record) {
+ m.Record = record
+}
+
+// CollectionRef returns the "collectionRef" field value.
+func (m *OTP) CollectionRef() string {
+ return m.GetString("collectionRef")
+}
+
+// SetCollectionRef updates the "collectionRef" record field value.
+func (m *OTP) SetCollectionRef(collectionId string) {
+ m.Set("collectionRef", collectionId)
+}
+
+// RecordRef returns the "recordRef" record field value.
+func (m *OTP) RecordRef() string {
+ return m.GetString("recordRef")
+}
+
+// SetRecordRef updates the "recordRef" record field value.
+func (m *OTP) SetRecordRef(recordId string) {
+ m.Set("recordRef", recordId)
+}
+
+// Created returns the "created" record field value.
+func (m *OTP) Created() types.DateTime {
+ return m.GetDateTime("created")
+}
+
+// Updated returns the "updated" record field value.
+func (m *OTP) Updated() types.DateTime {
+ return m.GetDateTime("updated")
+}
+
+// HasExpired checks if the otp is expired, aka. whether it has been
+// more than maxElapsed time since its creation.
+func (m *OTP) HasExpired(maxElapsed time.Duration) bool {
+ return time.Since(m.Created().Time()) > maxElapsed
+}
+
+func (app *BaseApp) registerOTPHooks() {
+ recordRefHooks[*OTP](app, CollectionNameOTPs, CollectionTypeAuth)
+
+ // run on every hour to cleanup expired otp sessions
+ app.Cron().Add("__otpsCleanup__", "0 * * * *", func() {
+ if err := app.DeleteExpiredOTPs(); err != nil {
+ app.Logger().Warn("Failed to delete expired OTP sessions", "error", err)
+ }
+ })
+}
diff --git a/core/otp_model_test.go b/core/otp_model_test.go
new file mode 100644
index 00000000..fd670c5e
--- /dev/null
+++ b/core/otp_model_test.go
@@ -0,0 +1,278 @@
+package core_test
+
+import (
+ "fmt"
+ "testing"
+ "time"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func TestNewOTP(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ otp := core.NewOTP(app)
+
+ if otp.Collection().Name != core.CollectionNameOTPs {
+ t.Fatalf("Expected record with %q collection, got %q", core.CollectionNameOTPs, otp.Collection().Name)
+ }
+}
+
+func TestOTPProxyRecord(t *testing.T) {
+ t.Parallel()
+
+ record := core.NewRecord(core.NewBaseCollection("test"))
+ record.Id = "test_id"
+
+ otp := core.OTP{}
+ otp.SetProxyRecord(record)
+
+ if otp.ProxyRecord() == nil || otp.ProxyRecord().Id != record.Id {
+ t.Fatalf("Expected proxy record with id %q, got %v", record.Id, otp.ProxyRecord())
+ }
+}
+
+func TestOTPRecordRef(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ otp := core.NewOTP(app)
+
+ testValues := []string{"test_1", "test2", ""}
+ for i, testValue := range testValues {
+ t.Run(fmt.Sprintf("%d_%q", i, testValue), func(t *testing.T) {
+ otp.SetRecordRef(testValue)
+
+ if v := otp.RecordRef(); v != testValue {
+ t.Fatalf("Expected getter %q, got %q", testValue, v)
+ }
+
+ if v := otp.GetString("recordRef"); v != testValue {
+ t.Fatalf("Expected field value %q, got %q", testValue, v)
+ }
+ })
+ }
+}
+
+func TestOTPCollectionRef(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ otp := core.NewOTP(app)
+
+ testValues := []string{"test_1", "test2", ""}
+ for i, testValue := range testValues {
+ t.Run(fmt.Sprintf("%d_%q", i, testValue), func(t *testing.T) {
+ otp.SetCollectionRef(testValue)
+
+ if v := otp.CollectionRef(); v != testValue {
+ t.Fatalf("Expected getter %q, got %q", testValue, v)
+ }
+
+ if v := otp.GetString("collectionRef"); v != testValue {
+ t.Fatalf("Expected field value %q, got %q", testValue, v)
+ }
+ })
+ }
+}
+
+func TestOTPCreated(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ otp := core.NewOTP(app)
+
+ if v := otp.Created().String(); v != "" {
+ t.Fatalf("Expected empty created, got %q", v)
+ }
+
+ now := types.NowDateTime()
+ otp.SetRaw("created", now)
+
+ if v := otp.Created().String(); v != now.String() {
+ t.Fatalf("Expected %q created, got %q", now.String(), v)
+ }
+}
+
+func TestOTPUpdated(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ otp := core.NewOTP(app)
+
+ if v := otp.Updated().String(); v != "" {
+ t.Fatalf("Expected empty updated, got %q", v)
+ }
+
+ now := types.NowDateTime()
+ otp.SetRaw("updated", now)
+
+ if v := otp.Updated().String(); v != now.String() {
+ t.Fatalf("Expected %q updated, got %q", now.String(), v)
+ }
+}
+
+func TestOTPHasExpired(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ now := types.NowDateTime()
+
+ otp := core.NewOTP(app)
+ otp.SetRaw("created", now.Add(-5*time.Minute))
+
+ scenarios := []struct {
+ maxElapsed time.Duration
+ expected bool
+ }{
+ {0 * time.Minute, true},
+ {3 * time.Minute, true},
+ {5 * time.Minute, true},
+ {6 * time.Minute, false},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, s.maxElapsed.String()), func(t *testing.T) {
+ result := otp.HasExpired(s.maxElapsed)
+
+ if result != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, result)
+ }
+ })
+ }
+}
+
+func TestOTPPreValidate(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ otpsCol, err := app.FindCollectionByNameOrId(core.CollectionNameOTPs)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ user, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ t.Run("no proxy record", func(t *testing.T) {
+ otp := &core.OTP{}
+
+ if err := app.Validate(otp); err == nil {
+ t.Fatal("Expected collection validation error")
+ }
+ })
+
+ t.Run("non-OTP collection", func(t *testing.T) {
+ otp := &core.OTP{}
+ otp.SetProxyRecord(core.NewRecord(core.NewBaseCollection("invalid")))
+ otp.SetRecordRef(user.Id)
+ otp.SetCollectionRef(user.Collection().Id)
+ otp.SetPassword("test123")
+
+ if err := app.Validate(otp); err == nil {
+ t.Fatal("Expected collection validation error")
+ }
+ })
+
+ t.Run("OTP collection", func(t *testing.T) {
+ otp := &core.OTP{}
+ otp.SetProxyRecord(core.NewRecord(otpsCol))
+ otp.SetRecordRef(user.Id)
+ otp.SetCollectionRef(user.Collection().Id)
+ otp.SetPassword("test123")
+
+ if err := app.Validate(otp); err != nil {
+ t.Fatalf("Expected nil validation error, got %v", err)
+ }
+ })
+}
+
+func TestOTPValidateHook(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ user, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ demo1, err := app.FindRecordById("demo1", "84nmscqy84lsi1t")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ name string
+ otp func() *core.OTP
+ expectErrors []string
+ }{
+ {
+ "empty",
+ func() *core.OTP {
+ return core.NewOTP(app)
+ },
+ []string{"collectionRef", "recordRef", "password"},
+ },
+ {
+ "non-auth collection",
+ func() *core.OTP {
+ otp := core.NewOTP(app)
+ otp.SetCollectionRef(demo1.Collection().Id)
+ otp.SetRecordRef(demo1.Id)
+ otp.SetPassword("test123")
+ return otp
+ },
+ []string{"collectionRef"},
+ },
+ {
+ "missing record id",
+ func() *core.OTP {
+ otp := core.NewOTP(app)
+ otp.SetCollectionRef(user.Collection().Id)
+ otp.SetRecordRef("missing")
+ otp.SetPassword("test123")
+ return otp
+ },
+ []string{"recordRef"},
+ },
+ {
+ "valid ref",
+ func() *core.OTP {
+ otp := core.NewOTP(app)
+ otp.SetCollectionRef(user.Collection().Id)
+ otp.SetRecordRef(user.Id)
+ otp.SetPassword("test123")
+ return otp
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ errs := app.Validate(s.otp())
+ tests.TestValidationErrors(t, errs, s.expectErrors)
+ })
+ }
+}
diff --git a/core/otp_query.go b/core/otp_query.go
new file mode 100644
index 00000000..8b2b4a94
--- /dev/null
+++ b/core/otp_query.go
@@ -0,0 +1,117 @@
+package core
+
+import (
+ "errors"
+ "time"
+
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+// FindAllOTPsByRecord returns all OTP models linked to the provided auth record.
+func (app *BaseApp) FindAllOTPsByRecord(authRecord *Record) ([]*OTP, error) {
+ result := []*OTP{}
+
+ err := app.RecordQuery(CollectionNameOTPs).
+ AndWhere(dbx.HashExp{
+ "collectionRef": authRecord.Collection().Id,
+ "recordRef": authRecord.Id,
+ }).
+ OrderBy("created DESC").
+ All(&result)
+
+ if err != nil {
+ return nil, err
+ }
+
+ return result, nil
+}
+
+// FindAllOTPsByCollection returns all OTP models linked to the provided collection.
+func (app *BaseApp) FindAllOTPsByCollection(collection *Collection) ([]*OTP, error) {
+ result := []*OTP{}
+
+ err := app.RecordQuery(CollectionNameOTPs).
+ AndWhere(dbx.HashExp{"collectionRef": collection.Id}).
+ OrderBy("created DESC").
+ All(&result)
+
+ if err != nil {
+ return nil, err
+ }
+
+ return result, nil
+}
+
+// FindOTPById returns a single OTP model by its id.
+func (app *BaseApp) FindOTPById(id string) (*OTP, error) {
+ result := &OTP{}
+
+ err := app.RecordQuery(CollectionNameOTPs).
+ AndWhere(dbx.HashExp{"id": id}).
+ Limit(1).
+ One(result)
+
+ if err != nil {
+ return nil, err
+ }
+
+ return result, nil
+}
+
+// DeleteAllOTPsByRecord deletes all OTP models associated with the provided record.
+//
+// Returns a combined error with the failed deletes.
+func (app *BaseApp) DeleteAllOTPsByRecord(authRecord *Record) error {
+ models, err := app.FindAllOTPsByRecord(authRecord)
+ if err != nil {
+ return err
+ }
+
+ var errs []error
+ for _, m := range models {
+ if err := app.Delete(m); err != nil {
+ errs = append(errs, err)
+ }
+ }
+ if len(errs) > 0 {
+ return errors.Join(errs...)
+ }
+
+ return nil
+}
+
+// DeleteExpiredOTPs deletes the expired OTPs for all auth collections.
+func (app *BaseApp) DeleteExpiredOTPs() error {
+ authCollections, err := app.FindAllCollections(CollectionTypeAuth)
+ if err != nil {
+ return err
+ }
+
+ // note: perform even if OTP is disabled to ensure that there are no dangling old records
+ for _, collection := range authCollections {
+ minValidDate, err := types.ParseDateTime(time.Now().Add(-1 * collection.OTP.DurationTime()))
+ if err != nil {
+ return err
+ }
+
+ items := []*Record{}
+
+ err = app.RecordQuery(CollectionNameOTPs).
+ AndWhere(dbx.HashExp{"collectionRef": collection.Id}).
+ AndWhere(dbx.NewExp("[[created]] < {:date}", dbx.Params{"date": minValidDate})).
+ All(&items)
+ if err != nil {
+ return err
+ }
+
+ for _, item := range items {
+ err = app.Delete(item)
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
diff --git a/core/otp_query_test.go b/core/otp_query_test.go
new file mode 100644
index 00000000..440ffb96
--- /dev/null
+++ b/core/otp_query_test.go
@@ -0,0 +1,310 @@
+package core_test
+
+import (
+ "fmt"
+ "slices"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestFindAllOTPsByRecord(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ if err := tests.StubOTPRecords(app); err != nil {
+ t.Fatal(err)
+ }
+
+ demo1, err := app.FindRecordById("demo1", "84nmscqy84lsi1t")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ superuser2, err := app.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test2@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ superuser4, err := app.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test4@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ user1, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ record *core.Record
+ expected []string
+ }{
+ {demo1, nil},
+ {superuser2, []string{"superuser2_0", "superuser2_1", "superuser2_3", "superuser2_2", "superuser2_4"}},
+ {superuser4, nil},
+ {user1, []string{"user1_0"}},
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.record.Collection().Name+"_"+s.record.Id, func(t *testing.T) {
+ result, err := app.FindAllOTPsByRecord(s.record)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if len(result) != len(s.expected) {
+ t.Fatalf("Expected total otps %d, got %d", len(s.expected), len(result))
+ }
+
+ for i, id := range s.expected {
+ if result[i].Id != id {
+ t.Errorf("[%d] Expected id %q, got %q", i, id, result[i].Id)
+ }
+ }
+ })
+ }
+}
+
+func TestFindAllOTPsByCollection(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ if err := tests.StubOTPRecords(app); err != nil {
+ t.Fatal(err)
+ }
+
+ demo1, err := app.FindCollectionByNameOrId("demo1")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ superusers, err := app.FindCollectionByNameOrId(core.CollectionNameSuperusers)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ clients, err := app.FindCollectionByNameOrId("clients")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ users, err := app.FindCollectionByNameOrId("users")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ collection *core.Collection
+ expected []string
+ }{
+ {demo1, nil},
+ {superusers, []string{
+ "superuser2_0",
+ "superuser2_1",
+ "superuser2_3",
+ "superuser3_0",
+ "superuser3_1",
+ "superuser2_2",
+ "superuser2_4",
+ }},
+ {clients, nil},
+ {users, []string{"user1_0"}},
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.collection.Name, func(t *testing.T) {
+ result, err := app.FindAllOTPsByCollection(s.collection)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if len(result) != len(s.expected) {
+ t.Fatalf("Expected total otps %d, got %d", len(s.expected), len(result))
+ }
+
+ for i, id := range s.expected {
+ if result[i].Id != id {
+ t.Errorf("[%d] Expected id %q, got %q", i, id, result[i].Id)
+ }
+ }
+ })
+ }
+}
+
+func TestFindOTPById(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ if err := tests.StubOTPRecords(app); err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ id string
+ expectError bool
+ }{
+ {"", true},
+ {"84nmscqy84lsi1t", true}, // non-otp id
+ {"superuser2_0", false},
+ {"superuser2_4", false}, // expired
+ {"user1_0", false},
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.id, func(t *testing.T) {
+ result, err := app.FindOTPById(s.id)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ if hasErr {
+ return
+ }
+
+ if result.Id != s.id {
+ t.Fatalf("Expected record with id %q, got %q", s.id, result.Id)
+ }
+ })
+ }
+}
+
+func TestDeleteAllOTPsByRecord(t *testing.T) {
+ t.Parallel()
+
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ demo1, err := testApp.FindRecordById("demo1", "84nmscqy84lsi1t")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ superuser2, err := testApp.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test2@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ superuser4, err := testApp.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test4@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ user1, err := testApp.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ record *core.Record
+ deletedIds []string
+ }{
+ {demo1, nil}, // non-auth record
+ {superuser2, []string{"superuser2_0", "superuser2_1", "superuser2_3", "superuser2_2", "superuser2_4"}},
+ {superuser4, nil},
+ {user1, []string{"user1_0"}},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s_%s", i, s.record.Collection().Name, s.record.Id), func(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ if err := tests.StubOTPRecords(app); err != nil {
+ t.Fatal(err)
+ }
+
+ deletedIds := []string{}
+ app.OnRecordAfterDeleteSuccess().BindFunc(func(e *core.RecordEvent) error {
+ deletedIds = append(deletedIds, e.Record.Id)
+ return e.Next()
+ })
+
+ err := app.DeleteAllOTPsByRecord(s.record)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if len(deletedIds) != len(s.deletedIds) {
+ t.Fatalf("Expected deleted ids\n%v\ngot\n%v", s.deletedIds, deletedIds)
+ }
+
+ for _, id := range s.deletedIds {
+ if !slices.Contains(deletedIds, id) {
+ t.Errorf("Expected to find deleted id %q in %v", id, deletedIds)
+ }
+ }
+ })
+ }
+}
+
+func TestDeleteExpiredOTPs(t *testing.T) {
+ t.Parallel()
+
+ checkDeletedIds := func(app core.App, t *testing.T, expectedDeletedIds []string) {
+ if err := tests.StubOTPRecords(app); err != nil {
+ t.Fatal(err)
+ }
+
+ deletedIds := []string{}
+ app.OnRecordAfterDeleteSuccess().BindFunc(func(e *core.RecordEvent) error {
+ deletedIds = append(deletedIds, e.Record.Id)
+ return e.Next()
+ })
+
+ if err := app.DeleteExpiredOTPs(); err != nil {
+ t.Fatal(err)
+ }
+
+ if len(deletedIds) != len(expectedDeletedIds) {
+ t.Fatalf("Expected deleted ids\n%v\ngot\n%v", expectedDeletedIds, deletedIds)
+ }
+
+ for _, id := range expectedDeletedIds {
+ if !slices.Contains(deletedIds, id) {
+ t.Errorf("Expected to find deleted id %q in %v", id, deletedIds)
+ }
+ }
+ }
+
+ t.Run("default test collections", func(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ checkDeletedIds(app, t, []string{
+ "user1_0",
+ "superuser2_2",
+ "superuser2_4",
+ })
+ })
+
+ t.Run("otp collection duration mock", func(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ superusers, err := app.FindCollectionByNameOrId(core.CollectionNameSuperusers)
+ if err != nil {
+ t.Fatal(err)
+ }
+ superusers.OTP.Duration = 60
+ if err := app.Save(superusers); err != nil {
+ t.Fatalf("Failed to mock superusers otp duration: %v", err)
+ }
+
+ checkDeletedIds(app, t, []string{
+ "user1_0",
+ "superuser2_2",
+ "superuser2_4",
+ "superuser3_1",
+ })
+ })
+}
diff --git a/resolvers/record_field_resolver.go b/core/record_field_resolver.go
similarity index 58%
rename from resolvers/record_field_resolver.go
rename to core/record_field_resolver.go
index e8e5a037..9caf9dbd 100644
--- a/resolvers/record_field_resolver.go
+++ b/core/record_field_resolver.go
@@ -1,14 +1,13 @@
-package resolvers
+package core
import (
"encoding/json"
+ "errors"
"fmt"
"strconv"
"strings"
"github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
"github.com/pocketbase/pocketbase/tools/search"
"github.com/pocketbase/pocketbase/tools/security"
"github.com/spf13/cast"
@@ -21,32 +20,9 @@ const (
lengthModifier string = "length"
)
-// list of auth filter fields that don't require join with the auth
-// collection or any other extra checks to be resolved.
-var plainRequestAuthFields = []string{
- "@request.auth." + schema.FieldNameId,
- "@request.auth." + schema.FieldNameCollectionId,
- "@request.auth." + schema.FieldNameCollectionName,
- "@request.auth." + schema.FieldNameUsername,
- "@request.auth." + schema.FieldNameEmail,
- "@request.auth." + schema.FieldNameEmailVisibility,
- "@request.auth." + schema.FieldNameVerified,
- "@request.auth." + schema.FieldNameCreated,
- "@request.auth." + schema.FieldNameUpdated,
-}
-
// ensure that `search.FieldResolver` interface is implemented
var _ search.FieldResolver = (*RecordFieldResolver)(nil)
-// CollectionsFinder defines a common interface for retrieving
-// collections and other related models.
-//
-// The interface at the moment is primarily used to avoid circular
-// dependency with the daos.Dao package.
-type CollectionsFinder interface {
- FindCollectionByNameOrId(collectionNameOrId string) (*models.Collection, error)
-}
-
// RecordFieldResolver defines a custom search resolver struct for
// managing Record model search fields.
//
@@ -54,7 +30,7 @@ type CollectionsFinder interface {
// Example:
//
// resolver := resolvers.NewRecordFieldResolver(
-// app.Dao(),
+// app,
// myCollection,
// &models.RequestInfo{...},
// true,
@@ -62,39 +38,36 @@ type CollectionsFinder interface {
// provider := search.NewProvider(resolver)
// ...
type RecordFieldResolver struct {
- dao CollectionsFinder
- baseCollection *models.Collection
- requestInfo *models.RequestInfo
+ app App
+ baseCollection *Collection
+ requestInfo *RequestInfo
staticRequestInfo map[string]any
allowedFields []string
- loadedCollections []*models.Collection
joins []*join
allowHiddenFields bool
}
// NewRecordFieldResolver creates and initializes a new `RecordFieldResolver`.
func NewRecordFieldResolver(
- dao CollectionsFinder,
- baseCollection *models.Collection,
- requestInfo *models.RequestInfo,
- // @todo consider moving per filter basis
+ app App,
+ baseCollection *Collection,
+ requestInfo *RequestInfo,
allowHiddenFields bool,
) *RecordFieldResolver {
r := &RecordFieldResolver{
- dao: dao,
+ app: app,
baseCollection: baseCollection,
requestInfo: requestInfo,
- allowHiddenFields: allowHiddenFields,
+ allowHiddenFields: allowHiddenFields, // note: it is not based only on the requestInfo.auth since it could be used by a non-request internal method
joins: []*join{},
- loadedCollections: []*models.Collection{baseCollection},
allowedFields: []string{
`^\w+[\w\.\:]*$`,
`^\@request\.context$`,
`^\@request\.method$`,
`^\@request\.auth\.[\w\.\:]*\w+$`,
- `^\@request\.data\.[\w\.\:]*\w+$`,
+ `^\@request\.body\.[\w\.\:]*\w+$`,
`^\@request\.query\.[\w\.\:]*\w+$`,
- `^\@request\.headers\.\w+$`,
+ `^\@request\.headers\.[\w\.\:]*\w+$`,
`^\@collection\.\w+(\:\w+)?\.[\w\.\:]*\w+$`,
},
}
@@ -105,13 +78,14 @@ func NewRecordFieldResolver(
r.staticRequestInfo["method"] = r.requestInfo.Method
r.staticRequestInfo["query"] = r.requestInfo.Query
r.staticRequestInfo["headers"] = r.requestInfo.Headers
- r.staticRequestInfo["data"] = r.requestInfo.Data
+ r.staticRequestInfo["body"] = r.requestInfo.Body
r.staticRequestInfo["auth"] = nil
- if r.requestInfo.AuthRecord != nil {
- authData := r.requestInfo.AuthRecord.PublicExport()
- // always add the record email no matter of the emailVisibility field
- authData[schema.FieldNameEmail] = r.requestInfo.AuthRecord.Email()
- r.staticRequestInfo["auth"] = authData
+ if r.requestInfo.Auth != nil {
+ authClone := r.requestInfo.Auth.Clone()
+ r.staticRequestInfo["auth"] = authClone.
+ Unhide(authClone.Collection().Fields.FieldNames()...).
+ IgnoreEmailVisibility(true).
+ PublicExport()
}
}
@@ -150,10 +124,10 @@ func (r *RecordFieldResolver) UpdateQuery(query *dbx.SelectQuery) error {
// @request.query.filter
// @request.headers.x_token
// @request.auth.someRelation.name
-// @request.data.someRelation.name
-// @request.data.someField
-// @request.data.someSelect:each
-// @request.data.someField:isset
+// @request.body.someRelation.name
+// @request.body.someField
+// @request.body.someSelect:each
+// @request.body.someField:isset
// @collection.product.name
func (r *RecordFieldResolver) Resolve(fieldName string) (*search.ResolverResult, error) {
return parseAndRun(fieldName, r)
@@ -161,7 +135,7 @@ func (r *RecordFieldResolver) Resolve(fieldName string) (*search.ResolverResult,
func (r *RecordFieldResolver) resolveStaticRequestField(path ...string) (*search.ResolverResult, error) {
if len(path) == 0 {
- return nil, fmt.Errorf("at least one path key should be provided")
+ return nil, errors.New("at least one path key should be provided")
}
lastProp, modifier, err := splitModifier(path[len(path)-1])
@@ -172,7 +146,10 @@ func (r *RecordFieldResolver) resolveStaticRequestField(path ...string) (*search
path[len(path)-1] = lastProp
// extract value
- resultVal, err := extractNestedMapVal(r.staticRequestInfo, path...)
+ resultVal, err := extractNestedVal(r.staticRequestInfo, path...)
+ if err != nil {
+ r.app.Logger().Debug("resolveStaticRequestField graceful fallback", "error", err.Error())
+ }
if modifier == issetModifier {
if err != nil {
@@ -191,8 +168,8 @@ func (r *RecordFieldResolver) resolveStaticRequestField(path ...string) (*search
// check if it is a number field and explicitly try to cast to
// float in case of a numeric string value was used
// (this usually the case when the data is from a multipart/form-data request)
- field := r.baseCollection.Schema.GetFieldByName(path[len(path)-1])
- if field != nil && field.Type == schema.FieldTypeNumber {
+ field := r.baseCollection.Fields.GetByName(path[len(path)-1])
+ if field != nil && field.Type() == FieldTypeNumber {
if nv, err := strconv.ParseFloat(v, 64); err == nil {
resultVal = nv
}
@@ -216,7 +193,7 @@ func (r *RecordFieldResolver) resolveStaticRequestField(path ...string) (*search
resultVal = val
}
- placeholder := "f" + security.PseudorandomString(5)
+ placeholder := "f" + security.PseudorandomString(6)
return &search.ResolverResult{
Identifier: "{:" + placeholder + "}",
@@ -224,22 +201,12 @@ func (r *RecordFieldResolver) resolveStaticRequestField(path ...string) (*search
}, nil
}
-func (r *RecordFieldResolver) loadCollection(collectionNameOrId string) (*models.Collection, error) {
- // return already loaded
- for _, collection := range r.loadedCollections {
- if collection.Id == collectionNameOrId || strings.EqualFold(collection.Name, collectionNameOrId) {
- return collection, nil
- }
+func (r *RecordFieldResolver) loadCollection(collectionNameOrId string) (*Collection, error) {
+ if collectionNameOrId == r.baseCollection.Name || collectionNameOrId == r.baseCollection.Id {
+ return r.baseCollection, nil
}
- // load collection
- collection, err := r.dao.FindCollectionByNameOrId(collectionNameOrId)
- if err != nil {
- return nil, err
- }
- r.loadedCollections = append(r.loadedCollections, collection)
-
- return collection, nil
+ return getCollectionByModelOrIdentifier(r.app, collectionNameOrId)
}
func (r *RecordFieldResolver) registerJoin(tableName string, tableAlias string, on dbx.Expression) {
@@ -261,11 +228,93 @@ func (r *RecordFieldResolver) registerJoin(tableName string, tableAlias string,
r.joins = append(r.joins, join)
}
-func extractNestedMapVal(m map[string]any, keys ...string) (any, error) {
+type mapExtractor interface {
+ AsMap() map[string]any
+}
+
+func extractNestedVal(rawData any, keys ...string) (any, error) {
if len(keys) == 0 {
- return nil, fmt.Errorf("at least one key should be provided")
+ return nil, errors.New("at least one key should be provided")
}
+ switch m := rawData.(type) {
+ // maps
+ case map[string]any:
+ return mapVal(m, keys...)
+ case map[string]string:
+ return mapVal(m, keys...)
+ case map[string]bool:
+ return mapVal(m, keys...)
+ case map[string]float32:
+ return mapVal(m, keys...)
+ case map[string]float64:
+ return mapVal(m, keys...)
+ case map[string]int:
+ return mapVal(m, keys...)
+ case map[string]int8:
+ return mapVal(m, keys...)
+ case map[string]int16:
+ return mapVal(m, keys...)
+ case map[string]int32:
+ return mapVal(m, keys...)
+ case map[string]int64:
+ return mapVal(m, keys...)
+ case map[string]uint:
+ return mapVal(m, keys...)
+ case map[string]uint8:
+ return mapVal(m, keys...)
+ case map[string]uint16:
+ return mapVal(m, keys...)
+ case map[string]uint32:
+ return mapVal(m, keys...)
+ case map[string]uint64:
+ return mapVal(m, keys...)
+ case mapExtractor:
+ return mapVal(m.AsMap(), keys...)
+
+ // slices
+ case []string:
+ return arrVal(m, keys...)
+ case []bool:
+ return arrVal(m, keys...)
+ case []float32:
+ return arrVal(m, keys...)
+ case []float64:
+ return arrVal(m, keys...)
+ case []int:
+ return arrVal(m, keys...)
+ case []int8:
+ return arrVal(m, keys...)
+ case []int16:
+ return arrVal(m, keys...)
+ case []int32:
+ return arrVal(m, keys...)
+ case []int64:
+ return arrVal(m, keys...)
+ case []uint:
+ return arrVal(m, keys...)
+ case []uint8:
+ return arrVal(m, keys...)
+ case []uint16:
+ return arrVal(m, keys...)
+ case []uint32:
+ return arrVal(m, keys...)
+ case []uint64:
+ return arrVal(m, keys...)
+ case []mapExtractor:
+ extracted := make([]any, len(m))
+ for i, v := range m {
+ extracted[i] = v.AsMap()
+ }
+ return arrVal(extracted, keys...)
+ case []any:
+ return arrVal(m, keys...)
+ default:
+ return nil, fmt.Errorf("expected map or array, got %#v", rawData)
+ }
+}
+
+func mapVal[T any](m map[string]T, keys ...string) (any, error) {
result, ok := m[keys[0]]
if !ok {
return nil, fmt.Errorf("invalid key path - missing key %q", keys[0])
@@ -276,11 +325,23 @@ func extractNestedMapVal(m map[string]any, keys ...string) (any, error) {
return result, nil
}
- if m, ok = result.(map[string]any); !ok {
- return nil, fmt.Errorf("expected map, got %#v", result)
+ return extractNestedVal(result, keys[1:]...)
+}
+
+func arrVal[T any](m []T, keys ...string) (any, error) {
+ idx, err := strconv.Atoi(keys[0])
+ if err != nil || idx < 0 || idx >= len(m) {
+ return nil, fmt.Errorf("invalid key path - invalid or missing array index %q", keys[0])
}
- return extractNestedMapVal(m, keys[1:]...)
+ result := m[idx]
+
+ // end key reached
+ if len(keys) == 1 {
+ return result, nil
+ }
+
+ return extractNestedVal(result, keys[1:]...)
}
func splitModifier(combined string) (string, string, error) {
diff --git a/resolvers/multi_match_subquery.go b/core/record_field_resolver_multi_match.go
similarity index 98%
rename from resolvers/multi_match_subquery.go
rename to core/record_field_resolver_multi_match.go
index 5169903a..bb51efdf 100644
--- a/resolvers/multi_match_subquery.go
+++ b/core/record_field_resolver_multi_match.go
@@ -1,4 +1,4 @@
-package resolvers
+package core
import (
"fmt"
diff --git a/resolvers/record_field_resolve_runner.go b/core/record_field_resolver_runner.go
similarity index 63%
rename from resolvers/record_field_resolve_runner.go
rename to core/record_field_resolver_runner.go
index b4a9b6ab..46c84f1d 100644
--- a/resolvers/record_field_resolve_runner.go
+++ b/core/record_field_resolver_runner.go
@@ -1,15 +1,15 @@
-package resolvers
+package core
import (
"encoding/json"
+ "errors"
"fmt"
+ "reflect"
"regexp"
"strconv"
"strings"
"github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
"github.com/pocketbase/pocketbase/tools/dbutils"
"github.com/pocketbase/pocketbase/tools/inflector"
"github.com/pocketbase/pocketbase/tools/list"
@@ -20,6 +20,17 @@ import (
// maxNestedRels defines the max allowed nested relations depth.
const maxNestedRels = 6
+// list of auth filter fields that don't require join with the auth
+// collection or any other extra checks to be resolved.
+var plainRequestAuthFields = map[string]struct{}{
+ "@request.auth." + FieldNameId: {},
+ "@request.auth." + FieldNameCollectionId: {},
+ "@request.auth." + FieldNameCollectionName: {},
+ "@request.auth." + FieldNameEmail: {},
+ "@request.auth." + FieldNameEmailVisibility: {},
+ "@request.auth." + FieldNameVerified: {},
+}
+
// parseAndRun starts a new one-off RecordFieldResolver.Resolve execution.
func parseAndRun(fieldName string, resolver *RecordFieldResolver) (*search.ResolverResult, error) {
r := &runner{
@@ -49,7 +60,7 @@ type runner struct {
func (r *runner) run() (*search.ResolverResult, error) {
if r.used {
- return nil, fmt.Errorf("the runner was already used")
+ return nil, errors.New("the runner was already used")
}
if len(r.resolver.allowedFields) > 0 && !list.ExistInSliceWithRegex(r.fieldName, r.resolver.allowedFields) {
@@ -77,32 +88,30 @@ func (r *runner) run() (*search.ResolverResult, error) {
return r.processRequestAuthField()
}
- if strings.HasPrefix(r.fieldName, "@request.data.") && len(r.activeProps) > 2 {
+ if strings.HasPrefix(r.fieldName, "@request.body.") && len(r.activeProps) > 2 {
name, modifier, err := splitModifier(r.activeProps[2])
if err != nil {
return nil, err
}
- dataField := r.resolver.baseCollection.Schema.GetFieldByName(name)
- if dataField == nil {
+ bodyField := r.resolver.baseCollection.Fields.GetByName(name)
+ if bodyField == nil {
return r.resolver.resolveStaticRequestField(r.activeProps[1:]...)
}
- dataField.InitOptions()
-
- // check for data relation field
- if dataField.Type == schema.FieldTypeRelation && len(r.activeProps) > 3 {
- return r.processRequestInfoRelationField(dataField)
+ // check for body relation field
+ if bodyField.Type() == FieldTypeRelation && len(r.activeProps) > 3 {
+ return r.processRequestInfoRelationField(bodyField)
}
- // check for data arrayble fields ":each" modifier
- if modifier == eachModifier && list.ExistInSlice(dataField.Type, schema.ArraybleFieldTypes()) && len(r.activeProps) == 3 {
- return r.processRequestInfoEachModifier(dataField)
+ // check for body arrayble fields ":each" modifier
+ if modifier == eachModifier && len(r.activeProps) == 3 {
+ return r.processRequestInfoEachModifier(bodyField)
}
- // check for data arrayble fields ":length" modifier
- if modifier == lengthModifier && list.ExistInSlice(dataField.Type, schema.ArraybleFieldTypes()) && len(r.activeProps) == 3 {
- return r.processRequestInfoLengthModifier(dataField)
+ // check for body arrayble fields ":length" modifier
+ if modifier == lengthModifier && len(r.activeProps) == 3 {
+ return r.processRequestInfoLengthModifier(bodyField)
}
}
@@ -184,18 +193,17 @@ func (r *runner) processCollectionField() (*search.ResolverResult, error) {
func (r *runner) processRequestAuthField() (*search.ResolverResult, error) {
// plain auth field
// ---
- if list.ExistInSlice(r.fieldName, plainRequestAuthFields) {
+ if _, ok := plainRequestAuthFields[r.fieldName]; ok {
return r.resolver.resolveStaticRequestField(r.activeProps[1:]...)
}
// resolve the auth collection field
// ---
- if r.resolver.requestInfo == nil || r.resolver.requestInfo.AuthRecord == nil || r.resolver.requestInfo.AuthRecord.Collection() == nil {
+ if r.resolver.requestInfo == nil || r.resolver.requestInfo.Auth == nil || r.resolver.requestInfo.Auth.Collection() == nil {
return &search.ResolverResult{Identifier: "NULL"}, nil
}
- collection := r.resolver.requestInfo.AuthRecord.Collection()
- r.resolver.loadedCollections = append(r.resolver.loadedCollections, collection)
+ collection := r.resolver.requestInfo.Auth.Collection()
r.activeCollectionName = collection.Name
r.activeTableAlias = "__auth_" + inflector.Columnify(r.activeCollectionName)
@@ -206,7 +214,7 @@ func (r *runner) processRequestAuthField() (*search.ResolverResult, error) {
r.activeTableAlias,
dbx.HashExp{
// aka. __auth_users.id = :userId
- (r.activeTableAlias + ".id"): r.resolver.requestInfo.AuthRecord.Id,
+ (r.activeTableAlias + ".id"): r.resolver.requestInfo.Auth.Id,
},
)
@@ -218,7 +226,7 @@ func (r *runner) processRequestAuthField() (*search.ResolverResult, error) {
tableName: inflector.Columnify(r.activeCollectionName),
tableAlias: r.multiMatchActiveTableAlias,
on: dbx.HashExp{
- (r.multiMatchActiveTableAlias + ".id"): r.resolver.requestInfo.AuthRecord.Id,
+ (r.multiMatchActiveTableAlias + ".id"): r.resolver.requestInfo.Auth.Id,
},
},
)
@@ -230,40 +238,68 @@ func (r *runner) processRequestAuthField() (*search.ResolverResult, error) {
return r.processActiveProps()
}
-func (r *runner) processRequestInfoLengthModifier(dataField *schema.SchemaField) (*search.ResolverResult, error) {
- dataItems := list.ToUniqueStringSlice(r.resolver.requestInfo.Data[dataField.Name])
+// note: nil value is returned as empty slice
+func toSlice(value any) []any {
+ if value == nil {
+ return []any{}
+ }
+
+ rv := reflect.ValueOf(value)
+
+ kind := rv.Kind()
+ if kind != reflect.Slice && kind != reflect.Array {
+ return []any{value}
+ }
+
+ rvLen := rv.Len()
+
+ result := make([]interface{}, rvLen)
+
+ for i := 0; i < rvLen; i++ {
+ result[i] = rv.Index(i).Interface()
+ }
+
+ return result
+}
+
+func (r *runner) processRequestInfoLengthModifier(bodyField Field) (*search.ResolverResult, error) {
+ if _, ok := bodyField.(MultiValuer); !ok {
+ return nil, fmt.Errorf("field %q doesn't support multivalue operations", bodyField.GetName())
+ }
+
+ bodyItems := toSlice(r.resolver.requestInfo.Body[bodyField.GetName()])
result := &search.ResolverResult{
- Identifier: fmt.Sprintf("%d", len(dataItems)),
+ Identifier: strconv.Itoa(len(bodyItems)),
}
return result, nil
}
-func (r *runner) processRequestInfoEachModifier(dataField *schema.SchemaField) (*search.ResolverResult, error) {
- options, ok := dataField.Options.(schema.MultiValuer)
+func (r *runner) processRequestInfoEachModifier(bodyField Field) (*search.ResolverResult, error) {
+ multiValuer, ok := bodyField.(MultiValuer)
if !ok {
- return nil, fmt.Errorf("field %q options are not initialized or doesn't support multivaluer operations", dataField.Name)
+ return nil, fmt.Errorf("field %q doesn't support multivalue operations", bodyField.GetName())
}
- dataItems := list.ToUniqueStringSlice(r.resolver.requestInfo.Data[dataField.Name])
- rawJson, err := json.Marshal(dataItems)
+ bodyItems := toSlice(r.resolver.requestInfo.Body[bodyField.GetName()])
+ bodyItemsRaw, err := json.Marshal(bodyItems)
if err != nil {
return nil, fmt.Errorf("cannot serialize the data for field %q", r.activeProps[2])
}
- placeholder := "dataEach" + security.PseudorandomString(4)
- cleanFieldName := inflector.Columnify(dataField.Name)
+ placeholder := "dataEach" + security.PseudorandomString(6)
+ cleanFieldName := inflector.Columnify(bodyField.GetName())
jeTable := fmt.Sprintf("json_each({:%s})", placeholder)
jeAlias := "__dataEach_" + cleanFieldName + "_je"
r.resolver.registerJoin(jeTable, jeAlias, nil)
result := &search.ResolverResult{
Identifier: fmt.Sprintf("[[%s.value]]", jeAlias),
- Params: dbx.Params{placeholder: rawJson},
+ Params: dbx.Params{placeholder: bodyItemsRaw},
}
- if options.IsMultiple() {
+ if multiValuer.IsMultiple() {
r.withMultiMatch = true
}
@@ -276,7 +312,7 @@ func (r *runner) processRequestInfoEachModifier(dataField *schema.SchemaField) (
tableName: jeTable2,
tableAlias: jeAlias2,
})
- r.multiMatch.params[placeholder2] = rawJson
+ r.multiMatch.params[placeholder2] = bodyItemsRaw
r.multiMatch.valueIdentifier = fmt.Sprintf("[[%s.value]]", jeAlias2)
result.MultiMatchSubQuery = r.multiMatch
@@ -285,27 +321,27 @@ func (r *runner) processRequestInfoEachModifier(dataField *schema.SchemaField) (
return result, nil
}
-func (r *runner) processRequestInfoRelationField(dataField *schema.SchemaField) (*search.ResolverResult, error) {
- options, ok := dataField.Options.(*schema.RelationOptions)
+func (r *runner) processRequestInfoRelationField(bodyField Field) (*search.ResolverResult, error) {
+ relField, ok := bodyField.(*RelationField)
if !ok {
- return nil, fmt.Errorf("failed to initialize data field %q options", dataField.Name)
+ return nil, fmt.Errorf("failed to initialize data relation field %q", bodyField.GetName())
}
- dataRelCollection, err := r.resolver.loadCollection(options.CollectionId)
+ dataRelCollection, err := r.resolver.loadCollection(relField.CollectionId)
if err != nil {
- return nil, fmt.Errorf("failed to load collection %q from data field %q", options.CollectionId, dataField.Name)
+ return nil, fmt.Errorf("failed to load collection %q from data field %q", relField.CollectionId, relField.Name)
}
var dataRelIds []string
- if r.resolver.requestInfo != nil && len(r.resolver.requestInfo.Data) != 0 {
- dataRelIds = list.ToUniqueStringSlice(r.resolver.requestInfo.Data[dataField.Name])
+ if r.resolver.requestInfo != nil && len(r.resolver.requestInfo.Body) != 0 {
+ dataRelIds = list.ToUniqueStringSlice(r.resolver.requestInfo.Body[relField.Name])
}
if len(dataRelIds) == 0 {
return &search.ResolverResult{Identifier: "NULL"}, nil
}
r.activeCollectionName = dataRelCollection.Name
- r.activeTableAlias = inflector.Columnify("__data_" + dataRelCollection.Name + "_" + dataField.Name)
+ r.activeTableAlias = inflector.Columnify("__data_" + dataRelCollection.Name + "_" + relField.Name)
// join the data rel collection to the main collection
r.resolver.registerJoin(
@@ -317,12 +353,12 @@ func (r *runner) processRequestInfoRelationField(dataField *schema.SchemaField)
),
)
- if options.IsMultiple() {
+ if relField.IsMultiple() {
r.withMultiMatch = true
}
// join the data rel collection to the multi-match subquery
- r.multiMatchActiveTableAlias = inflector.Columnify("__data_mm_" + dataRelCollection.Name + "_" + dataField.Name)
+ r.multiMatchActiveTableAlias = inflector.Columnify("__data_mm_" + dataRelCollection.Name + "_" + relField.Name)
r.multiMatch.joins = append(
r.multiMatch.joins,
&join{
@@ -336,7 +372,7 @@ func (r *runner) processRequestInfoRelationField(dataField *schema.SchemaField)
)
// leave only the data relation fields
- // aka. @request.data.someRel.fieldA.fieldB -> fieldA.fieldB
+ // aka. @request.body.someRel.fieldA.fieldB -> fieldA.fieldB
r.activeProps = r.activeProps[3:]
return r.processActiveProps()
@@ -355,131 +391,13 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
// last prop
if i == totalProps-1 {
- // system field, aka. internal model prop
- // (always available but not part of the collection schema)
- // -------------------------------------------------------
- if list.ExistInSlice(prop, resolvableSystemFieldNames(collection)) {
- result := &search.ResolverResult{
- Identifier: fmt.Sprintf("[[%s.%s]]", r.activeTableAlias, inflector.Columnify(prop)),
- }
-
- // allow querying only auth records with emails marked as public
- if prop == schema.FieldNameEmail && !r.allowHiddenFields {
- result.AfterBuild = func(expr dbx.Expression) dbx.Expression {
- return dbx.Enclose(dbx.And(expr, dbx.NewExp(fmt.Sprintf(
- "[[%s.%s]] = TRUE",
- r.activeTableAlias,
- schema.FieldNameEmailVisibility,
- ))))
- }
- }
-
- if r.withMultiMatch {
- r.multiMatch.valueIdentifier = fmt.Sprintf("[[%s.%s]]", r.multiMatchActiveTableAlias, inflector.Columnify(prop))
- result.MultiMatchSubQuery = r.multiMatch
- }
-
- return result, nil
- }
-
- name, modifier, err := splitModifier(prop)
- if err != nil {
- return nil, err
- }
-
- field := collection.Schema.GetFieldByName(name)
- if field == nil {
- if r.nullifyMisingField {
- return &search.ResolverResult{Identifier: "NULL"}, nil
- }
- return nil, fmt.Errorf("unknown field %q", name)
- }
-
- cleanFieldName := inflector.Columnify(field.Name)
-
- // arrayable fields with ":length" modifier
- // -------------------------------------------------------
- if modifier == lengthModifier && list.ExistInSlice(field.Type, schema.ArraybleFieldTypes()) {
- jePair := r.activeTableAlias + "." + cleanFieldName
-
- result := &search.ResolverResult{
- Identifier: dbutils.JsonArrayLength(jePair),
- }
-
- if r.withMultiMatch {
- jePair2 := r.multiMatchActiveTableAlias + "." + cleanFieldName
- r.multiMatch.valueIdentifier = dbutils.JsonArrayLength(jePair2)
- result.MultiMatchSubQuery = r.multiMatch
- }
-
- return result, nil
- }
-
- // arrayable fields with ":each" modifier
- // -------------------------------------------------------
- if modifier == eachModifier && list.ExistInSlice(field.Type, schema.ArraybleFieldTypes()) {
- jePair := r.activeTableAlias + "." + cleanFieldName
- jeAlias := r.activeTableAlias + "_" + cleanFieldName + "_je"
- r.resolver.registerJoin(dbutils.JsonEach(jePair), jeAlias, nil)
-
- result := &search.ResolverResult{
- Identifier: fmt.Sprintf("[[%s.value]]", jeAlias),
- }
-
- options, ok := field.Options.(schema.MultiValuer)
- if !ok {
- return nil, fmt.Errorf("field %q options are not initialized or doesn't multivaluer arrayable operations", prop)
- }
-
- if options.IsMultiple() {
- r.withMultiMatch = true
- }
-
- if r.withMultiMatch {
- jePair2 := r.multiMatchActiveTableAlias + "." + cleanFieldName
- jeAlias2 := r.multiMatchActiveTableAlias + "_" + cleanFieldName + "_je"
-
- r.multiMatch.joins = append(r.multiMatch.joins, &join{
- tableName: dbutils.JsonEach(jePair2),
- tableAlias: jeAlias2,
- })
- r.multiMatch.valueIdentifier = fmt.Sprintf("[[%s.value]]", jeAlias2)
-
- result.MultiMatchSubQuery = r.multiMatch
- }
-
- return result, nil
- }
-
- // default
- // -------------------------------------------------------
- result := &search.ResolverResult{
- Identifier: fmt.Sprintf("[[%s.%s]]", r.activeTableAlias, cleanFieldName),
- }
-
- if r.withMultiMatch {
- r.multiMatch.valueIdentifier = fmt.Sprintf("[[%s.%s]]", r.multiMatchActiveTableAlias, cleanFieldName)
- result.MultiMatchSubQuery = r.multiMatch
- }
-
- // wrap in json_extract to ensure that top-level primitives
- // stored as json work correctly when compared to their SQL equivalent
- // (https://github.com/pocketbase/pocketbase/issues/4068)
- if field.Type == schema.FieldTypeJson {
- result.NoCoalesce = true
- result.Identifier = dbutils.JsonExtract(r.activeTableAlias+"."+cleanFieldName, "")
- if r.withMultiMatch {
- r.multiMatch.valueIdentifier = dbutils.JsonExtract(r.multiMatchActiveTableAlias+"."+cleanFieldName, "")
- }
- }
-
- return result, nil
+ return r.processLastProp(collection, prop)
}
- field := collection.Schema.GetFieldByName(prop)
+ field := collection.Fields.GetByName(prop)
// json field -> treat the rest of the props as json path
- if field != nil && field.Type == schema.FieldTypeJson {
+ if field != nil && field.Type() == FieldTypeJSON {
var jsonPath strings.Builder
for j, p := range r.activeProps[i+1:] {
if _, err := strconv.Atoi(p); err == nil {
@@ -497,11 +415,11 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
result := &search.ResolverResult{
NoCoalesce: true,
- Identifier: dbutils.JsonExtract(r.activeTableAlias+"."+inflector.Columnify(prop), jsonPathStr),
+ Identifier: dbutils.JSONExtract(r.activeTableAlias+"."+inflector.Columnify(prop), jsonPathStr),
}
if r.withMultiMatch {
- r.multiMatch.valueIdentifier = dbutils.JsonExtract(r.multiMatchActiveTableAlias+"."+inflector.Columnify(prop), jsonPathStr)
+ r.multiMatch.valueIdentifier = dbutils.JSONExtract(r.multiMatchActiveTableAlias+"."+inflector.Columnify(prop), jsonPathStr)
result.MultiMatchSubQuery = r.multiMatch
}
@@ -531,37 +449,36 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
return nil, fmt.Errorf("failed to load back relation field %q collection", prop)
}
- backField := backCollection.Schema.GetFieldByName(parts[2])
+ backField := backCollection.Fields.GetByName(parts[2])
if backField == nil {
if r.nullifyMisingField {
return &search.ResolverResult{Identifier: "NULL"}, nil
}
return nil, fmt.Errorf("missing back relation field %q", parts[2])
}
- if backField.Type != schema.FieldTypeRelation {
+ if backField.Type() != FieldTypeRelation {
return nil, fmt.Errorf("invalid back relation field %q", parts[2])
}
- backField.InitOptions()
- backFieldOptions, ok := backField.Options.(*schema.RelationOptions)
+ backRelField, ok := backField.(*RelationField)
if !ok {
- return nil, fmt.Errorf("failed to initialize back relation field %q options", backField.Name)
+ return nil, fmt.Errorf("failed to initialize back relation field %q", backField.GetName())
}
- if backFieldOptions.CollectionId != collection.Id {
- return nil, fmt.Errorf("invalid back relation field %q collection reference", backField.Name)
+ if backRelField.CollectionId != collection.Id {
+ return nil, fmt.Errorf("invalid back relation field %q collection reference", backField.GetName())
}
// join the back relation to the main query
// ---
cleanProp := inflector.Columnify(prop)
- cleanBackFieldName := inflector.Columnify(backField.Name)
+ cleanBackFieldName := inflector.Columnify(backRelField.Name)
newTableAlias := r.activeTableAlias + "_" + cleanProp
newCollectionName := inflector.Columnify(backCollection.Name)
- isBackRelMultiple := backFieldOptions.IsMultiple()
+ isBackRelMultiple := backRelField.IsMultiple()
if !isBackRelMultiple {
// additionally check if the rel field has a single column unique index
- isBackRelMultiple = !dbutils.HasSingleColumnUniqueIndex(backField.Name, backCollection.Indexes)
+ isBackRelMultiple = !dbutils.HasSingleColumnUniqueIndex(backRelField.Name, backCollection.Indexes)
}
if !isBackRelMultiple {
@@ -579,7 +496,7 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
"[[%s.id]] IN (SELECT [[%s.value]] FROM %s {{%s}})",
r.activeTableAlias,
jeAlias,
- dbutils.JsonEach(newTableAlias+"."+cleanBackFieldName),
+ dbutils.JSONEach(newTableAlias+"."+cleanBackFieldName),
jeAlias,
)),
)
@@ -617,7 +534,7 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
"[[%s.id]] IN (SELECT [[%s.value]] FROM %s {{%s}})",
r.multiMatchActiveTableAlias,
jeAlias2,
- dbutils.JsonEach(newTableAlias2+"."+cleanBackFieldName),
+ dbutils.JSONEach(newTableAlias2+"."+cleanBackFieldName),
jeAlias2,
)),
},
@@ -632,29 +549,36 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
// -----------------------------------------------------------
// check for direct relation
- if field.Type != schema.FieldTypeRelation {
+ if field.Type() != FieldTypeRelation {
return nil, fmt.Errorf("field %q is not a valid relation", prop)
}
// join the relation to the main query
// ---
- field.InitOptions()
- options, ok := field.Options.(*schema.RelationOptions)
+ relField, ok := field.(*RelationField)
if !ok {
- return nil, fmt.Errorf("failed to initialize field %q options", prop)
+ return nil, fmt.Errorf("failed to initialize relation field %q", prop)
}
- relCollection, relErr := r.resolver.loadCollection(options.CollectionId)
+ relCollection, relErr := r.resolver.loadCollection(relField.CollectionId)
if relErr != nil {
return nil, fmt.Errorf("failed to load field %q collection", prop)
}
- cleanFieldName := inflector.Columnify(field.Name)
+ // "id" lookups optimization for single relations to avoid unnecessary joins,
+ // aka. "user.id" and "user" should produce the same query identifier
+ if !relField.IsMultiple() &&
+ // the penultimate prop is "id"
+ i == totalProps-2 && r.activeProps[i+1] == FieldNameId {
+ return r.processLastProp(collection, relField.Name)
+ }
+
+ cleanFieldName := inflector.Columnify(relField.Name)
prefixedFieldName := r.activeTableAlias + "." + cleanFieldName
newTableAlias := r.activeTableAlias + "_" + cleanFieldName
newCollectionName := relCollection.Name
- if !options.IsMultiple() {
+ if !relField.IsMultiple() {
r.resolver.registerJoin(
inflector.Columnify(newCollectionName),
newTableAlias,
@@ -662,7 +586,7 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
)
} else {
jeAlias := r.activeTableAlias + "_" + cleanFieldName + "_je"
- r.resolver.registerJoin(dbutils.JsonEach(prefixedFieldName), jeAlias, nil)
+ r.resolver.registerJoin(dbutils.JSONEach(prefixedFieldName), jeAlias, nil)
r.resolver.registerJoin(
inflector.Columnify(newCollectionName),
newTableAlias,
@@ -676,14 +600,14 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
// join the relation to the multi-match subquery
// ---
- if options.IsMultiple() {
+ if relField.IsMultiple() {
r.withMultiMatch = true // enable multimatch if not already
}
newTableAlias2 := r.multiMatchActiveTableAlias + "_" + cleanFieldName
prefixedFieldName2 := r.multiMatchActiveTableAlias + "." + cleanFieldName
- if !options.IsMultiple() {
+ if !relField.IsMultiple() {
r.multiMatch.joins = append(
r.multiMatch.joins,
&join{
@@ -697,7 +621,7 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
r.multiMatch.joins = append(
r.multiMatch.joins,
&join{
- tableName: dbutils.JsonEach(prefixedFieldName2),
+ tableName: dbutils.JSONEach(prefixedFieldName2),
tableAlias: jeAlias2,
},
&join{
@@ -715,18 +639,109 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
return nil, fmt.Errorf("failed to resolve field %q", r.fieldName)
}
-func resolvableSystemFieldNames(collection *models.Collection) []string {
- result := schema.BaseModelFieldNames()
-
- if collection.IsAuth() {
- result = append(
- result,
- schema.FieldNameUsername,
- schema.FieldNameVerified,
- schema.FieldNameEmailVisibility,
- schema.FieldNameEmail,
- )
+func (r *runner) processLastProp(collection *Collection, prop string) (*search.ResolverResult, error) {
+ name, modifier, err := splitModifier(prop)
+ if err != nil {
+ return nil, err
}
- return result
+ field := collection.Fields.GetByName(name)
+ if field == nil {
+ if r.nullifyMisingField {
+ return &search.ResolverResult{Identifier: "NULL"}, nil
+ }
+ return nil, fmt.Errorf("unknown field %q", name)
+ }
+
+ if field.GetHidden() && !r.allowHiddenFields {
+ return nil, fmt.Errorf("non-filterable field %q", name)
+ }
+
+ multvaluer, isMultivaluer := field.(MultiValuer)
+
+ cleanFieldName := inflector.Columnify(field.GetName())
+
+ // arrayable fields with ":length" modifier
+ // -------------------------------------------------------
+ if modifier == lengthModifier && isMultivaluer {
+ jePair := r.activeTableAlias + "." + cleanFieldName
+
+ result := &search.ResolverResult{
+ Identifier: dbutils.JSONArrayLength(jePair),
+ }
+
+ if r.withMultiMatch {
+ jePair2 := r.multiMatchActiveTableAlias + "." + cleanFieldName
+ r.multiMatch.valueIdentifier = dbutils.JSONArrayLength(jePair2)
+ result.MultiMatchSubQuery = r.multiMatch
+ }
+
+ return result, nil
+ }
+
+ // arrayable fields with ":each" modifier
+ // -------------------------------------------------------
+ if modifier == eachModifier && isMultivaluer {
+ jePair := r.activeTableAlias + "." + cleanFieldName
+ jeAlias := r.activeTableAlias + "_" + cleanFieldName + "_je"
+ r.resolver.registerJoin(dbutils.JSONEach(jePair), jeAlias, nil)
+
+ result := &search.ResolverResult{
+ Identifier: fmt.Sprintf("[[%s.value]]", jeAlias),
+ }
+
+ if multvaluer.IsMultiple() {
+ r.withMultiMatch = true
+ }
+
+ if r.withMultiMatch {
+ jePair2 := r.multiMatchActiveTableAlias + "." + cleanFieldName
+ jeAlias2 := r.multiMatchActiveTableAlias + "_" + cleanFieldName + "_je"
+
+ r.multiMatch.joins = append(r.multiMatch.joins, &join{
+ tableName: dbutils.JSONEach(jePair2),
+ tableAlias: jeAlias2,
+ })
+ r.multiMatch.valueIdentifier = fmt.Sprintf("[[%s.value]]", jeAlias2)
+
+ result.MultiMatchSubQuery = r.multiMatch
+ }
+
+ return result, nil
+ }
+
+ // default
+ // -------------------------------------------------------
+ result := &search.ResolverResult{
+ Identifier: fmt.Sprintf("[[%s.%s]]", r.activeTableAlias, cleanFieldName),
+ }
+
+ if r.withMultiMatch {
+ r.multiMatch.valueIdentifier = fmt.Sprintf("[[%s.%s]]", r.multiMatchActiveTableAlias, cleanFieldName)
+ result.MultiMatchSubQuery = r.multiMatch
+ }
+
+ // allow querying only auth records with emails marked as public
+ if field.GetName() == FieldNameEmail && !r.allowHiddenFields && collection.IsAuth() {
+ result.AfterBuild = func(expr dbx.Expression) dbx.Expression {
+ return dbx.Enclose(dbx.And(expr, dbx.NewExp(fmt.Sprintf(
+ "[[%s.%s]] = TRUE",
+ r.activeTableAlias,
+ FieldNameEmailVisibility,
+ ))))
+ }
+ }
+
+ // wrap in json_extract to ensure that top-level primitives
+ // stored as json work correctly when compared to their SQL equivalent
+ // (https://github.com/pocketbase/pocketbase/issues/4068)
+ if field.Type() == FieldTypeJSON {
+ result.NoCoalesce = true
+ result.Identifier = dbutils.JSONExtract(r.activeTableAlias+"."+cleanFieldName, "")
+ if r.withMultiMatch {
+ r.multiMatch.valueIdentifier = dbutils.JSONExtract(r.multiMatchActiveTableAlias+"."+cleanFieldName, "")
+ }
+ }
+
+ return result, nil
}
diff --git a/resolvers/record_field_resolver_test.go b/core/record_field_resolver_test.go
similarity index 90%
rename from resolvers/record_field_resolver_test.go
rename to core/record_field_resolver_test.go
index bd7c1530..0a709f18 100644
--- a/resolvers/record_field_resolver_test.go
+++ b/core/record_field_resolver_test.go
@@ -1,4 +1,4 @@
-package resolvers_test
+package core_test
import (
"encoding/json"
@@ -6,9 +6,7 @@ import (
"strings"
"testing"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/resolvers"
+ "github.com/pocketbase/pocketbase/core"
"github.com/pocketbase/pocketbase/tests"
"github.com/pocketbase/pocketbase/tools/list"
"github.com/pocketbase/pocketbase/tools/search"
@@ -18,23 +16,23 @@ func TestRecordFieldResolverUpdateQuery(t *testing.T) {
app, _ := tests.NewTestApp()
defer app.Cleanup()
- authRecord, err := app.Dao().FindRecordById("users", "4q1xlclmfloku33")
+ authRecord, err := app.FindRecordById("users", "4q1xlclmfloku33")
if err != nil {
t.Fatal(err)
}
- requestInfo := &models.RequestInfo{
+ requestInfo := &core.RequestInfo{
Context: "ctx",
- Headers: map[string]any{
+ Headers: map[string]string{
"a": "123",
"b": "456",
},
- Query: map[string]any{
- "a": nil,
- "b": 123,
+ Query: map[string]string{
+ "a": "", // to ensure that :isset returns true because the key exists
+ "b": "123",
},
- Data: map[string]any{
- "a": nil,
+ Body: map[string]any{
+ "a": nil, // to ensure that :isset returns true because the key exists
"b": 123,
"number": 10,
"select_many": []string{"optionA", "optionC"},
@@ -48,7 +46,7 @@ func TestRecordFieldResolverUpdateQuery(t *testing.T) {
"rel_one_cascade": "test1",
"rel_one_no_cascade": "test1",
},
- AuthRecord: authRecord,
+ Auth: authRecord,
}
scenarios := []struct {
@@ -73,21 +71,49 @@ func TestRecordFieldResolverUpdateQuery(t *testing.T) {
"SELECT `demo4`.* FROM `demo4` WHERE ([[demo4.title]] = 1 OR [[demo4.title]] IS NOT {:TEST} OR [[demo4.title]] LIKE {:TEST} ESCAPE '\\' OR [[demo4.title]] NOT LIKE {:TEST} ESCAPE '\\' OR [[demo4.title]] > {:TEST} OR [[demo4.title]] >= {:TEST} OR [[demo4.title]] < {:TEST} OR [[demo4.title]] <= {:TEST})",
},
{
- "incomplete rel",
+ "single direct rel",
"demo4",
"self_rel_one > true",
false,
"SELECT `demo4`.* FROM `demo4` WHERE [[demo4.self_rel_one]] > 1",
},
{
- "single rel (self rel)",
+ "single direct rel (with id)",
+ "demo4",
+ "self_rel_one.id > true", // shouldn't have join
+ false,
+ "SELECT `demo4`.* FROM `demo4` WHERE [[demo4.self_rel_one]] > 1",
+ },
+ {
+ "single direct rel (with non-id field)",
+ "demo4",
+ "self_rel_one.created > true", // should have join
+ false,
+ "SELECT DISTINCT `demo4`.* FROM `demo4` LEFT JOIN `demo4` `demo4_self_rel_one` ON [[demo4_self_rel_one.id]] = [[demo4.self_rel_one]] WHERE [[demo4_self_rel_one.created]] > 1",
+ },
+ {
+ "multiple direct rel",
+ "demo4",
+ "self_rel_many ?> true",
+ false,
+ "SELECT `demo4`.* FROM `demo4` WHERE [[demo4.self_rel_many]] > 1",
+ },
+ {
+ "multiple direct rel (with id)",
+ "demo4",
+ "self_rel_many.id ?> true", // should have join
+ false,
+ "SELECT DISTINCT `demo4`.* FROM `demo4` LEFT JOIN json_each(CASE WHEN json_valid([[demo4.self_rel_many]]) THEN [[demo4.self_rel_many]] ELSE json_array([[demo4.self_rel_many]]) END) `demo4_self_rel_many_je` LEFT JOIN `demo4` `demo4_self_rel_many` ON [[demo4_self_rel_many.id]] = [[demo4_self_rel_many_je.value]] WHERE [[demo4_self_rel_many.id]] > 1",
+ },
+ {
+ "nested single rel (self rel)",
"demo4",
"self_rel_one.title > true",
false,
"SELECT DISTINCT `demo4`.* FROM `demo4` LEFT JOIN `demo4` `demo4_self_rel_one` ON [[demo4_self_rel_one.id]] = [[demo4.self_rel_one]] WHERE [[demo4_self_rel_one.title]] > 1",
},
{
- "single rel (other collection)",
+ "nested single rel (other collection)",
"demo4",
"rel_one_cascade.title > true",
false,
@@ -215,9 +241,9 @@ func TestRecordFieldResolverUpdateQuery(t *testing.T) {
{
"@request.auth fields",
"demo4",
- "@request.auth.id > true || @request.auth.username > true || @request.auth.rel.title > true || @request.data.demo < true || @request.auth.missingA.missingB > false",
+ "@request.auth.id > true || @request.auth.username > true || @request.auth.rel.title > true || @request.body.demo < true || @request.auth.missingA.missingB > false",
false,
- "SELECT DISTINCT `demo4`.* FROM `demo4` LEFT JOIN `users` `__auth_users` ON `__auth_users`.`id`={:p0} LEFT JOIN `demo2` `__auth_users_rel` ON [[__auth_users_rel.id]] = [[__auth_users.rel]] WHERE ({:TEST} > 1 OR {:TEST} > 1 OR [[__auth_users_rel.title]] > 1 OR NULL < 1 OR NULL > 0)",
+ "SELECT DISTINCT `demo4`.* FROM `demo4` LEFT JOIN `users` `__auth_users` ON `__auth_users`.`id`={:p0} LEFT JOIN `demo2` `__auth_users_rel` ON [[__auth_users_rel.id]] = [[__auth_users.rel]] WHERE ({:TEST} > 1 OR [[__auth_users.username]] > 1 OR [[__auth_users_rel.title]] > 1 OR NULL < 1 OR NULL > 0)",
},
{
"@request.* static fields",
@@ -250,41 +276,43 @@ func TestRecordFieldResolverUpdateQuery(t *testing.T) {
{
"isset key",
"demo1",
- "@request.data.a:isset > true ||" +
- "@request.data.b:isset > true ||" +
- "@request.data.c:isset > true ||" +
+ "@request.body.a:isset > true ||" +
+ "@request.body.b:isset > true ||" +
+ "@request.body.c:isset > true ||" +
"@request.query.a:isset > true ||" +
"@request.query.b:isset > true ||" +
- "@request.query.c:isset > true",
+ "@request.query.c:isset > true ||" +
+ "@request.headers.a:isset > true ||" +
+ "@request.headers.c:isset > true",
false,
- "SELECT `demo1`.* FROM `demo1` WHERE (TRUE > 1 OR TRUE > 1 OR FALSE > 1 OR TRUE > 1 OR TRUE > 1 OR FALSE > 1)",
+ "SELECT `demo1`.* FROM `demo1` WHERE (TRUE > 1 OR TRUE > 1 OR FALSE > 1 OR TRUE > 1 OR TRUE > 1 OR FALSE > 1 OR TRUE > 1 OR FALSE > 1)",
},
{
- "@request.data.rel.* fields",
+ "@request.body.rel.* fields",
"demo4",
- "@request.data.rel_one_cascade.title > true &&" +
+ "@request.body.rel_one_cascade.title > true &&" +
// reference the same as rel_one_cascade collection but should use a different join alias
- "@request.data.rel_one_no_cascade.title < true &&" +
+ "@request.body.rel_one_no_cascade.title < true &&" +
// different collection
- "@request.data.self_rel_many.title = true",
+ "@request.body.self_rel_many.title = true",
false,
"SELECT DISTINCT `demo4`.* FROM `demo4` LEFT JOIN `demo3` `__data_demo3_rel_one_cascade` ON [[__data_demo3_rel_one_cascade.id]]={:p0} LEFT JOIN `demo3` `__data_demo3_rel_one_no_cascade` ON [[__data_demo3_rel_one_no_cascade.id]]={:p1} LEFT JOIN `demo4` `__data_demo4_self_rel_many` ON [[__data_demo4_self_rel_many.id]]={:p2} WHERE ([[__data_demo3_rel_one_cascade.title]] > 1 AND [[__data_demo3_rel_one_no_cascade.title]] < 1 AND (([[__data_demo4_self_rel_many.title]] = 1) AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__data_mm_demo4_self_rel_many.title]] as [[multiMatchValue]] FROM `demo4` `__mm_demo4` LEFT JOIN `demo4` `__data_mm_demo4_self_rel_many` ON [[__data_mm_demo4_self_rel_many.id]]={:p3} WHERE `__mm_demo4`.`id` = `demo4`.`id`) {{__smTEST}} WHERE NOT ([[__smTEST.multiMatchValue]] = 1)))))",
},
{
- "@request.data.arrayble:each fields",
+ "@request.body.arrayble:each fields",
"demo1",
- "@request.data.select_one:each > true &&" +
- "@request.data.select_one:each ?< true &&" +
- "@request.data.select_many:each > true &&" +
- "@request.data.select_many:each ?< true &&" +
- "@request.data.file_one:each > true &&" +
- "@request.data.file_one:each ?< true &&" +
- "@request.data.file_many:each > true &&" +
- "@request.data.file_many:each ?< true &&" +
- "@request.data.rel_one:each > true &&" +
- "@request.data.rel_one:each ?< true &&" +
- "@request.data.rel_many:each > true &&" +
- "@request.data.rel_many:each ?< true",
+ "@request.body.select_one:each > true &&" +
+ "@request.body.select_one:each ?< true &&" +
+ "@request.body.select_many:each > true &&" +
+ "@request.body.select_many:each ?< true &&" +
+ "@request.body.file_one:each > true &&" +
+ "@request.body.file_one:each ?< true &&" +
+ "@request.body.file_many:each > true &&" +
+ "@request.body.file_many:each ?< true &&" +
+ "@request.body.rel_one:each > true &&" +
+ "@request.body.rel_one:each ?< true &&" +
+ "@request.body.rel_many:each > true &&" +
+ "@request.body.rel_many:each ?< true",
false,
"SELECT DISTINCT `demo1`.* FROM `demo1` LEFT JOIN json_each({:dataEachTEST}) `__dataEach_select_one_je` LEFT JOIN json_each({:dataEachTEST}) `__dataEach_select_many_je` LEFT JOIN json_each({:dataEachTEST}) `__dataEach_file_one_je` LEFT JOIN json_each({:dataEachTEST}) `__dataEach_file_many_je` LEFT JOIN json_each({:dataEachTEST}) `__dataEach_rel_one_je` LEFT JOIN json_each({:dataEachTEST}) `__dataEach_rel_many_je` WHERE ([[__dataEach_select_one_je.value]] > 1 AND [[__dataEach_select_one_je.value]] < 1 AND (([[__dataEach_select_many_je.value]] > 1) AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__mm__dataEach_select_many_je.value]] as [[multiMatchValue]] FROM `demo1` `__mm_demo1` LEFT JOIN json_each({:mmdataEachTEST}) `__mm__dataEach_select_many_je` WHERE `__mm_demo1`.`id` = `demo1`.`id`) {{__smTEST}} WHERE ((NOT ([[__smTEST.multiMatchValue]] > 1)) OR ([[__smTEST.multiMatchValue]] IS NULL))))) AND [[__dataEach_select_many_je.value]] < 1 AND [[__dataEach_file_one_je.value]] > 1 AND [[__dataEach_file_one_je.value]] < 1 AND (([[__dataEach_file_many_je.value]] > 1) AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__mm__dataEach_file_many_je.value]] as [[multiMatchValue]] FROM `demo1` `__mm_demo1` LEFT JOIN json_each({:mmdataEachTEST}) `__mm__dataEach_file_many_je` WHERE `__mm_demo1`.`id` = `demo1`.`id`) {{__smTEST}} WHERE ((NOT ([[__smTEST.multiMatchValue]] > 1)) OR ([[__smTEST.multiMatchValue]] IS NULL))))) AND [[__dataEach_file_many_je.value]] < 1 AND [[__dataEach_rel_one_je.value]] > 1 AND [[__dataEach_rel_one_je.value]] < 1 AND (([[__dataEach_rel_many_je.value]] > 1) AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__mm__dataEach_rel_many_je.value]] as [[multiMatchValue]] FROM `demo1` `__mm_demo1` LEFT JOIN json_each({:mmdataEachTEST}) `__mm__dataEach_rel_many_je` WHERE `__mm_demo1`.`id` = `demo1`.`id`) {{__smTEST}} WHERE ((NOT ([[__smTEST.multiMatchValue]] > 1)) OR ([[__smTEST.multiMatchValue]] IS NULL))))) AND [[__dataEach_rel_many_je.value]] < 1)",
},
@@ -312,7 +340,7 @@ func TestRecordFieldResolverUpdateQuery(t *testing.T) {
"select_one:each != select_many:each &&" +
"select_many:each > select_one:each &&" +
"select_many:each ?< select_one:each &&" +
- "select_many:each = @request.data.select_many:each",
+ "select_many:each = @request.body.select_many:each",
false,
"SELECT DISTINCT `demo1`.* FROM `demo1` LEFT JOIN json_each(CASE WHEN json_valid([[demo1.select_one]]) THEN [[demo1.select_one]] ELSE json_array([[demo1.select_one]]) END) `demo1_select_one_je` LEFT JOIN json_each(CASE WHEN json_valid([[demo1.select_many]]) THEN [[demo1.select_many]] ELSE json_array([[demo1.select_many]]) END) `demo1_select_many_je` LEFT JOIN json_each({:dataEachTEST}) `__dataEach_select_many_je` WHERE (((COALESCE([[demo1_select_one_je.value]], '') IS NOT COALESCE([[demo1_select_many_je.value]], '')) AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__mm_demo1_select_many_je.value]] as [[multiMatchValue]] FROM `demo1` `__mm_demo1` LEFT JOIN json_each(CASE WHEN json_valid([[__mm_demo1.select_many]]) THEN [[__mm_demo1.select_many]] ELSE json_array([[__mm_demo1.select_many]]) END) `__mm_demo1_select_many_je` WHERE `__mm_demo1`.`id` = `demo1`.`id`) {{__smTEST}} WHERE ((NOT (COALESCE([[demo1_select_one_je.value]], '') IS NOT COALESCE([[__smTEST.multiMatchValue]], ''))) OR ([[__smTEST.multiMatchValue]] IS NULL))))) AND (([[demo1_select_many_je.value]] > [[demo1_select_one_je.value]]) AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__mm_demo1_select_many_je.value]] as [[multiMatchValue]] FROM `demo1` `__mm_demo1` LEFT JOIN json_each(CASE WHEN json_valid([[__mm_demo1.select_many]]) THEN [[__mm_demo1.select_many]] ELSE json_array([[__mm_demo1.select_many]]) END) `__mm_demo1_select_many_je` WHERE `__mm_demo1`.`id` = `demo1`.`id`) {{__smTEST}} WHERE ((NOT ([[__smTEST.multiMatchValue]] > [[demo1_select_one_je.value]])) OR ([[__smTEST.multiMatchValue]] IS NULL))))) AND [[demo1_select_many_je.value]] < [[demo1_select_one_je.value]] AND (([[demo1_select_many_je.value]] = [[__dataEach_select_many_je.value]]) AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__mm_demo1_select_many_je.value]] as [[multiMatchValue]] FROM `demo1` `__mm_demo1` LEFT JOIN json_each(CASE WHEN json_valid([[__mm_demo1.select_many]]) THEN [[__mm_demo1.select_many]] ELSE json_array([[__mm_demo1.select_many]]) END) `__mm_demo1_select_many_je` WHERE `__mm_demo1`.`id` = `demo1`.`id`) {{__mlTEST}} LEFT JOIN (SELECT [[__mm__dataEach_select_many_je.value]] as [[multiMatchValue]] FROM `demo1` `__mm_demo1` LEFT JOIN json_each({:mmdataEachTEST}) `__mm__dataEach_select_many_je` WHERE `__mm_demo1`.`id` = `demo1`.`id`) {{__mrTEST}} WHERE NOT (COALESCE([[__mlTEST.multiMatchValue]], '') = COALESCE([[__mrTEST.multiMatchValue]], ''))))))",
},
@@ -324,36 +352,36 @@ func TestRecordFieldResolverUpdateQuery(t *testing.T) {
"rel_many.rel.title ~ rel_one.email &&" +
"@collection.demo2.active = rel_many.rel.active &&" +
"@collection.demo2.active ?= rel_many.rel.active &&" +
- "rel_many.email > @request.data.rel_many.email",
+ "rel_many.email > @request.body.rel_many.email",
false,
"SELECT DISTINCT `demo1`.* FROM `demo1` LEFT JOIN json_each(CASE WHEN json_valid([[demo1.rel_many]]) THEN [[demo1.rel_many]] ELSE json_array([[demo1.rel_many]]) END) `demo1_rel_many_je` LEFT JOIN `users` `demo1_rel_many` ON [[demo1_rel_many.id]] = [[demo1_rel_many_je.value]] LEFT JOIN `demo2` `demo1_rel_many_rel` ON [[demo1_rel_many_rel.id]] = [[demo1_rel_many.rel]] LEFT JOIN `demo1` `demo1_rel_one` ON [[demo1_rel_one.id]] = [[demo1.rel_one]] LEFT JOIN `demo2` `__collection_demo2` LEFT JOIN `users` `__data_users_rel_many` ON [[__data_users_rel_many.id]] IN ({:p0}, {:p1}) WHERE (((COALESCE([[demo1_rel_many_rel.active]], '') IS NOT COALESCE([[demo1_rel_many.name]], '')) AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__mm_demo1_rel_many_rel.active]] as [[multiMatchValue]] FROM `demo1` `__mm_demo1` LEFT JOIN json_each(CASE WHEN json_valid([[__mm_demo1.rel_many]]) THEN [[__mm_demo1.rel_many]] ELSE json_array([[__mm_demo1.rel_many]]) END) `__mm_demo1_rel_many_je` LEFT JOIN `users` `__mm_demo1_rel_many` ON [[__mm_demo1_rel_many.id]] = [[__mm_demo1_rel_many_je.value]] LEFT JOIN `demo2` `__mm_demo1_rel_many_rel` ON [[__mm_demo1_rel_many_rel.id]] = [[__mm_demo1_rel_many.rel]] WHERE `__mm_demo1`.`id` = `demo1`.`id`) {{__mlTEST}} LEFT JOIN (SELECT [[__mm_demo1_rel_many.name]] as [[multiMatchValue]] FROM `demo1` `__mm_demo1` LEFT JOIN json_each(CASE WHEN json_valid([[__mm_demo1.rel_many]]) THEN [[__mm_demo1.rel_many]] ELSE json_array([[__mm_demo1.rel_many]]) END) `__mm_demo1_rel_many_je` LEFT JOIN `users` `__mm_demo1_rel_many` ON [[__mm_demo1_rel_many.id]] = [[__mm_demo1_rel_many_je.value]] WHERE `__mm_demo1`.`id` = `demo1`.`id`) {{__mrTEST}} WHERE ((NOT (COALESCE([[__mlTEST.multiMatchValue]], '') IS NOT COALESCE([[__mrTEST.multiMatchValue]], ''))) OR ([[__mlTEST.multiMatchValue]] IS NULL) OR ([[__mrTEST.multiMatchValue]] IS NULL))))) AND COALESCE([[demo1_rel_many_rel.active]], '') = COALESCE([[demo1_rel_many.name]], '') AND (([[demo1_rel_many_rel.title]] LIKE ('%' || [[demo1_rel_one.email]] || '%') ESCAPE '\\') AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__mm_demo1_rel_many_rel.title]] as [[multiMatchValue]] FROM `demo1` `__mm_demo1` LEFT JOIN json_each(CASE WHEN json_valid([[__mm_demo1.rel_many]]) THEN [[__mm_demo1.rel_many]] ELSE json_array([[__mm_demo1.rel_many]]) END) `__mm_demo1_rel_many_je` LEFT JOIN `users` `__mm_demo1_rel_many` ON [[__mm_demo1_rel_many.id]] = [[__mm_demo1_rel_many_je.value]] LEFT JOIN `demo2` `__mm_demo1_rel_many_rel` ON [[__mm_demo1_rel_many_rel.id]] = [[__mm_demo1_rel_many.rel]] WHERE `__mm_demo1`.`id` = `demo1`.`id`) {{__smTEST}} WHERE ((NOT ([[__smTEST.multiMatchValue]] LIKE ('%' || [[demo1_rel_one.email]] || '%') ESCAPE '\\')) OR ([[__smTEST.multiMatchValue]] IS NULL))))) AND ((COALESCE([[__collection_demo2.active]], '') = COALESCE([[demo1_rel_many_rel.active]], '')) AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__mm__collection_demo2.active]] as [[multiMatchValue]] FROM `demo1` `__mm_demo1` LEFT JOIN `demo2` `__mm__collection_demo2` WHERE `__mm_demo1`.`id` = `demo1`.`id`) {{__mlTEST}} LEFT JOIN (SELECT [[__mm_demo1_rel_many_rel.active]] as [[multiMatchValue]] FROM `demo1` `__mm_demo1` LEFT JOIN json_each(CASE WHEN json_valid([[__mm_demo1.rel_many]]) THEN [[__mm_demo1.rel_many]] ELSE json_array([[__mm_demo1.rel_many]]) END) `__mm_demo1_rel_many_je` LEFT JOIN `users` `__mm_demo1_rel_many` ON [[__mm_demo1_rel_many.id]] = [[__mm_demo1_rel_many_je.value]] LEFT JOIN `demo2` `__mm_demo1_rel_many_rel` ON [[__mm_demo1_rel_many_rel.id]] = [[__mm_demo1_rel_many.rel]] WHERE `__mm_demo1`.`id` = `demo1`.`id`) {{__mrTEST}} WHERE NOT (COALESCE([[__mlTEST.multiMatchValue]], '') = COALESCE([[__mrTEST.multiMatchValue]], ''))))) AND COALESCE([[__collection_demo2.active]], '') = COALESCE([[demo1_rel_many_rel.active]], '') AND (((([[demo1_rel_many.email]] > [[__data_users_rel_many.email]]) AND (NOT EXISTS (SELECT 1 FROM (SELECT [[__mm_demo1_rel_many.email]] as [[multiMatchValue]] FROM `demo1` `__mm_demo1` LEFT JOIN json_each(CASE WHEN json_valid([[__mm_demo1.rel_many]]) THEN [[__mm_demo1.rel_many]] ELSE json_array([[__mm_demo1.rel_many]]) END) `__mm_demo1_rel_many_je` LEFT JOIN `users` `__mm_demo1_rel_many` ON [[__mm_demo1_rel_many.id]] = [[__mm_demo1_rel_many_je.value]] WHERE `__mm_demo1`.`id` = `demo1`.`id`) {{__mlTEST}} LEFT JOIN (SELECT [[__data_mm_users_rel_many.email]] as [[multiMatchValue]] FROM `demo1` `__mm_demo1` LEFT JOIN `users` `__data_mm_users_rel_many` ON [[__data_mm_users_rel_many.id]] IN ({:p2}, {:p3}) WHERE `__mm_demo1`.`id` = `demo1`.`id`) {{__mrTEST}} WHERE ((NOT ([[__mlTEST.multiMatchValue]] > [[__mrTEST.multiMatchValue]])) OR ([[__mlTEST.multiMatchValue]] IS NULL) OR ([[__mrTEST.multiMatchValue]] IS NULL)))))) AND ([[demo1_rel_many.emailVisibility]] = TRUE)))",
},
{
- "@request.data.arrayable:length fields",
+ "@request.body.arrayable:length fields",
"demo1",
- "@request.data.select_one:length > 1 &&" +
- "@request.data.select_one:length ?> 2 &&" +
- "@request.data.select_many:length < 3 &&" +
- "@request.data.select_many:length ?> 4 &&" +
- "@request.data.rel_one:length = 5 &&" +
- "@request.data.rel_one:length ?= 6 &&" +
- "@request.data.rel_many:length != 7 &&" +
- "@request.data.rel_many:length ?!= 8 &&" +
- "@request.data.file_one:length = 9 &&" +
- "@request.data.file_one:length ?= 0 &&" +
- "@request.data.file_many:length != 1 &&" +
- "@request.data.file_many:length ?!= 2",
+ "@request.body.select_one:length > 1 &&" +
+ "@request.body.select_one:length ?> 2 &&" +
+ "@request.body.select_many:length < 3 &&" +
+ "@request.body.select_many:length ?> 4 &&" +
+ "@request.body.rel_one:length = 5 &&" +
+ "@request.body.rel_one:length ?= 6 &&" +
+ "@request.body.rel_many:length != 7 &&" +
+ "@request.body.rel_many:length ?!= 8 &&" +
+ "@request.body.file_one:length = 9 &&" +
+ "@request.body.file_one:length ?= 0 &&" +
+ "@request.body.file_many:length != 1 &&" +
+ "@request.body.file_many:length ?!= 2",
false,
"SELECT `demo1`.* FROM `demo1` WHERE (0 > {:TEST} AND 0 > {:TEST} AND 2 < {:TEST} AND 2 > {:TEST} AND 1 = {:TEST} AND 1 = {:TEST} AND 2 IS NOT {:TEST} AND 2 IS NOT {:TEST} AND 1 = {:TEST} AND 1 = {:TEST} AND 3 IS NOT {:TEST} AND 3 IS NOT {:TEST})",
},
{
"regular arrayable:length fields",
"demo4",
- "@request.data.self_rel_one.self_rel_many:length > 1 &&" +
- "@request.data.self_rel_one.self_rel_many:length ?> 2 &&" +
- "@request.data.rel_many_cascade.files:length ?< 3 &&" +
- "@request.data.rel_many_cascade.files:length < 4 &&" +
- "@request.data.rel_one_cascade.files:length < 4.1 &&" + // to ensure that the join to the same as above table will be aliased
+ "@request.body.self_rel_one.self_rel_many:length > 1 &&" +
+ "@request.body.self_rel_one.self_rel_many:length ?> 2 &&" +
+ "@request.body.rel_many_cascade.files:length ?< 3 &&" +
+ "@request.body.rel_many_cascade.files:length < 4 &&" +
+ "@request.body.rel_one_cascade.files:length < 4.1 &&" + // to ensure that the join to the same as above table will be aliased
"self_rel_one.self_rel_many:length = 5 &&" +
"self_rel_one.self_rel_many:length ?= 6 &&" +
"self_rel_one.rel_many_cascade.files:length != 7 &&" +
@@ -386,14 +414,14 @@ func TestRecordFieldResolverUpdateQuery(t *testing.T) {
for _, s := range scenarios {
t.Run(s.name, func(t *testing.T) {
- collection, err := app.Dao().FindCollectionByNameOrId(s.collectionIdOrName)
+ collection, err := app.FindCollectionByNameOrId(s.collectionIdOrName)
if err != nil {
t.Fatalf("[%s] Failed to load collection %s: %v", s.name, s.collectionIdOrName, err)
}
- query := app.Dao().RecordQuery(collection)
+ query := app.RecordQuery(collection)
- r := resolvers.NewRecordFieldResolver(app.Dao(), collection, requestInfo, s.allowHiddenFields)
+ r := core.NewRecordFieldResolver(app, collection, requestInfo, s.allowHiddenFields)
expr, err := search.FilterData(s.rule).BuildExpr(r)
if err != nil {
@@ -420,25 +448,25 @@ func TestRecordFieldResolverUpdateQuery(t *testing.T) {
}
}
-func TestRecordFieldResolverResolveSchemaFields(t *testing.T) {
+func TestRecordFieldResolverResolveCollectionFields(t *testing.T) {
app, _ := tests.NewTestApp()
defer app.Cleanup()
- collection, err := app.Dao().FindCollectionByNameOrId("demo4")
+ collection, err := app.FindCollectionByNameOrId("demo4")
if err != nil {
t.Fatal(err)
}
- authRecord, err := app.Dao().FindRecordById("users", "4q1xlclmfloku33")
+ authRecord, err := app.FindRecordById("users", "4q1xlclmfloku33")
if err != nil {
t.Fatal(err)
}
- requestInfo := &models.RequestInfo{
- AuthRecord: authRecord,
+ requestInfo := &core.RequestInfo{
+ Auth: authRecord,
}
- r := resolvers.NewRecordFieldResolver(app.Dao(), collection, requestInfo, true)
+ r := core.NewRecordFieldResolver(app, collection, requestInfo, true)
scenarios := []struct {
fieldName string
@@ -529,35 +557,35 @@ func TestRecordFieldResolverResolveStaticRequestInfoFields(t *testing.T) {
app, _ := tests.NewTestApp()
defer app.Cleanup()
- collection, err := app.Dao().FindCollectionByNameOrId("demo1")
+ collection, err := app.FindCollectionByNameOrId("demo1")
if err != nil {
t.Fatal(err)
}
- authRecord, err := app.Dao().FindRecordById("users", "4q1xlclmfloku33")
+ authRecord, err := app.FindRecordById("users", "4q1xlclmfloku33")
if err != nil {
t.Fatal(err)
}
- requestInfo := &models.RequestInfo{
+ requestInfo := &core.RequestInfo{
Context: "ctx",
Method: "get",
- Query: map[string]any{
- "a": 123,
+ Query: map[string]string{
+ "a": "123",
},
- Data: map[string]any{
+ Body: map[string]any{
"number": "10",
"number_unknown": "20",
"b": 456,
"c": map[string]int{"sub": 1},
},
- Headers: map[string]any{
+ Headers: map[string]string{
"d": "789",
},
- AuthRecord: authRecord,
+ Auth: authRecord,
}
- r := resolvers.NewRecordFieldResolver(app.Dao(), collection, requestInfo, true)
+ r := core.NewRecordFieldResolver(app, collection, requestInfo, true)
scenarios := []struct {
fieldName string
@@ -571,34 +599,35 @@ func TestRecordFieldResolverResolveStaticRequestInfoFields(t *testing.T) {
{"@request.context", false, `"ctx"`},
{"@request.method", false, `"get"`},
{"@request.query", true, ``},
- {"@request.query.a", false, `123`},
+ {"@request.query.a", false, `"123"`},
{"@request.query.a.missing", false, ``},
{"@request.headers", true, ``},
{"@request.headers.missing", false, ``},
{"@request.headers.d", false, `"789"`},
- {"@request.headers.d.sub", true, ``},
- {"@request.data", true, ``},
- {"@request.data.b", false, `456`},
- {"@request.data.number", false, `10`}, // number field normalization
- {"@request.data.number_unknown", false, `"20"`}, // no numeric normalizations for unknown fields
- {"@request.data.b.missing", false, ``},
- {"@request.data.c", false, `"{\"sub\":1}"`},
+ {"@request.headers.d.sub", false, ``},
+ {"@request.body", true, ``},
+ {"@request.body.b", false, `456`},
+ {"@request.body.number", false, `10`}, // number field normalization
+ {"@request.body.number_unknown", false, `"20"`}, // no numeric normalizations for unknown fields
+ {"@request.body.b.missing", false, ``},
+ {"@request.body.c", false, `"{\"sub\":1}"`},
{"@request.auth", true, ""},
{"@request.auth.id", false, `"4q1xlclmfloku33"`},
- {"@request.auth.username", false, `"users75657"`},
+ {"@request.auth.collectionId", false, `"` + authRecord.Collection().Id + `"`},
+ {"@request.auth.collectionName", false, `"` + authRecord.Collection().Name + `"`},
{"@request.auth.verified", false, `false`},
{"@request.auth.emailVisibility", false, `false`},
{"@request.auth.email", false, `"test@example.com"`}, // should always be returned no matter of the emailVisibility state
{"@request.auth.missing", false, `NULL`},
}
- for i, s := range scenarios {
+ for _, s := range scenarios {
t.Run(s.fieldName, func(t *testing.T) {
r, err := r.Resolve(s.fieldName)
hasErr := err != nil
if hasErr != s.expectError {
- t.Fatalf("(%d) Expected hasErr %v, got %v (%v)", i, s.expectError, hasErr, err)
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
}
if hasErr {
@@ -609,7 +638,7 @@ func TestRecordFieldResolverResolveStaticRequestInfoFields(t *testing.T) {
// ---
if len(r.Params) == 0 {
if r.Identifier != "NULL" {
- t.Fatalf("(%d) Expected 0 placeholder parameters for %v, got %v", i, r.Identifier, r.Params)
+ t.Fatalf("Expected 0 placeholder parameters for %v, got %v", r.Identifier, r.Params)
}
return
}
@@ -617,7 +646,7 @@ func TestRecordFieldResolverResolveStaticRequestInfoFields(t *testing.T) {
// existing key
// ---
if len(r.Params) != 1 {
- t.Fatalf("(%d) Expected 1 placeholder parameter for %v, got %v", i, r.Identifier, r.Params)
+ t.Fatalf("Expected 1 placeholder parameter for %v, got %v", r.Identifier, r.Params)
}
var paramName string
@@ -628,12 +657,12 @@ func TestRecordFieldResolverResolveStaticRequestInfoFields(t *testing.T) {
}
if r.Identifier != ("{:" + paramName + "}") {
- t.Fatalf("(%d) Expected parameter r.Identifier %q, got %q", i, paramName, r.Identifier)
+ t.Fatalf("Expected parameter r.Identifier %q, got %q", paramName, r.Identifier)
}
encodedParamValue, _ := json.Marshal(paramValue)
if string(encodedParamValue) != s.expectParamValue {
- t.Fatalf("(%d) Expected r.Params %v for %v, got %v", i, s.expectParamValue, r.Identifier, string(encodedParamValue))
+ t.Fatalf("Expected r.Params %#v for %s, got %#v", s.expectParamValue, r.Identifier, string(encodedParamValue))
}
})
}
@@ -642,7 +671,7 @@ func TestRecordFieldResolverResolveStaticRequestInfoFields(t *testing.T) {
if authRecord.EmailVisibility() {
t.Fatal("Expected the original authRecord emailVisibility to remain unchanged")
}
- if v, ok := authRecord.PublicExport()[schema.FieldNameEmail]; ok {
+ if v, ok := authRecord.PublicExport()[core.FieldNameEmail]; ok {
t.Fatalf("Expected the original authRecord email to not be exported, got %q", v)
}
}
diff --git a/core/record_model.go b/core/record_model.go
new file mode 100644
index 00000000..e06599bf
--- /dev/null
+++ b/core/record_model.go
@@ -0,0 +1,1530 @@
+package core
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "maps"
+ "slices"
+ "sort"
+ "strings"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/core/validators"
+ "github.com/pocketbase/pocketbase/tools/filesystem"
+ "github.com/pocketbase/pocketbase/tools/hook"
+ "github.com/pocketbase/pocketbase/tools/inflector"
+ "github.com/pocketbase/pocketbase/tools/list"
+ "github.com/pocketbase/pocketbase/tools/store"
+ "github.com/pocketbase/pocketbase/tools/types"
+ "github.com/spf13/cast"
+)
+
+// used as a workaround by some fields for persisting local state between various events
+// (for now is kept private and cannot be changed or cloned outside of the core package)
+const internalCustomFieldKeyPrefix = "@pbInternal"
+
+var (
+ _ Model = (*Record)(nil)
+ _ HookTagger = (*Record)(nil)
+ _ DBExporter = (*Record)(nil)
+ _ FilesManager = (*Record)(nil)
+)
+
+type Record struct {
+ collection *Collection
+ originalData map[string]any
+ customVisibility *store.Store[bool]
+ data *store.Store[any]
+ expand *store.Store[any]
+
+ BaseModel
+
+ exportCustomData bool
+ ignoreEmailVisibility bool
+ ignoreUnchangedFields bool
+}
+
+const systemHookIdRecord = "__pbRecordSystemHook__"
+
+func (app *BaseApp) registerRecordHooks() {
+ app.OnModelValidate().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdRecord,
+ Func: func(me *ModelEvent) error {
+ if re, ok := newRecordEventFromModelEvent(me); ok {
+ return me.App.OnRecordValidate().Trigger(re, func(re *RecordEvent) error {
+ syncModelEventWithRecordEvent(me, re)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelCreate().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdRecord,
+ Func: func(me *ModelEvent) error {
+ if re, ok := newRecordEventFromModelEvent(me); ok {
+ return me.App.OnRecordCreate().Trigger(re, func(re *RecordEvent) error {
+ syncModelEventWithRecordEvent(me, re)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelCreateExecute().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdRecord,
+ Func: func(me *ModelEvent) error {
+ if re, ok := newRecordEventFromModelEvent(me); ok {
+ return me.App.OnRecordCreateExecute().Trigger(re, func(re *RecordEvent) error {
+ syncModelEventWithRecordEvent(me, re)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelAfterCreateSuccess().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdRecord,
+ Func: func(me *ModelEvent) error {
+ if re, ok := newRecordEventFromModelEvent(me); ok {
+ return me.App.OnRecordAfterCreateSuccess().Trigger(re, func(re *RecordEvent) error {
+ syncModelEventWithRecordEvent(me, re)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelAfterCreateError().Bind(&hook.Handler[*ModelErrorEvent]{
+ Id: systemHookIdRecord,
+ Func: func(me *ModelErrorEvent) error {
+ if re, ok := newRecordErrorEventFromModelErrorEvent(me); ok {
+ return me.App.OnRecordAfterCreateError().Trigger(re, func(re *RecordErrorEvent) error {
+ syncModelErrorEventWithRecordErrorEvent(me, re)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelUpdate().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdRecord,
+ Func: func(me *ModelEvent) error {
+ if re, ok := newRecordEventFromModelEvent(me); ok {
+ return me.App.OnRecordUpdate().Trigger(re, func(re *RecordEvent) error {
+ syncModelEventWithRecordEvent(me, re)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelUpdateExecute().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdRecord,
+ Func: func(me *ModelEvent) error {
+ if re, ok := newRecordEventFromModelEvent(me); ok {
+ return me.App.OnRecordUpdateExecute().Trigger(re, func(re *RecordEvent) error {
+ syncModelEventWithRecordEvent(me, re)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelAfterUpdateSuccess().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdRecord,
+ Func: func(me *ModelEvent) error {
+ if re, ok := newRecordEventFromModelEvent(me); ok {
+ return me.App.OnRecordAfterUpdateSuccess().Trigger(re, func(re *RecordEvent) error {
+ syncModelEventWithRecordEvent(me, re)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelAfterUpdateError().Bind(&hook.Handler[*ModelErrorEvent]{
+ Id: systemHookIdRecord,
+ Func: func(me *ModelErrorEvent) error {
+ if re, ok := newRecordErrorEventFromModelErrorEvent(me); ok {
+ return me.App.OnRecordAfterUpdateError().Trigger(re, func(re *RecordErrorEvent) error {
+ syncModelErrorEventWithRecordErrorEvent(me, re)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelDelete().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdRecord,
+ Func: func(me *ModelEvent) error {
+ if re, ok := newRecordEventFromModelEvent(me); ok {
+ return me.App.OnRecordDelete().Trigger(re, func(re *RecordEvent) error {
+ syncModelEventWithRecordEvent(me, re)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelDeleteExecute().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdRecord,
+ Func: func(me *ModelEvent) error {
+ if re, ok := newRecordEventFromModelEvent(me); ok {
+ return me.App.OnRecordDeleteExecute().Trigger(re, func(re *RecordEvent) error {
+ syncModelEventWithRecordEvent(me, re)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelAfterDeleteSuccess().Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdRecord,
+ Func: func(me *ModelEvent) error {
+ if re, ok := newRecordEventFromModelEvent(me); ok {
+ return me.App.OnRecordAfterDeleteSuccess().Trigger(re, func(re *RecordEvent) error {
+ syncModelEventWithRecordEvent(me, re)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ app.OnModelAfterDeleteError().Bind(&hook.Handler[*ModelErrorEvent]{
+ Id: systemHookIdRecord,
+ Func: func(me *ModelErrorEvent) error {
+ if re, ok := newRecordErrorEventFromModelErrorEvent(me); ok {
+ return me.App.OnRecordAfterDeleteError().Trigger(re, func(re *RecordErrorEvent) error {
+ syncModelErrorEventWithRecordErrorEvent(me, re)
+ return me.Next()
+ })
+ }
+
+ return me.Next()
+ },
+ Priority: -99,
+ })
+
+ // ---------------------------------------------------------------
+
+ app.OnRecordValidate().Bind(&hook.Handler[*RecordEvent]{
+ Id: systemHookIdRecord,
+ Func: func(e *RecordEvent) error {
+ return e.Record.callFieldInterceptors(
+ e.Context,
+ e.App,
+ InterceptorActionValidate,
+ func() error {
+ return onRecordValidate(e)
+ },
+ )
+ },
+ Priority: 99,
+ })
+
+ app.OnRecordCreate().Bind(&hook.Handler[*RecordEvent]{
+ Id: systemHookIdRecord,
+ Func: func(e *RecordEvent) error {
+ return e.Record.callFieldInterceptors(
+ e.Context,
+ e.App,
+ InterceptorActionCreate,
+ e.Next,
+ )
+ },
+ Priority: -99,
+ })
+
+ app.OnRecordCreateExecute().Bind(&hook.Handler[*RecordEvent]{
+ Id: systemHookIdRecord,
+ Func: func(e *RecordEvent) error {
+ return e.Record.callFieldInterceptors(
+ e.Context,
+ e.App,
+ InterceptorActionCreateExecute,
+ func() error {
+ return onRecordSaveExecute(e)
+ },
+ )
+ },
+ Priority: 99,
+ })
+
+ app.OnRecordAfterCreateSuccess().Bind(&hook.Handler[*RecordEvent]{
+ Id: systemHookIdRecord,
+ Func: func(e *RecordEvent) error {
+ return e.Record.callFieldInterceptors(
+ e.Context,
+ e.App,
+ InterceptorActionAfterCreate,
+ e.Next,
+ )
+ },
+ Priority: -99,
+ })
+
+ app.OnRecordAfterCreateError().Bind(&hook.Handler[*RecordErrorEvent]{
+ Id: systemHookIdRecord,
+ Func: func(e *RecordErrorEvent) error {
+ return e.Record.callFieldInterceptors(
+ e.Context,
+ e.App,
+ InterceptorActionAfterCreateError,
+ e.Next,
+ )
+ },
+ Priority: -99,
+ })
+
+ app.OnRecordUpdate().Bind(&hook.Handler[*RecordEvent]{
+ Id: systemHookIdRecord,
+ Func: func(e *RecordEvent) error {
+ return e.Record.callFieldInterceptors(
+ e.Context,
+ e.App,
+ InterceptorActionUpdate,
+ e.Next,
+ )
+ },
+ Priority: -99,
+ })
+
+ app.OnRecordUpdateExecute().Bind(&hook.Handler[*RecordEvent]{
+ Id: systemHookIdRecord,
+ Func: func(e *RecordEvent) error {
+ return e.Record.callFieldInterceptors(
+ e.Context,
+ e.App,
+ InterceptorActionUpdateExecute,
+ func() error {
+ return onRecordSaveExecute(e)
+ },
+ )
+ },
+ Priority: 99,
+ })
+
+ app.OnRecordAfterUpdateSuccess().Bind(&hook.Handler[*RecordEvent]{
+ Id: systemHookIdRecord,
+ Func: func(e *RecordEvent) error {
+ return e.Record.callFieldInterceptors(
+ e.Context,
+ e.App,
+ InterceptorActionAfterUpdate,
+ e.Next,
+ )
+ },
+ Priority: -99,
+ })
+
+ app.OnRecordAfterUpdateError().Bind(&hook.Handler[*RecordErrorEvent]{
+ Id: systemHookIdRecord,
+ Func: func(e *RecordErrorEvent) error {
+ return e.Record.callFieldInterceptors(
+ e.Context,
+ e.App,
+ InterceptorActionAfterUpdateError,
+ e.Next,
+ )
+ },
+ Priority: -99,
+ })
+
+ app.OnRecordDelete().Bind(&hook.Handler[*RecordEvent]{
+ Id: systemHookIdRecord,
+ Func: func(e *RecordEvent) error {
+ return e.Record.callFieldInterceptors(
+ e.Context,
+ e.App,
+ InterceptorActionDelete,
+ e.Next,
+ )
+ },
+ Priority: -99,
+ })
+
+ app.OnRecordDeleteExecute().Bind(&hook.Handler[*RecordEvent]{
+ Id: systemHookIdRecord,
+ Func: func(e *RecordEvent) error {
+ return e.Record.callFieldInterceptors(
+ e.Context,
+ e.App,
+ InterceptorActionDeleteExecute,
+ func() error {
+ return onRecordDeleteExecute(e)
+ },
+ )
+ },
+ Priority: 99,
+ })
+
+ app.OnRecordAfterDeleteSuccess().Bind(&hook.Handler[*RecordEvent]{
+ Id: systemHookIdRecord,
+ Func: func(e *RecordEvent) error {
+ return e.Record.callFieldInterceptors(
+ e.Context,
+ e.App,
+ InterceptorActionAfterDelete,
+ e.Next,
+ )
+ },
+ Priority: -99,
+ })
+
+ app.OnRecordAfterDeleteError().Bind(&hook.Handler[*RecordErrorEvent]{
+ Id: systemHookIdRecord,
+ Func: func(e *RecordErrorEvent) error {
+ return e.Record.callFieldInterceptors(
+ e.Context,
+ e.App,
+ InterceptorActionAfterDeleteError,
+ e.Next,
+ )
+ },
+ Priority: -99,
+ })
+}
+
+// -------------------------------------------------------------------
+
+// newRecordFromNullStringMap initializes a single new Record model
+// with data loaded from the provided NullStringMap.
+//
+// Note that this method is intended to load and Scan data from a database row result.
+func newRecordFromNullStringMap(collection *Collection, data dbx.NullStringMap) (*Record, error) {
+ record := NewRecord(collection)
+
+ var fieldName string
+ for _, field := range collection.Fields {
+ fieldName = field.GetName()
+
+ nullString, ok := data[fieldName]
+
+ var value any
+ var err error
+
+ if ok && nullString.Valid {
+ value, err = field.PrepareValue(record, nullString.String)
+ } else {
+ value, err = field.PrepareValue(record, nil)
+ }
+
+ if err != nil {
+ return nil, err
+ }
+
+ // we load only the original data to avoid unnecessary copying the same data into the record.data store
+ // (it is also the reason why we don't invoke PostScan on the record itself)
+ record.originalData[fieldName] = value
+
+ if fieldName == FieldNameId {
+ record.Id = cast.ToString(value)
+ }
+ }
+
+ record.BaseModel.PostScan()
+
+ return record, nil
+}
+
+// newRecordsFromNullStringMaps initializes a new Record model for
+// each row in the provided NullStringMap slice.
+//
+// Note that this method is intended to load and Scan data from a database rows result.
+func newRecordsFromNullStringMaps(collection *Collection, rows []dbx.NullStringMap) ([]*Record, error) {
+ result := make([]*Record, len(rows))
+
+ var err error
+ for i, row := range rows {
+ result[i], err = newRecordFromNullStringMap(collection, row)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ return result, nil
+}
+
+// -------------------------------------------------------------------
+
+// NewRecord initializes a new empty Record model.
+func NewRecord(collection *Collection) *Record {
+ record := &Record{
+ collection: collection,
+ data: store.New[any](nil),
+ customVisibility: store.New[bool](nil),
+ originalData: make(map[string]any, len(collection.Fields)),
+ }
+
+ // initialize default field values
+ for _, field := range collection.Fields {
+ if field.GetName() == FieldNameId {
+ continue
+ }
+ value, _ := field.PrepareValue(record, nil)
+ record.originalData[field.GetName()] = value
+ }
+
+ return record
+}
+
+// Collection returns the Collection model associated with the current Record model.
+//
+// NB! The returned collection is only for read purposes and it shouldn't be modified
+// because it could have unintended side-effects on other Record models from the same collection.
+func (m *Record) Collection() *Collection {
+ return m.collection
+}
+
+// TableName returns the table name associated with the current Record model.
+func (m *Record) TableName() string {
+ return m.collection.Name
+}
+
+// PostScan implements the [dbx.PostScanner] interface.
+//
+// It essentially refreshes/updates the current Record original state
+// as if the model was fetched from the databases for the first time.
+//
+// Or in other words, it means that m.Original().FieldsData() will have
+// the same values as m.Record().FieldsData().
+func (m *Record) PostScan() error {
+ if m.Id == "" {
+ return errors.New("missing record primary key")
+ }
+
+ if err := m.BaseModel.PostScan(); err != nil {
+ return err
+ }
+
+ m.originalData = m.FieldsData()
+
+ return nil
+}
+
+// HookTags returns the hook tags associated with the current record.
+func (m *Record) HookTags() []string {
+ return []string{m.collection.Name, m.collection.Id}
+}
+
+// BaseFilesPath returns the storage dir path used by the record.
+func (m *Record) BaseFilesPath() string {
+ id := cast.ToString(m.LastSavedPK())
+ if id == "" {
+ id = m.Id
+ }
+
+ return m.collection.BaseFilesPath() + "/" + id
+}
+
+// Original returns a shallow copy of the current record model populated
+// with its ORIGINAL db data state (aka. right after PostScan())
+// and everything else reset to the defaults.
+//
+// If record was created using NewRecord() the original will be always
+// a blank record (until PostScan() is invoked).
+func (m *Record) Original() *Record {
+ newRecord := NewRecord(m.collection)
+
+ newRecord.originalData = maps.Clone(m.originalData)
+
+ if newRecord.originalData[FieldNameId] != nil {
+ newRecord.lastSavedPK = cast.ToString(newRecord.originalData[FieldNameId])
+ newRecord.Id = newRecord.lastSavedPK
+ }
+
+ return newRecord
+}
+
+// Fresh returns a shallow copy of the current record model populated
+// with its LATEST data state and everything else reset to the defaults
+// (aka. no expand, no unknown fields and with default visibility flags).
+func (m *Record) Fresh() *Record {
+ newRecord := m.Original()
+
+ // note: this will also load the Id field though m.Get
+ newRecord.Load(m.FieldsData())
+
+ return newRecord
+}
+
+// Clone returns a shallow copy of the current record model with all of
+// its collection and unknown fields data, expand and flags copied.
+//
+// use [Record.Fresh()] instead if you want a copy with only the latest
+// collection fields data and everything else reset to the defaults.
+func (m *Record) Clone() *Record {
+ newRecord := m.Original()
+
+ newRecord.Id = m.Id
+ newRecord.exportCustomData = m.exportCustomData
+ newRecord.ignoreEmailVisibility = m.ignoreEmailVisibility
+ newRecord.ignoreUnchangedFields = m.ignoreUnchangedFields
+ newRecord.customVisibility.Reset(m.customVisibility.GetAll())
+
+ newRecord.Load(m.data.GetAll())
+
+ if m.expand != nil {
+ newRecord.SetExpand(m.expand.GetAll())
+ }
+
+ return newRecord
+}
+
+// Expand returns a shallow copy of the current Record model expand data (if any).
+func (m *Record) Expand() map[string]any {
+ if m.expand == nil {
+ // return a dummy initialized map to avoid assignment to nil map errors
+ return map[string]any{}
+ }
+
+ return m.expand.GetAll()
+}
+
+// SetExpand replaces the current Record's expand with the provided expand arg data (shallow copied).
+func (m *Record) SetExpand(expand map[string]any) {
+ if m.expand == nil {
+ m.expand = store.New[any](nil)
+ }
+
+ m.expand.Reset(expand)
+}
+
+// MergeExpand merges recursively the provided expand data into
+// the current model's expand (if any).
+//
+// Note that if an expanded prop with the same key is a slice (old or new expand)
+// then both old and new records will be merged into a new slice (aka. a :merge: [b,c] => [a,b,c]).
+// Otherwise the "old" expanded record will be replace with the "new" one (aka. a :merge: aNew => aNew).
+func (m *Record) MergeExpand(expand map[string]any) {
+ // nothing to merge
+ if len(expand) == 0 {
+ return
+ }
+
+ // no old expand
+ if m.expand == nil {
+ m.expand = store.New(expand)
+ return
+ }
+
+ oldExpand := m.expand.GetAll()
+
+ for key, new := range expand {
+ old, ok := oldExpand[key]
+ if !ok {
+ oldExpand[key] = new
+ continue
+ }
+
+ var wasOldSlice bool
+ var oldSlice []*Record
+ switch v := old.(type) {
+ case *Record:
+ oldSlice = []*Record{v}
+ case []*Record:
+ wasOldSlice = true
+ oldSlice = v
+ default:
+ // invalid old expand data -> assign directly the new
+ // (no matter whether new is valid or not)
+ oldExpand[key] = new
+ continue
+ }
+
+ var wasNewSlice bool
+ var newSlice []*Record
+ switch v := new.(type) {
+ case *Record:
+ newSlice = []*Record{v}
+ case []*Record:
+ wasNewSlice = true
+ newSlice = v
+ default:
+ // invalid new expand data -> skip
+ continue
+ }
+
+ oldIndexed := make(map[string]*Record, len(oldSlice))
+ for _, oldRecord := range oldSlice {
+ oldIndexed[oldRecord.Id] = oldRecord
+ }
+
+ for _, newRecord := range newSlice {
+ oldRecord := oldIndexed[newRecord.Id]
+ if oldRecord != nil {
+ // note: there is no need to update oldSlice since oldRecord is a reference
+ oldRecord.MergeExpand(newRecord.Expand())
+ } else {
+ // missing new entry
+ oldSlice = append(oldSlice, newRecord)
+ }
+ }
+
+ if wasOldSlice || wasNewSlice || len(oldSlice) == 0 {
+ oldExpand[key] = oldSlice
+ } else {
+ oldExpand[key] = oldSlice[0]
+ }
+ }
+
+ m.expand.Reset(oldExpand)
+}
+
+// FieldsData returns a shallow copy ONLY of the collection's fields record's data.
+func (m *Record) FieldsData() map[string]any {
+ result := make(map[string]any, len(m.collection.Fields))
+
+ for _, field := range m.collection.Fields {
+ result[field.GetName()] = m.Get(field.GetName())
+ }
+
+ return result
+}
+
+// CustomData returns a shallow copy ONLY of the custom record fields data,
+// aka. fields that are neither defined by the collection, nor special system ones.
+//
+// Note that custom fields prefixed with "@pbInternal" are always skipped.
+func (m *Record) CustomData() map[string]any {
+ if m.data == nil {
+ return nil
+ }
+
+ fields := m.Collection().Fields
+
+ knownFields := make(map[string]struct{}, len(fields))
+
+ for _, f := range fields {
+ knownFields[f.GetName()] = struct{}{}
+ }
+
+ result := map[string]any{}
+
+ rawData := m.data.GetAll()
+ for k, v := range rawData {
+ if _, ok := knownFields[k]; !ok {
+ // skip internal custom fields
+ if strings.HasPrefix(k, internalCustomFieldKeyPrefix) {
+ continue
+ }
+
+ result[k] = v
+ }
+ }
+
+ return result
+}
+
+// WithCustomData toggles the export/serialization of custom data fields
+// (false by default).
+func (m *Record) WithCustomData(state bool) *Record {
+ m.exportCustomData = state
+ return m
+}
+
+// IgnoreEmailVisibility toggles the flag to ignore the auth record email visibility check.
+func (m *Record) IgnoreEmailVisibility(state bool) *Record {
+ m.ignoreEmailVisibility = state
+ return m
+}
+
+// IgnoreUnchangedFields toggles the flag to ignore the unchanged fields
+// from the DB export for the UPDATE SQL query.
+//
+// This could be used if you want to save only the record fields that you've changed
+// without overwrite other untouched fields in case of concurrent update.
+func (m *Record) IgnoreUnchangedFields(state bool) *Record {
+ m.ignoreUnchangedFields = state
+ return m
+}
+
+// Set sets the provided key-value data pair into the current Record
+// model directly as it is WITHOUT NORMALIZATIONS.
+//
+// See also [Record.Set].
+func (m *Record) SetRaw(key string, value any) {
+ if key == FieldNameId {
+ m.Id = cast.ToString(value)
+ }
+
+ m.data.Set(key, value)
+}
+
+// SetIfFieldExists sets the provided key-value data pair into the current Record model
+// ONLY if key is existing Collection field name/modifier.
+//
+// This method does nothing if key is not a known Collection field name/modifier.
+//
+// On success returns the matched Field, otherwise - nil.
+//
+// To set any key-value, including custom/unknown fields, use the [Record.Set] method.
+func (m *Record) SetIfFieldExists(key string, value any) Field {
+ for _, field := range m.Collection().Fields {
+ ff, ok := field.(SetterFinder)
+ if ok {
+ setter := ff.FindSetter(key)
+ if setter != nil {
+ setter(m, value)
+ return field
+ }
+ }
+
+ // fallback to the default field PrepareValue method for direct match
+ if key == field.GetName() {
+ value, _ = field.PrepareValue(m, value)
+ m.SetRaw(key, value)
+ return field
+ }
+ }
+
+ return nil
+}
+
+// Set sets the provided key-value data pair into the current Record model.
+//
+// If the record collection has field with name matching the provided "key",
+// the value will be further normalized according to the field setter(s).
+func (m *Record) Set(key string, value any) {
+ switch key {
+ case FieldNameExpand: // for backward-compatibility with earlier versions
+ m.SetExpand(cast.ToStringMap(value))
+ default:
+ field := m.SetIfFieldExists(key, value)
+ if field == nil {
+ // custom key - set it without any transformations
+ m.SetRaw(key, value)
+ }
+ }
+}
+
+func (m *Record) GetRaw(key string) any {
+ if key == FieldNameId {
+ return m.Id
+ }
+
+ if v, ok := m.data.GetOk(key); ok {
+ return v
+ }
+
+ return m.originalData[key]
+}
+
+// Get returns a normalized single record model data value for "key".
+func (m *Record) Get(key string) any {
+ switch key {
+ case FieldNameExpand: // for backward-compatibility with earlier versions
+ return m.Expand()
+ default:
+ for _, field := range m.Collection().Fields {
+ gm, ok := field.(GetterFinder)
+ if !ok {
+ continue // no custom getters
+ }
+
+ getter := gm.FindGetter(key)
+ if getter != nil {
+ return getter(m)
+ }
+ }
+
+ return m.GetRaw(key)
+ }
+}
+
+// Load bulk loads the provided data into the current Record model.
+func (m *Record) Load(data map[string]any) {
+ for k, v := range data {
+ m.Set(k, v)
+ }
+}
+
+// GetBool returns the data value for "key" as a bool.
+func (m *Record) GetBool(key string) bool {
+ return cast.ToBool(m.Get(key))
+}
+
+// GetString returns the data value for "key" as a string.
+func (m *Record) GetString(key string) string {
+ return cast.ToString(m.Get(key))
+}
+
+// GetInt returns the data value for "key" as an int.
+func (m *Record) GetInt(key string) int {
+ return cast.ToInt(m.Get(key))
+}
+
+// GetFloat returns the data value for "key" as a float64.
+func (m *Record) GetFloat(key string) float64 {
+ return cast.ToFloat64(m.Get(key))
+}
+
+// GetDateTime returns the data value for "key" as a DateTime instance.
+func (m *Record) GetDateTime(key string) types.DateTime {
+ d, _ := types.ParseDateTime(m.Get(key))
+ return d
+}
+
+// GetStringSlice returns the data value for "key" as a slice of non-zero unique strings.
+func (m *Record) GetStringSlice(key string) []string {
+ return list.ToUniqueStringSlice(m.Get(key))
+}
+
+// GetUploadedFiles returns the uploaded files for the provided "file" field key,
+// (aka. the current [*filesytem.File] values) so that you can apply further
+// validations or modifications (including changing the file name or content before persisting).
+//
+// Example:
+//
+// files := record.GetUploadedFiles("documents")
+// for _, f := range files {
+// f.Name = "doc_" + f.Name // add a prefix to each file name
+// }
+// app.Save(record) // the files are pointers so the applied changes will transparently reflect on the record value
+func (m *Record) GetUploadedFiles(key string) []*filesystem.File {
+ if !strings.HasSuffix(key, ":uploaded") {
+ key += ":uploaded"
+ }
+
+ values, _ := m.Get(key).([]*filesystem.File)
+
+ return values
+}
+
+// Retrieves the "key" json field value and unmarshals it into "result".
+//
+// Example
+//
+// result := struct {
+// FirstName string `json:"first_name"`
+// }{}
+// err := m.UnmarshalJSONField("my_field_name", &result)
+func (m *Record) UnmarshalJSONField(key string, result any) error {
+ return json.Unmarshal([]byte(m.GetString(key)), &result)
+}
+
+// ExpandedOne retrieves a single relation Record from the already
+// loaded expand data of the current model.
+//
+// If the requested expand relation is multiple, this method returns
+// only first available Record from the expanded relation.
+//
+// Returns nil if there is no such expand relation loaded.
+func (m *Record) ExpandedOne(relField string) *Record {
+ if m.expand == nil {
+ return nil
+ }
+
+ rel := m.expand.Get(relField)
+
+ switch v := rel.(type) {
+ case *Record:
+ return v
+ case []*Record:
+ if len(v) > 0 {
+ return v[0]
+ }
+ }
+
+ return nil
+}
+
+// ExpandedAll retrieves a slice of relation Records from the already
+// loaded expand data of the current model.
+//
+// If the requested expand relation is single, this method normalizes
+// the return result and will wrap the single model as a slice.
+//
+// Returns nil slice if there is no such expand relation loaded.
+func (m *Record) ExpandedAll(relField string) []*Record {
+ if m.expand == nil {
+ return nil
+ }
+
+ rel := m.expand.Get(relField)
+
+ switch v := rel.(type) {
+ case *Record:
+ return []*Record{v}
+ case []*Record:
+ return v
+ }
+
+ return nil
+}
+
+// FindFileFieldByFile returns the first file type field for which
+// any of the record's data contains the provided filename.
+func (m *Record) FindFileFieldByFile(filename string) *FileField {
+ for _, field := range m.Collection().Fields {
+ if field.Type() != FieldTypeFile {
+ continue
+ }
+
+ f, ok := field.(*FileField)
+ if !ok {
+ continue
+ }
+
+ filenames := m.GetStringSlice(f.GetName())
+ if slices.Contains(filenames, filename) {
+ return f
+ }
+ }
+
+ return nil
+}
+
+// DBExport implements the [DBExporter] interface and returns a key-value
+// map with the data to be persisted when saving the Record in the database.
+func (m *Record) DBExport(app App) (map[string]any, error) {
+ result, err := m.dbExport()
+ if err != nil {
+ return nil, err
+ }
+
+ // remove exported fields that haven't changed
+ // (with exception of the id column)
+ if !m.IsNew() && m.ignoreUnchangedFields {
+ oldResult, err := m.Original().dbExport()
+ if err != nil {
+ return nil, err
+ }
+
+ for oldK, oldV := range oldResult {
+ if oldK == idColumn {
+ continue
+ }
+ newV, ok := result[oldK]
+ if ok && areValuesEqual(newV, oldV) {
+ delete(result, oldK)
+ }
+ }
+ }
+
+ return result, nil
+}
+
+func (m *Record) dbExport() (map[string]any, error) {
+ fields := m.Collection().Fields
+
+ result := make(map[string]any, len(fields))
+
+ for _, field := range fields {
+ if f, ok := field.(DriverValuer); ok {
+ v, err := f.DriverValue(m)
+ if err != nil {
+ return nil, err
+ }
+ result[field.GetName()] = v
+ } else {
+ result[field.GetName()] = m.GetRaw(field.GetName())
+ }
+ }
+
+ return result, nil
+}
+
+func areValuesEqual(a any, b any) bool {
+ switch av := a.(type) {
+ case string:
+ bv, ok := b.(string)
+ return ok && bv == av
+ case bool:
+ bv, ok := b.(bool)
+ return ok && bv == av
+ case float32:
+ bv, ok := b.(float32)
+ return ok && bv == av
+ case float64:
+ bv, ok := b.(float64)
+ return ok && bv == av
+ case uint:
+ bv, ok := b.(uint)
+ return ok && bv == av
+ case uint8:
+ bv, ok := b.(uint8)
+ return ok && bv == av
+ case uint16:
+ bv, ok := b.(uint16)
+ return ok && bv == av
+ case uint32:
+ bv, ok := b.(uint32)
+ return ok && bv == av
+ case uint64:
+ bv, ok := b.(uint64)
+ return ok && bv == av
+ case int:
+ bv, ok := b.(int)
+ return ok && bv == av
+ case int8:
+ bv, ok := b.(int8)
+ return ok && bv == av
+ case int16:
+ bv, ok := b.(int16)
+ return ok && bv == av
+ case int32:
+ bv, ok := b.(int32)
+ return ok && bv == av
+ case int64:
+ bv, ok := b.(int64)
+ return ok && bv == av
+ case []byte:
+ bv, ok := b.([]byte)
+ return ok && bytes.Equal(av, bv)
+ case []string:
+ bv, ok := b.([]string)
+ return ok && slices.Equal(av, bv)
+ case []int:
+ bv, ok := b.([]int)
+ return ok && slices.Equal(av, bv)
+ case []int32:
+ bv, ok := b.([]int32)
+ return ok && slices.Equal(av, bv)
+ case []int64:
+ bv, ok := b.([]int64)
+ return ok && slices.Equal(av, bv)
+ case []float32:
+ bv, ok := b.([]float32)
+ return ok && slices.Equal(av, bv)
+ case []float64:
+ bv, ok := b.([]float64)
+ return ok && slices.Equal(av, bv)
+ case types.JSONArray[string]:
+ bv, ok := b.(types.JSONArray[string])
+ return ok && slices.Equal(av, bv)
+ case types.JSONRaw:
+ bv, ok := b.(types.JSONRaw)
+ return ok && bytes.Equal(av, bv)
+ default:
+ aRaw, err := json.Marshal(a)
+ if err != nil {
+ return false
+ }
+
+ bRaw, err := json.Marshal(b)
+ if err != nil {
+ return false
+ }
+
+ return bytes.Equal(aRaw, bRaw)
+ }
+}
+
+// Hide hides the specified fields from the public safe serialization of the record.
+func (record *Record) Hide(fieldNames ...string) *Record {
+ for _, name := range fieldNames {
+ record.customVisibility.Set(name, false)
+ }
+
+ return record
+}
+
+// Unhide forces to unhide the specified fields from the public safe serialization
+// of the record (even when the collection field itself is marked as hidden).
+func (record *Record) Unhide(fieldNames ...string) *Record {
+ for _, name := range fieldNames {
+ record.customVisibility.Set(name, true)
+ }
+
+ return record
+}
+
+// PublicExport exports only the record fields that are safe to be public.
+//
+// To export unknown data fields you need to set record.WithCustomData(true).
+//
+// For auth records, to force the export of the email field you need to set
+// record.IgnoreEmailVisibility(true).
+func (record *Record) PublicExport() map[string]any {
+ export := make(map[string]any, len(record.collection.Fields)+3)
+
+ var isVisible, hasCustomVisibility bool
+
+ customVisibility := record.customVisibility.GetAll()
+
+ // export schema fields
+ for _, f := range record.collection.Fields {
+ isVisible, hasCustomVisibility = customVisibility[f.GetName()]
+ if !hasCustomVisibility {
+ isVisible = !f.GetHidden()
+ }
+
+ if !isVisible {
+ continue
+ }
+
+ export[f.GetName()] = record.Get(f.GetName())
+ }
+
+ // export custom fields
+ if record.exportCustomData {
+ for k, v := range record.CustomData() {
+ isVisible, hasCustomVisibility = customVisibility[k]
+ if !hasCustomVisibility || isVisible {
+ export[k] = v
+ }
+ }
+ }
+
+ if record.Collection().IsAuth() {
+ // always hide the password and tokenKey fields
+ delete(export, FieldNamePassword)
+ delete(export, FieldNameTokenKey)
+
+ if !record.ignoreEmailVisibility && !record.GetBool(FieldNameEmailVisibility) {
+ delete(export, FieldNameEmail)
+ }
+ }
+
+ // add helper collection reference fields
+ isVisible, hasCustomVisibility = customVisibility[FieldNameCollectionId]
+ if !hasCustomVisibility || isVisible {
+ export[FieldNameCollectionId] = record.collection.Id
+ }
+ isVisible, hasCustomVisibility = customVisibility[FieldNameCollectionName]
+ if !hasCustomVisibility || isVisible {
+ export[FieldNameCollectionName] = record.collection.Name
+ }
+
+ // add expand (if non-nil)
+ isVisible, hasCustomVisibility = customVisibility[FieldNameExpand]
+ if (!hasCustomVisibility || isVisible) && record.expand != nil {
+ export[FieldNameExpand] = record.expand.GetAll()
+ }
+
+ return export
+}
+
+// MarshalJSON implements the [json.Marshaler] interface.
+//
+// Only the data exported by `PublicExport()` will be serialized.
+func (m Record) MarshalJSON() ([]byte, error) {
+ return json.Marshal(m.PublicExport())
+}
+
+// UnmarshalJSON implements the [json.Unmarshaler] interface.
+func (m *Record) UnmarshalJSON(data []byte) error {
+ result := map[string]any{}
+
+ if err := json.Unmarshal(data, &result); err != nil {
+ return err
+ }
+
+ m.Load(result)
+
+ return nil
+}
+
+// ReplaceModifiers returns a new map with applied modifier
+// values based on the current record and the specified data.
+//
+// The resolved modifier keys will be removed.
+//
+// Multiple modifiers will be applied one after another,
+// while reusing the previous base key value result (ex. 1; -5; +2 => -2).
+//
+// Note that because Go doesn't guaranteed the iteration order of maps,
+// we would explicitly apply shorter keys first for a more consistent and reproducible behavior.
+//
+// Example usage:
+//
+// newData := record.ReplaceModifiers(data)
+// // record: {"field": 10}
+// // data: {"field+": 5}
+// // result: {"field": 15}
+func (m *Record) ReplaceModifiers(data map[string]any) map[string]any {
+ if len(data) == 0 {
+ return data
+ }
+
+ dataCopy := maps.Clone(data)
+
+ recordCopy := m.Fresh()
+
+ // key orders is not guaranteed so
+ sortedDataKeys := make([]string, 0, len(data))
+ for k := range data {
+ sortedDataKeys = append(sortedDataKeys, k)
+ }
+ sort.SliceStable(sortedDataKeys, func(i int, j int) bool {
+ return len(sortedDataKeys[i]) < len(sortedDataKeys[j])
+ })
+
+ for _, k := range sortedDataKeys {
+ field := recordCopy.SetIfFieldExists(k, data[k])
+ if field != nil {
+ // delete the original key in case it is with a modifer (ex. "items+")
+ delete(dataCopy, k)
+
+ // store the transformed value under the field name
+ dataCopy[field.GetName()] = recordCopy.Get(field.GetName())
+ }
+ }
+
+ return dataCopy
+}
+
+// -------------------------------------------------------------------
+
+func (m *Record) callFieldInterceptors(
+ ctx context.Context,
+ app App,
+ actionName string,
+ actionFunc func() error,
+) error {
+ // the firing order of the fields doesn't matter
+ for _, field := range m.Collection().Fields {
+ if f, ok := field.(RecordInterceptor); ok {
+ oldfn := actionFunc
+ actionFunc = func() error {
+ return f.Intercept(ctx, app, m, actionName, oldfn)
+ }
+ }
+ }
+
+ return actionFunc()
+}
+
+func onRecordValidate(e *RecordEvent) error {
+ errs := validation.Errors{}
+
+ for _, f := range e.Record.Collection().Fields {
+ if err := f.ValidateValue(e.Context, e.App, e.Record); err != nil {
+ errs[f.GetName()] = err
+ }
+ }
+
+ if len(errs) > 0 {
+ return errs
+ }
+
+ return e.Next()
+}
+
+func onRecordSaveExecute(e *RecordEvent) error {
+ if e.Record.Collection().IsAuth() {
+ // ensure that the token key is different on password change
+ old := e.Record.Original()
+ if !e.Record.IsNew() &&
+ old.TokenKey() == e.Record.TokenKey() &&
+ old.Get(FieldNamePassword) != e.Record.Get(FieldNamePassword) {
+ e.Record.RefreshTokenKey()
+ }
+
+ // cross-check that the auth record id is unique across all auth collections.
+ authCollections, err := e.App.FindAllCollections(CollectionTypeAuth)
+ if err != nil {
+ return fmt.Errorf("unable to fetch the auth collections for cross-id unique check: %w", err)
+ }
+ for _, collection := range authCollections {
+ if e.Record.Collection().Id == collection.Id {
+ continue // skip current collection (sqlite will do the check for us)
+ }
+ record, _ := e.App.FindRecordById(collection, e.Record.Id)
+ if record != nil {
+ return validation.Errors{
+ FieldNameId: validation.NewError("validation_invalid_auth_id", "Invalid or duplicated auth record id."),
+ }
+ }
+ }
+ }
+
+ err := e.Next()
+ if err == nil {
+ return nil
+ }
+
+ return validators.NormalizeUniqueIndexError(
+ err,
+ e.Record.Collection().Name,
+ e.Record.Collection().Fields.FieldNames(),
+ )
+}
+
+func onRecordDeleteExecute(e *RecordEvent) error {
+ // fetch rel references (if any)
+ //
+ // note: the select is outside of the transaction to minimize
+ // SQLITE_BUSY errors when mixing read&write in a single transaction
+ refs, err := e.App.FindCollectionReferences(e.Record.Collection())
+ if err != nil {
+ return err
+ }
+
+ originalApp := e.App
+ txErr := e.App.RunInTransaction(func(txApp App) error {
+ e.App = txApp
+
+ // delete the record before the relation references to ensure that there
+ // will be no "A<->B" relations to prevent deadlock when calling DeleteRecord recursively
+ if err := e.Next(); err != nil {
+ return err
+ }
+
+ return cascadeRecordDelete(txApp, e.Record, refs)
+ })
+ e.App = originalApp
+
+ return txErr
+}
+
+// cascadeRecordDelete triggers cascade deletion for the provided references.
+//
+// NB! This method is expected to be called from inside of a transaction.
+func cascadeRecordDelete(app App, mainRecord *Record, refs map[*Collection][]Field) error {
+ // Sort the refs keys to ensure that the cascade events firing order is always the same.
+ // This is not necessary for the operation to function correctly but it helps having deterministic output during testing.
+ sortedRefKeys := make([]*Collection, 0, len(refs))
+ for k := range refs {
+ sortedRefKeys = append(sortedRefKeys, k)
+ }
+ sort.Slice(sortedRefKeys, func(i, j int) bool {
+ return sortedRefKeys[i].Name < sortedRefKeys[j].Name
+ })
+
+ for _, refCollection := range sortedRefKeys {
+ fields, ok := refs[refCollection]
+
+ if refCollection.IsView() || !ok {
+ continue // skip missing or view collections
+ }
+
+ for _, field := range fields {
+ recordTableName := inflector.Columnify(refCollection.Name)
+ prefixedFieldName := recordTableName + "." + inflector.Columnify(field.GetName())
+
+ query := app.RecordQuery(refCollection)
+
+ if opt, ok := field.(MultiValuer); !ok || !opt.IsMultiple() {
+ query.AndWhere(dbx.HashExp{prefixedFieldName: mainRecord.Id})
+ } else {
+ query.AndWhere(dbx.Exists(dbx.NewExp(fmt.Sprintf(
+ `SELECT 1 FROM json_each(CASE WHEN json_valid([[%s]]) THEN [[%s]] ELSE json_array([[%s]]) END) {{__je__}} WHERE [[__je__.value]]={:jevalue}`,
+ prefixedFieldName, prefixedFieldName, prefixedFieldName,
+ ), dbx.Params{
+ "jevalue": mainRecord.Id,
+ })))
+ }
+
+ if refCollection.Id == mainRecord.Collection().Id {
+ query.AndWhere(dbx.Not(dbx.HashExp{recordTableName + ".id": mainRecord.Id}))
+ }
+
+ // trigger cascade for each batchSize rel items until there is none
+ batchSize := 4000
+ rows := make([]*Record, 0, batchSize)
+ for {
+ if err := query.Limit(int64(batchSize)).All(&rows); err != nil {
+ return err
+ }
+
+ total := len(rows)
+ if total == 0 {
+ break
+ }
+
+ err := deleteRefRecords(app, mainRecord, rows, field)
+ if err != nil {
+ return err
+ }
+
+ if total < batchSize {
+ break // no more items
+ }
+
+ rows = rows[:0] // keep allocated memory
+ }
+ }
+ }
+
+ return nil
+}
+
+// deleteRefRecords checks if related records has to be deleted (if `CascadeDelete` is set)
+// OR
+// just unset the record id from any relation field values (if they are not required).
+//
+// NB! This method is expected to be called from inside of a transaction.
+func deleteRefRecords(app App, mainRecord *Record, refRecords []*Record, field Field) error {
+ relField, _ := field.(*RelationField)
+ if relField == nil {
+ return errors.New("only RelationField is supported at the moment, got " + field.Type())
+ }
+
+ for _, refRecord := range refRecords {
+ ids := refRecord.GetStringSlice(relField.Name)
+
+ // unset the record id
+ for i := len(ids) - 1; i >= 0; i-- {
+ if ids[i] == mainRecord.Id {
+ ids = append(ids[:i], ids[i+1:]...)
+ break
+ }
+ }
+
+ // cascade delete the reference
+ // (only if there are no other active references in case of multiple select)
+ if relField.CascadeDelete && len(ids) == 0 {
+ if err := app.Delete(refRecord); err != nil {
+ return err
+ }
+ // no further actions are needed (the reference is deleted)
+ continue
+ }
+
+ if relField.Required && len(ids) == 0 {
+ return fmt.Errorf("the record cannot be deleted because it is part of a required reference in record %s (%s collection)", refRecord.Id, refRecord.Collection().Name)
+ }
+
+ // save the reference changes
+ // (without validation because it is possible that another relation field to have a reference to a previous deleted record)
+ refRecord.Set(relField.Name, ids)
+ if err := app.SaveNoValidate(refRecord); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/core/record_model_auth.go b/core/record_model_auth.go
new file mode 100644
index 00000000..00fae0f8
--- /dev/null
+++ b/core/record_model_auth.go
@@ -0,0 +1,64 @@
+package core
+
+// Email returns the "email" record field value (usually available with Auth collections).
+func (m *Record) Email() string {
+ return m.GetString(FieldNameEmail)
+}
+
+// SetEmail sets the "email" record field value (usually available with Auth collections).
+func (m *Record) SetEmail(email string) {
+ m.Set(FieldNameEmail, email)
+}
+
+// Verified returns the "emailVisibility" record field value (usually available with Auth collections).
+func (m *Record) EmailVisibility() bool {
+ return m.GetBool(FieldNameEmailVisibility)
+}
+
+// SetEmailVisibility sets the "emailVisibility" record field value (usually available with Auth collections).
+func (m *Record) SetEmailVisibility(visible bool) {
+ m.Set(FieldNameEmailVisibility, visible)
+}
+
+// Verified returns the "verified" record field value (usually available with Auth collections).
+func (m *Record) Verified() bool {
+ return m.GetBool(FieldNameVerified)
+}
+
+// SetVerified sets the "verified" record field value (usually available with Auth collections).
+func (m *Record) SetVerified(verified bool) {
+ m.Set(FieldNameVerified, verified)
+}
+
+// TokenKey returns the "tokenKey" record field value (usually available with Auth collections).
+func (m *Record) TokenKey() string {
+ return m.GetString(FieldNameTokenKey)
+}
+
+// SetTokenKey sets the "tokenKey" record field value (usually available with Auth collections).
+func (m *Record) SetTokenKey(key string) {
+ m.Set(FieldNameTokenKey, key)
+}
+
+// RefreshTokenKey generates and sets a new random auth record "tokenKey".
+func (m *Record) RefreshTokenKey() {
+ m.Set(FieldNameTokenKey+autogenerateModifier, "")
+}
+
+// SetPassword sets the "password" record field value (usually available with Auth collections).
+func (m *Record) SetPassword(password string) {
+ // note: the tokenKey will be auto changed if necessary before db write
+ m.Set(FieldNamePassword, password)
+}
+
+// ValidatePassword validates a plain password against the "password" record field.
+//
+// Returns false if the password is incorrect.
+func (m *Record) ValidatePassword(password string) bool {
+ pv, ok := m.GetRaw(FieldNamePassword).(*PasswordFieldValue)
+ if !ok {
+ return false
+ }
+
+ return pv.Validate(password)
+}
diff --git a/core/record_model_auth_test.go b/core/record_model_auth_test.go
new file mode 100644
index 00000000..da748f00
--- /dev/null
+++ b/core/record_model_auth_test.go
@@ -0,0 +1,119 @@
+package core_test
+
+import (
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tools/security"
+)
+
+func TestRecordEmail(t *testing.T) {
+ record := core.NewRecord(core.NewAuthCollection("test"))
+
+ if record.Email() != "" {
+ t.Fatalf("Expected email %q, got %q", "", record.Email())
+ }
+
+ email := "test@example.com"
+ record.SetEmail(email)
+
+ if record.Email() != email {
+ t.Fatalf("Expected email %q, got %q", email, record.Email())
+ }
+}
+
+func TestRecordEmailVisibility(t *testing.T) {
+ record := core.NewRecord(core.NewAuthCollection("test"))
+
+ if record.EmailVisibility() != false {
+ t.Fatalf("Expected emailVisibility %v, got %v", false, record.EmailVisibility())
+ }
+
+ record.SetEmailVisibility(true)
+
+ if record.EmailVisibility() != true {
+ t.Fatalf("Expected emailVisibility %v, got %v", true, record.EmailVisibility())
+ }
+}
+
+func TestRecordVerified(t *testing.T) {
+ record := core.NewRecord(core.NewAuthCollection("test"))
+
+ if record.Verified() != false {
+ t.Fatalf("Expected verified %v, got %v", false, record.Verified())
+ }
+
+ record.SetVerified(true)
+
+ if record.Verified() != true {
+ t.Fatalf("Expected verified %v, got %v", true, record.Verified())
+ }
+}
+
+func TestRecordTokenKey(t *testing.T) {
+ record := core.NewRecord(core.NewAuthCollection("test"))
+
+ if record.TokenKey() != "" {
+ t.Fatalf("Expected tokenKey %q, got %q", "", record.TokenKey())
+ }
+
+ tokenKey := "example"
+
+ record.SetTokenKey(tokenKey)
+
+ if record.TokenKey() != tokenKey {
+ t.Fatalf("Expected tokenKey %q, got %q", tokenKey, record.TokenKey())
+ }
+
+ record.RefreshTokenKey()
+
+ if record.TokenKey() == tokenKey {
+ t.Fatalf("Expected tokenKey to be random generated, got %q", tokenKey)
+ }
+
+ if len(record.TokenKey()) != 50 {
+ t.Fatalf("Expected %d characters, got %d", 50, len(record.TokenKey()))
+ }
+}
+
+func TestRecordPassword(t *testing.T) {
+ scenarios := []struct {
+ name string
+ password string
+ expected bool
+ }{
+ {
+ "empty password",
+ "",
+ false,
+ },
+ {
+ "non-empty password",
+ "123456",
+ true,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ record := core.NewRecord(core.NewAuthCollection("test"))
+
+ if record.ValidatePassword(s.password) {
+ t.Fatal("[before set] Expected password to be invalid")
+ }
+
+ record.SetPassword(s.password)
+
+ result := record.ValidatePassword(s.password)
+
+ if result != s.expected {
+ t.Fatalf("[after set] Expected ValidatePassword %v, got %v", result, s.expected)
+ }
+
+ // try with a random string to ensure that not any string validates
+ if record.ValidatePassword(security.PseudorandomString(5)) {
+ t.Fatal("[random] Expected password to be invalid")
+ }
+ })
+ }
+}
diff --git a/core/record_model_superusers.go b/core/record_model_superusers.go
new file mode 100644
index 00000000..256d6aee
--- /dev/null
+++ b/core/record_model_superusers.go
@@ -0,0 +1,80 @@
+package core
+
+import (
+ "fmt"
+
+ "github.com/pocketbase/pocketbase/tools/hook"
+ "github.com/pocketbase/pocketbase/tools/router"
+)
+
+const CollectionNameSuperusers = "_superusers"
+
+func (app *BaseApp) registerSuperuserHooks() {
+ app.OnRecordDelete(CollectionNameSuperusers).Bind(&hook.Handler[*RecordEvent]{
+ Id: "pbSuperusersRecordDelete",
+ Func: func(e *RecordEvent) error {
+ originalApp := e.App
+ txErr := e.App.RunInTransaction(func(txApp App) error {
+ e.App = txApp
+
+ total, err := e.App.CountRecords(CollectionNameSuperusers)
+ if err != nil {
+ return fmt.Errorf("failed to fetch total superusers count: %w", err)
+ }
+
+ if total == 1 {
+ return router.NewBadRequestError("You can't delete the only existing superuser", nil)
+ }
+
+ return e.Next()
+ })
+ e.App = originalApp
+
+ return txErr
+ },
+ Priority: -99,
+ })
+
+ recordSaveHandler := &hook.Handler[*RecordEvent]{
+ Id: "pbSuperusersRecordSaveExec",
+ Func: func(e *RecordEvent) error {
+ e.Record.SetVerified(true) // always mark superusers as verified
+ return e.Next()
+ },
+ Priority: -99,
+ }
+ app.OnRecordCreateExecute(CollectionNameSuperusers).Bind(recordSaveHandler)
+ app.OnRecordUpdateExecute(CollectionNameSuperusers).Bind(recordSaveHandler)
+
+ collectionSaveHandler := &hook.Handler[*CollectionEvent]{
+ Id: "pbSuperusersCollectionSaveExec",
+ Func: func(e *CollectionEvent) error {
+ // don't allow name change even if executed with SaveNoValidate
+ e.Collection.Name = CollectionNameSuperusers
+
+ // for now don't allow superusers OAuth2 since we don't want
+ // to accidentally create a new superuser by just OAuth2 signin
+ e.Collection.OAuth2.Enabled = false
+ e.Collection.OAuth2.Providers = nil
+
+ // force password auth
+ e.Collection.PasswordAuth.Enabled = true
+
+ // for superusers we don't allow for now standalone OTP auth and always require to be combined with MFA
+ if e.Collection.OTP.Enabled {
+ e.Collection.MFA.Enabled = true
+ }
+
+ return e.Next()
+ },
+ Priority: 99,
+ }
+ app.OnCollectionCreateExecute(CollectionNameSuperusers).Bind(collectionSaveHandler)
+ app.OnCollectionUpdateExecute(CollectionNameSuperusers).Bind(collectionSaveHandler)
+}
+
+// IsSuperuser returns whether the current record is a superuser, aka.
+// whether the record is from the _superusers collection.
+func (m *Record) IsSuperuser() bool {
+ return m.Collection().Name == CollectionNameSuperusers
+}
diff --git a/core/record_model_superusers_test.go b/core/record_model_superusers_test.go
new file mode 100644
index 00000000..db8a70a4
--- /dev/null
+++ b/core/record_model_superusers_test.go
@@ -0,0 +1,48 @@
+package core_test
+
+import (
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestRecordIsSuperUser(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ demo1, err := app.FindRecordById("demo1", "84nmscqy84lsi1t")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ user, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ superuser, err := app.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ record *core.Record
+ expected bool
+ }{
+ {demo1, false},
+ {user, false},
+ {superuser, true},
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.record.Collection().Name, func(t *testing.T) {
+ result := s.record.IsSuperuser()
+ if result != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, result)
+ }
+ })
+ }
+}
diff --git a/core/record_model_test.go b/core/record_model_test.go
new file mode 100644
index 00000000..d19c7a43
--- /dev/null
+++ b/core/record_model_test.go
@@ -0,0 +1,2080 @@
+package core_test
+
+import (
+ "bytes"
+ "context"
+ "database/sql"
+ "encoding/json"
+ "fmt"
+ "regexp"
+ "slices"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/filesystem"
+ "github.com/pocketbase/pocketbase/tools/types"
+ "github.com/spf13/cast"
+)
+
+func TestNewRecord(t *testing.T) {
+ t.Parallel()
+
+ collection := core.NewBaseCollection("test")
+ collection.Fields.Add(&core.BoolField{Name: "status"})
+
+ m := core.NewRecord(collection)
+
+ rawData, err := json.Marshal(m.FieldsData()) // should be initialized with the defaults
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ expected := `{"id":"","status":false}`
+
+ if str := string(rawData); str != expected {
+ t.Fatalf("Expected schema data\n%v\ngot\n%v", expected, str)
+ }
+}
+
+func TestRecordCollection(t *testing.T) {
+ t.Parallel()
+
+ collection := core.NewBaseCollection("test")
+
+ m := core.NewRecord(collection)
+
+ if m.Collection().Name != collection.Name {
+ t.Fatalf("Expected collection with name %q, got %q", collection.Name, m.Collection().Name)
+ }
+}
+
+func TestRecordTableName(t *testing.T) {
+ t.Parallel()
+
+ collection := core.NewBaseCollection("test")
+
+ m := core.NewRecord(collection)
+
+ if m.TableName() != collection.Name {
+ t.Fatalf("Expected table %q, got %q", collection.Name, m.TableName())
+ }
+}
+
+func TestRecordPostScan(t *testing.T) {
+ t.Parallel()
+
+ collection := core.NewBaseCollection("test_collection")
+ collection.Fields.Add(&core.TextField{Name: "test"})
+
+ m := core.NewRecord(collection)
+
+ // calling PostScan without id
+ err := m.PostScan()
+ if err == nil {
+ t.Fatal("Expected PostScan id error, got nil")
+ }
+
+ m.Id = "test_id"
+ m.Set("test", "abc")
+
+ if v := m.IsNew(); v != true {
+ t.Fatalf("[before PostScan] Expected IsNew %v, got %v", true, v)
+ }
+ if v := m.Original().PK(); v != "" {
+ t.Fatalf("[before PostScan] Expected the original PK to be empty string, got %v", v)
+ }
+ if v := m.Original().Get("test"); v != "" {
+ t.Fatalf("[before PostScan] Expected the original 'test' field to be empty string, got %v", v)
+ }
+
+ err = m.PostScan()
+ if err != nil {
+ t.Fatalf("Expected PostScan nil error, got %v", err)
+ }
+
+ if v := m.IsNew(); v != false {
+ t.Fatalf("[after PostScan] Expected IsNew %v, got %v", false, v)
+ }
+ if v := m.Original().PK(); v != "test_id" {
+ t.Fatalf("[after PostScan] Expected the original PK to be %q, got %v", "test_id", v)
+ }
+ if v := m.Original().Get("test"); v != "abc" {
+ t.Fatalf("[after PostScan] Expected the original 'test' field to be %q, got %v", "abc", v)
+ }
+}
+
+func TestRecordHookTags(t *testing.T) {
+ t.Parallel()
+
+ collection := core.NewBaseCollection("test")
+
+ m := core.NewRecord(collection)
+
+ tags := m.HookTags()
+
+ expectedTags := []string{collection.Id, collection.Name}
+
+ if len(tags) != len(expectedTags) {
+ t.Fatalf("Expected tags\n%v\ngot\n%v", expectedTags, tags)
+ }
+
+ for _, tag := range tags {
+ if !slices.Contains(expectedTags, tag) {
+ t.Errorf("Missing expected tag %q", tag)
+ }
+ }
+}
+
+func TestRecordBaseFilesPath(t *testing.T) {
+ t.Parallel()
+
+ collection := core.NewBaseCollection("test")
+
+ m := core.NewRecord(collection)
+ m.Id = "abc"
+
+ result := m.BaseFilesPath()
+ expected := collection.BaseFilesPath() + "/" + m.Id
+ if result != expected {
+ t.Fatalf("Expected %q, got %q", expected, result)
+ }
+}
+
+func TestRecordOriginal(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ record, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+ originalId := record.Id
+ originalName := record.GetString("name")
+
+ extraFieldsCheck := []string{`"email":`, `"custom":`}
+
+ // change the fields
+ record.Id = "changed"
+ record.Set("name", "name_new")
+ record.Set("custom", "test_custom")
+ record.SetExpand(map[string]any{"test": 123})
+ record.IgnoreEmailVisibility(true)
+ record.IgnoreUnchangedFields(true)
+ record.WithCustomData(true)
+ record.Unhide(record.Collection().Fields.FieldNames()...)
+
+ // ensure that the email visibility and the custom data toggles are active
+ raw, err := record.MarshalJSON()
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawStr := string(raw)
+ for _, f := range extraFieldsCheck {
+ if !strings.Contains(rawStr, f) {
+ t.Fatalf("Expected %s in\n%s", f, rawStr)
+ }
+ }
+
+ // check changes
+ if v := record.GetString("name"); v != "name_new" {
+ t.Fatalf("Expected name to be %q, got %q", "name_new", v)
+ }
+ if v := record.GetString("custom"); v != "test_custom" {
+ t.Fatalf("Expected custom to be %q, got %q", "test_custom", v)
+ }
+
+ // check original
+ if v := record.Original().PK(); v != originalId {
+ t.Fatalf("Expected the original PK to be %q, got %q", originalId, v)
+ }
+ if v := record.Original().Id; v != originalId {
+ t.Fatalf("Expected the original id to be %q, got %q", originalId, v)
+ }
+ if v := record.Original().GetString("name"); v != originalName {
+ t.Fatalf("Expected the original name to be %q, got %q", originalName, v)
+ }
+ if v := record.Original().GetString("custom"); v != "" {
+ t.Fatalf("Expected the original custom to be %q, got %q", "", v)
+ }
+ if v := record.Original().Expand(); len(v) != 0 {
+ t.Fatalf("Expected empty original expand, got\n%v", v)
+ }
+
+ // ensure that the email visibility and the custom flag toggles weren't copied
+ originalRaw, err := record.Original().MarshalJSON()
+ if err != nil {
+ t.Fatal(err)
+ }
+ originalRawStr := string(originalRaw)
+ for _, f := range extraFieldsCheck {
+ if strings.Contains(originalRawStr, f) {
+ t.Fatalf("Didn't expected %s in original\n%s", f, originalRawStr)
+ }
+ }
+
+ // loading new data shouldn't affect the original state
+ record.Load(map[string]any{"name": "name_new2"})
+
+ if v := record.GetString("name"); v != "name_new2" {
+ t.Fatalf("Expected name to be %q, got %q", "name_new2", v)
+ }
+
+ if v := record.Original().GetString("name"); v != originalName {
+ t.Fatalf("Expected the original name still to be %q, got %q", originalName, v)
+ }
+}
+
+func TestRecordFresh(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ record, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+ originalId := record.Id
+
+ extraFieldsCheck := []string{`"email":`, `"custom":`}
+
+ // change the fields
+ record.Id = "changed"
+ record.Set("name", "name_new")
+ record.Set("custom", "test_custom")
+ record.SetExpand(map[string]any{"test": 123})
+ record.IgnoreEmailVisibility(true)
+ record.IgnoreUnchangedFields(true)
+ record.WithCustomData(true)
+ record.Unhide(record.Collection().Fields.FieldNames()...)
+
+ // ensure that the email visibility and the custom data toggles are active
+ raw, err := record.MarshalJSON()
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawStr := string(raw)
+ for _, f := range extraFieldsCheck {
+ if !strings.Contains(rawStr, f) {
+ t.Fatalf("Expected %s in\n%s", f, rawStr)
+ }
+ }
+
+ // check changes
+ if v := record.GetString("name"); v != "name_new" {
+ t.Fatalf("Expected name to be %q, got %q", "name_new", v)
+ }
+ if v := record.GetString("custom"); v != "test_custom" {
+ t.Fatalf("Expected custom to be %q, got %q", "test_custom", v)
+ }
+
+ // check fresh
+ if v := record.Fresh().LastSavedPK(); v != originalId {
+ t.Fatalf("Expected the fresh LastSavedPK to be %q, got %q", originalId, v)
+ }
+ if v := record.Fresh().PK(); v != record.Id {
+ t.Fatalf("Expected the fresh PK to be %q, got %q", record.Id, v)
+ }
+ if v := record.Fresh().Id; v != record.Id {
+ t.Fatalf("Expected the fresh id to be %q, got %q", record.Id, v)
+ }
+ if v := record.Fresh().GetString("name"); v != record.GetString("name") {
+ t.Fatalf("Expected the fresh name to be %q, got %q", record.GetString("name"), v)
+ }
+ if v := record.Fresh().GetString("custom"); v != "" {
+ t.Fatalf("Expected the fresh custom to be %q, got %q", "", v)
+ }
+ if v := record.Fresh().Expand(); len(v) != 0 {
+ t.Fatalf("Expected empty fresh expand, got\n%v", v)
+ }
+
+ // ensure that the email visibility and the custom flag toggles weren't copied
+ freshRaw, err := record.Fresh().MarshalJSON()
+ if err != nil {
+ t.Fatal(err)
+ }
+ freshRawStr := string(freshRaw)
+ for _, f := range extraFieldsCheck {
+ if strings.Contains(freshRawStr, f) {
+ t.Fatalf("Didn't expected %s in fresh\n%s", f, freshRawStr)
+ }
+ }
+}
+
+func TestRecordClone(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ record, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+ originalId := record.Id
+
+ extraFieldsCheck := []string{`"email":`, `"custom":`}
+
+ // change the fields
+ record.Id = "changed"
+ record.Set("name", "name_new")
+ record.Set("custom", "test_custom")
+ record.SetExpand(map[string]any{"test": 123})
+ record.IgnoreEmailVisibility(true)
+ record.WithCustomData(true)
+ record.Unhide(record.Collection().Fields.FieldNames()...)
+
+ // ensure that the email visibility and the custom data toggles are active
+ raw, err := record.MarshalJSON()
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawStr := string(raw)
+ for _, f := range extraFieldsCheck {
+ if !strings.Contains(rawStr, f) {
+ t.Fatalf("Expected %s in\n%s", f, rawStr)
+ }
+ }
+
+ // check changes
+ if v := record.GetString("name"); v != "name_new" {
+ t.Fatalf("Expected name to be %q, got %q", "name_new", v)
+ }
+ if v := record.GetString("custom"); v != "test_custom" {
+ t.Fatalf("Expected custom to be %q, got %q", "test_custom", v)
+ }
+
+ // check clone
+ if v := record.Clone().LastSavedPK(); v != originalId {
+ t.Fatalf("Expected the clone LastSavedPK to be %q, got %q", originalId, v)
+ }
+ if v := record.Clone().PK(); v != record.Id {
+ t.Fatalf("Expected the clone PK to be %q, got %q", record.Id, v)
+ }
+ if v := record.Clone().Id; v != record.Id {
+ t.Fatalf("Expected the clone id to be %q, got %q", record.Id, v)
+ }
+ if v := record.Clone().GetString("name"); v != record.GetString("name") {
+ t.Fatalf("Expected the clone name to be %q, got %q", record.GetString("name"), v)
+ }
+ if v := record.Clone().GetString("custom"); v != "test_custom" {
+ t.Fatalf("Expected the clone custom to be %q, got %q", "test_custom", v)
+ }
+ if _, ok := record.Clone().Expand()["test"]; !ok {
+ t.Fatalf("Expected non-empty clone expand")
+ }
+
+ // ensure that the email visibility and the custom data toggles state were copied
+ cloneRaw, err := record.Clone().MarshalJSON()
+ if err != nil {
+ t.Fatal(err)
+ }
+ cloneRawStr := string(cloneRaw)
+ for _, f := range extraFieldsCheck {
+ if !strings.Contains(cloneRawStr, f) {
+ t.Fatalf("Expected %s in clone\n%s", f, cloneRawStr)
+ }
+ }
+}
+
+func TestRecordExpand(t *testing.T) {
+ t.Parallel()
+
+ record := core.NewRecord(core.NewBaseCollection("test"))
+
+ expand := record.Expand()
+ if expand == nil || len(expand) != 0 {
+ t.Fatalf("Expected empty map expand, got %v", expand)
+ }
+
+ data1 := map[string]any{"a": 123, "b": 456}
+ data2 := map[string]any{"c": 123}
+ record.SetExpand(data1)
+ record.SetExpand(data2) // should overwrite the previous call
+
+ // modify the expand map to check for shallow copy
+ data2["d"] = 456
+
+ expand = record.Expand()
+ if len(expand) != 1 {
+ t.Fatalf("Expected empty map expand, got %v", expand)
+ }
+ if v := expand["c"]; v != 123 {
+ t.Fatalf("Expected to find expand.c %v, got %v", 123, v)
+ }
+}
+
+func TestRecordMergeExpand(t *testing.T) {
+ t.Parallel()
+
+ collection := core.NewBaseCollection("test")
+ m := core.NewRecord(collection)
+ m.Id = "m"
+
+ // a
+ a := core.NewRecord(collection)
+ a.Id = "a"
+ a1 := core.NewRecord(collection)
+ a1.Id = "a1"
+ a2 := core.NewRecord(collection)
+ a2.Id = "a2"
+ a3 := core.NewRecord(collection)
+ a3.Id = "a3"
+ a31 := core.NewRecord(collection)
+ a31.Id = "a31"
+ a32 := core.NewRecord(collection)
+ a32.Id = "a32"
+ a.SetExpand(map[string]any{
+ "a1": a1,
+ "a23": []*core.Record{a2, a3},
+ })
+ a3.SetExpand(map[string]any{
+ "a31": a31,
+ "a32": []*core.Record{a32},
+ })
+
+ // b
+ b := core.NewRecord(collection)
+ b.Id = "b"
+ b1 := core.NewRecord(collection)
+ b1.Id = "b1"
+ b.SetExpand(map[string]any{
+ "b1": b1,
+ })
+
+ // c
+ c := core.NewRecord(collection)
+ c.Id = "c"
+
+ // load initial expand
+ m.SetExpand(map[string]any{
+ "a": a,
+ "b": b,
+ "c": []*core.Record{c},
+ })
+
+ // a (new)
+ aNew := core.NewRecord(collection)
+ aNew.Id = a.Id
+ a3New := core.NewRecord(collection)
+ a3New.Id = a3.Id
+ a32New := core.NewRecord(collection)
+ a32New.Id = "a32New"
+ a33New := core.NewRecord(collection)
+ a33New.Id = "a33New"
+ a3New.SetExpand(map[string]any{
+ "a32": []*core.Record{a32New},
+ "a33New": a33New,
+ })
+ aNew.SetExpand(map[string]any{
+ "a23": []*core.Record{a2, a3New},
+ })
+
+ // b (new)
+ bNew := core.NewRecord(collection)
+ bNew.Id = "bNew"
+ dNew := core.NewRecord(collection)
+ dNew.Id = "dNew"
+
+ // merge expands
+ m.MergeExpand(map[string]any{
+ "a": aNew,
+ "b": []*core.Record{bNew},
+ "dNew": dNew,
+ })
+
+ result := m.Expand()
+
+ raw, err := json.Marshal(result)
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawStr := string(raw)
+
+ expected := `{"a":{"collectionId":"_pbc_3632233996","collectionName":"test","expand":{"a1":{"collectionId":"_pbc_3632233996","collectionName":"test","id":"a1"},"a23":[{"collectionId":"_pbc_3632233996","collectionName":"test","id":"a2"},{"collectionId":"_pbc_3632233996","collectionName":"test","expand":{"a31":{"collectionId":"_pbc_3632233996","collectionName":"test","id":"a31"},"a32":[{"collectionId":"_pbc_3632233996","collectionName":"test","id":"a32"},{"collectionId":"_pbc_3632233996","collectionName":"test","id":"a32New"}],"a33New":{"collectionId":"_pbc_3632233996","collectionName":"test","id":"a33New"}},"id":"a3"}]},"id":"a"},"b":[{"collectionId":"_pbc_3632233996","collectionName":"test","expand":{"b1":{"collectionId":"_pbc_3632233996","collectionName":"test","id":"b1"}},"id":"b"},{"collectionId":"_pbc_3632233996","collectionName":"test","id":"bNew"}],"c":[{"collectionId":"_pbc_3632233996","collectionName":"test","id":"c"}],"dNew":{"collectionId":"_pbc_3632233996","collectionName":"test","id":"dNew"}}`
+
+ if expected != rawStr {
+ t.Fatalf("Expected \n%v, \ngot \n%v", expected, rawStr)
+ }
+}
+
+func TestRecordMergeExpandNilCheck(t *testing.T) {
+ t.Parallel()
+
+ collection := core.NewBaseCollection("test")
+
+ scenarios := []struct {
+ name string
+ expand map[string]any
+ expected string
+ }{
+ {
+ "nil expand",
+ nil,
+ `{"collectionId":"_pbc_3632233996","collectionName":"test","id":""}`,
+ },
+ {
+ "empty expand",
+ map[string]any{},
+ `{"collectionId":"_pbc_3632233996","collectionName":"test","id":""}`,
+ },
+ {
+ "non-empty expand",
+ map[string]any{"test": core.NewRecord(collection)},
+ `{"collectionId":"_pbc_3632233996","collectionName":"test","expand":{"test":{"collectionId":"_pbc_3632233996","collectionName":"test","id":""}},"id":""}`,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ m := core.NewRecord(collection)
+ m.MergeExpand(s.expand)
+
+ raw, err := json.Marshal(m)
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawStr := string(raw)
+
+ if rawStr != s.expected {
+ t.Fatalf("Expected \n%v, \ngot \n%v", s.expected, rawStr)
+ }
+ })
+ }
+}
+
+func TestRecordExpandedOne(t *testing.T) {
+ t.Parallel()
+
+ collection := core.NewBaseCollection("test")
+
+ main := core.NewRecord(collection)
+
+ single := core.NewRecord(collection)
+ single.Id = "single"
+
+ multiple1 := core.NewRecord(collection)
+ multiple1.Id = "multiple1"
+
+ multiple2 := core.NewRecord(collection)
+ multiple2.Id = "multiple2"
+
+ main.SetExpand(map[string]any{
+ "single": single,
+ "multiple": []*core.Record{multiple1, multiple2},
+ })
+
+ if v := main.ExpandedOne("missing"); v != nil {
+ t.Fatalf("Expected nil, got %v", v)
+ }
+
+ if v := main.ExpandedOne("single"); v == nil || v.Id != "single" {
+ t.Fatalf("Expected record with id %q, got %v", "single", v)
+ }
+
+ if v := main.ExpandedOne("multiple"); v == nil || v.Id != "multiple1" {
+ t.Fatalf("Expected record with id %q, got %v", "multiple1", v)
+ }
+}
+
+func TestRecordExpandedAll(t *testing.T) {
+ t.Parallel()
+
+ collection := core.NewBaseCollection("test")
+
+ main := core.NewRecord(collection)
+
+ single := core.NewRecord(collection)
+ single.Id = "single"
+
+ multiple1 := core.NewRecord(collection)
+ multiple1.Id = "multiple1"
+
+ multiple2 := core.NewRecord(collection)
+ multiple2.Id = "multiple2"
+
+ main.SetExpand(map[string]any{
+ "single": single,
+ "multiple": []*core.Record{multiple1, multiple2},
+ })
+
+ if v := main.ExpandedAll("missing"); v != nil {
+ t.Fatalf("Expected nil, got %v", v)
+ }
+
+ if v := main.ExpandedAll("single"); len(v) != 1 || v[0].Id != "single" {
+ t.Fatalf("Expected [single] slice, got %v", v)
+ }
+
+ if v := main.ExpandedAll("multiple"); len(v) != 2 || v[0].Id != "multiple1" || v[1].Id != "multiple2" {
+ t.Fatalf("Expected [multiple1, multiple2] slice, got %v", v)
+ }
+}
+
+func TestRecordFieldsData(t *testing.T) {
+ t.Parallel()
+
+ collection := core.NewAuthCollection("test")
+ collection.Fields.Add(&core.TextField{Name: "field1"})
+ collection.Fields.Add(&core.TextField{Name: "field2"})
+
+ m := core.NewRecord(collection)
+ m.Id = "test_id" // direct id assignment
+ m.Set("email", "test@example.com")
+ m.Set("password", "123") // hidden fields should be also returned
+ m.Set("tokenKey", "789")
+ m.Set("field1", 123)
+ m.Set("field2", 456)
+ m.Set("unknown", 789)
+
+ raw, err := json.Marshal(m.FieldsData())
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ expected := `{"email":"test@example.com","emailVisibility":false,"field1":"123","field2":"456","id":"test_id","password":"123","tokenKey":"789","verified":false}`
+
+ if v := string(raw); v != expected {
+ t.Fatalf("Expected\n%v\ngot\n%v", expected, v)
+ }
+}
+
+func TestRecordCustomData(t *testing.T) {
+ t.Parallel()
+
+ collection := core.NewAuthCollection("test")
+ collection.Fields.Add(&core.TextField{Name: "field1"})
+ collection.Fields.Add(&core.TextField{Name: "field2"})
+
+ m := core.NewRecord(collection)
+ m.Id = "test_id" // direct id assignment
+ m.Set("email", "test@example.com")
+ m.Set("password", "123") // hidden fields should be also returned
+ m.Set("tokenKey", "789")
+ m.Set("field1", 123)
+ m.Set("field2", 456)
+ m.Set("unknown", 789)
+
+ raw, err := json.Marshal(m.CustomData())
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ expected := `{"unknown":789}`
+
+ if v := string(raw); v != expected {
+ t.Fatalf("Expected\n%v\ngot\n%v", expected, v)
+ }
+}
+
+func TestRecordSetGet(t *testing.T) {
+ t.Parallel()
+
+ f1 := &mockField{}
+ f1.Name = "mock1"
+
+ f2 := &mockField{}
+ f2.Name = "mock2"
+
+ f3 := &mockField{}
+ f3.Name = "mock3"
+
+ collection := core.NewBaseCollection("test")
+ collection.Fields.Add(&core.TextField{Name: "text1"})
+ collection.Fields.Add(&core.TextField{Name: "text2"})
+ collection.Fields.Add(f1)
+ collection.Fields.Add(f2)
+ collection.Fields.Add(f3)
+
+ record := core.NewRecord(collection)
+ record.Set("text1", 123) // should be converted to string using the ScanValue fallback
+ record.SetRaw("text2", 456)
+ record.Set("mock1", 1) // should be converted to string using the setter
+ record.SetRaw("mock2", 1)
+ record.Set("mock3:test", "abc")
+ record.Set("unknown", 789)
+
+ t.Run("GetRaw", func(t *testing.T) {
+ expected := map[string]any{
+ "text1": "123",
+ "text2": 456,
+ "mock1": "1",
+ "mock2": 1,
+ "mock3": "modifier_set",
+ "mock3:test": nil,
+ "unknown": 789,
+ }
+
+ for k, v := range expected {
+ raw := record.GetRaw(k)
+ if raw != v {
+ t.Errorf("Expected %q to be %v, got %v", k, v, raw)
+ }
+ }
+ })
+
+ t.Run("Get", func(t *testing.T) {
+ expected := map[string]any{
+ "text1": "123",
+ "text2": 456,
+ "mock1": "1",
+ "mock2": 1,
+ "mock3": "modifier_set",
+ "mock3:test": "modifier_get",
+ "unknown": 789,
+ }
+
+ for k, v := range expected {
+ get := record.Get(k)
+ if get != v {
+ t.Errorf("Expected %q to be %v, got %v", k, v, get)
+ }
+ }
+ })
+}
+
+func TestRecordLoad(t *testing.T) {
+ t.Parallel()
+
+ collection := core.NewBaseCollection("test")
+ collection.Fields.Add(&core.TextField{Name: "text"})
+
+ record := core.NewRecord(collection)
+ record.Load(map[string]any{
+ "text": 123,
+ "custom": 456,
+ })
+
+ expected := map[string]any{
+ "text": "123",
+ "custom": 456,
+ }
+
+ for k, v := range expected {
+ get := record.Get(k)
+ if get != v {
+ t.Errorf("Expected %q to be %#v, got %#v", k, v, get)
+ }
+ }
+}
+
+func TestRecordGetBool(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ value any
+ expected bool
+ }{
+ {nil, false},
+ {"", false},
+ {0, false},
+ {1, true},
+ {[]string{"true"}, false},
+ {time.Now(), false},
+ {"test", false},
+ {"false", false},
+ {"true", true},
+ {false, false},
+ {true, true},
+ }
+
+ collection := core.NewBaseCollection("test")
+ record := core.NewRecord(collection)
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.value), func(t *testing.T) {
+ record.Set("test", s.value)
+
+ result := record.GetBool("test")
+ if result != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, result)
+ }
+ })
+ }
+}
+
+func TestRecordGetString(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ value any
+ expected string
+ }{
+ {nil, ""},
+ {"", ""},
+ {0, "0"},
+ {1.4, "1.4"},
+ {[]string{"true"}, ""},
+ {map[string]int{"test": 1}, ""},
+ {[]byte("abc"), "abc"},
+ {"test", "test"},
+ {false, "false"},
+ {true, "true"},
+ }
+
+ collection := core.NewBaseCollection("test")
+ record := core.NewRecord(collection)
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.value), func(t *testing.T) {
+ record.Set("test", s.value)
+
+ result := record.GetString("test")
+ if result != s.expected {
+ t.Fatalf("Expected %q, got %q", s.expected, result)
+ }
+ })
+ }
+}
+
+func TestRecordGetInt(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ value any
+ expected int
+ }{
+ {nil, 0},
+ {"", 0},
+ {[]string{"true"}, 0},
+ {map[string]int{"test": 1}, 0},
+ {time.Now(), 0},
+ {"test", 0},
+ {123, 123},
+ {2.4, 2},
+ {"123", 123},
+ {"123.5", 0},
+ {false, 0},
+ {true, 1},
+ }
+
+ collection := core.NewBaseCollection("test")
+ record := core.NewRecord(collection)
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.value), func(t *testing.T) {
+ record.Set("test", s.value)
+
+ result := record.GetInt("test")
+ if result != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, result)
+ }
+ })
+ }
+}
+
+func TestRecordGetFloat(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ value any
+ expected float64
+ }{
+ {nil, 0},
+ {"", 0},
+ {[]string{"true"}, 0},
+ {map[string]int{"test": 1}, 0},
+ {time.Now(), 0},
+ {"test", 0},
+ {123, 123},
+ {2.4, 2.4},
+ {"123", 123},
+ {"123.5", 123.5},
+ {false, 0},
+ {true, 1},
+ }
+
+ collection := core.NewBaseCollection("test")
+ record := core.NewRecord(collection)
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.value), func(t *testing.T) {
+ record.Set("test", s.value)
+
+ result := record.GetFloat("test")
+ if result != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, result)
+ }
+ })
+ }
+}
+
+func TestRecordGetDateTime(t *testing.T) {
+ t.Parallel()
+
+ nowTime := time.Now()
+ testTime, _ := time.Parse(types.DefaultDateLayout, "2022-01-01 08:00:40.000Z")
+
+ scenarios := []struct {
+ value any
+ expected time.Time
+ }{
+ {nil, time.Time{}},
+ {"", time.Time{}},
+ {false, time.Time{}},
+ {true, time.Time{}},
+ {"test", time.Time{}},
+ {[]string{"true"}, time.Time{}},
+ {map[string]int{"test": 1}, time.Time{}},
+ {1641024040, testTime},
+ {"2022-01-01 08:00:40.000", testTime},
+ {nowTime, nowTime},
+ }
+
+ collection := core.NewBaseCollection("test")
+ record := core.NewRecord(collection)
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.value), func(t *testing.T) {
+ record.Set("test", s.value)
+
+ result := record.GetDateTime("test")
+ if !result.Time().Equal(s.expected) {
+ t.Fatalf("Expected %v, got %v", s.expected, result)
+ }
+ })
+ }
+}
+
+func TestRecordGetStringSlice(t *testing.T) {
+ t.Parallel()
+
+ nowTime := time.Now()
+
+ scenarios := []struct {
+ value any
+ expected []string
+ }{
+ {nil, []string{}},
+ {"", []string{}},
+ {false, []string{"false"}},
+ {true, []string{"true"}},
+ {nowTime, []string{}},
+ {123, []string{"123"}},
+ {"test", []string{"test"}},
+ {map[string]int{"test": 1}, []string{}},
+ {`["test1", "test2"]`, []string{"test1", "test2"}},
+ {[]int{123, 123, 456}, []string{"123", "456"}},
+ {[]string{"test", "test", "123"}, []string{"test", "123"}},
+ }
+
+ collection := core.NewBaseCollection("test")
+ record := core.NewRecord(collection)
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.value), func(t *testing.T) {
+ record.Set("test", s.value)
+
+ result := record.GetStringSlice("test")
+
+ if len(result) != len(s.expected) {
+ t.Fatalf("Expected %d elements, got %d: %v", len(s.expected), len(result), result)
+ }
+
+ for _, v := range result {
+ if !slices.Contains(s.expected, v) {
+ t.Fatalf("Cannot find %v in %v", v, s.expected)
+ }
+ }
+ })
+ }
+}
+
+func TestRecordGetUploadedFiles(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f1, err := filesystem.NewFileFromBytes([]byte("test"), "f1")
+ if err != nil {
+ t.Fatal(err)
+ }
+ f1.Name = "f1"
+
+ f2, err := filesystem.NewFileFromBytes([]byte("test"), "f2")
+ if err != nil {
+ t.Fatal(err)
+ }
+ f2.Name = "f2"
+
+ record, err := app.FindRecordById("demo3", "lcl9d87w22ml6jy")
+ if err != nil {
+ t.Fatal(err)
+ }
+ record.Set("files+", []any{f1, f2})
+
+ scenarios := []struct {
+ key string
+ expected string
+ }{
+ {
+ "",
+ "null",
+ },
+ {
+ "title",
+ "null",
+ },
+ {
+ "files",
+ `[{"name":"f1","originalName":"f1","size":4},{"name":"f2","originalName":"f2","size":4}]`,
+ },
+ {
+ "files:uploaded",
+ `[{"name":"f1","originalName":"f1","size":4},{"name":"f2","originalName":"f2","size":4}]`,
+ },
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.key), func(t *testing.T) {
+ v := record.GetUploadedFiles(s.key)
+
+ raw, err := json.Marshal(v)
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawStr := string(raw)
+
+ if rawStr != s.expected {
+ t.Fatalf("Expected\n%s\ngot\n%s", s.expected, rawStr)
+ }
+ })
+ }
+}
+
+func TestRecordUnmarshalJSONField(t *testing.T) {
+ t.Parallel()
+
+ collection := core.NewBaseCollection("test")
+ collection.Fields.Add(&core.JSONField{Name: "field"})
+
+ record := core.NewRecord(collection)
+
+ var testPointer *string
+ var testStr string
+ var testInt int
+ var testBool bool
+ var testSlice []int
+ var testMap map[string]any
+
+ scenarios := []struct {
+ value any
+ destination any
+ expectError bool
+ expectedJSON string
+ }{
+ {nil, testPointer, false, `null`},
+ {nil, testStr, false, `""`},
+ {"", testStr, false, `""`},
+ {1, testInt, false, `1`},
+ {true, testBool, false, `true`},
+ {[]int{1, 2, 3}, testSlice, false, `[1,2,3]`},
+ {map[string]any{"test": 123}, testMap, false, `{"test":123}`},
+ // json encoded values
+ {`null`, testPointer, false, `null`},
+ {`true`, testBool, false, `true`},
+ {`456`, testInt, false, `456`},
+ {`"test"`, testStr, false, `"test"`},
+ {`[4,5,6]`, testSlice, false, `[4,5,6]`},
+ {`{"test":456}`, testMap, false, `{"test":456}`},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.value), func(t *testing.T) {
+ record.Set("field", s.value)
+
+ err := record.UnmarshalJSONField("field", &s.destination)
+ hasErr := err != nil
+
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v", s.expectError, hasErr)
+ }
+
+ raw, _ := json.Marshal(s.destination)
+ if v := string(raw); v != s.expectedJSON {
+ t.Fatalf("Expected %q, got %q", s.expectedJSON, v)
+ }
+ })
+ }
+}
+
+func TestRecordFindFileFieldByFile(t *testing.T) {
+ t.Parallel()
+
+ collection := core.NewBaseCollection("test")
+ collection.Fields.Add(
+ &core.TextField{Name: "field1"},
+ &core.FileField{Name: "field2", MaxSelect: 1, MaxSize: 1},
+ &core.FileField{Name: "field3", MaxSelect: 2, MaxSize: 1},
+ )
+
+ m := core.NewRecord(collection)
+ m.Set("field1", "test")
+ m.Set("field2", "test.png")
+ m.Set("field3", []string{"test1.png", "test2.png"})
+
+ scenarios := []struct {
+ filename string
+ expectField string
+ }{
+ {"", ""},
+ {"test", ""},
+ {"test2", ""},
+ {"test.png", "field2"},
+ {"test2.png", "field3"},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.filename), func(t *testing.T) {
+ result := m.FindFileFieldByFile(s.filename)
+
+ var fieldName string
+ if result != nil {
+ fieldName = result.Name
+ }
+
+ if s.expectField != fieldName {
+ t.Fatalf("Expected field %v, got %v", s.expectField, result)
+ }
+ })
+ }
+}
+
+func TestRecordDBExport(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ f1 := &core.TextField{Name: "field1"}
+ f2 := &core.FileField{Name: "field2", MaxSelect: 1, MaxSize: 1}
+ f3 := &core.SelectField{Name: "field3", MaxSelect: 2, Values: []string{"test1", "test2", "test3"}}
+ f4 := &core.RelationField{Name: "field4", MaxSelect: 2}
+
+ colBase := core.NewBaseCollection("test_base")
+ colBase.Fields.Add(f1, f2, f3, f4)
+
+ colAuth := core.NewAuthCollection("test_auth")
+ colAuth.Fields.Add(f1, f2, f3, f4)
+
+ scenarios := []struct {
+ collection *core.Collection
+ expected string
+ }{
+ {
+ colBase,
+ `{"field1":"test","field2":"test.png","field3":["test1","test2"],"field4":["test11","test12"],"id":"test_id"}`,
+ },
+ {
+ colAuth,
+ `{"email":"test_email","emailVisibility":true,"field1":"test","field2":"test.png","field3":["test1","test2"],"field4":["test11","test12"],"id":"test_id","password":"_TEST_","tokenKey":"test_tokenKey","verified":false}`,
+ },
+ }
+
+ data := map[string]any{
+ "id": "test_id",
+ "field1": "test",
+ "field2": "test.png",
+ "field3": []string{"test1", "test2"},
+ "field4": []string{"test11", "test12", "test11"}, // strip duplicate,
+ "unknown": "test_unknown",
+ "password": "test_passwordHash",
+ "username": "test_username",
+ "emailVisibility": true,
+ "email": "test_email",
+ "verified": "invalid", // should be casted
+ "tokenKey": "test_tokenKey",
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s_%s", i, s.collection.Type, s.collection.Name), func(t *testing.T) {
+ record := core.NewRecord(s.collection)
+
+ record.Load(data)
+
+ result, err := record.DBExport(app)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ raw, err := json.Marshal(result)
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawStr := string(raw)
+
+ // replace _TEST_ placeholder with .+ regex pattern
+ pattern := regexp.MustCompile(strings.ReplaceAll(
+ "^"+regexp.QuoteMeta(s.expected)+"$",
+ "_TEST_",
+ `.+`,
+ ))
+
+ if !pattern.MatchString(rawStr) {
+ t.Fatalf("Expected\n%v\ngot\n%v", s.expected, rawStr)
+ }
+ })
+ }
+}
+
+func TestRecordIgnoreUnchangedFields(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ col, err := app.FindCollectionByNameOrId("demo3")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ new := core.NewRecord(col)
+
+ existing, err := app.FindRecordById(col, "mk5fmymtx4wsprk")
+ if err != nil {
+ t.Fatal(err)
+ }
+ existing.Set("title", "test_new")
+ existing.Set("files", existing.Get("files")) // no change
+
+ scenarios := []struct {
+ ignoreUnchangedFields bool
+ record *core.Record
+ expected []string
+ }{
+ {
+ false,
+ new,
+ []string{"id", "created", "updated", "title", "files"},
+ },
+ {
+ true,
+ new,
+ []string{"id", "created", "updated", "title", "files"},
+ },
+ {
+ false,
+ existing,
+ []string{"id", "created", "updated", "title", "files"},
+ },
+ {
+ true,
+ existing,
+ []string{"id", "title"},
+ },
+ }
+
+ for i, s := range scenarios {
+ action := "create"
+ if !s.record.IsNew() {
+ action = "update"
+ }
+
+ t.Run(fmt.Sprintf("%d_%s_%v", i, action, s.ignoreUnchangedFields), func(t *testing.T) {
+ s.record.IgnoreUnchangedFields(s.ignoreUnchangedFields)
+
+ result, err := s.record.DBExport(app)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if len(result) != len(s.expected) {
+ t.Fatalf("Expected %d keys, got %d:\n%v", len(s.expected), len(result), result)
+ }
+
+ for _, key := range s.expected {
+ if _, ok := result[key]; !ok {
+ t.Fatalf("Missing expected key %q in\n%v", key, result)
+ }
+ }
+ })
+ }
+}
+
+func TestRecordPublicExportAndMarshalJSON(t *testing.T) {
+ t.Parallel()
+
+ f1 := &core.TextField{Name: "field1"}
+ f2 := &core.FileField{Name: "field2", MaxSelect: 1, MaxSize: 1}
+ f3 := &core.SelectField{Name: "field3", MaxSelect: 2, Values: []string{"test1", "test2", "test3"}}
+ f4 := &core.TextField{Name: "field4", Hidden: true}
+ f5 := &core.TextField{Name: "field5", Hidden: true}
+
+ colBase := core.NewBaseCollection("test_base")
+ colBase.Fields.Add(f1, f2, f3, f4, f5)
+
+ colAuth := core.NewAuthCollection("test_auth")
+ colAuth.Fields.Add(f1, f2, f3, f4, f5)
+
+ scenarios := []struct {
+ name string
+ collection *core.Collection
+ ignoreEmailVisibility bool
+ withCustomData bool
+ hideFields []string
+ unhideFields []string
+ expectedJSON string
+ }{
+ // base
+ {
+ "[base] no extra flags",
+ colBase,
+ false,
+ false,
+ nil,
+ nil,
+ `{"collectionId":"_pbc_3318600878","collectionName":"test_base","expand":{"test":123},"field1":"field_1","field2":"field_2.png","field3":["test1","test2"],"id":"test_id"}`,
+ },
+ {
+ "[base] with email visibility",
+ colBase,
+ true, // should have no effect
+ false,
+ nil,
+ nil,
+ `{"collectionId":"_pbc_3318600878","collectionName":"test_base","expand":{"test":123},"field1":"field_1","field2":"field_2.png","field3":["test1","test2"],"id":"test_id"}`,
+ },
+ {
+ "[base] with custom data",
+ colBase,
+ true, // should have no effect
+ true,
+ nil,
+ nil,
+ `{"collectionId":"_pbc_3318600878","collectionName":"test_base","email":"test_email","emailVisibility":"test_invalid","expand":{"test":123},"field1":"field_1","field2":"field_2.png","field3":["test1","test2"],"id":"test_id","password":"test_passwordHash","tokenKey":"test_tokenKey","unknown":"test_unknown","verified":true}`,
+ },
+ {
+ "[base] with explicit hide and unhide fields",
+ colBase,
+ false,
+ true,
+ []string{"field3", "field1", "expand", "collectionId", "collectionName", "email", "tokenKey", "unknown"},
+ []string{"field4", "@pbInternalAbc"},
+ `{"emailVisibility":"test_invalid","field2":"field_2.png","field4":"field_4","id":"test_id","password":"test_passwordHash","verified":true}`,
+ },
+ {
+ "[base] trying to unhide custom fields without explicit WithCustomData",
+ colBase,
+ false,
+ true,
+ nil,
+ []string{"field5", "@pbInternalAbc", "email", "tokenKey", "unknown"},
+ `{"collectionId":"_pbc_3318600878","collectionName":"test_base","email":"test_email","emailVisibility":"test_invalid","expand":{"test":123},"field1":"field_1","field2":"field_2.png","field3":["test1","test2"],"field5":"field_5","id":"test_id","password":"test_passwordHash","tokenKey":"test_tokenKey","unknown":"test_unknown","verified":true}`,
+ },
+
+ // auth
+ {
+ "[auth] no extra flags",
+ colAuth,
+ false,
+ false,
+ nil,
+ nil,
+ `{"collectionId":"_pbc_4255619734","collectionName":"test_auth","emailVisibility":false,"expand":{"test":123},"field1":"field_1","field2":"field_2.png","field3":["test1","test2"],"id":"test_id","verified":true}`,
+ },
+ {
+ "[auth] with email visibility",
+ colAuth,
+ true,
+ false,
+ nil,
+ nil,
+ `{"collectionId":"_pbc_4255619734","collectionName":"test_auth","email":"test_email","emailVisibility":false,"expand":{"test":123},"field1":"field_1","field2":"field_2.png","field3":["test1","test2"],"id":"test_id","verified":true}`,
+ },
+ {
+ "[auth] with custom data",
+ colAuth,
+ false,
+ true,
+ nil,
+ nil,
+ `{"collectionId":"_pbc_4255619734","collectionName":"test_auth","emailVisibility":false,"expand":{"test":123},"field1":"field_1","field2":"field_2.png","field3":["test1","test2"],"id":"test_id","unknown":"test_unknown","verified":true}`,
+ },
+ {
+ "[auth] with explicit hide and unhide fields",
+ colAuth,
+ true,
+ true,
+ []string{"field3", "field1", "expand", "collectionId", "collectionName", "email", "unknown"},
+ []string{"field4", "@pbInternalAbc"},
+ `{"emailVisibility":false,"field2":"field_2.png","field4":"field_4","id":"test_id","verified":true}`,
+ },
+ {
+ "[auth] trying to unhide custom fields without explicit WithCustomData",
+ colAuth,
+ false,
+ true,
+ nil,
+ []string{"field5", "@pbInternalAbc", "tokenKey", "unknown", "email"}, // emailVisibility:false has higher priority
+ `{"collectionId":"_pbc_4255619734","collectionName":"test_auth","emailVisibility":false,"expand":{"test":123},"field1":"field_1","field2":"field_2.png","field3":["test1","test2"],"field5":"field_5","id":"test_id","unknown":"test_unknown","verified":true}`,
+ },
+ }
+
+ data := map[string]any{
+ "id": "test_id",
+ "field1": "field_1",
+ "field2": "field_2.png",
+ "field3": []string{"test1", "test2"},
+ "field4": "field_4",
+ "field5": "field_5",
+ "expand": map[string]any{"test": 123},
+ "collectionId": "m_id", // should be always ignored
+ "collectionName": "m_name", // should be always ignored
+ "unknown": "test_unknown",
+ "password": "test_passwordHash",
+ "emailVisibility": "test_invalid", // for auth collections should be casted to bool
+ "email": "test_email",
+ "verified": true,
+ "tokenKey": "test_tokenKey",
+ "@pbInternalAbc": "test_custom_inter", // always hidden
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ m := core.NewRecord(s.collection)
+
+ m.Load(data)
+ m.IgnoreEmailVisibility(s.ignoreEmailVisibility)
+ m.WithCustomData(s.withCustomData)
+ m.Unhide(s.unhideFields...)
+ m.Hide(s.hideFields...)
+
+ exportResult, err := json.Marshal(m.PublicExport())
+ if err != nil {
+ t.Fatal(err)
+ }
+ exportResultStr := string(exportResult)
+
+ // MarshalJSON and PublicExport should return the same
+ marshalResult, err := m.MarshalJSON()
+ if err != nil {
+ t.Fatal(err)
+ }
+ marshalResultStr := string(marshalResult)
+
+ if exportResultStr != marshalResultStr {
+ t.Fatalf("Expected the PublicExport to be the same as MarshalJSON, but got \n%v \nvs \n%v", exportResultStr, marshalResultStr)
+ }
+
+ if exportResultStr != s.expectedJSON {
+ t.Fatalf("Expected json \n%v \ngot \n%v", s.expectedJSON, exportResultStr)
+ }
+ })
+ }
+}
+
+func TestRecordUnmarshalJSON(t *testing.T) {
+ t.Parallel()
+
+ collection := core.NewBaseCollection("test")
+ collection.Fields.Add(&core.TextField{Name: "text"})
+
+ record := core.NewRecord(collection)
+
+ data := map[string]any{
+ "text": 123,
+ "custom": 456.789,
+ }
+ rawData, err := json.Marshal(data)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ err = record.UnmarshalJSON(rawData)
+ if err != nil {
+ t.Fatalf("Failed to unmarshal: %v", err)
+ }
+
+ expected := map[string]any{
+ "text": "123",
+ "custom": 456.789,
+ }
+
+ for k, v := range expected {
+ get := record.Get(k)
+ if get != v {
+ t.Errorf("Expected %q to be %#v, got %#v", k, v, get)
+ }
+ }
+}
+
+func TestRecordReplaceModifiers(t *testing.T) {
+ t.Parallel()
+
+ collection := core.NewBaseCollection("test")
+ collection.Fields.Add(
+ &mockField{core.TextField{Name: "mock"}},
+ &core.NumberField{Name: "number"},
+ )
+
+ originalData := map[string]any{
+ "mock": "a",
+ "number": 2.1,
+ }
+
+ record := core.NewRecord(collection)
+ for k, v := range originalData {
+ record.Set(k, v)
+ }
+
+ result := record.ReplaceModifiers(map[string]any{
+ "mock:test": "b",
+ "number+": 3,
+ })
+
+ expected := map[string]any{
+ "mock": "modifier_set",
+ "number": 5.1,
+ }
+
+ if len(result) != len(expected) {
+ t.Fatalf("Expected\n%v\ngot\n%v", expected, result)
+ }
+
+ for k, v := range expected {
+ if result[k] != v {
+ t.Errorf("Expected %q %#v, got %#v", k, v, result[k])
+ }
+ }
+
+ // ensure that the original data hasn't changed
+ for k, v := range originalData {
+ rv := record.Get(k)
+ if rv != v {
+ t.Errorf("Expected original %q %#v, got %#v", k, v, rv)
+ }
+ }
+}
+
+func TestRecordValidate(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection := core.NewBaseCollection("test")
+
+ collection.Fields.Add(
+ // dummy fields to ensure that its validators are triggered
+ &core.TextField{Name: "f1", Min: 3},
+ &core.NumberField{Name: "f2", Required: true},
+ )
+
+ record := core.NewRecord(collection)
+ record.Id = "!invalid"
+
+ t.Run("no data set", func(t *testing.T) {
+ tests.TestValidationErrors(t, app.Validate(record), []string{"id", "f2"})
+ })
+
+ t.Run("failing the text field min requirement", func(t *testing.T) {
+ record.Set("f1", "a")
+ tests.TestValidationErrors(t, app.Validate(record), []string{"id", "f1", "f2"})
+ })
+
+ t.Run("satisfying the fields validations", func(t *testing.T) {
+ record.Id = strings.Repeat("a", 15)
+ record.Set("f1", "abc")
+ record.Set("f2", 1)
+ tests.TestValidationErrors(t, app.Validate(record), nil)
+ })
+}
+
+func TestRecordSave(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ name string
+ record func(app core.App) (*core.Record, error)
+ expectError bool
+ }{
+ // trigger validators
+ {
+ name: "create - trigger validators",
+ record: func(app core.App) (*core.Record, error) {
+ c, _ := app.FindCollectionByNameOrId("demo2")
+ record := core.NewRecord(c)
+ return record, nil
+ },
+ expectError: true,
+ },
+ {
+ name: "update - trigger validators",
+ record: func(app core.App) (*core.Record, error) {
+ record, _ := app.FindFirstRecordByData("demo2", "title", "test1")
+ record.Set("title", "")
+ return record, nil
+ },
+ expectError: true,
+ },
+
+ // create
+ {
+ name: "create base record",
+ record: func(app core.App) (*core.Record, error) {
+ c, _ := app.FindCollectionByNameOrId("demo2")
+ record := core.NewRecord(c)
+ record.Set("title", "new_test")
+ return record, nil
+ },
+ expectError: false,
+ },
+ {
+ name: "create auth record",
+ record: func(app core.App) (*core.Record, error) {
+ c, _ := app.FindCollectionByNameOrId("nologin")
+ record := core.NewRecord(c)
+ record.Set("email", "test_new@example.com")
+ record.Set("password", "1234567890")
+ return record, nil
+ },
+ expectError: false,
+ },
+ {
+ name: "create view record",
+ record: func(app core.App) (*core.Record, error) {
+ c, _ := app.FindCollectionByNameOrId("view2")
+ record := core.NewRecord(c)
+ record.Set("state", true)
+ return record, nil
+ },
+ expectError: true, // view records are read-only
+ },
+
+ // update
+ {
+ name: "update base record",
+ record: func(app core.App) (*core.Record, error) {
+ record, _ := app.FindFirstRecordByData("demo2", "title", "test1")
+ record.Set("title", "test_new")
+ return record, nil
+ },
+ expectError: false,
+ },
+ {
+ name: "update auth record",
+ record: func(app core.App) (*core.Record, error) {
+ record, _ := app.FindAuthRecordByEmail("nologin", "test@example.com")
+ record.Set("name", "test_new")
+ record.Set("email", "test_new@example.com")
+ return record, nil
+ },
+ expectError: false,
+ },
+ {
+ name: "update view record",
+ record: func(app core.App) (*core.Record, error) {
+ record, _ := app.FindFirstRecordByData("view2", "state", true)
+ record.Set("state", false)
+ return record, nil
+ },
+ expectError: true, // view records are read-only
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ record, err := s.record(app)
+ if err != nil {
+ t.Fatalf("Failed to retrieve test record: %v", err)
+ }
+
+ saveErr := app.Save(record)
+
+ hasErr := saveErr != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", hasErr, s.expectError, saveErr)
+ }
+
+ if hasErr {
+ return
+ }
+
+ // the record should always have an id after successful Save
+ if record.Id == "" {
+ t.Fatal("Expected record id to be set")
+ }
+
+ if record.IsNew() {
+ t.Fatal("Expected the record to be marked as not new")
+ }
+
+ // refetch and compare the serialization
+ refreshed, err := app.FindRecordById(record.Collection(), record.Id)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ rawRefreshed, err := refreshed.MarshalJSON()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ raw, err := record.MarshalJSON()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if !bytes.Equal(raw, rawRefreshed) {
+ t.Fatalf("Expected the refreshed record to be the same as the saved one, got\n%s\nVS\n%s", raw, rawRefreshed)
+ }
+ })
+ }
+}
+
+func TestRecordSaveIdFromOtherCollection(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ baseCollection, _ := app.FindCollectionByNameOrId("demo2")
+ authCollection, _ := app.FindCollectionByNameOrId("nologin")
+
+ // base collection test
+ r1 := core.NewRecord(baseCollection)
+ r1.Set("title", "test_new")
+ r1.Set("id", "mk5fmymtx4wsprk") // existing id of demo3 record
+ if err := app.Save(r1); err != nil {
+ t.Fatalf("Expected nil, got error %v", err)
+ }
+
+ // auth collection test
+ r2 := core.NewRecord(authCollection)
+ r2.SetEmail("test_new@example.com")
+ r2.SetPassword("1234567890")
+ r2.Set("id", "gk390qegs4y47wn") // existing id of "clients" record
+ if err := app.Save(r2); err == nil {
+ t.Fatal("Expected error, got nil")
+ }
+
+ // try again with unique id
+ r2.Set("id", strings.Repeat("a", 15))
+ if err := app.Save(r2); err != nil {
+ t.Fatalf("Expected nil, got error %v", err)
+ }
+}
+
+func TestRecordSaveIdUpdateNoValidation(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ rec, err := app.FindRecordById("demo3", "7nwo8tuiatetxdm")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ rec.Id = strings.Repeat("a", 15)
+
+ err = app.SaveNoValidate(rec)
+ if err == nil {
+ t.Fatal("Expected save to fail, got nil")
+ }
+
+ // no changes
+ rec.Load(rec.Original().FieldsData())
+ err = app.SaveNoValidate(rec)
+ if err != nil {
+ t.Fatalf("Expected save to succeed, got error %v", err)
+ }
+}
+
+func TestRecordSaveWithChangedPassword(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ record, err := app.FindAuthRecordByEmail("nologin", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ originalTokenKey := record.TokenKey()
+
+ t.Run("no password change shouldn't change the tokenKey", func(t *testing.T) {
+ record.Set("name", "example")
+
+ if err := app.Save(record); err != nil {
+ t.Fatal(err)
+ }
+
+ tokenKey := record.TokenKey()
+ if tokenKey == "" || originalTokenKey != tokenKey {
+ t.Fatalf("Expected tokenKey to not change, got %q VS %q", originalTokenKey, tokenKey)
+ }
+ })
+
+ t.Run("password change should change the tokenKey", func(t *testing.T) {
+ record.Set("password", "1234567890")
+
+ if err := app.Save(record); err != nil {
+ t.Fatal(err)
+ }
+
+ tokenKey := record.TokenKey()
+ if tokenKey == "" || originalTokenKey == tokenKey {
+ t.Fatalf("Expected tokenKey to change, got %q VS %q", originalTokenKey, tokenKey)
+ }
+ })
+}
+
+func TestRecordDelete(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ demoCollection, _ := app.FindCollectionByNameOrId("demo2")
+
+ // delete unsaved record
+ // ---
+ rec0 := core.NewRecord(demoCollection)
+ if err := app.Delete(rec0); err == nil {
+ t.Fatal("(rec0) Didn't expect to succeed deleting unsaved record")
+ }
+
+ // delete existing record + external auths
+ // ---
+ rec1, _ := app.FindRecordById("users", "4q1xlclmfloku33")
+ if err := app.Delete(rec1); err != nil {
+ t.Fatalf("(rec1) Expected nil, got error %v", err)
+ }
+ // check if it was really deleted
+ if refreshed, _ := app.FindRecordById(rec1.Collection().Id, rec1.Id); refreshed != nil {
+ t.Fatalf("(rec1) Expected record to be deleted, got %v", refreshed)
+ }
+ // check if the external auths were deleted
+ if auths, _ := app.FindAllExternalAuthsByRecord(rec1); len(auths) > 0 {
+ t.Fatalf("(rec1) Expected external auths to be deleted, got %v", auths)
+ }
+
+ // delete existing record while being part of a non-cascade required relation
+ // ---
+ rec2, _ := app.FindRecordById("demo3", "7nwo8tuiatetxdm")
+ if err := app.Delete(rec2); err == nil {
+ t.Fatalf("(rec2) Expected error, got nil")
+ }
+
+ // delete existing record + cascade
+ // ---
+ calledQueries := []string{}
+ app.NonconcurrentDB().(*dbx.DB).QueryLogFunc = func(ctx context.Context, t time.Duration, sql string, rows *sql.Rows, err error) {
+ calledQueries = append(calledQueries, sql)
+ }
+ app.DB().(*dbx.DB).QueryLogFunc = func(ctx context.Context, t time.Duration, sql string, rows *sql.Rows, err error) {
+ calledQueries = append(calledQueries, sql)
+ }
+ app.NonconcurrentDB().(*dbx.DB).ExecLogFunc = func(ctx context.Context, t time.Duration, sql string, result sql.Result, err error) {
+ calledQueries = append(calledQueries, sql)
+ }
+ app.DB().(*dbx.DB).ExecLogFunc = func(ctx context.Context, t time.Duration, sql string, result sql.Result, err error) {
+ calledQueries = append(calledQueries, sql)
+ }
+ rec3, _ := app.FindRecordById("users", "oap640cot4yru2s")
+ // delete
+ if err := app.Delete(rec3); err != nil {
+ t.Fatalf("(rec3) Expected nil, got error %v", err)
+ }
+ // check if it was really deleted
+ rec3, _ = app.FindRecordById(rec3.Collection().Id, rec3.Id)
+ if rec3 != nil {
+ t.Fatalf("(rec3) Expected record to be deleted, got %v", rec3)
+ }
+ // check if the operation cascaded
+ rel, _ := app.FindRecordById("demo1", "84nmscqy84lsi1t")
+ if rel != nil {
+ t.Fatalf("(rec3) Expected the delete to cascade, found relation %v", rel)
+ }
+ // ensure that the json rel fields were prefixed
+ joinedQueries := strings.Join(calledQueries, " ")
+ expectedRelManyPart := "SELECT `demo1`.* FROM `demo1` WHERE EXISTS (SELECT 1 FROM json_each(CASE WHEN json_valid([[demo1.rel_many]]) THEN [[demo1.rel_many]] ELSE json_array([[demo1.rel_many]]) END) {{__je__}} WHERE [[__je__.value]]='"
+ if !strings.Contains(joinedQueries, expectedRelManyPart) {
+ t.Fatalf("(rec3) Expected the cascade delete to call the query \n%v, got \n%v", expectedRelManyPart, calledQueries)
+ }
+ expectedRelOnePart := "SELECT `demo1`.* FROM `demo1` WHERE (`demo1`.`rel_one`='"
+ if !strings.Contains(joinedQueries, expectedRelOnePart) {
+ t.Fatalf("(rec3) Expected the cascade delete to call the query \n%v, got \n%v", expectedRelOnePart, calledQueries)
+ }
+}
+
+func TestRecordDeleteBatchProcessing(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ if err := createMockBatchProcessingData(app); err != nil {
+ t.Fatal(err)
+ }
+
+ // find and delete the first c1 record to trigger cascade
+ mainRecord, _ := app.FindRecordById("c1", "a")
+ if err := app.Delete(mainRecord); err != nil {
+ t.Fatal(err)
+ }
+
+ // check if the main record was deleted
+ _, err := app.FindRecordById(mainRecord.Collection().Id, mainRecord.Id)
+ if err == nil {
+ t.Fatal("The main record wasn't deleted")
+ }
+
+ // check if the c1 b rel field were updated
+ c1RecordB, err := app.FindRecordById("c1", "b")
+ if err != nil || c1RecordB.GetString("rel") != "" {
+ t.Fatalf("Expected c1RecordB.rel to be nil, got %v", c1RecordB.GetString("rel"))
+ }
+
+ // check if the c2 rel fields were updated
+ c2Records, err := app.FindAllRecords("c2", nil)
+ if err != nil || len(c2Records) == 0 {
+ t.Fatalf("Failed to fetch c2 records: %v", err)
+ }
+ for _, r := range c2Records {
+ ids := r.GetStringSlice("rel")
+ if len(ids) != 1 || ids[0] != "b" {
+ t.Fatalf("Expected only 'b' rel id, got %v", ids)
+ }
+ }
+
+ // check if all c3 relations were deleted
+ c3Records, err := app.FindAllRecords("c3", nil)
+ if err != nil {
+ t.Fatalf("Failed to fetch c3 records: %v", err)
+ }
+ if total := len(c3Records); total != 0 {
+ t.Fatalf("Expected c3 records to be deleted, found %d", total)
+ }
+}
+
+func createMockBatchProcessingData(app core.App) error {
+ // create mock collection without relation
+ c1 := core.NewBaseCollection("c1")
+ c1.Id = "c1"
+ c1.Fields.Add(
+ &core.TextField{Name: "text"},
+ &core.RelationField{
+ Name: "rel",
+ MaxSelect: 1,
+ CollectionId: "c1",
+ CascadeDelete: false, // should unset all rel fields
+ },
+ )
+ if err := app.SaveNoValidate(c1); err != nil {
+ return err
+ }
+
+ // create mock collection with a multi-rel field
+ c2 := core.NewBaseCollection("c2")
+ c2.Id = "c2"
+ c2.Fields.Add(
+ &core.TextField{Name: "text"},
+ &core.RelationField{
+ Name: "rel",
+ MaxSelect: 10,
+ CollectionId: "c1",
+ CascadeDelete: false, // should unset all rel fields
+ },
+ )
+ if err := app.SaveNoValidate(c2); err != nil {
+ return err
+ }
+
+ // create mock collection with a single-rel field
+ c3 := core.NewBaseCollection("c3")
+ c3.Id = "c3"
+ c3.Fields.Add(
+ &core.RelationField{
+ Name: "rel",
+ MaxSelect: 1,
+ CollectionId: "c1",
+ CascadeDelete: true, // should delete all c3 records
+ },
+ )
+ if err := app.SaveNoValidate(c3); err != nil {
+ return err
+ }
+
+ // insert mock records
+ c1RecordA := core.NewRecord(c1)
+ c1RecordA.Id = "a"
+ c1RecordA.Set("rel", c1RecordA.Id) // self reference
+ if err := app.SaveNoValidate(c1RecordA); err != nil {
+ return err
+ }
+ c1RecordB := core.NewRecord(c1)
+ c1RecordB.Id = "b"
+ c1RecordB.Set("rel", c1RecordA.Id) // rel to another record from the same collection
+ if err := app.SaveNoValidate(c1RecordB); err != nil {
+ return err
+ }
+ for i := 0; i < 4500; i++ {
+ c2Record := core.NewRecord(c2)
+ c2Record.Set("rel", []string{c1RecordA.Id, c1RecordB.Id})
+ if err := app.SaveNoValidate(c2Record); err != nil {
+ return err
+ }
+
+ c3Record := core.NewRecord(c3)
+ c3Record.Set("rel", c1RecordA.Id)
+ if err := app.SaveNoValidate(c3Record); err != nil {
+ return err
+ }
+ }
+
+ // set the same id as the relation for at least 1 record
+ // to check whether the correct condition will be added
+ c3Record := core.NewRecord(c3)
+ c3Record.Set("rel", c1RecordA.Id)
+ c3Record.Id = c1RecordA.Id
+ if err := app.SaveNoValidate(c3Record); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// -------------------------------------------------------------------
+
+type mockField struct {
+ core.TextField
+}
+
+func (f *mockField) FindGetter(key string) core.GetterFunc {
+ switch key {
+ case f.Name + ":test":
+ return func(record *core.Record) any {
+ return "modifier_get"
+ }
+ default:
+ return nil
+ }
+}
+
+func (f *mockField) FindSetter(key string) core.SetterFunc {
+ switch key {
+ case f.Name:
+ return func(record *core.Record, raw any) {
+ record.SetRaw(f.Name, cast.ToString(raw))
+ }
+ case f.Name + ":test":
+ return func(record *core.Record, raw any) {
+ record.SetRaw(f.Name, "modifier_set")
+ }
+ default:
+ return nil
+ }
+}
diff --git a/core/record_proxy.go b/core/record_proxy.go
new file mode 100644
index 00000000..644ede7d
--- /dev/null
+++ b/core/record_proxy.go
@@ -0,0 +1,32 @@
+package core
+
+// RecordProxy defines an interface for a Record proxy/project model,
+// aka. custom model struct that acts on behalve the proxied Record to
+// allow for example typed getter/setters for the Record fields.
+//
+// To implement the interface it is usually enough to embed the [BaseRecordProxy] struct.
+type RecordProxy interface {
+ // ProxyRecord returns the proxied Record model.
+ ProxyRecord() *Record
+
+ // SetProxyRecord loads the specified record model into the current proxy.
+ SetProxyRecord(record *Record)
+}
+
+var _ RecordProxy = (*BaseRecordProxy)(nil)
+
+// BaseRecordProxy implements the [RecordProxy] interface and it is intended
+// to be used as embed to custom user provided Record proxy structs.
+type BaseRecordProxy struct {
+ *Record
+}
+
+// ProxyRecord returns the proxied Record model.
+func (m *BaseRecordProxy) ProxyRecord() *Record {
+ return m.Record
+}
+
+// SetProxyRecord loads the specified record model into the current proxy.
+func (m *BaseRecordProxy) SetProxyRecord(record *Record) {
+ m.Record = record
+}
diff --git a/core/record_proxy_test.go b/core/record_proxy_test.go
new file mode 100644
index 00000000..bc8f004b
--- /dev/null
+++ b/core/record_proxy_test.go
@@ -0,0 +1,20 @@
+package core_test
+
+import (
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+)
+
+func TestBaseRecordProxy(t *testing.T) {
+ p := core.BaseRecordProxy{}
+
+ record := core.NewRecord(core.NewBaseCollection("test"))
+ record.Id = "test"
+
+ p.SetProxyRecord(record)
+
+ if p.ProxyRecord() == nil || p.ProxyRecord().Id != p.Id || p.Id != "test" {
+ t.Fatalf("Expected proxy record to be set")
+ }
+}
diff --git a/core/record_query.go b/core/record_query.go
new file mode 100644
index 00000000..fac7313d
--- /dev/null
+++ b/core/record_query.go
@@ -0,0 +1,607 @@
+package core
+
+import (
+ "context"
+ "database/sql"
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/tools/inflector"
+ "github.com/pocketbase/pocketbase/tools/list"
+ "github.com/pocketbase/pocketbase/tools/search"
+ "github.com/pocketbase/pocketbase/tools/security"
+)
+
+var recordProxyType = reflect.TypeOf((*RecordProxy)(nil)).Elem()
+
+// RecordQuery returns a new Record select query from a collection model, id or name.
+//
+// In case a collection id or name is provided and that collection doesn't
+// actually exists, the generated query will be created with a cancelled context
+// and will fail once an executor (Row(), One(), All(), etc.) is called.
+func (app *BaseApp) RecordQuery(collectionModelOrIdentifier any) *dbx.SelectQuery {
+ var tableName string
+
+ collection, collectionErr := getCollectionByModelOrIdentifier(app, collectionModelOrIdentifier)
+ if collection != nil {
+ tableName = collection.Name
+ }
+ if tableName == "" {
+ // update with some fake table name for easier debugging
+ tableName = "@@__invalidCollectionModelOrIdentifier"
+ }
+
+ query := app.DB().Select(app.DB().QuoteSimpleColumnName(tableName) + ".*").From(tableName)
+
+ // in case of an error attach a new context and cancel it immediately with the error
+ if collectionErr != nil {
+ ctx, cancelFunc := context.WithCancelCause(context.Background())
+ query.WithContext(ctx)
+ cancelFunc(collectionErr)
+ }
+
+ return query.WithBuildHook(func(q *dbx.Query) {
+ q.WithExecHook(execLockRetry(app.config.QueryTimeout, defaultMaxLockRetries)).
+ WithOneHook(func(q *dbx.Query, a any, op func(b any) error) error {
+ if a == nil {
+ return op(a)
+ }
+
+ switch v := a.(type) {
+ case *Record:
+ record, err := resolveRecordOneHook(collection, op)
+ if err != nil {
+ return err
+ }
+
+ *v = *record
+
+ return nil
+ case RecordProxy:
+ record, err := resolveRecordOneHook(collection, op)
+ if err != nil {
+ return err
+ }
+
+ v.SetProxyRecord(record)
+ return nil
+ default:
+ return op(a)
+ }
+ }).
+ WithAllHook(func(q *dbx.Query, sliceA any, op func(sliceB any) error) error {
+ if sliceA == nil {
+ return op(sliceA)
+ }
+
+ switch v := sliceA.(type) {
+ case *[]*Record:
+ records, err := resolveRecordAllHook(collection, op)
+ if err != nil {
+ return err
+ }
+
+ *v = records
+
+ return nil
+ case *[]Record:
+ records, err := resolveRecordAllHook(collection, op)
+ if err != nil {
+ return err
+ }
+
+ nonPointers := make([]Record, len(records))
+ for i, r := range records {
+ nonPointers[i] = *r
+ }
+
+ *v = nonPointers
+
+ return nil
+ default: // expects []RecordProxy slice
+ records, err := resolveRecordAllHook(collection, op)
+ if err != nil {
+ return err
+ }
+
+ rv := reflect.ValueOf(v)
+ if rv.Kind() != reflect.Ptr || rv.IsNil() {
+ return errors.New("must be a pointer")
+ }
+
+ rv = dereference(rv)
+
+ if rv.Kind() != reflect.Slice {
+ return errors.New("must be a slice of RecordSetters")
+ }
+
+ // create an empty slice
+ if rv.IsNil() {
+ rv.Set(reflect.MakeSlice(rv.Type(), 0, len(records)))
+ }
+
+ et := rv.Type().Elem()
+
+ var isSliceOfPointers bool
+ if et.Kind() == reflect.Ptr {
+ isSliceOfPointers = true
+ et = et.Elem()
+ }
+
+ if !reflect.PointerTo(et).Implements(recordProxyType) {
+ return op(sliceA)
+ }
+
+ for _, record := range records {
+ ev := reflect.New(et)
+
+ if !ev.CanInterface() {
+ continue
+ }
+
+ ps, ok := ev.Interface().(RecordProxy)
+ if !ok {
+ continue
+ }
+
+ ps.SetProxyRecord(record)
+
+ ev = ev.Elem()
+ if isSliceOfPointers {
+ ev = ev.Addr()
+ }
+
+ rv.Set(reflect.Append(rv, ev))
+ }
+
+ return nil
+ }
+ })
+ })
+}
+
+func resolveRecordOneHook(collection *Collection, op func(dst any) error) (*Record, error) {
+ data := dbx.NullStringMap{}
+ if err := op(&data); err != nil {
+ return nil, err
+ }
+ return newRecordFromNullStringMap(collection, data)
+}
+
+func resolveRecordAllHook(collection *Collection, op func(dst any) error) ([]*Record, error) {
+ data := []dbx.NullStringMap{}
+ if err := op(&data); err != nil {
+ return nil, err
+ }
+ return newRecordsFromNullStringMaps(collection, data)
+}
+
+// dereference returns the underlying value v points to.
+func dereference(v reflect.Value) reflect.Value {
+ for v.Kind() == reflect.Ptr {
+ if v.IsNil() {
+ // initialize with a new value and continue searching
+ v.Set(reflect.New(v.Type().Elem()))
+ }
+ v = v.Elem()
+ }
+ return v
+}
+
+func getCollectionByModelOrIdentifier(app App, collectionModelOrIdentifier any) (*Collection, error) {
+ switch c := collectionModelOrIdentifier.(type) {
+ case *Collection:
+ return c, nil
+ case Collection:
+ return &c, nil
+ case string:
+ return app.FindCachedCollectionByNameOrId(c)
+ default:
+ return nil, errors.New("unknown collection identifier - must be collection model, id or name")
+ }
+}
+
+// -------------------------------------------------------------------
+
+// FindRecordById finds the Record model by its id.
+func (app *BaseApp) FindRecordById(
+ collectionModelOrIdentifier any,
+ recordId string,
+ optFilters ...func(q *dbx.SelectQuery) error,
+) (*Record, error) {
+ collection, err := getCollectionByModelOrIdentifier(app, collectionModelOrIdentifier)
+ if err != nil {
+ return nil, err
+ }
+
+ record := &Record{}
+
+ query := app.RecordQuery(collection).
+ AndWhere(dbx.HashExp{collection.Name + ".id": recordId})
+
+ // apply filter funcs (if any)
+ for _, filter := range optFilters {
+ if filter == nil {
+ continue
+ }
+ if err = filter(query); err != nil {
+ return nil, err
+ }
+ }
+
+ err = query.Limit(1).One(record)
+ if err != nil {
+ return nil, err
+ }
+
+ return record, nil
+}
+
+// FindRecordsByIds finds all records by the specified ids.
+// If no records are found, returns an empty slice.
+func (app *BaseApp) FindRecordsByIds(
+ collectionModelOrIdentifier any,
+ recordIds []string,
+ optFilters ...func(q *dbx.SelectQuery) error,
+) ([]*Record, error) {
+ collection, err := getCollectionByModelOrIdentifier(app, collectionModelOrIdentifier)
+ if err != nil {
+ return nil, err
+ }
+
+ query := app.RecordQuery(collection).
+ AndWhere(dbx.In(
+ collection.Name+".id",
+ list.ToInterfaceSlice(recordIds)...,
+ ))
+
+ for _, filter := range optFilters {
+ if filter == nil {
+ continue
+ }
+ if err = filter(query); err != nil {
+ return nil, err
+ }
+ }
+
+ records := make([]*Record, 0, len(recordIds))
+
+ err = query.All(&records)
+ if err != nil {
+ return nil, err
+ }
+
+ return records, nil
+}
+
+// FindAllRecords finds all records matching specified db expressions.
+//
+// Returns all collection records if no expression is provided.
+//
+// Returns an empty slice if no records are found.
+//
+// Example:
+//
+// // no extra expressions
+// app.FindAllRecords("example")
+//
+// // with extra expressions
+// expr1 := dbx.HashExp{"email": "test@example.com"}
+// expr2 := dbx.NewExp("LOWER(username) = {:username}", dbx.Params{"username": "test"})
+// app.FindAllRecords("example", expr1, expr2)
+func (app *BaseApp) FindAllRecords(collectionModelOrIdentifier any, exprs ...dbx.Expression) ([]*Record, error) {
+ query := app.RecordQuery(collectionModelOrIdentifier)
+
+ for _, expr := range exprs {
+ if expr != nil { // add only the non-nil expressions
+ query.AndWhere(expr)
+ }
+ }
+
+ var records []*Record
+
+ if err := query.All(&records); err != nil {
+ return nil, err
+ }
+
+ return records, nil
+}
+
+// FindFirstRecordByData returns the first found record matching
+// the provided key-value pair.
+func (app *BaseApp) FindFirstRecordByData(collectionModelOrIdentifier any, key string, value any) (*Record, error) {
+ record := &Record{}
+
+ err := app.RecordQuery(collectionModelOrIdentifier).
+ AndWhere(dbx.HashExp{inflector.Columnify(key): value}).
+ Limit(1).
+ One(record)
+ if err != nil {
+ return nil, err
+ }
+
+ return record, nil
+}
+
+// FindRecordsByFilter returns limit number of records matching the
+// provided string filter.
+//
+// NB! Use the last "params" argument to bind untrusted user variables!
+//
+// The filter argument is optional and can be empty string to target
+// all available records.
+//
+// The sort argument is optional and can be empty string OR the same format
+// used in the web APIs, ex. "-created,title".
+//
+// If the limit argument is <= 0, no limit is applied to the query and
+// all matching records are returned.
+//
+// Returns an empty slice if no records are found.
+//
+// Example:
+//
+// app.FindRecordsByFilter(
+// "posts",
+// "title ~ {:title} && visible = {:visible}",
+// "-created",
+// 10,
+// 0,
+// dbx.Params{"title": "lorem ipsum", "visible": true}
+// )
+func (app *BaseApp) FindRecordsByFilter(
+ collectionModelOrIdentifier any,
+ filter string,
+ sort string,
+ limit int,
+ offset int,
+ params ...dbx.Params,
+) ([]*Record, error) {
+ collection, err := getCollectionByModelOrIdentifier(app, collectionModelOrIdentifier)
+ if err != nil {
+ return nil, err
+ }
+
+ q := app.RecordQuery(collection)
+
+ // build a fields resolver and attach the generated conditions to the query
+ // ---
+ resolver := NewRecordFieldResolver(
+ app,
+ collection, // the base collection
+ nil, // no request data
+ true, // allow searching hidden/protected fields like "email"
+ )
+
+ if filter != "" {
+ expr, err := search.FilterData(filter).BuildExpr(resolver, params...)
+ if err != nil {
+ return nil, fmt.Errorf("invalid filter expression: %w", err)
+ }
+ q.AndWhere(expr)
+ }
+
+ if sort != "" {
+ for _, sortField := range search.ParseSortFromString(sort) {
+ expr, err := sortField.BuildExpr(resolver)
+ if err != nil {
+ return nil, err
+ }
+ if expr != "" {
+ q.AndOrderBy(expr)
+ }
+ }
+ }
+
+ resolver.UpdateQuery(q) // attaches any adhoc joins and aliases
+ // ---
+
+ if offset > 0 {
+ q.Offset(int64(offset))
+ }
+
+ if limit > 0 {
+ q.Limit(int64(limit))
+ }
+
+ records := []*Record{}
+
+ if err := q.All(&records); err != nil {
+ return nil, err
+ }
+
+ return records, nil
+}
+
+// FindFirstRecordByFilter returns the first available record matching the provided filter (if any).
+//
+// NB! Use the last params argument to bind untrusted user variables!
+//
+// Returns sql.ErrNoRows if no record is found.
+//
+// Example:
+//
+// app.FindFirstRecordByFilter("posts", "")
+// app.FindFirstRecordByFilter("posts", "slug={:slug} && status='public'", dbx.Params{"slug": "test"})
+func (app *BaseApp) FindFirstRecordByFilter(
+ collectionModelOrIdentifier any,
+ filter string,
+ params ...dbx.Params,
+) (*Record, error) {
+ result, err := app.FindRecordsByFilter(collectionModelOrIdentifier, filter, "", 1, 0, params...)
+ if err != nil {
+ return nil, err
+ }
+
+ if len(result) == 0 {
+ return nil, sql.ErrNoRows
+ }
+
+ return result[0], nil
+}
+
+// CountRecords returns the total number of records in a collection.
+func (app *BaseApp) CountRecords(collectionModelOrIdentifier any, exprs ...dbx.Expression) (int64, error) {
+ var total int64
+
+ q := app.RecordQuery(collectionModelOrIdentifier).Select("count(*)")
+
+ for _, expr := range exprs {
+ if expr != nil { // add only the non-nil expressions
+ q.AndWhere(expr)
+ }
+ }
+
+ err := q.Row(&total)
+
+ return total, err
+}
+
+// FindAuthRecordByToken finds the auth record associated with the provided JWT
+// (auth, file, verifyEmail, changeEmail, passwordReset types).
+//
+// Optionally specify a list of validTypes to check tokens only from those types.
+//
+// Returns an error if the JWT is invalid, expired or not associated to an auth collection record.
+func (app *BaseApp) FindAuthRecordByToken(token string, validTypes ...string) (*Record, error) {
+ if token == "" {
+ return nil, errors.New("missing token")
+ }
+
+ unverifiedClaims, err := security.ParseUnverifiedJWT(token)
+ if err != nil {
+ return nil, err
+ }
+
+ // check required claims
+ id, _ := unverifiedClaims[TokenClaimId].(string)
+ collectionId, _ := unverifiedClaims[TokenClaimCollectionId].(string)
+ tokenType, _ := unverifiedClaims[TokenClaimType].(string)
+ if id == "" || collectionId == "" || tokenType == "" {
+ return nil, errors.New("missing or invalid token claims")
+ }
+
+ // check types (if explicitly set)
+ if len(validTypes) > 0 && !list.ExistInSlice(tokenType, validTypes) {
+ return nil, fmt.Errorf("invalid token type %q, expects %q", tokenType, strings.Join(validTypes, ","))
+ }
+
+ record, err := app.FindRecordById(collectionId, id)
+ if err != nil {
+ return nil, err
+ }
+
+ if !record.Collection().IsAuth() {
+ return nil, errors.New("the token is not associated to an auth collection record")
+ }
+
+ var baseTokenKey string
+ switch tokenType {
+ case TokenTypeAuth:
+ baseTokenKey = record.Collection().AuthToken.Secret
+ case TokenTypeFile:
+ baseTokenKey = record.Collection().FileToken.Secret
+ case TokenTypeVerification:
+ baseTokenKey = record.Collection().VerificationToken.Secret
+ case TokenTypePasswordReset:
+ baseTokenKey = record.Collection().PasswordResetToken.Secret
+ case TokenTypeEmailChange:
+ baseTokenKey = record.Collection().EmailChangeToken.Secret
+ default:
+ return nil, errors.New("unknown token type " + tokenType)
+ }
+
+ secret := record.TokenKey() + baseTokenKey
+
+ // verify token signature
+ _, err = security.ParseJWT(token, secret)
+ if err != nil {
+ return nil, err
+ }
+
+ return record, nil
+}
+
+// FindAuthRecordByEmail finds the auth record associated with the provided email.
+//
+// Returns an error if it is not an auth collection or the record is not found.
+func (app *BaseApp) FindAuthRecordByEmail(collectionModelOrIdentifier any, email string) (*Record, error) {
+ collection, err := getCollectionByModelOrIdentifier(app, collectionModelOrIdentifier)
+ if err != nil {
+ return nil, fmt.Errorf("failed to fetch auth collection: %w", err)
+ }
+ if !collection.IsAuth() {
+ return nil, fmt.Errorf("%q is not an auth collection", collection.Name)
+ }
+
+ record := &Record{}
+
+ err = app.RecordQuery(collection).
+ AndWhere(dbx.HashExp{FieldNameEmail: email}).
+ Limit(1).
+ One(record)
+ if err != nil {
+ return nil, err
+ }
+
+ return record, nil
+}
+
+// CanAccessRecord checks if a record is allowed to be accessed by the
+// specified requestInfo and accessRule.
+//
+// Rule and db checks are ignored in case requestInfo.AuthRecord is a superuser.
+//
+// The returned error indicate that something unexpected happened during
+// the check (eg. invalid rule or db query error).
+//
+// The method always return false on invalid rule or db query error.
+//
+// Example:
+//
+// requestInfo, _ := e.RequestInfo()
+// record, _ := app.FindRecordById("example", "RECORD_ID")
+// rule := types.Pointer("@request.auth.id != '' || status = 'public'")
+// // ... or use one of the record collection's rule, eg. record.Collection().ViewRule
+//
+// if ok, _ := app.CanAccessRecord(record, requestInfo, rule); ok { ... }
+func (app *BaseApp) CanAccessRecord(record *Record, requestInfo *RequestInfo, accessRule *string) (bool, error) {
+ // superusers can access everything
+ if requestInfo.HasSuperuserAuth() {
+ return true, nil
+ }
+
+ // only superusers can access this record
+ if accessRule == nil {
+ return false, nil
+ }
+
+ // empty public rule, aka. everyone can access
+ if *accessRule == "" {
+ return true, nil
+ }
+
+ var exists bool
+
+ query := app.RecordQuery(record.Collection()).
+ Select("(1)").
+ AndWhere(dbx.HashExp{record.Collection().Name + ".id": record.Id})
+
+ // parse and apply the access rule filter
+ resolver := NewRecordFieldResolver(app, record.Collection(), requestInfo, true)
+ expr, err := search.FilterData(*accessRule).BuildExpr(resolver)
+ if err != nil {
+ return false, err
+ }
+ resolver.UpdateQuery(query)
+
+ err = query.AndWhere(expr).Limit(1).Row(&exists)
+ if err != nil && !errors.Is(err, sql.ErrNoRows) {
+ return false, err
+ }
+
+ return exists, nil
+}
diff --git a/daos/record_expand.go b/core/record_query_expand.go
similarity index 59%
rename from daos/record_expand.go
rename to core/record_query_expand.go
index 857b5eec..79df2969 100644
--- a/daos/record_expand.go
+++ b/core/record_query_expand.go
@@ -1,4 +1,4 @@
-package daos
+package core
import (
"errors"
@@ -8,21 +8,12 @@ import (
"strings"
"github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
"github.com/pocketbase/pocketbase/tools/dbutils"
"github.com/pocketbase/pocketbase/tools/list"
- "github.com/pocketbase/pocketbase/tools/security"
- "github.com/pocketbase/pocketbase/tools/types"
)
-// MaxExpandDepth specifies the max allowed nested expand depth path.
-//
-// @todo Consider eventually reusing resolvers.maxNestedRels
-const MaxExpandDepth = 6
-
// ExpandFetchFunc defines the function that is used to fetch the expanded relation records.
-type ExpandFetchFunc func(relCollection *models.Collection, relIds []string) ([]*models.Record, error)
+type ExpandFetchFunc func(relCollection *Collection, relIds []string) ([]*Record, error)
// ExpandRecord expands the relations of a single Record model.
//
@@ -30,8 +21,8 @@ type ExpandFetchFunc func(relCollection *models.Collection, relIds []string) ([]
// that returns all relation records.
//
// Returns a map with the failed expand parameters and their errors.
-func (dao *Dao) ExpandRecord(record *models.Record, expands []string, optFetchFunc ExpandFetchFunc) map[string]error {
- return dao.ExpandRecords([]*models.Record{record}, expands, optFetchFunc)
+func (app *BaseApp) ExpandRecord(record *Record, expands []string, optFetchFunc ExpandFetchFunc) map[string]error {
+ return app.ExpandRecords([]*Record{record}, expands, optFetchFunc)
}
// ExpandRecords expands the relations of the provided Record models list.
@@ -40,13 +31,13 @@ func (dao *Dao) ExpandRecord(record *models.Record, expands []string, optFetchFu
// that returns all relation records.
//
// Returns a map with the failed expand parameters and their errors.
-func (dao *Dao) ExpandRecords(records []*models.Record, expands []string, optFetchFunc ExpandFetchFunc) map[string]error {
+func (app *BaseApp) ExpandRecords(records []*Record, expands []string, optFetchFunc ExpandFetchFunc) map[string]error {
normalized := normalizeExpands(expands)
failed := map[string]error{}
for _, expand := range normalized {
- if err := dao.expandRecords(records, expand, optFetchFunc, 1); err != nil {
+ if err := app.expandRecords(records, expand, optFetchFunc, 1); err != nil {
failed[expand] = err
}
}
@@ -62,24 +53,23 @@ var indirectExpandRegex = regexp.MustCompile(`^(\w+)_via_(\w+)$`)
// notes:
// - if fetchFunc is nil, dao.FindRecordsByIds will be used
// - all records are expected to be from the same collection
-// - if MaxExpandDepth is reached, the function returns nil ignoring the remaining expand path
-func (dao *Dao) expandRecords(records []*models.Record, expandPath string, fetchFunc ExpandFetchFunc, recursionLevel int) error {
+// - if maxNestedRels(6) is reached, the function returns nil ignoring the remaining expand path
+func (app *BaseApp) expandRecords(records []*Record, expandPath string, fetchFunc ExpandFetchFunc, recursionLevel int) error {
if fetchFunc == nil {
// load a default fetchFunc
- fetchFunc = func(relCollection *models.Collection, relIds []string) ([]*models.Record, error) {
- return dao.FindRecordsByIds(relCollection.Id, relIds)
+ fetchFunc = func(relCollection *Collection, relIds []string) ([]*Record, error) {
+ return app.FindRecordsByIds(relCollection.Id, relIds)
}
}
- if expandPath == "" || recursionLevel > MaxExpandDepth || len(records) == 0 {
+ if expandPath == "" || recursionLevel > maxNestedRels || len(records) == 0 {
return nil
}
mainCollection := records[0].Collection()
- var relField *schema.SchemaField
- var relFieldOptions *schema.RelationOptions
- var relCollection *models.Collection
+ var relField *RelationField
+ var relCollection *Collection
parts := strings.SplitN(expandPath, ".", 2)
var matches []string
@@ -100,33 +90,27 @@ func (dao *Dao) expandRecords(records []*models.Record, expandPath string, fetch
}
if len(matches) == 3 {
- indirectRel, _ := dao.FindCollectionByNameOrId(matches[1])
+ indirectRel, _ := getCollectionByModelOrIdentifier(app, matches[1])
if indirectRel == nil {
return fmt.Errorf("couldn't find back-related collection %q", matches[1])
}
- indirectRelField := indirectRel.Schema.GetFieldByName(matches[2])
- if indirectRelField == nil || indirectRelField.Type != schema.FieldTypeRelation {
+ indirectRelField, _ := indirectRel.Fields.GetByName(matches[2]).(*RelationField)
+ if indirectRelField == nil || indirectRelField.CollectionId != mainCollection.Id {
return fmt.Errorf("couldn't find back-relation field %q in collection %q", matches[2], indirectRel.Name)
}
- indirectRelField.InitOptions()
- indirectRelFieldOptions, _ := indirectRelField.Options.(*schema.RelationOptions)
- if indirectRelFieldOptions == nil || indirectRelFieldOptions.CollectionId != mainCollection.Id {
- return fmt.Errorf("invalid back-relation field path %q", parts[0])
- }
-
// add the related id(s) as a dynamic relation field value to
// allow further expand checks at later stage in a more unified manner
prepErr := func() error {
- q := dao.DB().Select("id").
+ q := app.DB().Select("id").
From(indirectRel.Name).
Limit(1000) // the limit is arbitrary chosen and may change in the future
- if indirectRelFieldOptions.IsMultiple() {
+ if indirectRelField.IsMultiple() {
q.AndWhere(dbx.Exists(dbx.NewExp(fmt.Sprintf(
"SELECT 1 FROM %s je WHERE je.value = {:id}",
- dbutils.JsonEach(indirectRelField.Name),
+ dbutils.JSONEach(indirectRelField.Name),
))))
} else {
q.AndWhere(dbx.NewExp("[[" + indirectRelField.Name + "]] = {:id}"))
@@ -153,36 +137,26 @@ func (dao *Dao) expandRecords(records []*models.Record, expandPath string, fetch
return prepErr
}
- relFieldOptions = &schema.RelationOptions{
- MaxSelect: nil,
+ // indirect/back relation
+ relField = &RelationField{
+ Name: parts[0],
+ MaxSelect: 2147483647,
CollectionId: indirectRel.Id,
}
- if dbutils.HasSingleColumnUniqueIndex(indirectRelField.Name, indirectRel.Indexes) {
- relFieldOptions.MaxSelect = types.Pointer(1)
- }
- // indirect/back relation
- relField = &schema.SchemaField{
- Id: "_" + parts[0] + security.PseudorandomString(3),
- Type: schema.FieldTypeRelation,
- Name: parts[0],
- Options: relFieldOptions,
+ if dbutils.HasSingleColumnUniqueIndex(indirectRelField.GetName(), indirectRel.Indexes) {
+ relField.MaxSelect = 1
}
relCollection = indirectRel
} else {
// direct relation
- relField = mainCollection.Schema.GetFieldByName(parts[0])
- if relField == nil || relField.Type != schema.FieldTypeRelation {
- return fmt.Errorf("Couldn't find relation field %q in collection %q.", parts[0], mainCollection.Name)
- }
- relField.InitOptions()
- relFieldOptions, _ = relField.Options.(*schema.RelationOptions)
- if relFieldOptions == nil {
- return fmt.Errorf("Couldn't initialize the options of relation field %q.", parts[0])
+ relField, _ = mainCollection.Fields.GetByName(parts[0]).(*RelationField)
+ if relField == nil {
+ return fmt.Errorf("couldn't find relation field %q in collection %q", parts[0], mainCollection.Name)
}
- relCollection, _ = dao.FindCollectionByNameOrId(relFieldOptions.CollectionId)
+ relCollection, _ = getCollectionByModelOrIdentifier(app, relField.CollectionId)
if relCollection == nil {
- return fmt.Errorf("Couldn't find related collection %q.", relFieldOptions.CollectionId)
+ return fmt.Errorf("couldn't find related collection %q", relField.CollectionId)
}
}
@@ -202,22 +176,28 @@ func (dao *Dao) expandRecords(records []*models.Record, expandPath string, fetch
// expand nested fields
if len(parts) > 1 {
- err := dao.expandRecords(rels, parts[1], fetchFunc, recursionLevel+1)
+ err := app.expandRecords(rels, parts[1], fetchFunc, recursionLevel+1)
if err != nil {
return err
}
}
// reindex with the rel id
- indexedRels := make(map[string]*models.Record, len(rels))
+ indexedRels := make(map[string]*Record, len(rels))
for _, rel := range rels {
- indexedRels[rel.GetId()] = rel
+ indexedRels[rel.Id] = rel
}
for _, model := range records {
+ // init expand if not already
+ // (this is done to ensure that the "expand" key will be returned in the response even if empty)
+ if model.expand == nil {
+ model.SetExpand(nil)
+ }
+
relIds := model.GetStringSlice(relField.Name)
- validRels := make([]*models.Record, 0, len(relIds))
+ validRels := make([]*Record, 0, len(relIds))
for _, id := range relIds {
if rel, ok := indexedRels[id]; ok {
validRels = append(validRels, rel)
@@ -231,13 +211,13 @@ func (dao *Dao) expandRecords(records []*models.Record, expandPath string, fetch
expandData := model.Expand()
// normalize access to the previously expanded rel records (if any)
- var oldExpandedRels []*models.Record
+ var oldExpandedRels []*Record
switch v := expandData[relField.Name].(type) {
case nil:
// no old expands
- case *models.Record:
- oldExpandedRels = []*models.Record{v}
- case []*models.Record:
+ case *Record:
+ oldExpandedRels = []*Record{v}
+ case []*Record:
oldExpandedRels = v
}
@@ -254,10 +234,10 @@ func (dao *Dao) expandRecords(records []*models.Record, expandPath string, fetch
}
// update the expanded data
- if relFieldOptions.MaxSelect != nil && *relFieldOptions.MaxSelect <= 1 {
- expandData[relField.Name] = validRels[0]
- } else {
+ if relField.IsMultiple() {
expandData[relField.Name] = validRels
+ } else {
+ expandData[relField.Name] = validRels[0]
}
model.SetExpand(expandData)
@@ -300,14 +280,3 @@ func normalizeExpands(paths []string) []string {
return list.ToUniqueStringSlice(result)
}
-
-func isRelFieldUnique(collection *models.Collection, fieldName string) bool {
- for _, idx := range collection.Indexes {
- parsed := dbutils.ParseIndex(idx)
- if parsed.Unique && len(parsed.Columns) == 1 && strings.EqualFold(parsed.Columns[0].Name, fieldName) {
- return true
- }
- }
-
- return false
-}
diff --git a/daos/record_expand_test.go b/core/record_query_expand_test.go
similarity index 53%
rename from daos/record_expand_test.go
rename to core/record_query_expand_test.go
index dcb0da1f..04b5cbf1 100644
--- a/daos/record_expand_test.go
+++ b/core/record_query_expand_test.go
@@ -1,4 +1,4 @@
-package daos_test
+package core_test
import (
"encoding/json"
@@ -6,9 +6,7 @@ import (
"strings"
"testing"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
+ "github.com/pocketbase/pocketbase/core"
"github.com/pocketbase/pocketbase/tests"
"github.com/pocketbase/pocketbase/tools/list"
)
@@ -20,21 +18,21 @@ func TestExpandRecords(t *testing.T) {
defer app.Cleanup()
scenarios := []struct {
- testName string
- collectionIdOrName string
- recordIds []string
- expands []string
- fetchFunc daos.ExpandFetchFunc
- expectExpandProps int
- expectExpandFailures int
+ testName string
+ collectionIdOrName string
+ recordIds []string
+ expands []string
+ fetchFunc core.ExpandFetchFunc
+ expectNonemptyExpandProps int
+ expectExpandFailures int
}{
{
"empty records",
"",
[]string{},
[]string{"self_rel_one", "self_rel_many.self_rel_one"},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
0,
0,
@@ -44,8 +42,8 @@ func TestExpandRecords(t *testing.T) {
"demo4",
[]string{"i9naidtvr6qsgb4", "qzaqccwrmva4o1n"},
[]string{},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
0,
0,
@@ -55,7 +53,7 @@ func TestExpandRecords(t *testing.T) {
"demo4",
[]string{"i9naidtvr6qsgb4", "qzaqccwrmva4o1n"},
[]string{"self_rel_one", "self_rel_many.self_rel_one"},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
return nil, errors.New("test error")
},
0,
@@ -66,8 +64,8 @@ func TestExpandRecords(t *testing.T) {
"demo4",
[]string{"i9naidtvr6qsgb4", "qzaqccwrmva4o1n"},
[]string{"missing"},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
0,
1,
@@ -77,8 +75,8 @@ func TestExpandRecords(t *testing.T) {
"demo4",
[]string{"i9naidtvr6qsgb4", "qzaqccwrmva4o1n"},
[]string{"title"},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
0,
1,
@@ -88,8 +86,8 @@ func TestExpandRecords(t *testing.T) {
"demo4",
[]string{"i9naidtvr6qsgb4", "qzaqccwrmva4o1n"},
[]string{"rel_one_no_cascade.title"},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
0,
1,
@@ -117,8 +115,8 @@ func TestExpandRecords(t *testing.T) {
"self_rel_many", "self_rel_many.",
" self_rel_many ", "",
},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
9,
0,
@@ -132,8 +130,8 @@ func TestExpandRecords(t *testing.T) {
"oap640cot4yru2s", // no rels
},
[]string{"rel"},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
2,
0,
@@ -156,8 +154,8 @@ func TestExpandRecords(t *testing.T) {
"demo4",
[]string{"qzaqccwrmva4o1n"},
[]string{"self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many"},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
6,
0,
@@ -167,8 +165,8 @@ func TestExpandRecords(t *testing.T) {
"demo3",
[]string{"lcl9d87w22ml6jy"},
[]string{"demo4(rel_one_no_cascade_required)"},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
1,
0,
@@ -178,8 +176,8 @@ func TestExpandRecords(t *testing.T) {
"demo3",
[]string{"lcl9d87w22ml6jy"},
[]string{"demo4_via_rel_one_no_cascade_required"},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
1,
0,
@@ -191,8 +189,8 @@ func TestExpandRecords(t *testing.T) {
[]string{
"demo4_via_rel_one_no_cascade_required.self_rel_many.self_rel_many.self_rel_one",
},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
5,
0,
@@ -204,8 +202,8 @@ func TestExpandRecords(t *testing.T) {
[]string{
"demo4_via_rel_many_no_cascade_required.self_rel_many.rel_many_no_cascade_required.demo4_via_rel_many_no_cascade_required",
},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
7,
0,
@@ -220,8 +218,8 @@ func TestExpandRecords(t *testing.T) {
"self_rel_many.self_rel_one.rel_many_cascade",
"self_rel_many.self_rel_one.rel_many_no_cascade_required",
},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
5,
0,
@@ -229,21 +227,25 @@ func TestExpandRecords(t *testing.T) {
}
for _, s := range scenarios {
- ids := list.ToUniqueStringSlice(s.recordIds)
- records, _ := app.Dao().FindRecordsByIds(s.collectionIdOrName, ids)
- failed := app.Dao().ExpandRecords(records, s.expands, s.fetchFunc)
+ t.Run(s.testName, func(t *testing.T) {
+ ids := list.ToUniqueStringSlice(s.recordIds)
+ records, _ := app.FindRecordsByIds(s.collectionIdOrName, ids)
+ failed := app.ExpandRecords(records, s.expands, s.fetchFunc)
- if len(failed) != s.expectExpandFailures {
- t.Errorf("[%s] Expected %d failures, got %d: \n%v", s.testName, s.expectExpandFailures, len(failed), failed)
- }
+ if len(failed) != s.expectExpandFailures {
+ t.Errorf("Expected %d failures, got %d\n%v", s.expectExpandFailures, len(failed), failed)
+ }
- encoded, _ := json.Marshal(records)
- encodedStr := string(encoded)
- totalExpandProps := strings.Count(encodedStr, schema.FieldNameExpand)
+ encoded, _ := json.Marshal(records)
+ encodedStr := string(encoded)
+ totalExpandProps := strings.Count(encodedStr, `"`+core.FieldNameExpand+`":`)
+ totalEmptyExpands := strings.Count(encodedStr, `"`+core.FieldNameExpand+`":{}`)
+ totalNonemptyExpands := totalExpandProps - totalEmptyExpands
- if s.expectExpandProps != totalExpandProps {
- t.Errorf("[%s] Expected %d expand props, got %d: \n%v", s.testName, s.expectExpandProps, totalExpandProps, encodedStr)
- }
+ if s.expectNonemptyExpandProps != totalNonemptyExpands {
+ t.Errorf("Expected %d expand props, got %d\n%v", s.expectNonemptyExpandProps, totalNonemptyExpands, encodedStr)
+ }
+ })
}
}
@@ -254,21 +256,21 @@ func TestExpandRecord(t *testing.T) {
defer app.Cleanup()
scenarios := []struct {
- testName string
- collectionIdOrName string
- recordId string
- expands []string
- fetchFunc daos.ExpandFetchFunc
- expectExpandProps int
- expectExpandFailures int
+ testName string
+ collectionIdOrName string
+ recordId string
+ expands []string
+ fetchFunc core.ExpandFetchFunc
+ expectNonemptyExpandProps int
+ expectExpandFailures int
}{
{
"empty expand",
"demo4",
"i9naidtvr6qsgb4",
[]string{},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
0,
0,
@@ -278,7 +280,7 @@ func TestExpandRecord(t *testing.T) {
"demo4",
"i9naidtvr6qsgb4",
[]string{"self_rel_one", "self_rel_many.self_rel_one"},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
return nil, errors.New("test error")
},
0,
@@ -289,8 +291,8 @@ func TestExpandRecord(t *testing.T) {
"demo4",
"i9naidtvr6qsgb4",
[]string{"missing"},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
0,
1,
@@ -300,8 +302,8 @@ func TestExpandRecord(t *testing.T) {
"demo4",
"i9naidtvr6qsgb4",
[]string{"title"},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
0,
1,
@@ -311,8 +313,8 @@ func TestExpandRecord(t *testing.T) {
"demo4",
"qzaqccwrmva4o1n",
[]string{"rel_one_no_cascade.title"},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
0,
1,
@@ -327,8 +329,8 @@ func TestExpandRecord(t *testing.T) {
"self_rel_many", "self_rel_many.",
" self_rel_many ", "",
},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
8,
0,
@@ -338,8 +340,8 @@ func TestExpandRecord(t *testing.T) {
"users",
"oap640cot4yru2s",
[]string{"rel"},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
0,
0,
@@ -349,8 +351,8 @@ func TestExpandRecord(t *testing.T) {
"demo4",
"qzaqccwrmva4o1n",
[]string{"self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many.self_rel_many"},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
6,
0,
@@ -360,8 +362,8 @@ func TestExpandRecord(t *testing.T) {
"demo3",
"lcl9d87w22ml6jy",
[]string{"demo4(rel_one_no_cascade_required)"},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
1,
0,
@@ -371,8 +373,8 @@ func TestExpandRecord(t *testing.T) {
"demo3",
"lcl9d87w22ml6jy",
[]string{"demo4_via_rel_one_no_cascade_required"},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
1,
0,
@@ -384,8 +386,8 @@ func TestExpandRecord(t *testing.T) {
[]string{
"demo4(rel_one_no_cascade_required).self_rel_many.self_rel_many.self_rel_one",
},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
5,
0,
@@ -397,8 +399,8 @@ func TestExpandRecord(t *testing.T) {
[]string{
"demo4_via_rel_many_no_cascade_required.self_rel_many.rel_many_no_cascade_required.demo4_via_rel_many_no_cascade_required",
},
- func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
},
7,
0,
@@ -406,44 +408,48 @@ func TestExpandRecord(t *testing.T) {
}
for _, s := range scenarios {
- record, _ := app.Dao().FindRecordById(s.collectionIdOrName, s.recordId)
- failed := app.Dao().ExpandRecord(record, s.expands, s.fetchFunc)
+ t.Run(s.testName, func(t *testing.T) {
+ record, _ := app.FindRecordById(s.collectionIdOrName, s.recordId)
+ failed := app.ExpandRecord(record, s.expands, s.fetchFunc)
- if len(failed) != s.expectExpandFailures {
- t.Errorf("[%s] Expected %d failures, got %d: \n%v", s.testName, s.expectExpandFailures, len(failed), failed)
- }
+ if len(failed) != s.expectExpandFailures {
+ t.Errorf("Expected %d failures, got %d\n%v", s.expectExpandFailures, len(failed), failed)
+ }
- encoded, _ := json.Marshal(record)
- encodedStr := string(encoded)
- totalExpandProps := strings.Count(encodedStr, schema.FieldNameExpand)
+ encoded, _ := json.Marshal(record)
+ encodedStr := string(encoded)
+ totalExpandProps := strings.Count(encodedStr, `"`+core.FieldNameExpand+`":`)
+ totalEmptyExpands := strings.Count(encodedStr, `"`+core.FieldNameExpand+`":{}`)
+ totalNonemptyExpands := totalExpandProps - totalEmptyExpands
- if s.expectExpandProps != totalExpandProps {
- t.Errorf("[%s] Expected %d expand props, got %d: \n%v", s.testName, s.expectExpandProps, totalExpandProps, encodedStr)
- }
+ if s.expectNonemptyExpandProps != totalNonemptyExpands {
+ t.Errorf("Expected %d expand props, got %d\n%v", s.expectNonemptyExpandProps, totalNonemptyExpands, encodedStr)
+ }
+ })
}
}
-func TestIndirectExpandSingeVsArrayResult(t *testing.T) {
+func TestBackRelationExpandSingeVsArrayResult(t *testing.T) {
t.Parallel()
app, _ := tests.NewTestApp()
defer app.Cleanup()
- record, err := app.Dao().FindRecordById("demo3", "7nwo8tuiatetxdm")
+ record, err := app.FindRecordById("demo3", "7nwo8tuiatetxdm")
if err != nil {
t.Fatal(err)
}
// non-unique indirect expand
{
- errs := app.Dao().ExpandRecord(record, []string{"demo4_via_rel_one_cascade"}, func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ errs := app.ExpandRecord(record, []string{"demo4_via_rel_one_cascade"}, func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
})
if len(errs) > 0 {
t.Fatal(errs)
}
- result, ok := record.Expand()["demo4_via_rel_one_cascade"].([]*models.Record)
+ result, ok := record.Expand()["demo4_via_rel_one_cascade"].([]*core.Record)
if !ok {
t.Fatalf("Expected the expanded result to be a slice, got %v", result)
}
@@ -453,26 +459,26 @@ func TestIndirectExpandSingeVsArrayResult(t *testing.T) {
{
// mock a unique constraint for the rel_one_cascade field
// ---
- demo4, err := app.Dao().FindCollectionByNameOrId("demo4")
+ demo4, err := app.FindCollectionByNameOrId("demo4")
if err != nil {
t.Fatal(err)
}
demo4.Indexes = append(demo4.Indexes, "create unique index idx_unique_expand on demo4 (rel_one_cascade)")
- if err := app.Dao().SaveCollection(demo4); err != nil {
+ if err := app.Save(demo4); err != nil {
t.Fatalf("Failed to mock unique constraint: %v", err)
}
// ---
- errs := app.Dao().ExpandRecord(record, []string{"demo4_via_rel_one_cascade"}, func(c *models.Collection, ids []string) ([]*models.Record, error) {
- return app.Dao().FindRecordsByIds(c.Id, ids, nil)
+ errs := app.ExpandRecord(record, []string{"demo4_via_rel_one_cascade"}, func(c *core.Collection, ids []string) ([]*core.Record, error) {
+ return app.FindRecordsByIds(c.Id, ids, nil)
})
if len(errs) > 0 {
t.Fatal(errs)
}
- result, ok := record.Expand()["demo4_via_rel_one_cascade"].(*models.Record)
+ result, ok := record.Expand()["demo4_via_rel_one_cascade"].(*core.Record)
if !ok {
t.Fatalf("Expected the expanded result to be a single model, got %v", result)
}
diff --git a/core/record_query_test.go b/core/record_query_test.go
new file mode 100644
index 00000000..85a8824d
--- /dev/null
+++ b/core/record_query_test.go
@@ -0,0 +1,1143 @@
+package core_test
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "slices"
+ "strings"
+ "testing"
+
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func TestRecordQueryWithDifferentCollectionValues(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ collection, err := app.FindCollectionByNameOrId("demo1")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ name string
+ collection any
+ expectedTotal int
+ expectError bool
+ }{
+ {"with nil value", nil, 0, true},
+ {"with invalid or missing collection id/name", "missing", 0, true},
+ {"with pointer model", collection, 3, false},
+ {"with value model", *collection, 3, false},
+ {"with name", "demo1", 3, false},
+ {"with id", "wsmn24bux7wo113", 3, false},
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ var records []*core.Record
+ err := app.RecordQuery(s.collection).All(&records)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasError %v, got %v", s.expectError, hasErr)
+ }
+
+ if total := len(records); total != s.expectedTotal {
+ t.Fatalf("Expected %d records, got %d", s.expectedTotal, total)
+ }
+ })
+ }
+}
+
+func TestRecordQueryOne(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ name string
+ collection string
+ recordId string
+ model core.Model
+ }{
+ {
+ "record model",
+ "demo1",
+ "84nmscqy84lsi1t",
+ &core.Record{},
+ },
+ {
+ "record proxy",
+ "demo1",
+ "84nmscqy84lsi1t",
+ &struct {
+ core.BaseRecordProxy
+ }{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ collection, err := app.FindCollectionByNameOrId(s.collection)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ q := app.RecordQuery(collection).
+ Where(dbx.HashExp{"id": s.recordId})
+
+ if err := q.One(s.model); err != nil {
+ t.Fatal(err)
+ }
+
+ if s.model.PK() != s.recordId {
+ t.Fatalf("Expected record with id %q, got %q", s.recordId, s.model.PK())
+ }
+ })
+ }
+}
+
+func TestRecordQueryAll(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ type mockRecordProxy struct {
+ core.BaseRecordProxy
+ }
+
+ scenarios := []struct {
+ name string
+ collection string
+ recordIds []any
+ result any
+ }{
+ {
+ "slice of Record models",
+ "demo1",
+ []any{"84nmscqy84lsi1t", "al1h9ijdeojtsjy"},
+ &[]core.Record{},
+ },
+ {
+ "slice of pointer Record models",
+ "demo1",
+ []any{"84nmscqy84lsi1t", "al1h9ijdeojtsjy"},
+ &[]*core.Record{},
+ },
+ {
+ "slice of Record proxies",
+ "demo1",
+ []any{"84nmscqy84lsi1t", "al1h9ijdeojtsjy"},
+ &[]mockRecordProxy{},
+ },
+ {
+ "slice of pointer Record proxies",
+ "demo1",
+ []any{"84nmscqy84lsi1t", "al1h9ijdeojtsjy"},
+ &[]mockRecordProxy{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ collection, err := app.FindCollectionByNameOrId(s.collection)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ q := app.RecordQuery(collection).
+ Where(dbx.HashExp{"id": s.recordIds})
+
+ if err := q.All(s.result); err != nil {
+ t.Fatal(err)
+ }
+
+ raw, err := json.Marshal(s.result)
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawStr := string(raw)
+
+ sliceOfMaps := []any{}
+ if err := json.Unmarshal(raw, &sliceOfMaps); err != nil {
+ t.Fatal(err)
+ }
+
+ if len(sliceOfMaps) != len(s.recordIds) {
+ t.Fatalf("Expected %d items, got %d", len(s.recordIds), len(sliceOfMaps))
+ }
+
+ for _, id := range s.recordIds {
+ if !strings.Contains(rawStr, fmt.Sprintf(`"id":%q`, id)) {
+ t.Fatalf("Missing id %q in\n%s", id, rawStr)
+ }
+ }
+ })
+ }
+}
+
+func TestFindRecordById(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ collectionIdOrName string
+ id string
+ filters []func(q *dbx.SelectQuery) error
+ expectError bool
+ }{
+ {"demo2", "missing", nil, true},
+ {"missing", "0yxhwia2amd8gec", nil, true},
+ {"demo2", "0yxhwia2amd8gec", nil, false},
+ {"demo2", "0yxhwia2amd8gec", []func(q *dbx.SelectQuery) error{}, false},
+ {"demo2", "0yxhwia2amd8gec", []func(q *dbx.SelectQuery) error{nil, nil}, false},
+ {"demo2", "0yxhwia2amd8gec", []func(q *dbx.SelectQuery) error{
+ nil,
+ func(q *dbx.SelectQuery) error { return nil },
+ }, false},
+ {"demo2", "0yxhwia2amd8gec", []func(q *dbx.SelectQuery) error{
+ func(q *dbx.SelectQuery) error {
+ q.AndWhere(dbx.HashExp{"title": "missing"})
+ return nil
+ },
+ }, true},
+ {"demo2", "0yxhwia2amd8gec", []func(q *dbx.SelectQuery) error{
+ func(q *dbx.SelectQuery) error {
+ return errors.New("test error")
+ },
+ }, true},
+ {"demo2", "0yxhwia2amd8gec", []func(q *dbx.SelectQuery) error{
+ func(q *dbx.SelectQuery) error {
+ q.AndWhere(dbx.HashExp{"title": "test3"})
+ return nil
+ },
+ }, false},
+ {"demo2", "0yxhwia2amd8gec", []func(q *dbx.SelectQuery) error{
+ func(q *dbx.SelectQuery) error {
+ q.AndWhere(dbx.HashExp{"title": "test3"})
+ return nil
+ },
+ nil,
+ }, false},
+ {"demo2", "0yxhwia2amd8gec", []func(q *dbx.SelectQuery) error{
+ func(q *dbx.SelectQuery) error {
+ q.AndWhere(dbx.HashExp{"title": "test3"})
+ return nil
+ },
+ func(q *dbx.SelectQuery) error {
+ q.AndWhere(dbx.HashExp{"active": false})
+ return nil
+ },
+ }, true},
+ {"sz5l5z67tg7gku0", "0yxhwia2amd8gec", []func(q *dbx.SelectQuery) error{
+ func(q *dbx.SelectQuery) error {
+ q.AndWhere(dbx.HashExp{"title": "test3"})
+ return nil
+ },
+ func(q *dbx.SelectQuery) error {
+ q.AndWhere(dbx.HashExp{"active": true})
+ return nil
+ },
+ }, false},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s_%s_%d", i, s.collectionIdOrName, s.id, len(s.filters)), func(t *testing.T) {
+ record, err := app.FindRecordById(
+ s.collectionIdOrName,
+ s.id,
+ s.filters...,
+ )
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ if record != nil && record.Id != s.id {
+ t.Fatalf("Expected record with id %s, got %s", s.id, record.Id)
+ }
+ })
+ }
+}
+
+func TestFindRecordsByIds(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ collectionIdOrName string
+ ids []string
+ filters []func(q *dbx.SelectQuery) error
+ expectTotal int
+ expectError bool
+ }{
+ {"demo2", []string{}, nil, 0, false},
+ {"demo2", []string{""}, nil, 0, false},
+ {"demo2", []string{"missing"}, nil, 0, false},
+ {"missing", []string{"0yxhwia2amd8gec"}, nil, 0, true},
+ {"demo2", []string{"0yxhwia2amd8gec"}, nil, 1, false},
+ {"sz5l5z67tg7gku0", []string{"0yxhwia2amd8gec"}, nil, 1, false},
+ {
+ "demo2",
+ []string{"0yxhwia2amd8gec", "llvuca81nly1qls"},
+ nil,
+ 2,
+ false,
+ },
+ {
+ "demo2",
+ []string{"0yxhwia2amd8gec", "llvuca81nly1qls"},
+ []func(q *dbx.SelectQuery) error{},
+ 2,
+ false,
+ },
+ {
+ "demo2",
+ []string{"0yxhwia2amd8gec", "llvuca81nly1qls"},
+ []func(q *dbx.SelectQuery) error{nil, nil},
+ 2,
+ false,
+ },
+ {
+ "demo2",
+ []string{"0yxhwia2amd8gec", "llvuca81nly1qls"},
+ []func(q *dbx.SelectQuery) error{
+ func(q *dbx.SelectQuery) error {
+ return nil // empty filter
+ },
+ },
+ 2,
+ false,
+ },
+ {
+ "demo2",
+ []string{"0yxhwia2amd8gec", "llvuca81nly1qls"},
+ []func(q *dbx.SelectQuery) error{
+ func(q *dbx.SelectQuery) error {
+ return nil // empty filter
+ },
+ func(q *dbx.SelectQuery) error {
+ return errors.New("test error")
+ },
+ },
+ 0,
+ true,
+ },
+ {
+ "demo2",
+ []string{"0yxhwia2amd8gec", "llvuca81nly1qls"},
+ []func(q *dbx.SelectQuery) error{
+ func(q *dbx.SelectQuery) error {
+ q.AndWhere(dbx.HashExp{"active": true})
+ return nil
+ },
+ nil,
+ },
+ 1,
+ false,
+ },
+ {
+ "sz5l5z67tg7gku0",
+ []string{"0yxhwia2amd8gec", "llvuca81nly1qls"},
+ []func(q *dbx.SelectQuery) error{
+ func(q *dbx.SelectQuery) error {
+ q.AndWhere(dbx.HashExp{"active": true})
+ return nil
+ },
+ func(q *dbx.SelectQuery) error {
+ q.AndWhere(dbx.Not(dbx.HashExp{"title": ""}))
+ return nil
+ },
+ },
+ 1,
+ false,
+ },
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s_%v_%d", i, s.collectionIdOrName, s.ids, len(s.filters)), func(t *testing.T) {
+ records, err := app.FindRecordsByIds(
+ s.collectionIdOrName,
+ s.ids,
+ s.filters...,
+ )
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ if len(records) != s.expectTotal {
+ t.Fatalf("Expected %d records, got %d", s.expectTotal, len(records))
+ }
+
+ for _, r := range records {
+ if !slices.Contains(s.ids, r.Id) {
+ t.Fatalf("Couldn't find id %s in %v", r.Id, s.ids)
+ }
+ }
+ })
+ }
+}
+
+func TestFindAllRecords(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ collectionIdOrName string
+ expressions []dbx.Expression
+ expectIds []string
+ expectError bool
+ }{
+ {
+ "missing",
+ nil,
+ []string{},
+ true,
+ },
+ {
+ "demo2",
+ nil,
+ []string{
+ "achvryl401bhse3",
+ "llvuca81nly1qls",
+ "0yxhwia2amd8gec",
+ },
+ false,
+ },
+ {
+ "demo2",
+ []dbx.Expression{
+ nil,
+ dbx.HashExp{"id": "123"},
+ },
+ []string{},
+ false,
+ },
+ {
+ "sz5l5z67tg7gku0",
+ []dbx.Expression{
+ dbx.Like("title", "test").Match(true, true),
+ dbx.HashExp{"active": true},
+ },
+ []string{
+ "achvryl401bhse3",
+ "0yxhwia2amd8gec",
+ },
+ false,
+ },
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, s.collectionIdOrName), func(t *testing.T) {
+ records, err := app.FindAllRecords(s.collectionIdOrName, s.expressions...)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ if len(records) != len(s.expectIds) {
+ t.Fatalf("Expected %d records, got %d", len(s.expectIds), len(records))
+ }
+
+ for _, r := range records {
+ if !slices.Contains(s.expectIds, r.Id) {
+ t.Fatalf("Couldn't find id %s in %v", r.Id, s.expectIds)
+ }
+ }
+ })
+ }
+}
+
+func TestFindFirstRecordByData(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ collectionIdOrName string
+ key string
+ value any
+ expectId string
+ expectError bool
+ }{
+ {
+ "missing",
+ "id",
+ "llvuca81nly1qls",
+ "llvuca81nly1qls",
+ true,
+ },
+ {
+ "demo2",
+ "",
+ "llvuca81nly1qls",
+ "",
+ true,
+ },
+ {
+ "demo2",
+ "id",
+ "invalid",
+ "",
+ true,
+ },
+ {
+ "demo2",
+ "id",
+ "llvuca81nly1qls",
+ "llvuca81nly1qls",
+ false,
+ },
+ {
+ "sz5l5z67tg7gku0",
+ "title",
+ "test3",
+ "0yxhwia2amd8gec",
+ false,
+ },
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s_%s_%v", i, s.collectionIdOrName, s.key, s.value), func(t *testing.T) {
+ record, err := app.FindFirstRecordByData(s.collectionIdOrName, s.key, s.value)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ if !s.expectError && record.Id != s.expectId {
+ t.Fatalf("Expected record with id %s, got %v", s.expectId, record.Id)
+ }
+ })
+ }
+}
+
+func TestFindRecordsByFilter(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ name string
+ collectionIdOrName string
+ filter string
+ sort string
+ limit int
+ offset int
+ params []dbx.Params
+ expectError bool
+ expectRecordIds []string
+ }{
+ {
+ "missing collection",
+ "missing",
+ "id != ''",
+ "",
+ 0,
+ 0,
+ nil,
+ true,
+ nil,
+ },
+ {
+ "invalid filter",
+ "demo2",
+ "someMissingField > 1",
+ "",
+ 0,
+ 0,
+ nil,
+ true,
+ nil,
+ },
+ {
+ "empty filter",
+ "demo2",
+ "",
+ "",
+ 0,
+ 0,
+ nil,
+ false,
+ []string{
+ "llvuca81nly1qls",
+ "achvryl401bhse3",
+ "0yxhwia2amd8gec",
+ },
+ },
+ {
+ "simple filter",
+ "demo2",
+ "id != ''",
+ "",
+ 0,
+ 0,
+ nil,
+ false,
+ []string{
+ "llvuca81nly1qls",
+ "achvryl401bhse3",
+ "0yxhwia2amd8gec",
+ },
+ },
+ {
+ "multi-condition filter with sort",
+ "demo2",
+ "id != '' && active=true",
+ "-created,title",
+ -1, // should behave the same as 0
+ 0,
+ nil,
+ false,
+ []string{
+ "0yxhwia2amd8gec",
+ "achvryl401bhse3",
+ },
+ },
+ {
+ "with limit and offset",
+ "sz5l5z67tg7gku0",
+ "id != ''",
+ "title",
+ 2,
+ 1,
+ nil,
+ false,
+ []string{
+ "achvryl401bhse3",
+ "0yxhwia2amd8gec",
+ },
+ },
+ {
+ "with placeholder params",
+ "demo2",
+ "active = {:active}",
+ "",
+ 10,
+ 0,
+ []dbx.Params{{"active": false}},
+ false,
+ []string{
+ "llvuca81nly1qls",
+ },
+ },
+ {
+ "with json filter and sort",
+ "demo4",
+ "json_object != null && json_object.a.b = 'test'",
+ "-json_object.a",
+ 10,
+ 0,
+ []dbx.Params{{"active": false}},
+ false,
+ []string{
+ "i9naidtvr6qsgb4",
+ },
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ records, err := app.FindRecordsByFilter(
+ s.collectionIdOrName,
+ s.filter,
+ s.sort,
+ s.limit,
+ s.offset,
+ s.params...,
+ )
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ if hasErr {
+ return
+ }
+
+ if len(records) != len(s.expectRecordIds) {
+ t.Fatalf("Expected %d records, got %d", len(s.expectRecordIds), len(records))
+ }
+
+ for i, id := range s.expectRecordIds {
+ if id != records[i].Id {
+ t.Fatalf("Expected record with id %q, got %q at index %d", id, records[i].Id, i)
+ }
+ }
+ })
+ }
+}
+
+func TestFindFirstRecordByFilter(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ name string
+ collectionIdOrName string
+ filter string
+ params []dbx.Params
+ expectError bool
+ expectRecordId string
+ }{
+ {
+ "missing collection",
+ "missing",
+ "id != ''",
+ nil,
+ true,
+ "",
+ },
+ {
+ "invalid filter",
+ "demo2",
+ "someMissingField > 1",
+ nil,
+ true,
+ "",
+ },
+ {
+ "empty filter",
+ "demo2",
+ "",
+ nil,
+ false,
+ "llvuca81nly1qls",
+ },
+ {
+ "valid filter but no matches",
+ "demo2",
+ "id = 'test'",
+ nil,
+ true,
+ "",
+ },
+ {
+ "valid filter and multiple matches",
+ "sz5l5z67tg7gku0",
+ "id != ''",
+ nil,
+ false,
+ "llvuca81nly1qls",
+ },
+ {
+ "with placeholder params",
+ "demo2",
+ "active = {:active}",
+ []dbx.Params{{"active": false}},
+ false,
+ "llvuca81nly1qls",
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ record, err := app.FindFirstRecordByFilter(s.collectionIdOrName, s.filter, s.params...)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ if hasErr {
+ return
+ }
+
+ if record.Id != s.expectRecordId {
+ t.Fatalf("Expected record with id %q, got %q", s.expectRecordId, record.Id)
+ }
+ })
+ }
+}
+
+func TestCountRecords(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ name string
+ collectionIdOrName string
+ expressions []dbx.Expression
+ expectTotal int64
+ expectError bool
+ }{
+ {
+ "missing collection",
+ "missing",
+ nil,
+ 0,
+ true,
+ },
+ {
+ "valid collection name",
+ "demo2",
+ nil,
+ 3,
+ false,
+ },
+ {
+ "valid collection id",
+ "sz5l5z67tg7gku0",
+ nil,
+ 3,
+ false,
+ },
+ {
+ "nil expression",
+ "demo2",
+ []dbx.Expression{nil},
+ 3,
+ false,
+ },
+ {
+ "no matches",
+ "demo2",
+ []dbx.Expression{
+ nil,
+ dbx.Like("title", "missing"),
+ dbx.HashExp{"active": true},
+ },
+ 0,
+ false,
+ },
+ {
+ "with matches",
+ "demo2",
+ []dbx.Expression{
+ nil,
+ dbx.Like("title", "test"),
+ dbx.HashExp{"active": true},
+ },
+ 2,
+ false,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ total, err := app.CountRecords(s.collectionIdOrName, s.expressions...)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ if total != s.expectTotal {
+ t.Fatalf("Expected total %d, got %d", s.expectTotal, total)
+ }
+ })
+ }
+}
+
+func TestFindAuthRecordByToken(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ name string
+ token string
+ types []string
+ expectedId string
+ }{
+ {
+ "empty token",
+ "",
+ nil,
+ "",
+ },
+ {
+ "invalid token",
+ "invalid",
+ nil,
+ "",
+ },
+ {
+ "expired token",
+ "eyJhbGciOiJIUzI1NiJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsInR5cGUiOiJhdXRoIiwiY29sbGVjdGlvbklkIjoiX3BiX3VzZXJzX2F1dGhfIiwiZXhwIjoxNjQwOTkxNjYxLCJyZWZyZXNoYWJsZSI6dHJ1ZX0.2D3tmqPn3vc5LoqqCz8V-iCDVXo9soYiH0d32G7FQT4",
+ nil,
+ "",
+ },
+ {
+ "valid auth token",
+ "eyJhbGciOiJIUzI1NiJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsInR5cGUiOiJhdXRoIiwiY29sbGVjdGlvbklkIjoiX3BiX3VzZXJzX2F1dGhfIiwiZXhwIjoyNTI0NjA0NDYxLCJyZWZyZXNoYWJsZSI6dHJ1ZX0.ZT3F0Z3iM-xbGgSG3LEKiEzHrPHr8t8IuHLZGGNuxLo",
+ nil,
+ "4q1xlclmfloku33",
+ },
+ {
+ "valid verification token",
+ "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6ImRjNDlrNmpnZWpuNDBoMyIsImV4cCI6MjUyNDYwNDQ2MSwidHlwZSI6InZlcmlmaWNhdGlvbiIsImNvbGxlY3Rpb25JZCI6ImtwdjcwOXNrMmxxYnFrOCIsImVtYWlsIjoidGVzdEBleGFtcGxlLmNvbSJ9.5GmuZr4vmwk3Cb_3ZZWNxwbE75KZC-j71xxIPR9AsVw",
+ nil,
+ "dc49k6jgejn40h3",
+ },
+ {
+ "auth token with file type only check",
+ "eyJhbGciOiJIUzI1NiJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsInR5cGUiOiJhdXRoIiwiY29sbGVjdGlvbklkIjoiX3BiX3VzZXJzX2F1dGhfIiwiZXhwIjoyNTI0NjA0NDYxLCJyZWZyZXNoYWJsZSI6dHJ1ZX0.ZT3F0Z3iM-xbGgSG3LEKiEzHrPHr8t8IuHLZGGNuxLo",
+ []string{core.TokenTypeFile},
+ "",
+ },
+ {
+ "auth token with file and auth type check",
+ "eyJhbGciOiJIUzI1NiJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsInR5cGUiOiJhdXRoIiwiY29sbGVjdGlvbklkIjoiX3BiX3VzZXJzX2F1dGhfIiwiZXhwIjoyNTI0NjA0NDYxLCJyZWZyZXNoYWJsZSI6dHJ1ZX0.ZT3F0Z3iM-xbGgSG3LEKiEzHrPHr8t8IuHLZGGNuxLo",
+ []string{core.TokenTypeFile, core.TokenTypeAuth},
+ "4q1xlclmfloku33",
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ record, err := app.FindAuthRecordByToken(s.token, s.types...)
+
+ hasErr := err != nil
+ expectErr := s.expectedId == ""
+ if hasErr != expectErr {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", expectErr, hasErr, err)
+ }
+
+ if hasErr {
+ return
+ }
+
+ if record.Id != s.expectedId {
+ t.Fatalf("Expected record with id %q, got %q", s.expectedId, record.Id)
+ }
+ })
+ }
+}
+
+func TestFindAuthRecordByEmail(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ collectionIdOrName string
+ email string
+ expectError bool
+ }{
+ {"missing", "test@example.com", true},
+ {"demo2", "test@example.com", true},
+ {"users", "missing@example.com", true},
+ {"users", "test@example.com", false},
+ {"clients", "test2@example.com", false},
+ }
+
+ for _, s := range scenarios {
+ t.Run(fmt.Sprintf("%s_%s", s.collectionIdOrName, s.email), func(t *testing.T) {
+ record, err := app.FindAuthRecordByEmail(s.collectionIdOrName, s.email)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+
+ if hasErr {
+ return
+ }
+
+ if record.Email() != s.email {
+ t.Fatalf("Expected record with email %s, got %s", s.email, record.Email())
+ }
+ })
+ }
+}
+
+func TestCanAccessRecord(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ superuser, err := app.FindAuthRecordByEmail(core.CollectionNameSuperusers, "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ user, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ record, err := app.FindRecordById("demo1", "imy661ixudk5izi")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ name string
+ record *core.Record
+ requestInfo *core.RequestInfo
+ rule *string
+ expected bool
+ expectError bool
+ }{
+ {
+ "as superuser with nil rule",
+ record,
+ &core.RequestInfo{
+ Auth: superuser,
+ },
+ nil,
+ true,
+ false,
+ },
+ {
+ "as superuser with non-empty rule",
+ record,
+ &core.RequestInfo{
+ Auth: superuser,
+ },
+ types.Pointer("id = ''"), // the filter rule should be ignored
+ true,
+ false,
+ },
+ {
+ "as superuser with invalid rule",
+ record,
+ &core.RequestInfo{
+ Auth: superuser,
+ },
+ types.Pointer("id ?!@ 1"), // the filter rule should be ignored
+ true,
+ false,
+ },
+ {
+ "as guest with nil rule",
+ record,
+ &core.RequestInfo{},
+ nil,
+ false,
+ false,
+ },
+ {
+ "as guest with empty rule",
+ record,
+ &core.RequestInfo{},
+ types.Pointer(""),
+ true,
+ false,
+ },
+ {
+ "as guest with invalid rule",
+ record,
+ &core.RequestInfo{},
+ types.Pointer("id ?!@ 1"),
+ false,
+ true,
+ },
+ {
+ "as guest with mismatched rule",
+ record,
+ &core.RequestInfo{},
+ types.Pointer("@request.auth.id != ''"),
+ false,
+ false,
+ },
+ {
+ "as guest with matched rule",
+ record,
+ &core.RequestInfo{
+ Body: map[string]any{"test": 1},
+ },
+ types.Pointer("@request.auth.id != '' || @request.body.test = 1"),
+ true,
+ false,
+ },
+ {
+ "as auth record with nil rule",
+ record,
+ &core.RequestInfo{
+ Auth: user,
+ },
+ nil,
+ false,
+ false,
+ },
+ {
+ "as auth record with empty rule",
+ record,
+ &core.RequestInfo{
+ Auth: user,
+ },
+ types.Pointer(""),
+ true,
+ false,
+ },
+ {
+ "as auth record with invalid rule",
+ record,
+ &core.RequestInfo{
+ Auth: user,
+ },
+ types.Pointer("id ?!@ 1"),
+ false,
+ true,
+ },
+ {
+ "as auth record with mismatched rule",
+ record,
+ &core.RequestInfo{
+ Auth: user,
+ Body: map[string]any{"test": 1},
+ },
+ types.Pointer("@request.auth.id != '' && @request.body.test > 1"),
+ false,
+ false,
+ },
+ {
+ "as auth record with matched rule",
+ record,
+ &core.RequestInfo{
+ Auth: user,
+ Body: map[string]any{"test": 2},
+ },
+ types.Pointer("@request.auth.id != '' && @request.body.test > 1"),
+ true,
+ false,
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result, err := app.CanAccessRecord(s.record, s.requestInfo, s.rule)
+
+ if result != s.expected {
+ t.Fatalf("Expected %v, got %v", s.expected, result)
+ }
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
diff --git a/core/record_tokens.go b/core/record_tokens.go
new file mode 100644
index 00000000..e1bf7a95
--- /dev/null
+++ b/core/record_tokens.go
@@ -0,0 +1,165 @@
+package core
+
+import (
+ "errors"
+ "time"
+
+ "github.com/golang-jwt/jwt/v4"
+ "github.com/pocketbase/pocketbase/tools/security"
+)
+
+// Supported record token types
+const (
+ TokenTypeAuth = "auth"
+ TokenTypeFile = "file"
+ TokenTypeVerification = "verification"
+ TokenTypePasswordReset = "passwordReset"
+ TokenTypeEmailChange = "emailChange"
+)
+
+// List with commonly used record token claims
+const (
+ TokenClaimId = "id"
+ TokenClaimType = "type"
+ TokenClaimCollectionId = "collectionId"
+ TokenClaimEmail = "email"
+ TokenClaimNewEmail = "newEmail"
+ TokenClaimRefreshable = "refreshable"
+)
+
+// Common token related errors
+var (
+ ErrNotAuthRecord = errors.New("not an auth collection record")
+ ErrMissingSigningKey = errors.New("missing or invalid signing key")
+)
+
+// NewStaticAuthToken generates and returns a new static record authentication token.
+//
+// Static auth tokens are similar to the regular auth tokens, but are
+// non-refreshable and support custom duration.
+//
+// Zero or negative duration will fallback to the duration from the auth collection settings.
+func (m *Record) NewStaticAuthToken(duration time.Duration) (string, error) {
+ return m.newAuthToken(duration, false)
+}
+
+// NewAuthToken generates and returns a new record authentication token.
+func (m *Record) NewAuthToken() (string, error) {
+ return m.newAuthToken(0, true)
+}
+
+func (m *Record) newAuthToken(duration time.Duration, refreshable bool) (string, error) {
+ if !m.Collection().IsAuth() {
+ return "", ErrNotAuthRecord
+ }
+
+ key := (m.TokenKey() + m.Collection().AuthToken.Secret)
+ if key == "" {
+ return "", ErrMissingSigningKey
+ }
+
+ claims := jwt.MapClaims{
+ TokenClaimType: TokenTypeAuth,
+ TokenClaimId: m.Id,
+ TokenClaimCollectionId: m.Collection().Id,
+ TokenClaimRefreshable: refreshable,
+ }
+
+ if duration <= 0 {
+ duration = m.Collection().AuthToken.DurationTime()
+ }
+
+ return security.NewJWT(claims, key, duration)
+}
+
+// NewVerificationToken generates and returns a new record verification token.
+func (m *Record) NewVerificationToken() (string, error) {
+ if !m.Collection().IsAuth() {
+ return "", ErrNotAuthRecord
+ }
+
+ key := (m.TokenKey() + m.Collection().VerificationToken.Secret)
+ if key == "" {
+ return "", ErrMissingSigningKey
+ }
+
+ return security.NewJWT(
+ jwt.MapClaims{
+ TokenClaimType: TokenTypeVerification,
+ TokenClaimId: m.Id,
+ TokenClaimCollectionId: m.Collection().Id,
+ TokenClaimEmail: m.Email(),
+ },
+ key,
+ m.Collection().VerificationToken.DurationTime(),
+ )
+}
+
+// NewPasswordResetToken generates and returns a new auth record password reset request token.
+func (m *Record) NewPasswordResetToken() (string, error) {
+ if !m.Collection().IsAuth() {
+ return "", ErrNotAuthRecord
+ }
+
+ key := (m.TokenKey() + m.Collection().PasswordResetToken.Secret)
+ if key == "" {
+ return "", ErrMissingSigningKey
+ }
+
+ return security.NewJWT(
+ jwt.MapClaims{
+ TokenClaimType: TokenTypePasswordReset,
+ TokenClaimId: m.Id,
+ TokenClaimCollectionId: m.Collection().Id,
+ TokenClaimEmail: m.Email(),
+ },
+ key,
+ m.Collection().PasswordResetToken.DurationTime(),
+ )
+}
+
+// NewEmailChangeToken generates and returns a new auth record change email request token.
+func (m *Record) NewEmailChangeToken(newEmail string) (string, error) {
+ if !m.Collection().IsAuth() {
+ return "", ErrNotAuthRecord
+ }
+
+ key := (m.TokenKey() + m.Collection().EmailChangeToken.Secret)
+ if key == "" {
+ return "", ErrMissingSigningKey
+ }
+
+ return security.NewJWT(
+ jwt.MapClaims{
+ TokenClaimType: TokenTypeEmailChange,
+ TokenClaimId: m.Id,
+ TokenClaimCollectionId: m.Collection().Id,
+ TokenClaimEmail: m.Email(),
+ TokenClaimNewEmail: newEmail,
+ },
+ key,
+ m.Collection().EmailChangeToken.DurationTime(),
+ )
+}
+
+// NewFileToken generates and returns a new record private file access token.
+func (m *Record) NewFileToken() (string, error) {
+ if !m.Collection().IsAuth() {
+ return "", ErrNotAuthRecord
+ }
+
+ key := (m.TokenKey() + m.Collection().FileToken.Secret)
+ if key == "" {
+ return "", ErrMissingSigningKey
+ }
+
+ return security.NewJWT(
+ jwt.MapClaims{
+ TokenClaimType: TokenTypeFile,
+ TokenClaimId: m.Id,
+ TokenClaimCollectionId: m.Collection().Id,
+ },
+ key,
+ m.Collection().FileToken.DurationTime(),
+ )
+}
diff --git a/core/record_tokens_test.go b/core/record_tokens_test.go
new file mode 100644
index 00000000..34ad909a
--- /dev/null
+++ b/core/record_tokens_test.go
@@ -0,0 +1,176 @@
+package core_test
+
+import (
+ "fmt"
+ "testing"
+ "time"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/security"
+ "github.com/spf13/cast"
+)
+
+func TestNewStaticAuthToken(t *testing.T) {
+ t.Parallel()
+
+ testRecordToken(t, core.TokenTypeAuth, func(record *core.Record) (string, error) {
+ return record.NewStaticAuthToken(0)
+ }, map[string]any{
+ core.TokenClaimRefreshable: false,
+ })
+}
+
+func TestNewStaticAuthTokenWithCustomDuration(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ user, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ var tolerance int64 = 1 // in sec
+
+ durations := []int64{-100, 0, 100}
+
+ for i, d := range durations {
+ t.Run(fmt.Sprintf("%d_%d", i, d), func(t *testing.T) {
+ now := time.Now()
+
+ duration := time.Duration(d) * time.Second
+
+ token, err := user.NewStaticAuthToken(duration)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ claims, err := security.ParseUnverifiedJWT(token)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ exp := cast.ToInt64(claims["exp"])
+
+ expectedDuration := duration
+ // should fallback to the collection setting
+ if expectedDuration <= 0 {
+ expectedDuration = user.Collection().AuthToken.DurationTime()
+ }
+ expectedMinExp := now.Add(expectedDuration).Unix() - tolerance
+ expectedMaxExp := now.Add(expectedDuration).Unix() + tolerance
+
+ if exp < expectedMinExp {
+ t.Fatalf("Expected token exp to be greater than %d, got %d", expectedMinExp, exp)
+ }
+
+ if exp > expectedMaxExp {
+ t.Fatalf("Expected token exp to be less than %d, got %d", expectedMaxExp, exp)
+ }
+ })
+ }
+}
+
+func TestNewAuthToken(t *testing.T) {
+ t.Parallel()
+
+ testRecordToken(t, core.TokenTypeAuth, func(record *core.Record) (string, error) {
+ return record.NewAuthToken()
+ }, map[string]any{
+ core.TokenClaimRefreshable: true,
+ })
+}
+
+func TestNewVerificationToken(t *testing.T) {
+ t.Parallel()
+
+ testRecordToken(t, core.TokenTypeVerification, func(record *core.Record) (string, error) {
+ return record.NewVerificationToken()
+ }, nil)
+}
+
+func TestNewPasswordResetToken(t *testing.T) {
+ t.Parallel()
+
+ testRecordToken(t, core.TokenTypePasswordReset, func(record *core.Record) (string, error) {
+ return record.NewPasswordResetToken()
+ }, nil)
+}
+
+func TestNewEmailChangeToken(t *testing.T) {
+ t.Parallel()
+
+ testRecordToken(t, core.TokenTypeEmailChange, func(record *core.Record) (string, error) {
+ return record.NewEmailChangeToken("new@example.com")
+ }, nil)
+}
+
+func TestNewFileToken(t *testing.T) {
+ t.Parallel()
+
+ testRecordToken(t, core.TokenTypeFile, func(record *core.Record) (string, error) {
+ return record.NewFileToken()
+ }, nil)
+}
+
+func testRecordToken(
+ t *testing.T,
+ tokenType string,
+ tokenFunc func(record *core.Record) (string, error),
+ expectedClaims map[string]any,
+) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ demo1, err := app.FindRecordById("demo1", "84nmscqy84lsi1t")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ user, err := app.FindAuthRecordByEmail("users", "test@example.com")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ t.Run("non-auth record", func(t *testing.T) {
+ _, err = tokenFunc(demo1)
+ if err == nil {
+ t.Fatal("Expected error for non-auth records")
+ }
+ })
+
+ t.Run("auth record", func(t *testing.T) {
+ token, err := tokenFunc(user)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ tokenRecord, _ := app.FindAuthRecordByToken(token, tokenType)
+ if tokenRecord == nil || tokenRecord.Id != user.Id {
+ t.Fatalf("Expected auth record\n%v\ngot\n%v", user, tokenRecord)
+ }
+
+ if len(expectedClaims) > 0 {
+ claims, _ := security.ParseUnverifiedJWT(token)
+ for k, v := range expectedClaims {
+ if claims[k] != v {
+ t.Errorf("Expected claim %q with value %#v, got %#v", k, v, claims[k])
+ }
+ }
+ }
+ })
+
+ t.Run("empty signing key", func(t *testing.T) {
+ user.SetTokenKey("")
+ collection := user.Collection()
+ *collection = core.Collection{}
+ collection.Type = core.CollectionTypeAuth
+
+ _, err := tokenFunc(user)
+ if err == nil {
+ t.Fatal("Expected empty signing key error")
+ }
+ })
+}
diff --git a/core/settings_model.go b/core/settings_model.go
new file mode 100644
index 00000000..c918371a
--- /dev/null
+++ b/core/settings_model.go
@@ -0,0 +1,675 @@
+package core
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "os"
+ "regexp"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/go-ozzo/ozzo-validation/v4/is"
+ "github.com/pocketbase/pocketbase/core/validators"
+ "github.com/pocketbase/pocketbase/tools/cron"
+ "github.com/pocketbase/pocketbase/tools/hook"
+ "github.com/pocketbase/pocketbase/tools/mailer"
+ "github.com/pocketbase/pocketbase/tools/security"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+const (
+ paramsTable = "_params"
+
+ paramsKeySettings = "settings"
+
+ systemHookIdSettings = "__pbSettingsSystemHook__"
+)
+
+func (app *BaseApp) registerSettingsHooks() {
+ saveFunc := func(me *ModelEvent) error {
+ if err := me.Next(); err != nil {
+ return err
+ }
+
+ if me.Model.PK() == paramsKeySettings {
+ // auto reload the app settings because we don't know whether
+ // the Settings model is the app one or a different one
+ return errors.Join(
+ me.App.Settings().PostScan(),
+ me.App.ReloadSettings(),
+ )
+ }
+
+ return nil
+ }
+
+ app.OnModelAfterCreateSuccess(paramsTable).Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdSettings,
+ Func: saveFunc,
+ Priority: -999,
+ })
+
+ app.OnModelAfterUpdateSuccess(paramsTable).Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdSettings,
+ Func: saveFunc,
+ Priority: -999,
+ })
+
+ app.OnModelDelete(paramsTable).Bind(&hook.Handler[*ModelEvent]{
+ Id: systemHookIdSettings,
+ Func: func(me *ModelEvent) error {
+ if me.Model.PK() == paramsKeySettings {
+ return errors.New("the app params settings cannot be deleted")
+ }
+
+ return me.Next()
+ },
+ Priority: -999,
+ })
+
+ app.OnCollectionUpdate().Bind(&hook.Handler[*CollectionEvent]{
+ Id: systemHookIdSettings,
+ Func: func(e *CollectionEvent) error {
+ oldCollection, err := e.App.FindCachedCollectionByNameOrId(e.Collection.Id)
+ if err != nil {
+ return fmt.Errorf("failed to retrieve old cached collection: %w", err)
+ }
+
+ err = e.Next()
+ if err != nil {
+ return err
+ }
+
+ // update existing rate limit rules on collection rename
+ if oldCollection.Name != e.Collection.Name {
+ var hasChange bool
+
+ rules := e.App.Settings().RateLimits.Rules
+ for i := 0; i < len(rules); i++ {
+ if strings.HasPrefix(rules[i].Label, oldCollection.Name+":") {
+ rules[i].Label = strings.Replace(rules[i].Label, oldCollection.Name+":", e.Collection.Name+":", 1)
+ hasChange = true
+ }
+ }
+
+ if hasChange {
+ e.App.Settings().RateLimits.Rules = rules
+ err = e.App.Save(e.App.Settings())
+ if err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+ },
+ Priority: 99,
+ })
+}
+
+var (
+ _ Model = (*Settings)(nil)
+ _ PostValidator = (*Settings)(nil)
+ _ DBExporter = (*Settings)(nil)
+)
+
+type settings struct {
+ SMTP SMTPConfig `form:"smtp" json:"smtp"`
+ Backups BackupsConfig `form:"backups" json:"backups"`
+ S3 S3Config `form:"s3" json:"s3"`
+ Meta MetaConfig `form:"meta" json:"meta"`
+ Logs LogsConfig `form:"logs" json:"logs"`
+ Batch BatchConfig `form:"batch" json:"batch"`
+ RateLimits RateLimitsConfig `form:"rateLimits" json:"rateLimits"`
+ TrustedProxy TrustedProxyConfig `form:"trustedProxy" json:"trustedProxy"`
+}
+
+// Settings defines the PocketBase app settings.
+type Settings struct {
+ settings
+
+ mu sync.RWMutex
+ isNew bool
+}
+
+func newDefaultSettings() *Settings {
+ return &Settings{
+ isNew: true,
+ settings: settings{
+ Meta: MetaConfig{
+ AppName: "Acme",
+ AppURL: "http://localhost:8090",
+ HideControls: false,
+ SenderName: "Support",
+ SenderAddress: "support@example.com",
+ },
+ Logs: LogsConfig{
+ MaxDays: 5,
+ LogIP: true,
+ },
+ SMTP: SMTPConfig{
+ Enabled: false,
+ Host: "smtp.example.com",
+ Port: 587,
+ Username: "",
+ Password: "",
+ TLS: false,
+ },
+ Backups: BackupsConfig{
+ CronMaxKeep: 3,
+ },
+ Batch: BatchConfig{
+ Enabled: false,
+ MaxRequests: 50,
+ Timeout: 3,
+ },
+ RateLimits: RateLimitsConfig{
+ Enabled: false, // @todo once tested enough enable by default for new installations
+ Rules: []RateLimitRule{
+ {Label: "*:auth", MaxRequests: 2, Duration: 3},
+ {Label: "*:create", MaxRequests: 20, Duration: 5},
+ {Label: "/api/batch", MaxRequests: 3, Duration: 1},
+ {Label: "/api/", MaxRequests: 300, Duration: 10},
+ },
+ },
+ },
+ }
+}
+
+// TableName implements [Model.TableName] interface method.
+func (s *Settings) TableName() string {
+ return paramsTable
+}
+
+// PK implements [Model.LastSavedPK] interface method.
+func (s *Settings) LastSavedPK() any {
+ return paramsKeySettings
+}
+
+// PK implements [Model.PK] interface method.
+func (s *Settings) PK() any {
+ return paramsKeySettings
+}
+
+// IsNew implements [Model.IsNew] interface method.
+func (s *Settings) IsNew() bool {
+ s.mu.RLock()
+ defer s.mu.RUnlock()
+
+ return s.isNew
+}
+
+// MarkAsNew implements [Model.MarkAsNew] interface method.
+func (s *Settings) MarkAsNew() {
+ s.mu.Lock()
+ defer s.mu.Unlock()
+
+ s.isNew = true
+}
+
+// MarkAsNew implements [Model.MarkAsNotNew] interface method.
+func (s *Settings) MarkAsNotNew() {
+ s.mu.Lock()
+ defer s.mu.Unlock()
+
+ s.isNew = false
+}
+
+// PostScan implements [Model.PostScan] interface method.
+func (s *Settings) PostScan() error {
+ s.MarkAsNotNew()
+ return nil
+}
+
+// String returns a serialized string representation of the current settings.
+func (s *Settings) String() string {
+ s.mu.RLock()
+ defer s.mu.RUnlock()
+
+ raw, _ := json.Marshal(s)
+ return string(raw)
+}
+
+// DBExport prepares and exports the current settings for db persistence.
+func (s *Settings) DBExport(app App) (map[string]any, error) {
+ s.mu.RLock()
+ defer s.mu.RUnlock()
+
+ now := types.NowDateTime()
+
+ result := map[string]any{
+ "id": s.PK(),
+ }
+
+ if s.IsNew() {
+ result["created"] = now
+ }
+ result["updated"] = now
+
+ encoded, err := json.Marshal(s.settings)
+ if err != nil {
+ return nil, err
+ }
+
+ encryptionKey := os.Getenv(app.EncryptionEnv())
+ if encryptionKey != "" {
+ encryptVal, encryptErr := security.Encrypt(encoded, encryptionKey)
+ if encryptErr != nil {
+ return nil, encryptErr
+ }
+
+ result["value"] = encryptVal
+ } else {
+ result["value"] = encoded
+ }
+
+ return result, nil
+}
+
+// PostValidate implements the [PostValidator] interface and defines
+// the Settings model validations.
+func (s *Settings) PostValidate(ctx context.Context, app App) error {
+ s.mu.RLock()
+ defer s.mu.RUnlock()
+
+ return validation.ValidateStructWithContext(ctx, s,
+ validation.Field(&s.Meta),
+ validation.Field(&s.Logs),
+ validation.Field(&s.SMTP),
+ validation.Field(&s.S3),
+ validation.Field(&s.Backups),
+ validation.Field(&s.Batch),
+ validation.Field(&s.RateLimits),
+ validation.Field(&s.TrustedProxy),
+ )
+}
+
+// Merge merges the "other" settings into the current one.
+func (s *Settings) Merge(other *Settings) error {
+ other.mu.RLock()
+ defer other.mu.RUnlock()
+
+ raw, err := json.Marshal(other.settings)
+ if err != nil {
+ return err
+ }
+
+ s.mu.Lock()
+ defer s.mu.Unlock()
+
+ return json.Unmarshal(raw, &s)
+}
+
+// Clone creates a new deep copy of the current settings.
+func (s *Settings) Clone() (*Settings, error) {
+ clone := &Settings{
+ isNew: s.isNew,
+ }
+
+ if err := clone.Merge(s); err != nil {
+ return nil, err
+ }
+
+ return clone, nil
+}
+
+// MarshalJSON implements the [json.Marshaler] interface.
+//
+// Note that sensitive fields (S3 secret, SMTP password, etc.) are excluded.
+func (s *Settings) MarshalJSON() ([]byte, error) {
+ s.mu.RLock()
+ copy := s.settings
+ s.mu.RUnlock()
+
+ sensitiveFields := []*string{
+ ©.SMTP.Password,
+ ©.S3.Secret,
+ ©.Backups.S3.Secret,
+ }
+
+ // mask all sensitive fields
+ for _, v := range sensitiveFields {
+ if v != nil && *v != "" {
+ *v = ""
+ }
+ }
+
+ return json.Marshal(copy)
+}
+
+// -------------------------------------------------------------------
+
+type SMTPConfig struct {
+ Enabled bool `form:"enabled" json:"enabled"`
+ Port int `form:"port" json:"port"`
+ Host string `form:"host" json:"host"`
+ Username string `form:"username" json:"username"`
+ Password string `form:"password" json:"password,omitempty"`
+
+ // SMTP AUTH - PLAIN (default) or LOGIN
+ AuthMethod string `form:"authMethod" json:"authMethod"`
+
+ // Whether to enforce TLS encryption for the mail server connection.
+ //
+ // When set to false StartTLS command is send, leaving the server
+ // to decide whether to upgrade the connection or not.
+ TLS bool `form:"tls" json:"tls"`
+
+ // LocalName is optional domain name or IP address used for the
+ // EHLO/HELO exchange (if not explicitly set, defaults to "localhost").
+ //
+ // This is required only by some SMTP servers, such as Gmail SMTP-relay.
+ LocalName string `form:"localName" json:"localName"`
+}
+
+// Validate makes SMTPConfig validatable by implementing [validation.Validatable] interface.
+func (c SMTPConfig) Validate() error {
+ return validation.ValidateStruct(&c,
+ validation.Field(
+ &c.Host,
+ validation.When(c.Enabled, validation.Required),
+ is.Host,
+ ),
+ validation.Field(
+ &c.Port,
+ validation.When(c.Enabled, validation.Required),
+ validation.Min(0),
+ ),
+ validation.Field(
+ &c.AuthMethod,
+ // don't require it for backward compatibility
+ // (fallback internally to PLAIN)
+ // validation.When(c.Enabled, validation.Required),
+ validation.In(mailer.SMTPAuthLogin, mailer.SMTPAuthPlain),
+ ),
+ validation.Field(&c.LocalName, is.Host),
+ )
+}
+
+// -------------------------------------------------------------------
+
+type S3Config struct {
+ Enabled bool `form:"enabled" json:"enabled"`
+ Bucket string `form:"bucket" json:"bucket"`
+ Region string `form:"region" json:"region"`
+ Endpoint string `form:"endpoint" json:"endpoint"`
+ AccessKey string `form:"accessKey" json:"accessKey"`
+ Secret string `form:"secret" json:"secret,omitempty"`
+ ForcePathStyle bool `form:"forcePathStyle" json:"forcePathStyle"`
+}
+
+// Validate makes S3Config validatable by implementing [validation.Validatable] interface.
+func (c S3Config) Validate() error {
+ return validation.ValidateStruct(&c,
+ validation.Field(&c.Endpoint, is.URL, validation.When(c.Enabled, validation.Required)),
+ validation.Field(&c.Bucket, validation.When(c.Enabled, validation.Required)),
+ validation.Field(&c.Region, validation.When(c.Enabled, validation.Required)),
+ validation.Field(&c.AccessKey, validation.When(c.Enabled, validation.Required)),
+ validation.Field(&c.Secret, validation.When(c.Enabled, validation.Required)),
+ )
+}
+
+// -------------------------------------------------------------------
+
+type BatchConfig struct {
+ Enabled bool `form:"enabled" json:"enabled"`
+
+ // MaxRequests is the maximum allowed batch request to execute.
+ MaxRequests int `form:"maxRequests" json:"maxRequests"`
+
+ // Timeout is the the max duration in seconds to wait before cancelling the batch transaction.
+ Timeout int64 `form:"timeout" json:"timeout"`
+
+ // MaxBodySize is the maximum allowed batch request body size in bytes.
+ //
+ // If not set, fallbacks to max ~128MB.
+ MaxBodySize int64 `form:"maxBodySize" json:"maxBodySize"`
+}
+
+// Validate makes BatchConfig validatable by implementing [validation.Validatable] interface.
+func (c BatchConfig) Validate() error {
+ return validation.ValidateStruct(&c,
+ validation.Field(&c.MaxRequests, validation.When(c.Enabled, validation.Required), validation.Min(0)),
+ validation.Field(&c.Timeout, validation.When(c.Enabled, validation.Required), validation.Min(0)),
+ validation.Field(&c.MaxBodySize, validation.Min(0)),
+ )
+}
+
+// -------------------------------------------------------------------
+
+type BackupsConfig struct {
+ // Cron is a cron expression to schedule auto backups, eg. "* * * * *".
+ //
+ // Leave it empty to disable the auto backups functionality.
+ Cron string `form:"cron" json:"cron"`
+
+ // CronMaxKeep is the the max number of cron generated backups to
+ // keep before removing older entries.
+ //
+ // This field works only when the cron config has valid cron expression.
+ CronMaxKeep int `form:"cronMaxKeep" json:"cronMaxKeep"`
+
+ // S3 is an optional S3 storage config specifying where to store the app backups.
+ S3 S3Config `form:"s3" json:"s3"`
+}
+
+// Validate makes BackupsConfig validatable by implementing [validation.Validatable] interface.
+func (c BackupsConfig) Validate() error {
+ return validation.ValidateStruct(&c,
+ validation.Field(&c.S3),
+ validation.Field(&c.Cron, validation.By(checkCronExpression)),
+ validation.Field(
+ &c.CronMaxKeep,
+ validation.When(c.Cron != "", validation.Required),
+ validation.Min(1),
+ ),
+ )
+}
+
+func checkCronExpression(value any) error {
+ v, _ := value.(string)
+ if v == "" {
+ return nil // nothing to check
+ }
+
+ _, err := cron.NewSchedule(v)
+ if err != nil {
+ return validation.NewError("validation_invalid_cron", err.Error())
+ }
+
+ return nil
+}
+
+// -------------------------------------------------------------------
+
+type MetaConfig struct {
+ AppName string `form:"appName" json:"appName"`
+ AppURL string `form:"appURL" json:"appURL"`
+ SenderName string `form:"senderName" json:"senderName"`
+ SenderAddress string `form:"senderAddress" json:"senderAddress"`
+ HideControls bool `form:"hideControls" json:"hideControls"`
+}
+
+// Validate makes MetaConfig validatable by implementing [validation.Validatable] interface.
+func (c MetaConfig) Validate() error {
+ return validation.ValidateStruct(&c,
+ validation.Field(&c.AppName, validation.Required, validation.Length(1, 255)),
+ validation.Field(&c.AppURL, validation.Required, is.URL),
+ validation.Field(&c.SenderName, validation.Required, validation.Length(1, 255)),
+ validation.Field(&c.SenderAddress, is.EmailFormat, validation.Required),
+ )
+}
+
+// -------------------------------------------------------------------
+
+type LogsConfig struct {
+ MaxDays int `form:"maxDays" json:"maxDays"`
+ MinLevel int `form:"minLevel" json:"minLevel"`
+ LogIP bool `form:"logIP" json:"logIP"`
+ LogAuthId bool `form:"logAuthId" json:"logAuthId"`
+}
+
+// Validate makes LogsConfig validatable by implementing [validation.Validatable] interface.
+func (c LogsConfig) Validate() error {
+ return validation.ValidateStruct(&c,
+ validation.Field(&c.MaxDays, validation.Min(0)),
+ )
+}
+
+// -------------------------------------------------------------------
+
+type TrustedProxyConfig struct {
+ // Headers is a list of explicit trusted header(s) to check.
+ Headers []string `form:"headers" json:"headers"`
+
+ // UseLeftmostIP specifies to use the left-mostish IP from the trusted headers.
+ //
+ // Note that this could be insecure when used with X-Forward-For header
+ // because some proxies like AWS ELB allow users to prepend their own header value
+ // before appending the trusted ones.
+ UseLeftmostIP bool `form:"useLeftmostIP" json:"useLeftmostIP"`
+}
+
+// MarshalJSON implements the [json.Marshaler] interface.
+func (c TrustedProxyConfig) MarshalJSON() ([]byte, error) {
+ type alias TrustedProxyConfig
+
+ // serialize as empty array
+ if c.Headers == nil {
+ c.Headers = []string{}
+ }
+
+ return json.Marshal(alias(c))
+}
+
+// Validate makes RateLimitRule validatable by implementing [validation.Validatable] interface.
+func (c TrustedProxyConfig) Validate() error {
+ return nil
+}
+
+// -------------------------------------------------------------------
+
+type RateLimitsConfig struct {
+ Rules []RateLimitRule `form:"rules" json:"rules"`
+ Enabled bool `form:"enabled" json:"enabled"`
+}
+
+// FindRateLimitRule returns the first matching rule based on the provided labels.
+func (c *RateLimitsConfig) FindRateLimitRule(searchLabels []string) (RateLimitRule, bool) {
+ var prefixRules []int
+
+ for i, label := range searchLabels {
+ // check for direct match
+ for j := range c.Rules {
+ if label == c.Rules[j].Label {
+ return c.Rules[j], true
+ }
+
+ if i == 0 && strings.HasSuffix(c.Rules[j].Label, "/") {
+ prefixRules = append(prefixRules, j)
+ }
+ }
+
+ // check for prefix match
+ if len(prefixRules) > 0 {
+ for j := range prefixRules {
+ if strings.HasPrefix(label+"/", c.Rules[prefixRules[j]].Label) {
+ return c.Rules[prefixRules[j]], true
+ }
+ }
+ }
+ }
+
+ return RateLimitRule{}, false
+}
+
+// MarshalJSON implements the [json.Marshaler] interface.
+func (c RateLimitsConfig) MarshalJSON() ([]byte, error) {
+ type alias RateLimitsConfig
+
+ // serialize as empty array
+ if c.Rules == nil {
+ c.Rules = []RateLimitRule{}
+ }
+
+ return json.Marshal(alias(c))
+}
+
+// Validate makes RateLimitsConfig validatable by implementing [validation.Validatable] interface.
+func (c RateLimitsConfig) Validate() error {
+ return validation.ValidateStruct(&c,
+ validation.Field(
+ &c.Rules,
+ validation.When(c.Enabled, validation.Required),
+ validation.By(checkUniqueRuleLabel),
+ ),
+ )
+}
+
+func checkUniqueRuleLabel(value any) error {
+ rules, ok := value.([]RateLimitRule)
+ if !ok {
+ return validators.ErrUnsupportedValueType
+ }
+
+ labels := make(map[string]struct{}, len(rules))
+
+ for i, rule := range rules {
+ _, ok := labels[rule.Label]
+ if ok {
+ return validation.Errors{
+ strconv.Itoa(i): validation.Errors{
+ "label": validation.NewError("validation_duplicated_rate_limit_tag", "Rate limit tag with label "+rule.Label+" already exists.").
+ SetParams(map[string]any{"label": rule.Label}),
+ },
+ }
+ } else {
+ labels[rule.Label] = struct{}{}
+ }
+ }
+
+ return nil
+}
+
+var rateLimitRuleLabelRegex = regexp.MustCompile(`^(\w+\ \/[\w\/-]*|\/[\w\/-]*|^\w+\:\w+|\*\:\w+|\w+)$`)
+
+type RateLimitRule struct {
+ // Label is the identifier of the current rule.
+ //
+ // It could be a tag, complete path or path prerefix (when ends with `/`).
+ //
+ // Example supported labels:
+ // - test_a (plain text "tag")
+ // - users:create
+ // - *:create
+ // - /
+ // - /api
+ // - POST /api/collections/
+ Label string `form:"label" json:"label"`
+
+ // MaxRequests is the max allowed number of requests per Duration.
+ MaxRequests int `form:"maxRequests" json:"maxRequests"`
+
+ // Duration specifies the interval (in seconds) per which to reset
+ // the counted/accumulated rate limiter tokens.
+ Duration int64 `form:"duration" json:"duration"`
+}
+
+// Validate makes RateLimitRule validatable by implementing [validation.Validatable] interface.
+func (c RateLimitRule) Validate() error {
+ return validation.ValidateStruct(&c,
+ validation.Field(&c.Label, validation.Required, validation.Match(rateLimitRuleLabelRegex)),
+ validation.Field(&c.MaxRequests, validation.Required, validation.Min(1)),
+ validation.Field(&c.Duration, validation.Required, validation.Min(1)),
+ )
+}
+
+// DurationTime returns the tag's Duration as [time.Duration].
+func (c RateLimitRule) DurationTime() time.Duration {
+ return time.Duration(c.Duration) * time.Second
+}
diff --git a/core/settings_model_test.go b/core/settings_model_test.go
new file mode 100644
index 00000000..81dca9c5
--- /dev/null
+++ b/core/settings_model_test.go
@@ -0,0 +1,691 @@
+package core_test
+
+import (
+ "encoding/json"
+ "fmt"
+ "strings"
+ "testing"
+ "time"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/mailer"
+)
+
+func TestSettingsDelete(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ err := app.Delete(app.Settings())
+ if err == nil {
+ t.Fatal("Exected settings delete to fail")
+ }
+}
+
+func TestSettingsMerge(t *testing.T) {
+ s1 := &core.Settings{}
+ s1.Meta.AppURL = "app_url" // should be unset
+
+ s2 := &core.Settings{}
+ s2.Meta.AppName = "test"
+ s2.Logs.MaxDays = 123
+ s2.SMTP.Host = "test"
+ s2.SMTP.Enabled = true
+ s2.S3.Enabled = true
+ s2.S3.Endpoint = "test"
+ s2.Backups.Cron = "* * * * *"
+ s2.Batch.Timeout = 15
+
+ if err := s1.Merge(s2); err != nil {
+ t.Fatal(err)
+ }
+
+ s1Encoded, err := json.Marshal(s1)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ s2Encoded, err := json.Marshal(s2)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if string(s1Encoded) != string(s2Encoded) {
+ t.Fatalf("Expected the same serialization, got\n%v\nVS\n%v", string(s1Encoded), string(s2Encoded))
+ }
+}
+
+func TestSettingsClone(t *testing.T) {
+ s1 := &core.Settings{}
+ s1.Meta.AppName = "test_name"
+
+ s2, err := s1.Clone()
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ s1Bytes, err := json.Marshal(s1)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ s2Bytes, err := json.Marshal(s2)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if string(s1Bytes) != string(s2Bytes) {
+ t.Fatalf("Expected equivalent serialization, got %v VS %v", string(s1Bytes), string(s2Bytes))
+ }
+
+ // verify that it is a deep copy
+ s2.Meta.AppName = "new_test_name"
+ if s1.Meta.AppName == s2.Meta.AppName {
+ t.Fatalf("Expected s1 and s2 to have different Meta.AppName, got %s", s1.Meta.AppName)
+ }
+}
+
+func TestSettingsMarshalJSON(t *testing.T) {
+ settings := &core.Settings{}
+
+ // control fields
+ settings.Meta.AppName = "test123"
+ settings.SMTP.Username = "abc"
+
+ // secrets
+ testSecret := "test_secret"
+ settings.SMTP.Password = testSecret
+ settings.S3.Secret = testSecret
+ settings.Backups.S3.Secret = testSecret
+
+ raw, err := json.Marshal(settings)
+ if err != nil {
+ t.Fatal(err)
+ }
+ rawStr := string(raw)
+
+ expected := `{"smtp":{"enabled":false,"port":0,"host":"","username":"abc","authMethod":"","tls":false,"localName":""},"backups":{"cron":"","cronMaxKeep":0,"s3":{"enabled":false,"bucket":"","region":"","endpoint":"","accessKey":"","forcePathStyle":false}},"s3":{"enabled":false,"bucket":"","region":"","endpoint":"","accessKey":"","forcePathStyle":false},"meta":{"appName":"test123","appURL":"","senderName":"","senderAddress":"","hideControls":false},"logs":{"maxDays":0,"minLevel":0,"logIP":false,"logAuthId":false},"batch":{"enabled":false,"maxRequests":0,"timeout":0,"maxBodySize":0},"rateLimits":{"rules":[],"enabled":false},"trustedProxy":{"headers":[],"useLeftmostIP":false}}`
+
+ if rawStr != expected {
+ t.Fatalf("Expected\n%v\ngot\n%v", expected, rawStr)
+ }
+}
+
+func TestSettingsValidate(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ s := app.Settings()
+
+ // set invalid settings data
+ s.Meta.AppName = ""
+ s.Logs.MaxDays = -10
+ s.SMTP.Enabled = true
+ s.SMTP.Host = ""
+ s.S3.Enabled = true
+ s.S3.Endpoint = "invalid"
+ s.Backups.Cron = "invalid"
+ s.Backups.CronMaxKeep = -10
+ s.Batch.Enabled = true
+ s.Batch.MaxRequests = -1
+ s.Batch.Timeout = -1
+ s.RateLimits.Enabled = true
+ s.RateLimits.Rules = nil
+
+ // check if Validate() is triggering the members validate methods.
+ err := app.Validate(s)
+ if err == nil {
+ t.Fatalf("Expected error, got nil")
+ }
+
+ expectations := []string{
+ `"meta":{`,
+ `"logs":{`,
+ `"smtp":{`,
+ `"s3":{`,
+ `"backups":{`,
+ `"batch":{`,
+ `"rateLimits":{`,
+ }
+
+ errBytes, _ := json.Marshal(err)
+ jsonErr := string(errBytes)
+ for _, expected := range expectations {
+ if !strings.Contains(jsonErr, expected) {
+ t.Errorf("Expected error key %s in %v", expected, jsonErr)
+ }
+ }
+}
+
+func TestMetaConfigValidate(t *testing.T) {
+ scenarios := []struct {
+ name string
+ config core.MetaConfig
+ expectedErrors []string
+ }{
+ {
+ "zero values",
+ core.MetaConfig{},
+ []string{
+ "appName",
+ "appURL",
+ "senderName",
+ "senderAddress",
+ },
+ },
+ {
+ "invalid data",
+ core.MetaConfig{
+ AppName: strings.Repeat("a", 300),
+ AppURL: "test",
+ SenderName: strings.Repeat("a", 300),
+ SenderAddress: "invalid_email",
+ },
+ []string{
+ "appName",
+ "appURL",
+ "senderName",
+ "senderAddress",
+ },
+ },
+ {
+ "valid data",
+ core.MetaConfig{
+ AppName: "test",
+ AppURL: "https://example.com",
+ SenderName: "test",
+ SenderAddress: "test@example.com",
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result := s.config.Validate()
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
+
+func TestLogsConfigValidate(t *testing.T) {
+ scenarios := []struct {
+ name string
+ config core.LogsConfig
+ expectedErrors []string
+ }{
+ {
+ "zero values",
+ core.LogsConfig{},
+ []string{},
+ },
+ {
+ "invalid data",
+ core.LogsConfig{MaxDays: -1},
+ []string{"maxDays"},
+ },
+ {
+ "valid data",
+ core.LogsConfig{MaxDays: 2},
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result := s.config.Validate()
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
+
+func TestSMTPConfigValidate(t *testing.T) {
+ scenarios := []struct {
+ name string
+ config core.SMTPConfig
+ expectedErrors []string
+ }{
+ {
+ "zero values (disabled)",
+ core.SMTPConfig{},
+ []string{},
+ },
+ {
+ "zero values (enabled)",
+ core.SMTPConfig{Enabled: true},
+ []string{"host", "port"},
+ },
+ {
+ "invalid data",
+ core.SMTPConfig{
+ Enabled: true,
+ Host: "test:test:test",
+ Port: -10,
+ LocalName: "invalid!",
+ AuthMethod: "invalid",
+ },
+ []string{"host", "port", "authMethod", "localName"},
+ },
+ {
+ "valid data (no explicit auth method and localName)",
+ core.SMTPConfig{
+ Enabled: true,
+ Host: "example.com",
+ Port: 100,
+ TLS: true,
+ },
+ []string{},
+ },
+ {
+ "valid data (explicit auth method and localName)",
+ core.SMTPConfig{
+ Enabled: true,
+ Host: "example.com",
+ Port: 100,
+ AuthMethod: mailer.SMTPAuthLogin,
+ LocalName: "example.com",
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result := s.config.Validate()
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
+
+func TestS3ConfigValidate(t *testing.T) {
+ scenarios := []struct {
+ name string
+ config core.S3Config
+ expectedErrors []string
+ }{
+ {
+ "zero values (disabled)",
+ core.S3Config{},
+ []string{},
+ },
+ {
+ "zero values (enabled)",
+ core.S3Config{Enabled: true},
+ []string{
+ "bucket",
+ "region",
+ "endpoint",
+ "accessKey",
+ "secret",
+ },
+ },
+ {
+ "invalid data",
+ core.S3Config{
+ Enabled: true,
+ Endpoint: "test:test:test",
+ },
+ []string{
+ "bucket",
+ "region",
+ "endpoint",
+ "accessKey",
+ "secret",
+ },
+ },
+ {
+ "valid data (url endpoint)",
+ core.S3Config{
+ Enabled: true,
+ Endpoint: "https://localhost:8090",
+ Bucket: "test",
+ Region: "test",
+ AccessKey: "test",
+ Secret: "test",
+ },
+ []string{},
+ },
+ {
+ "valid data (hostname endpoint)",
+ core.S3Config{
+ Enabled: true,
+ Endpoint: "example.com",
+ Bucket: "test",
+ Region: "test",
+ AccessKey: "test",
+ Secret: "test",
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result := s.config.Validate()
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
+
+func TestBackupsConfigValidate(t *testing.T) {
+ scenarios := []struct {
+ name string
+ config core.BackupsConfig
+ expectedErrors []string
+ }{
+ {
+ "zero value",
+ core.BackupsConfig{},
+ []string{},
+ },
+ {
+ "invalid cron",
+ core.BackupsConfig{
+ Cron: "invalid",
+ CronMaxKeep: 0,
+ },
+ []string{"cron", "cronMaxKeep"},
+ },
+ {
+ "invalid enabled S3",
+ core.BackupsConfig{
+ S3: core.S3Config{
+ Enabled: true,
+ },
+ },
+ []string{"s3"},
+ },
+ {
+ "valid data",
+ core.BackupsConfig{
+ S3: core.S3Config{
+ Enabled: true,
+ Endpoint: "example.com",
+ Bucket: "test",
+ Region: "test",
+ AccessKey: "test",
+ Secret: "test",
+ },
+ Cron: "*/10 * * * *",
+ CronMaxKeep: 1,
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result := s.config.Validate()
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
+
+func TestBatchConfigValidate(t *testing.T) {
+ scenarios := []struct {
+ name string
+ config core.BatchConfig
+ expectedErrors []string
+ }{
+ {
+ "zero value",
+ core.BatchConfig{},
+ []string{},
+ },
+ {
+ "zero value (enabled)",
+ core.BatchConfig{Enabled: true},
+ []string{"maxRequests", "timeout"},
+ },
+ {
+ "invalid data (negative values)",
+ core.BatchConfig{
+ MaxRequests: -1,
+ Timeout: -1,
+ MaxBodySize: -1,
+ },
+ []string{"maxRequests", "timeout", "maxBodySize"},
+ },
+ {
+ "min fields valid data",
+ core.BatchConfig{
+ Enabled: true,
+ MaxRequests: 1,
+ Timeout: 1,
+ },
+ []string{},
+ },
+ {
+ "all fields valid data",
+ core.BatchConfig{
+ Enabled: true,
+ MaxRequests: 10,
+ Timeout: 1,
+ MaxBodySize: 1,
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result := s.config.Validate()
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
+
+func TestRateLimitsConfigValidate(t *testing.T) {
+ scenarios := []struct {
+ name string
+ config core.RateLimitsConfig
+ expectedErrors []string
+ }{
+ {
+ "zero value (disabled)",
+ core.RateLimitsConfig{},
+ []string{},
+ },
+ {
+ "zero value (enabled)",
+ core.RateLimitsConfig{Enabled: true},
+ []string{"rules"},
+ },
+ {
+ "invalid data",
+ core.RateLimitsConfig{
+ Enabled: true,
+ Rules: []core.RateLimitRule{
+ {
+ Label: "/123abc/",
+ Duration: 1,
+ MaxRequests: 2,
+ },
+ {
+ Label: "!abc",
+ Duration: -1,
+ MaxRequests: -1,
+ },
+ },
+ },
+ []string{"rules"},
+ },
+ {
+ "valid data",
+ core.RateLimitsConfig{
+ Enabled: true,
+ Rules: []core.RateLimitRule{
+ {
+ Label: "123_abc",
+ Duration: 1,
+ MaxRequests: 2,
+ },
+ {
+ Label: "/456-abc",
+ Duration: 1,
+ MaxRequests: 2,
+ },
+ },
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result := s.config.Validate()
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
+
+func TestRateLimitsFindRateLimitRule(t *testing.T) {
+ limits := core.RateLimitsConfig{
+ Rules: []core.RateLimitRule{
+ {Label: "abc"},
+ {Label: "POST /test/a/"},
+ {Label: "/test/a/"},
+ {Label: "POST /test/a"},
+ {Label: "/test/a"},
+ },
+ }
+
+ scenarios := []struct {
+ labels []string
+ expected string
+ }{
+ {[]string{}, ""},
+ {[]string{"missing"}, ""},
+ {[]string{"abc"}, "abc"},
+ {[]string{"/test"}, ""},
+ {[]string{"/test/a"}, "/test/a"},
+ {[]string{"GET /test/a"}, ""},
+ {[]string{"POST /test/a"}, "POST /test/a"},
+ {[]string{"/test/a/b/c"}, "/test/a/"},
+ {[]string{"GET /test/a/b/c"}, ""},
+ {[]string{"POST /test/a/b/c"}, "POST /test/a/"},
+ {[]string{"/test/a", "abc"}, "/test/a"}, // priority checks
+ }
+
+ for _, s := range scenarios {
+ t.Run(strings.Join(s.labels, ""), func(t *testing.T) {
+ rule, ok := limits.FindRateLimitRule(s.labels)
+
+ hasLabel := rule.Label != ""
+ if hasLabel != ok {
+ t.Fatalf("Expected hasLabel %v, got %v", hasLabel, ok)
+ }
+
+ if rule.Label != s.expected {
+ t.Fatalf("Expected rule with label %q, got %q", s.expected, rule.Label)
+ }
+ })
+ }
+}
+
+func TestRateLimitRuleValidate(t *testing.T) {
+ scenarios := []struct {
+ name string
+ config core.RateLimitRule
+ expectedErrors []string
+ }{
+ {
+ "zero value",
+ core.RateLimitRule{},
+ []string{"label", "duration", "maxRequests"},
+ },
+ {
+ "invalid data",
+ core.RateLimitRule{
+ Label: "@abc",
+ Duration: -1,
+ MaxRequests: -1,
+ },
+ []string{"label", "duration", "maxRequests"},
+ },
+ {
+ "valid data (name)",
+ core.RateLimitRule{
+ Label: "abc:123",
+ Duration: 1,
+ MaxRequests: 1,
+ },
+ []string{},
+ },
+ {
+ "valid data (name:action)",
+ core.RateLimitRule{
+ Label: "abc:123",
+ Duration: 1,
+ MaxRequests: 1,
+ },
+ []string{},
+ },
+ {
+ "valid data (*:action)",
+ core.RateLimitRule{
+ Label: "*:123",
+ Duration: 1,
+ MaxRequests: 1,
+ },
+ []string{},
+ },
+ {
+ "valid data (path /a/b)",
+ core.RateLimitRule{
+ Label: "/a/b",
+ Duration: 1,
+ MaxRequests: 1,
+ },
+ []string{},
+ },
+ {
+ "valid data (path POST /a/b)",
+ core.RateLimitRule{
+ Label: "POST /a/b/",
+ Duration: 1,
+ MaxRequests: 1,
+ },
+ []string{},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result := s.config.Validate()
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
+ }
+}
+
+func TestRateLimitRuleDurationTime(t *testing.T) {
+ scenarios := []struct {
+ config core.RateLimitRule
+ expected time.Duration
+ }{
+ {core.RateLimitRule{}, 0 * time.Second},
+ {core.RateLimitRule{Duration: 1234}, 1234 * time.Second},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%d", i, s.config.Duration), func(t *testing.T) {
+ result := s.config.DurationTime()
+
+ if result != s.expected {
+ t.Fatalf("Expected duration %d, got %d", s.expected, result)
+ }
+ })
+ }
+}
diff --git a/core/settings_query.go b/core/settings_query.go
new file mode 100644
index 00000000..c1883c61
--- /dev/null
+++ b/core/settings_query.go
@@ -0,0 +1,88 @@
+package core
+
+import (
+ "database/sql"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "os"
+
+ "github.com/pocketbase/pocketbase/tools/security"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+type Param struct {
+ BaseModel
+
+ Created types.DateTime `db:"created" json:"created"`
+ Updated types.DateTime `db:"Updated" json:"Updated"`
+ Value types.JSONRaw `db:"value" json:"value"`
+}
+
+func (m *Param) TableName() string {
+ return paramsTable
+}
+
+// ReloadSettings initializes and reloads the stored application settings.
+//
+// If no settings were stored it will persist the current app ones.
+func (app *BaseApp) ReloadSettings() error {
+ param := &Param{}
+ err := app.ModelQuery(param).Model(paramsKeySettings, param)
+ if err != nil && !errors.Is(err, sql.ErrNoRows) {
+ return err
+ }
+
+ // no settings were previously stored -> save
+ // (ReloadSettings() will be invoked again by a system hook after successful save)
+ if param.Id == "" {
+ // force insert in case the param entry was deleted manually after application start
+ app.Settings().MarkAsNew()
+ return app.Save(app.Settings())
+ }
+
+ event := new(SettingsReloadEvent)
+ event.App = app
+
+ return app.OnSettingsReload().Trigger(event, func(e *SettingsReloadEvent) error {
+ return e.App.Settings().loadParam(e.App, param)
+ })
+}
+
+// loadParam loads the settings from the stored param into the app ones.
+//
+// @todo note that the encryption may get removed in the future since it doesn't
+// really accomplish much and it might be better to find a way to encrypt the backups
+// or implement support for resolving env variables.
+func (s *Settings) loadParam(app App, param *Param) error {
+ // try first without decryption
+ s.mu.Lock()
+ plainDecodeErr := json.Unmarshal(param.Value, s)
+ s.mu.Unlock()
+
+ // failed, try to decrypt
+ if plainDecodeErr != nil {
+ encryptionKey := os.Getenv(app.EncryptionEnv())
+
+ // load without decryption has failed and there is no encryption key to use for decrypt
+ if encryptionKey == "" {
+ return fmt.Errorf("invalid settings db data or missing encryption key %q", app.EncryptionEnv())
+ }
+
+ // decrypt
+ decrypted, decryptErr := security.Decrypt(string(param.Value), encryptionKey)
+ if decryptErr != nil {
+ return decryptErr
+ }
+
+ // decode again
+ s.mu.Lock()
+ decryptedDecodeErr := json.Unmarshal(decrypted, s)
+ s.mu.Unlock()
+ if decryptedDecodeErr != nil {
+ return decryptedDecodeErr
+ }
+ }
+
+ return s.PostScan()
+}
diff --git a/core/settings_query_test.go b/core/settings_query_test.go
new file mode 100644
index 00000000..a7542889
--- /dev/null
+++ b/core/settings_query_test.go
@@ -0,0 +1,159 @@
+package core_test
+
+import (
+ "os"
+ "strings"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tests"
+ "github.com/pocketbase/pocketbase/tools/types"
+)
+
+func TestReloadSettings(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ // cleanup all stored settings
+ // ---
+ if _, err := app.DB().NewQuery("DELETE from _params;").Execute(); err != nil {
+ t.Fatalf("Failed to delete all test settings: %v", err)
+ }
+
+ // check if the new settings are saved in the db
+ // ---
+ app.Settings().Meta.AppName = "test_name_after_delete"
+
+ app.ResetEventCalls()
+ if err := app.ReloadSettings(); err != nil {
+ t.Fatalf("Failed to reload the settings after delete: %v", err)
+ }
+ testEventCalls(t, app, map[string]int{
+ "OnModelCreate": 1,
+ "OnModelCreateExecute": 1,
+ "OnModelAfterCreateSuccess": 1,
+ "OnModelValidate": 1,
+ "OnSettingsReload": 1,
+ })
+
+ param := &core.Param{}
+ err := app.ModelQuery(param).Model("settings", param)
+ if err != nil {
+ t.Fatalf("Expected new settings to be persisted, got %v", err)
+ }
+
+ if !strings.Contains(param.Value.String(), "test_name_after_delete") {
+ t.Fatalf("Expected to find AppName test_name_after_delete in\n%s", param.Value.String())
+ }
+
+ // change the db entry and reload the app settings (ensure that there was no db update)
+ // ---
+ param.Value = types.JSONRaw([]byte(`{"meta": {"appName":"test_name_after_update"}}`))
+ if err := app.Save(param); err != nil {
+ t.Fatalf("Failed to update the test settings: %v", err)
+ }
+
+ app.ResetEventCalls()
+ if err := app.ReloadSettings(); err != nil {
+ t.Fatalf("Failed to reload app settings: %v", err)
+ }
+ testEventCalls(t, app, map[string]int{
+ "OnSettingsReload": 1,
+ })
+
+ // try to reload again without doing any changes
+ // ---
+ app.ResetEventCalls()
+ if err := app.ReloadSettings(); err != nil {
+ t.Fatalf("Failed to reload app settings without change: %v", err)
+ }
+ testEventCalls(t, app, map[string]int{
+ "OnSettingsReload": 1,
+ })
+
+ if app.Settings().Meta.AppName != "test_name_after_update" {
+ t.Fatalf("Expected AppName %q, got %q", "test_name_after_update", app.Settings().Meta.AppName)
+ }
+}
+
+func TestReloadSettingsWithEncryption(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ os.Setenv("pb_test_env", strings.Repeat("a", 32))
+
+ // cleanup all stored settings
+ // ---
+ if _, err := app.DB().NewQuery("DELETE from _params;").Execute(); err != nil {
+ t.Fatalf("Failed to delete all test settings: %v", err)
+ }
+
+ // check if the new settings are saved in the db
+ // ---
+ app.Settings().Meta.AppName = "test_name_after_delete"
+
+ app.ResetEventCalls()
+ if err := app.ReloadSettings(); err != nil {
+ t.Fatalf("Failed to reload the settings after delete: %v", err)
+ }
+ testEventCalls(t, app, map[string]int{
+ "OnModelCreate": 1,
+ "OnModelCreateExecute": 1,
+ "OnModelAfterCreateSuccess": 1,
+ "OnModelValidate": 1,
+ "OnSettingsReload": 1,
+ })
+
+ param := &core.Param{}
+ err := app.ModelQuery(param).Model("settings", param)
+ if err != nil {
+ t.Fatalf("Expected new settings to be persisted, got %v", err)
+ }
+ rawValue := param.Value.String()
+ if rawValue == "" || strings.Contains(rawValue, "test_name") {
+ t.Fatalf("Expected inserted settings to be encrypted, found\n%s", rawValue)
+ }
+
+ // change and reload the app settings (ensure that there was no db update)
+ // ---
+ app.Settings().Meta.AppName = "test_name_after_update"
+ if err := app.Save(app.Settings()); err != nil {
+ t.Fatalf("Failed to update app settings: %v", err)
+ }
+
+ // try to reload again without doing any changes
+ // ---
+ app.ResetEventCalls()
+ if err := app.ReloadSettings(); err != nil {
+ t.Fatalf("Failed to reload app settings: %v", err)
+ }
+ testEventCalls(t, app, map[string]int{
+ "OnSettingsReload": 1,
+ })
+
+ // refetch the settings param to ensure that the new value was stored encrypted
+ err = app.ModelQuery(param).Model("settings", param)
+ if err != nil {
+ t.Fatalf("Expected new settings to be persisted, got %v", err)
+ }
+ rawValue = param.Value.String()
+ if rawValue == "" || strings.Contains(rawValue, "test_name") {
+ t.Fatalf("Expected updated settings to be encrypted, found\n%s", rawValue)
+ }
+
+ if app.Settings().Meta.AppName != "test_name_after_update" {
+ t.Fatalf("Expected AppName %q, got %q", "test_name_after_update", app.Settings().Meta.AppName)
+ }
+}
+
+func testEventCalls(t *testing.T, app *tests.TestApp, events map[string]int) {
+ if len(events) != len(app.EventCalls) {
+ t.Fatalf("Expected events doesn't match:\n%v\ngot\n%v", events, app.EventCalls)
+ }
+
+ for name, total := range events {
+ if v, ok := app.EventCalls[name]; !ok || v != total {
+ t.Fatalf("Expected events doesn't exist or match:\n%v\ngot\n%v", events, app.EventCalls)
+ }
+ }
+}
diff --git a/core/validators/db.go b/core/validators/db.go
new file mode 100644
index 00000000..bf437b1d
--- /dev/null
+++ b/core/validators/db.go
@@ -0,0 +1,78 @@
+package validators
+
+import (
+ "database/sql"
+ "errors"
+ "strings"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/dbx"
+)
+
+// UniqueId checks whether a field string id already exists in the specified table.
+//
+// Example:
+//
+// validation.Field(&form.RelId, validation.By(validators.UniqueId(form.app.DB(), "tbl_example"))
+func UniqueId(db dbx.Builder, tableName string) validation.RuleFunc {
+ return func(value any) error {
+ v, _ := value.(string)
+ if v == "" {
+ return nil // nothing to check
+ }
+
+ var foundId string
+
+ err := db.
+ Select("id").
+ From(tableName).
+ Where(dbx.HashExp{"id": v}).
+ Limit(1).
+ Row(&foundId)
+
+ if (err != nil && !errors.Is(err, sql.ErrNoRows)) || foundId != "" {
+ return validation.NewError("validation_invalid_or_existing_id", "The model id is invalid or already exists.")
+ }
+
+ return nil
+ }
+}
+
+// NormalizeUniqueIndexError attempts to convert a
+// "unique constraint failed" error into a validation.Errors.
+//
+// The provided err is returned as it is without changes if:
+// - err is nil
+// - err is already validation.Errors
+// - err is not "unique constraint failed" error
+func NormalizeUniqueIndexError(err error, tableOrAlias string, fieldNames []string) error {
+ if err == nil {
+ return err
+ }
+
+ //
+ if _, ok := err.(validation.Errors); ok {
+ return err
+ }
+
+ msg := strings.ToLower(err.Error())
+
+ // check for unique constraint failure
+ if strings.Contains(msg, "unique constraint failed") {
+ normalizedErrs := validation.Errors{}
+ msg = strings.ReplaceAll(strings.TrimSpace(msg), ",", " ")
+
+ for _, name := range fieldNames {
+ // blank space to unify multi-columns lookup
+ if strings.Contains(msg+" ", strings.ToLower(tableOrAlias+"."+name)) {
+ normalizedErrs[name] = validation.NewError("validation_not_unique", "Value must be unique")
+ }
+ }
+
+ if len(normalizedErrs) > 0 {
+ return normalizedErrs
+ }
+ }
+
+ return err
+}
diff --git a/core/validators/db_test.go b/core/validators/db_test.go
new file mode 100644
index 00000000..f5ba1f26
--- /dev/null
+++ b/core/validators/db_test.go
@@ -0,0 +1,111 @@
+package validators_test
+
+import (
+ "errors"
+ "fmt"
+ "testing"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/pocketbase/core/validators"
+ "github.com/pocketbase/pocketbase/tests"
+)
+
+func TestUniqueId(t *testing.T) {
+ t.Parallel()
+
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
+
+ scenarios := []struct {
+ id string
+ tableName string
+ expectError bool
+ }{
+ {"", "", false},
+ {"test", "", true},
+ {"wsmn24bux7wo113", "_collections", true},
+ {"test_unique_id", "unknown_table", true},
+ {"test_unique_id", "_collections", false},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s_%s", i, s.id, s.tableName), func(t *testing.T) {
+ err := validators.UniqueId(app.DB(), s.tableName)(s.id)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
+
+func TestNormalizeUniqueIndexError(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ name string
+ err error
+ table string
+ names []string
+ expectedKeys []string
+ }{
+ {
+ "nil error (no changes)",
+ nil,
+ "test",
+ []string{"a", "b"},
+ nil,
+ },
+ {
+ "non-unique index error (no changes)",
+ errors.New("abc"),
+ "test",
+ []string{"a", "b"},
+ nil,
+ },
+ {
+ "validation error (no changes)",
+ validation.Errors{"c": errors.New("abc")},
+ "test",
+ []string{"a", "b"},
+ []string{"c"},
+ },
+ {
+ "unique index error but mismatched table name",
+ errors.New("UNIQUE constraint failed for fields test.a,test.b"),
+ "example",
+ []string{"a", "b"},
+ nil,
+ },
+ {
+ "unique index error but mismatched fields",
+ errors.New("UNIQUE constraint failed for fields test.a,test.b"),
+ "test",
+ []string{"c", "d"},
+ nil,
+ },
+ {
+ "unique index error with matching table name and fields",
+ errors.New("UNIQUE constraint failed for fields test.a,test.b"),
+ "test",
+ []string{"a", "b", "c"},
+ []string{"a", "b"},
+ },
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.name, func(t *testing.T) {
+ result := validators.NormalizeUniqueIndexError(s.err, s.table, s.names)
+
+ if len(s.expectedKeys) == 0 {
+ if result != s.err {
+ t.Fatalf("Expected no error change, got %v", result)
+ }
+ return
+ }
+
+ tests.TestValidationErrors(t, result, s.expectedKeys)
+ })
+ }
+}
diff --git a/core/validators/equal.go b/core/validators/equal.go
new file mode 100644
index 00000000..9ee673cb
--- /dev/null
+++ b/core/validators/equal.go
@@ -0,0 +1,85 @@
+package validators
+
+import (
+ "reflect"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+)
+
+// Equal checks whether the validated value matches another one from the same type.
+//
+// It expects the compared values to be from the same type and works
+// with booleans, numbers, strings and their pointer variants.
+//
+// If one of the value is pointer, the comparison is based on its
+// underlying value (when possible to determine).
+//
+// Note that empty/zero values are also compared (this differ from other validation.RuleFunc).
+//
+// Example:
+//
+// validation.Field(&form.PasswordConfirm, validation.By(validators.Equal(form.Password)))
+func Equal[T comparable](valueToCompare T) validation.RuleFunc {
+ return func(value any) error {
+ if compareValues(value, valueToCompare) {
+ return nil
+ }
+
+ return validation.NewError("validation_values_mismatch", "Values don't match.")
+ }
+}
+
+func compareValues(a, b any) bool {
+ if a == b {
+ return true
+ }
+
+ if checkIsNil(a) && checkIsNil(b) {
+ return true
+ }
+
+ var result bool
+
+ defer func() {
+ if err := recover(); err != nil {
+ result = false
+ }
+ }()
+
+ reflectA := reflect.ValueOf(a)
+ reflectB := reflect.ValueOf(b)
+
+ dereferencedA := dereference(reflectA)
+ dereferencedB := dereference(reflectB)
+ if dereferencedA.CanInterface() && dereferencedB.CanInterface() {
+ result = dereferencedA.Interface() == dereferencedB.Interface()
+ }
+
+ return result
+}
+
+// note https://github.com/golang/go/issues/51649
+func checkIsNil(value any) bool {
+ if value == nil {
+ return true
+ }
+
+ var result bool
+
+ defer func() {
+ if err := recover(); err != nil {
+ result = false
+ }
+ }()
+
+ result = reflect.ValueOf(value).IsNil()
+
+ return result
+}
+
+func dereference(v reflect.Value) reflect.Value {
+ for v.Kind() == reflect.Pointer {
+ v = v.Elem()
+ }
+ return v
+}
diff --git a/core/validators/equal_test.go b/core/validators/equal_test.go
new file mode 100644
index 00000000..dedb3763
--- /dev/null
+++ b/core/validators/equal_test.go
@@ -0,0 +1,62 @@
+package validators_test
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core/validators"
+)
+
+func Equal(t *testing.T) {
+ t.Parallel()
+
+ strA := "abc"
+ strB := "abc"
+ strC := "123"
+ var strNilPtr *string
+ var strNilPtr2 *string
+
+ scenarios := []struct {
+ valA any
+ valB any
+ expectError bool
+ }{
+ {nil, nil, false},
+ {"", "", false},
+ {"", "456", true},
+ {"123", "", true},
+ {"123", "456", true},
+ {"123", "123", false},
+ {true, false, true},
+ {false, true, true},
+ {false, false, false},
+ {true, true, false},
+ {0, 0, false},
+ {0, 1, true},
+ {1, 2, true},
+ {1, 1, false},
+ {&strA, &strA, false},
+ {&strA, &strB, false},
+ {&strA, &strC, true},
+ {"abc", &strA, false},
+ {&strA, "abc", false},
+ {"abc", &strC, true},
+ {"test", 123, true},
+ {nil, 123, true},
+ {nil, strA, true},
+ {nil, &strA, true},
+ {nil, strNilPtr, false},
+ {strNilPtr, strNilPtr2, false},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%v_%v", i, s.valA, s.valB), func(t *testing.T) {
+ err := validators.Equal(s.valA)(s.valB)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
diff --git a/forms/validators/file.go b/core/validators/file.go
similarity index 77%
rename from forms/validators/file.go
rename to core/validators/file.go
index 2177078e..9e008c11 100644
--- a/forms/validators/file.go
+++ b/core/validators/file.go
@@ -9,31 +9,38 @@ import (
"github.com/pocketbase/pocketbase/tools/filesystem"
)
-// UploadedFileSize checks whether the validated `rest.UploadedFile`
+// UploadedFileSize checks whether the validated [*filesystem.File]
// size is no more than the provided maxBytes.
//
// Example:
//
// validation.Field(&form.File, validation.By(validators.UploadedFileSize(1000)))
-func UploadedFileSize(maxBytes int) validation.RuleFunc {
+func UploadedFileSize(maxBytes int64) validation.RuleFunc {
return func(value any) error {
- v, _ := value.(*filesystem.File)
+ v, ok := value.(*filesystem.File)
+ if !ok {
+ return ErrUnsupportedValueType
+ }
+
if v == nil {
return nil // nothing to validate
}
- if int(v.Size) > maxBytes {
+ if v.Size > maxBytes {
return validation.NewError(
"validation_file_size_limit",
fmt.Sprintf("Failed to upload %q - the maximum allowed file size is %v bytes.", v.OriginalName, maxBytes),
- )
+ ).SetParams(map[string]any{
+ "file": v.OriginalName,
+ "maxSize": maxBytes,
+ })
}
return nil
}
}
-// UploadedFileMimeType checks whether the validated `rest.UploadedFile`
+// UploadedFileMimeType checks whether the validated [*filesystem.File]
// mimetype is within the provided allowed mime types.
//
// Example:
@@ -42,7 +49,11 @@ func UploadedFileSize(maxBytes int) validation.RuleFunc {
// validation.Field(&form.File, validation.By(validators.UploadedFileMimeType(validMimeTypes)))
func UploadedFileMimeType(validTypes []string) validation.RuleFunc {
return func(value any) error {
- v, _ := value.(*filesystem.File)
+ v, ok := value.(*filesystem.File)
+ if !ok {
+ return ErrUnsupportedValueType
+ }
+
if v == nil {
return nil // nothing to validate
}
diff --git a/core/validators/file_test.go b/core/validators/file_test.go
new file mode 100644
index 00000000..3af6fe53
--- /dev/null
+++ b/core/validators/file_test.go
@@ -0,0 +1,75 @@
+package validators_test
+
+import (
+ "fmt"
+ "strings"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core/validators"
+ "github.com/pocketbase/pocketbase/tools/filesystem"
+)
+
+func TestUploadedFileSize(t *testing.T) {
+ t.Parallel()
+
+ file, err := filesystem.NewFileFromBytes([]byte("test"), "test.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ maxBytes int64
+ file *filesystem.File
+ expectError bool
+ }{
+ {0, nil, false},
+ {4, nil, false},
+ {3, file, true}, // all test files have "test" as content
+ {4, file, false},
+ {5, file, false},
+ }
+
+ for _, s := range scenarios {
+ t.Run(fmt.Sprintf("%d", s.maxBytes), func(t *testing.T) {
+ err := validators.UploadedFileSize(s.maxBytes)(s.file)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
+
+func TestUploadedFileMimeType(t *testing.T) {
+ t.Parallel()
+
+ file, err := filesystem.NewFileFromBytes([]byte("test"), "test.png") // the extension shouldn't matter
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ scenarios := []struct {
+ types []string
+ file *filesystem.File
+ expectError bool
+ }{
+ {nil, nil, false},
+ {[]string{"image/jpeg"}, nil, false},
+ {[]string{}, file, true},
+ {[]string{"image/jpeg"}, file, true},
+ // test files are detected as "text/plain; charset=utf-8" content type
+ {[]string{"image/jpeg", "text/plain; charset=utf-8"}, file, false},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%s", i, strings.Join(s.types, ";")), func(t *testing.T) {
+ err := validators.UploadedFileMimeType(s.types)(s.file)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
diff --git a/core/validators/string.go b/core/validators/string.go
new file mode 100644
index 00000000..c0d885b5
--- /dev/null
+++ b/core/validators/string.go
@@ -0,0 +1,29 @@
+package validators
+
+import (
+ "regexp"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+)
+
+// IsRegex checks whether the validated value is a valid regular expression pattern.
+//
+// Example:
+//
+// validation.Field(&form.Pattern, validation.By(validators.IsRegex))
+func IsRegex(value any) error {
+ v, ok := value.(string)
+ if !ok {
+ return ErrUnsupportedValueType
+ }
+
+ if v == "" {
+ return nil // nothing to check
+ }
+
+ if _, err := regexp.Compile(v); err != nil {
+ return validation.NewError("validation_invalid_regex", err.Error())
+ }
+
+ return nil
+}
diff --git a/core/validators/string_test.go b/core/validators/string_test.go
new file mode 100644
index 00000000..dead6df7
--- /dev/null
+++ b/core/validators/string_test.go
@@ -0,0 +1,33 @@
+package validators_test
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/pocketbase/pocketbase/core/validators"
+)
+
+func TestIsRegex(t *testing.T) {
+ t.Parallel()
+
+ scenarios := []struct {
+ val string
+ expectError bool
+ }{
+ {"", false},
+ {`abc`, false},
+ {`\w+`, false},
+ {`\w*((abc+`, true},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#v", i, s.val), func(t *testing.T) {
+ err := validators.IsRegex(s.val)
+
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
+ }
+ })
+ }
+}
diff --git a/core/validators/validators.go b/core/validators/validators.go
new file mode 100644
index 00000000..a4ce3a6f
--- /dev/null
+++ b/core/validators/validators.go
@@ -0,0 +1,40 @@
+// Package validators implements some common custom PocketBase validators.
+package validators
+
+import (
+ "errors"
+ "maps"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+)
+
+var ErrUnsupportedValueType = validation.NewError("validation_unsupported_value_type", "Invalid or unsupported value type.")
+
+// JoinValidationErrors attempts to join the provided [validation.Errors] arguments.
+//
+// If only one of the arguments is [validation.Errors], it returns the first non-empty [validation.Errors].
+//
+// If both arguments are not [validation.Errors] then it returns a combined [errors.Join] error.
+func JoinValidationErrors(errA, errB error) error {
+ vErrA, okA := errA.(validation.Errors)
+ vErrB, okB := errB.(validation.Errors)
+
+ // merge
+ if okA && okB {
+ result := maps.Clone(vErrA)
+ maps.Copy(result, vErrB)
+ if len(result) > 0 {
+ return result
+ }
+ }
+
+ if okA && len(vErrA) > 0 {
+ return vErrA
+ }
+
+ if okB && len(vErrB) > 0 {
+ return vErrB
+ }
+
+ return errors.Join(errA, errB)
+}
diff --git a/core/validators/validators_test.go b/core/validators/validators_test.go
new file mode 100644
index 00000000..0a54fa8e
--- /dev/null
+++ b/core/validators/validators_test.go
@@ -0,0 +1,39 @@
+package validators_test
+
+import (
+ "errors"
+ "fmt"
+ "testing"
+
+ validation "github.com/go-ozzo/ozzo-validation/v4"
+ "github.com/pocketbase/pocketbase/core/validators"
+)
+
+func TestJoinValidationErrors(t *testing.T) {
+ scenarios := []struct {
+ errA error
+ errB error
+ expected string
+ }{
+ {nil, nil, ""},
+ {errors.New("abc"), nil, "abc"},
+ {nil, errors.New("abc"), "abc"},
+ {errors.New("abc"), errors.New("456"), "abc\n456"},
+ {validation.Errors{"test1": errors.New("test1_err")}, nil, "test1: test1_err."},
+ {nil, validation.Errors{"test2": errors.New("test2_err")}, "test2: test2_err."},
+ {validation.Errors{}, errors.New("456"), "\n456"},
+ {errors.New("456"), validation.Errors{}, "456\n"},
+ {validation.Errors{"test1": errors.New("test1_err")}, errors.New("456"), "test1: test1_err."},
+ {errors.New("456"), validation.Errors{"test2": errors.New("test2_err")}, "test2: test2_err."},
+ {validation.Errors{"test1": errors.New("test1_err")}, validation.Errors{"test2": errors.New("test2_err")}, "test1: test1_err; test2: test2_err."},
+ }
+
+ for i, s := range scenarios {
+ t.Run(fmt.Sprintf("%d_%#T_%T", i, s.errA, s.errB), func(t *testing.T) {
+ result := fmt.Sprintf("%v", validators.JoinValidationErrors(s.errA, s.errB))
+ if result != s.expected {
+ t.Fatalf("Expected\n%v\ngot\n%v", s.expected, result)
+ }
+ })
+ }
+}
diff --git a/daos/view.go b/core/view.go
similarity index 66%
rename from daos/view.go
rename to core/view.go
index a7cc7053..da30d8b5 100644
--- a/daos/view.go
+++ b/core/view.go
@@ -1,4 +1,4 @@
-package daos
+package core
import (
"errors"
@@ -8,23 +8,20 @@ import (
"strings"
"github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
+ "github.com/pocketbase/pocketbase/tools/dbutils"
"github.com/pocketbase/pocketbase/tools/inflector"
- "github.com/pocketbase/pocketbase/tools/list"
"github.com/pocketbase/pocketbase/tools/security"
"github.com/pocketbase/pocketbase/tools/tokenizer"
- "github.com/pocketbase/pocketbase/tools/types"
)
// DeleteView drops the specified view name.
//
// This method is a no-op if a view with the provided name doesn't exist.
//
-// Be aware that this method is vulnerable to SQL injection and the
+// NB! Be aware that this method is vulnerable to SQL injection and the
// "name" argument must come only from trusted input!
-func (dao *Dao) DeleteView(name string) error {
- _, err := dao.DB().NewQuery(fmt.Sprintf(
+func (app *BaseApp) DeleteView(name string) error {
+ _, err := app.DB().NewQuery(fmt.Sprintf(
"DROP VIEW IF EXISTS {{%s}}",
name,
)).Execute()
@@ -34,18 +31,18 @@ func (dao *Dao) DeleteView(name string) error {
// SaveView creates (or updates already existing) persistent SQL view.
//
-// Be aware that this method is vulnerable to SQL injection and the
+// NB! Be aware that this method is vulnerable to SQL injection and the
// "selectQuery" argument must come only from trusted input!
-func (dao *Dao) SaveView(name string, selectQuery string) error {
- return dao.RunInTransaction(func(txDao *Dao) error {
+func (app *BaseApp) SaveView(name string, selectQuery string) error {
+ return app.RunInTransaction(func(txApp App) error {
// delete old view (if exists)
- if err := txDao.DeleteView(name); err != nil {
+ if err := txApp.DeleteView(name); err != nil {
return err
}
selectQuery = strings.Trim(strings.TrimSpace(selectQuery), ";")
- // try to eagerly detect multiple inline statements
+ // try to loosely detect multiple inline statements
tk := tokenizer.NewFromString(selectQuery)
tk.Separators(';')
if queryParts, _ := tk.ScanAll(); len(queryParts) > 1 {
@@ -55,18 +52,18 @@ func (dao *Dao) SaveView(name string, selectQuery string) error {
// (re)create the view
//
// note: the query is wrapped in a secondary SELECT as a rudimentary
- // measure to discourage multiple inline sql statements execution.
+ // measure to discourage multiple inline sql statements execution
viewQuery := fmt.Sprintf("CREATE VIEW {{%s}} AS SELECT * FROM (%s)", name, selectQuery)
- if _, err := txDao.DB().NewQuery(viewQuery).Execute(); err != nil {
+ if _, err := txApp.DB().NewQuery(viewQuery).Execute(); err != nil {
return err
}
// fetch the view table info to ensure that the view was created
// because missing tables or columns won't return an error
- if _, err := txDao.TableInfo(name); err != nil {
+ if _, err := txApp.TableInfo(name); err != nil {
// manually cleanup previously created view in case the func
// is called in a nested transaction and the error is discarded
- txDao.DeleteView(name)
+ txApp.DeleteView(name)
return err
}
@@ -75,31 +72,23 @@ func (dao *Dao) SaveView(name string, selectQuery string) error {
})
}
-// CreateViewSchema creates a new view schema from the provided select query.
+// CreateViewFields creates a new FieldsList from the provided select query.
//
// There are some caveats:
// - The select query must have an "id" column.
// - Wildcard ("*") columns are not supported to avoid accidentally leaking sensitive data.
-func (dao *Dao) CreateViewSchema(selectQuery string) (schema.Schema, error) {
- result := schema.NewSchema()
+func (app *BaseApp) CreateViewFields(selectQuery string) (FieldsList, error) {
+ result := NewFieldsList()
- suggestedFields, err := dao.parseQueryToFields(selectQuery)
+ suggestedFields, err := parseQueryToFields(app, selectQuery)
if err != nil {
return result, err
}
// note wrap in a transaction in case the selectQuery contains
// multiple statements allowing us to rollback on any error
- txErr := dao.RunInTransaction(func(txDao *Dao) error {
- tempView := "_temp_" + security.PseudorandomString(5)
- // create a temp view with the provided query
- if err := txDao.SaveView(tempView, selectQuery); err != nil {
- return err
- }
- defer txDao.DeleteView(tempView)
-
- // extract the generated view table info
- info, err := txDao.TableInfo(tempView)
+ txErr := app.RunInTransaction(func(txApp App) error {
+ info, err := getQueryTableInfo(txApp, selectQuery)
if err != nil {
return err
}
@@ -107,15 +96,11 @@ func (dao *Dao) CreateViewSchema(selectQuery string) (schema.Schema, error) {
var hasId bool
for _, row := range info {
- if row.Name == schema.FieldNameId {
+ if row.Name == FieldNameId {
hasId = true
}
- if list.ExistInSlice(row.Name, schema.BaseModelFieldNames()) {
- continue // skip base model fields since they are not part of the schema
- }
-
- var field *schema.SchemaField
+ var field Field
if f, ok := suggestedFields[row.Name]; ok {
field = f.field
@@ -123,7 +108,7 @@ func (dao *Dao) CreateViewSchema(selectQuery string) (schema.Schema, error) {
field = defaultViewField(row.Name)
}
- result.AddField(field)
+ result.Add(field)
}
if !hasId {
@@ -136,14 +121,9 @@ func (dao *Dao) CreateViewSchema(selectQuery string) (schema.Schema, error) {
return result, txErr
}
-// FindRecordByViewFile returns the original models.Record of the
-// provided view collection file.
-func (dao *Dao) FindRecordByViewFile(
- viewCollectionNameOrId string,
- fileFieldName string,
- filename string,
-) (*models.Record, error) {
- view, err := dao.FindCollectionByNameOrId(viewCollectionNameOrId)
+// FindRecordByViewFile returns the original Record of the provided view collection file.
+func (app *BaseApp) FindRecordByViewFile(viewCollectionModelOrIdentifier any, fileFieldName string, filename string) (*Record, error) {
+ view, err := getCollectionByModelOrIdentifier(app, viewCollectionModelOrIdentifier)
if err != nil {
return nil, err
}
@@ -160,7 +140,7 @@ func (dao *Dao) FindRecordByViewFile(
return nil, errors.New("reached the max recursion level of view collection file field queries")
}
- queryFields, err := dao.parseQueryToFields(view.ViewOptions().Query)
+ queryFields, err := parseQueryToFields(app, view.ViewQuery)
if err != nil {
return nil, err
}
@@ -168,13 +148,13 @@ func (dao *Dao) FindRecordByViewFile(
for _, item := range queryFields {
if item.collection == nil ||
item.original == nil ||
- item.field.Name != fileFieldName {
+ item.field.GetName() != fileFieldName {
continue
}
if item.collection.IsView() {
view = item.collection
- fileFieldName = item.original.Name
+ fileFieldName = item.original.GetName()
return findFirstNonViewQueryFileField(level + 1)
}
@@ -189,19 +169,19 @@ func (dao *Dao) FindRecordByViewFile(
return nil, err
}
- cleanFieldName := inflector.Columnify(qf.original.Name)
+ cleanFieldName := inflector.Columnify(qf.original.GetName())
- record := &models.Record{}
+ record := &Record{}
- query := dao.RecordQuery(qf.collection).Limit(1)
+ query := app.RecordQuery(qf.collection).Limit(1)
- if opt, ok := qf.original.Options.(schema.MultiValuer); !ok || !opt.IsMultiple() {
+ if opt, ok := qf.original.(MultiValuer); !ok || !opt.IsMultiple() {
query.AndWhere(dbx.HashExp{cleanFieldName: filename})
} else {
- query.InnerJoin(fmt.Sprintf(
- `json_each(CASE WHEN json_valid([[%s]]) THEN [[%s]] ELSE json_array([[%s]]) END) as {{_je_file}}`,
- cleanFieldName, cleanFieldName, cleanFieldName,
- ), dbx.HashExp{"_je_file.value": filename})
+ query.InnerJoin(
+ fmt.Sprintf(`%s as {{_je_file}}`, dbutils.JSONEach(cleanFieldName)),
+ dbx.HashExp{"_je_file.value": filename},
+ )
}
if err := query.One(record); err != nil {
@@ -217,36 +197,33 @@ func (dao *Dao) FindRecordByViewFile(
type queryField struct {
// field is the final resolved field.
- field *schema.SchemaField
+ field Field
// collection refers to the original field's collection model.
- // It could be nil if the found query field is not from a collection schema.
- collection *models.Collection
+ // It could be nil if the found query field is not from a collection
+ collection *Collection
// original is the original found collection field.
- // It could be nil if the found query field is not from a collection schema.
- original *schema.SchemaField
+ // It could be nil if the found query field is not from a collection
+ original Field
}
-func defaultViewField(name string) *schema.SchemaField {
- return &schema.SchemaField{
- Name: name,
- Type: schema.FieldTypeJson,
- Options: &schema.JsonOptions{
- MaxSize: 1, // the size doesn't matter in this case
- },
+func defaultViewField(name string) Field {
+ return &JSONField{
+ Name: name,
+ MaxSize: 1, // unused for views
}
}
var castRegex = regexp.MustCompile(`(?i)^cast\s*\(.*\s+as\s+(\w+)\s*\)$`)
-func (dao *Dao) parseQueryToFields(selectQuery string) (map[string]*queryField, error) {
+func parseQueryToFields(app App, selectQuery string) (map[string]*queryField, error) {
p := new(identifiersParser)
if err := p.parse(selectQuery); err != nil {
return nil, err
}
- collections, err := dao.findCollectionsByIdentifiers(p.tables)
+ collections, err := findCollectionsByIdentifiers(app, p.tables)
if err != nil {
return nil, err
}
@@ -262,12 +239,24 @@ func (dao *Dao) parseQueryToFields(selectQuery string) (map[string]*queryField,
for _, col := range p.columns {
colLower := strings.ToLower(col.original)
+ // pk (always assume text field for now)
+ if col.alias == FieldNameId {
+ result[col.alias] = &queryField{
+ field: &TextField{
+ Name: col.alias,
+ System: true,
+ Required: true,
+ PrimaryKey: true,
+ },
+ }
+ continue
+ }
+
// numeric aggregations
if strings.HasPrefix(colLower, "count(") || strings.HasPrefix(colLower, "total(") {
result[col.alias] = &queryField{
- field: &schema.SchemaField{
+ field: &NumberField{
Name: col.alias,
- Type: schema.FieldTypeNumber,
},
}
continue
@@ -280,25 +269,22 @@ func (dao *Dao) parseQueryToFields(selectQuery string) (map[string]*queryField,
switch castMatch[1] {
case "real", "integer", "int", "decimal", "numeric":
result[col.alias] = &queryField{
- field: &schema.SchemaField{
+ field: &NumberField{
Name: col.alias,
- Type: schema.FieldTypeNumber,
},
}
continue
case "text":
result[col.alias] = &queryField{
- field: &schema.SchemaField{
+ field: &TextField{
Name: col.alias,
- Type: schema.FieldTypeText,
},
}
continue
case "boolean", "bool":
result[col.alias] = &queryField{
- field: &schema.SchemaField{
+ field: &BoolField{
Name: col.alias,
- Type: schema.FieldTypeBool,
},
}
continue
@@ -308,7 +294,7 @@ func (dao *Dao) parseQueryToFields(selectQuery string) (map[string]*queryField,
parts := strings.Split(col.original, ".")
var fieldName string
- var collection *models.Collection
+ var collection *Collection
if len(parts) == 2 {
fieldName = parts[1]
@@ -318,7 +304,7 @@ func (dao *Dao) parseQueryToFields(selectQuery string) (map[string]*queryField,
collection = collections[mainTable.alias]
}
- // fallback to the default field if the found column is not from a collection schema
+ // fallback to the default field
if collection == nil {
result[col.alias] = &queryField{
field: defaultViewField(col.alias),
@@ -331,83 +317,62 @@ func (dao *Dao) parseQueryToFields(selectQuery string) (map[string]*queryField,
}
// find the first field by name (case insensitive)
- var field *schema.SchemaField
- for _, f := range collection.Schema.Fields() {
- if strings.EqualFold(f.Name, fieldName) {
+ var field Field
+ for _, f := range collection.Fields {
+ if strings.EqualFold(f.GetName(), fieldName) {
field = f
break
}
}
- if field != nil {
- clone := *field
- clone.Id = "" // unset to prevent duplications if the same field is aliased multiple times
- clone.Name = col.alias
- result[col.alias] = &queryField{
- field: &clone,
- collection: collection,
- original: field,
- }
- continue
- }
-
- if fieldName == schema.FieldNameId {
- // convert to relation since it is a direct id reference
- result[col.alias] = &queryField{
- field: &schema.SchemaField{
- Name: col.alias,
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{
- MaxSelect: types.Pointer(1),
- CollectionId: collection.Id,
- },
- },
- collection: collection,
- }
- } else if fieldName == schema.FieldNameCreated || fieldName == schema.FieldNameUpdated {
- result[col.alias] = &queryField{
- field: &schema.SchemaField{
- Name: col.alias,
- Type: schema.FieldTypeDate,
- },
- collection: collection,
- }
- } else if fieldName == schema.FieldNameUsername && collection.IsAuth() {
- result[col.alias] = &queryField{
- field: &schema.SchemaField{
- Name: col.alias,
- Type: schema.FieldTypeText,
- },
- collection: collection,
- }
- } else if fieldName == schema.FieldNameEmail && collection.IsAuth() {
- result[col.alias] = &queryField{
- field: &schema.SchemaField{
- Name: col.alias,
- Type: schema.FieldTypeEmail,
- },
- collection: collection,
- }
- } else if (fieldName == schema.FieldNameVerified || fieldName == schema.FieldNameEmailVisibility) && collection.IsAuth() {
- result[col.alias] = &queryField{
- field: &schema.SchemaField{
- Name: col.alias,
- Type: schema.FieldTypeBool,
- },
- collection: collection,
- }
- } else {
+ // fallback to the default field
+ if field == nil {
result[col.alias] = &queryField{
field: defaultViewField(col.alias),
collection: collection,
}
+ continue
+ }
+
+ // convert to relation since it is an id reference
+ if strings.EqualFold(fieldName, FieldNameId) {
+ result[col.alias] = &queryField{
+ field: &RelationField{
+ Name: col.alias,
+ MaxSelect: 1,
+ CollectionId: collection.Id,
+ },
+ collection: collection,
+ }
+ continue
+ }
+
+ // we fetch a brand new collection object to avoid using reflection
+ // or having a dedicated Clone method for each field type
+ tempCollection, err := app.FindCollectionByNameOrId(collection.Id)
+ if err != nil {
+ return nil, err
+ }
+
+ clone := tempCollection.Fields.GetById(field.GetId())
+ if clone == nil {
+ return nil, fmt.Errorf("missing expected field %q (%q) in collection %q", field.GetName(), field.GetId(), tempCollection.Name)
+ }
+ // set new random id to prevent duplications if the same field is aliased multiple times
+ clone.SetId("_clone_" + security.PseudorandomString(4))
+ clone.SetName(col.alias)
+
+ result[col.alias] = &queryField{
+ original: field,
+ field: clone,
+ collection: collection,
}
}
return result, nil
}
-func (dao *Dao) findCollectionsByIdentifiers(tables []identifier) (map[string]*models.Collection, error) {
+func findCollectionsByIdentifiers(app App, tables []identifier) (map[string]*Collection, error) {
names := make([]any, 0, len(tables))
for _, table := range tables {
@@ -421,10 +386,10 @@ func (dao *Dao) findCollectionsByIdentifiers(tables []identifier) (map[string]*m
return nil, nil
}
- result := make(map[string]*models.Collection, len(names))
- collections := make([]*models.Collection, 0, len(names))
+ result := make(map[string]*Collection, len(names))
+ collections := make([]*Collection, 0, len(names))
- err := dao.CollectionQuery().
+ err := app.CollectionQuery().
AndWhere(dbx.In("name", names...)).
All(&collections)
if err != nil {
@@ -442,12 +407,37 @@ func (dao *Dao) findCollectionsByIdentifiers(tables []identifier) (map[string]*m
return result, nil
}
+func getQueryTableInfo(app App, selectQuery string) ([]*TableInfoRow, error) {
+ tempView := "_temp_" + security.PseudorandomString(6)
+
+ var info []*TableInfoRow
+
+ txErr := app.RunInTransaction(func(txApp App) error {
+ // create a temp view with the provided query
+ err := txApp.SaveView(tempView, selectQuery)
+ if err != nil {
+ return err
+ }
+
+ // extract the generated view table info
+ info, err = txApp.TableInfo(tempView)
+
+ return errors.Join(err, txApp.DeleteView(tempView))
+ })
+
+ if txErr != nil {
+ return nil, txErr
+ }
+
+ return info, nil
+}
+
// -------------------------------------------------------------------
// Raw query identifiers parser
// -------------------------------------------------------------------
-var joinReplaceRegex = regexp.MustCompile(`(?im)\s+(inner join|outer join|left join|right join|join)\s+?`)
-var discardReplaceRegex = regexp.MustCompile(`(?im)\s+(where|group by|having|order|limit|with)\s+?`)
+var joinReplaceRegex = regexp.MustCompile(`(?im)\s+(full\s+outer\s+join|left\s+outer\s+join|right\s+outer\s+join|full\s+join|cross\s+join|inner\s+join|outer\s+join|left\s+join|right\s+join|join)\s+?`)
+var discardReplaceRegex = regexp.MustCompile(`(?im)\s+(where|group\s+by|having|order|limit|with)\s+?`)
var commentsReplaceRegex = regexp.MustCompile(`(?m)(\/\*[\s\S]+\*\/)|(--.+$)`)
type identifier struct {
diff --git a/daos/view_test.go b/core/view_test.go
similarity index 60%
rename from daos/view_test.go
rename to core/view_test.go
index a84a00a9..bd4ec3d6 100644
--- a/daos/view_test.go
+++ b/core/view_test.go
@@ -1,23 +1,20 @@
-package daos_test
+package core_test
import (
"encoding/json"
"fmt"
+ "slices"
"testing"
"github.com/pocketbase/dbx"
"github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
"github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/list"
)
func ensureNoTempViews(app core.App, t *testing.T) {
var total int
- err := app.Dao().DB().Select("count(*)").
+ err := app.DB().Select("count(*)").
From("sqlite_schema").
AndWhere(dbx.HashExp{"type": "view"}).
AndWhere(dbx.NewExp(`[[name]] LIKE '%\_temp\_%' ESCAPE '\'`)).
@@ -50,7 +47,7 @@ func TestDeleteView(t *testing.T) {
}
for i, s := range scenarios {
- err := app.Dao().DeleteView(s.viewName)
+ err := app.DeleteView(s.viewName)
hasErr := err != nil
if hasErr != s.expectError {
@@ -84,7 +81,7 @@ func TestSaveView(t *testing.T) {
{
"empty name",
"",
- "select * from _admins",
+ "select * from " + core.CollectionNameSuperusers,
true,
nil,
},
@@ -112,39 +109,40 @@ func TestSaveView(t *testing.T) {
{
"non select query",
"123Test",
- "drop table _admins",
+ "drop table " + core.CollectionNameSuperusers,
true,
nil,
},
{
"multiple select queries",
"123Test",
- "select *, count(id) as c from _admins; select * from demo1;",
+ "select *, count(id) as c from " + core.CollectionNameSuperusers + "; select * from demo1;",
true,
nil,
},
{
"try to break the parent parenthesis",
"123Test",
- "select *, count(id) as c from `_admins`)",
+ "select *, count(id) as c from `" + core.CollectionNameSuperusers + "`)",
true,
nil,
},
{
"simple select query (+ trimmed semicolon)",
"123Test",
- ";select *, count(id) as c from _admins;",
+ ";select *, count(id) as c from " + core.CollectionNameSuperusers + ";",
false,
[]string{
"id", "created", "updated",
- "passwordHash", "tokenKey", "email",
- "lastResetSentAt", "avatar", "c",
+ "password", "tokenKey", "email",
+ "emailVisibility", "verified",
+ "c",
},
},
{
"update old view with new query",
"123Test",
- "select 1 as test from _admins",
+ "select 1 as test from " + core.CollectionNameSuperusers,
false,
[]string{"test"},
},
@@ -152,7 +150,7 @@ func TestSaveView(t *testing.T) {
for _, s := range scenarios {
t.Run(s.scenarioName, func(t *testing.T) {
- err := app.Dao().SaveView(s.viewName, s.query)
+ err := app.SaveView(s.viewName, s.query)
hasErr := err != nil
if hasErr != s.expectError {
@@ -163,7 +161,7 @@ func TestSaveView(t *testing.T) {
return
}
- infoRows, err := app.Dao().TableInfo(s.viewName)
+ infoRows, err := app.TableInfo(s.viewName)
if err != nil {
t.Fatalf("Failed to fetch table info for %s: %v", s.viewName, err)
}
@@ -173,7 +171,7 @@ func TestSaveView(t *testing.T) {
}
for _, row := range infoRows {
- if !list.ExistInSlice(row.Name, s.expectColumns) {
+ if !slices.Contains(s.expectColumns, row.Name) {
t.Fatalf("Missing %q column in %v", row.Name, s.expectColumns)
}
}
@@ -183,14 +181,14 @@ func TestSaveView(t *testing.T) {
ensureNoTempViews(app, t)
}
-func TestCreateViewSchemaWithDiscardedNestedTransaction(t *testing.T) {
+func TestCreateViewFieldsWithDiscardedNestedTransaction(t *testing.T) {
t.Parallel()
app, _ := tests.NewTestApp()
defer app.Cleanup()
- app.Dao().RunInTransaction(func(txDao *daos.Dao) error {
- _, err := txDao.CreateViewSchema("select id from missing")
+ app.RunInTransaction(func(txApp core.App) error {
+ _, err := txApp.CreateViewFields("select id from missing")
if err == nil {
t.Fatal("Expected error, got nil")
}
@@ -201,7 +199,7 @@ func TestCreateViewSchemaWithDiscardedNestedTransaction(t *testing.T) {
ensureNoTempViews(app, t)
}
-func TestCreateViewSchema(t *testing.T) {
+func TestCreateViewFields(t *testing.T) {
t.Parallel()
app, _ := tests.NewTestApp()
@@ -256,8 +254,11 @@ func TestCreateViewSchema(t *testing.T) {
`,
false,
map[string]string{
- "text": schema.FieldTypeText,
- "url": schema.FieldTypeUrl,
+ "id": core.FieldTypeText,
+ "text": core.FieldTypeText,
+ "url": core.FieldTypeURL,
+ "created": core.FieldTypeAutodate,
+ "updated": core.FieldTypeAutodate,
},
},
{
@@ -285,20 +286,23 @@ func TestCreateViewSchema(t *testing.T) {
`,
false,
map[string]string{
- "text": schema.FieldTypeText,
- "bool": schema.FieldTypeBool,
- "url": schema.FieldTypeUrl,
- "select_one": schema.FieldTypeSelect,
- "select_many": schema.FieldTypeSelect,
- "file_one": schema.FieldTypeFile,
- "file_many": schema.FieldTypeFile,
- "number_alias": schema.FieldTypeNumber,
- "email": schema.FieldTypeEmail,
- "datetime": schema.FieldTypeDate,
- "json": schema.FieldTypeJson,
- "rel_one": schema.FieldTypeRelation,
- "rel_many": schema.FieldTypeRelation,
- "single_quoted_column": schema.FieldTypeJson,
+ "id": core.FieldTypeText,
+ "created": core.FieldTypeAutodate,
+ "updated": core.FieldTypeAutodate,
+ "text": core.FieldTypeText,
+ "bool": core.FieldTypeBool,
+ "url": core.FieldTypeURL,
+ "select_one": core.FieldTypeSelect,
+ "select_many": core.FieldTypeSelect,
+ "file_one": core.FieldTypeFile,
+ "file_many": core.FieldTypeFile,
+ "number_alias": core.FieldTypeNumber,
+ "email": core.FieldTypeEmail,
+ "datetime": core.FieldTypeDate,
+ "json": core.FieldTypeJSON,
+ "rel_one": core.FieldTypeRelation,
+ "rel_many": core.FieldTypeRelation,
+ "single_quoted_column": core.FieldTypeJSON,
},
},
{
@@ -306,7 +310,9 @@ func TestCreateViewSchema(t *testing.T) {
"select a.id, b.id as bid, b.created from demo1 as a left join demo2 b",
false,
map[string]string{
- "bid": schema.FieldTypeRelation,
+ "id": core.FieldTypeText,
+ "bid": core.FieldTypeRelation,
+ "created": core.FieldTypeAutodate,
},
},
{
@@ -318,24 +324,25 @@ func TestCreateViewSchema(t *testing.T) {
lj.id cid,
ij.id as did,
a.bool,
- _admins.id as eid,
- _admins.email
+ ` + core.CollectionNameSuperusers + `.id as eid,
+ ` + core.CollectionNameSuperusers + `.email
from demo1 a, demo2 as b
left join demo3 lj on lj.id = 123
inner join demo4 as ij on ij.id = 123
- join _admins
+ join ` + core.CollectionNameSuperusers + `
where 1=1
group by a.id
limit 10
`,
false,
map[string]string{
- "bid": schema.FieldTypeRelation,
- "cid": schema.FieldTypeRelation,
- "did": schema.FieldTypeRelation,
- "bool": schema.FieldTypeBool,
- "eid": schema.FieldTypeJson, // not from collection
- "email": schema.FieldTypeJson, // not from collection
+ "id": core.FieldTypeText,
+ "bid": core.FieldTypeRelation,
+ "cid": core.FieldTypeRelation,
+ "did": core.FieldTypeRelation,
+ "bool": core.FieldTypeBool,
+ "eid": core.FieldTypeRelation,
+ "email": core.FieldTypeEmail,
},
},
{
@@ -359,21 +366,22 @@ func TestCreateViewSchema(t *testing.T) {
from demo1 a`,
false,
map[string]string{
- "count": schema.FieldTypeNumber,
- "total": schema.FieldTypeNumber,
- "cast_int": schema.FieldTypeNumber,
- "cast_integer": schema.FieldTypeNumber,
- "cast_real": schema.FieldTypeNumber,
- "cast_decimal": schema.FieldTypeNumber,
- "cast_numeric": schema.FieldTypeNumber,
- "cast_text": schema.FieldTypeText,
- "cast_bool": schema.FieldTypeBool,
- "cast_boolean": schema.FieldTypeBool,
+ "id": core.FieldTypeText,
+ "count": core.FieldTypeNumber,
+ "total": core.FieldTypeNumber,
+ "cast_int": core.FieldTypeNumber,
+ "cast_integer": core.FieldTypeNumber,
+ "cast_real": core.FieldTypeNumber,
+ "cast_decimal": core.FieldTypeNumber,
+ "cast_numeric": core.FieldTypeNumber,
+ "cast_text": core.FieldTypeText,
+ "cast_bool": core.FieldTypeBool,
+ "cast_boolean": core.FieldTypeBool,
// json because they are nullable
- "sum": schema.FieldTypeJson,
- "avg": schema.FieldTypeJson,
- "min": schema.FieldTypeJson,
- "max": schema.FieldTypeJson,
+ "sum": core.FieldTypeJSON,
+ "avg": core.FieldTypeJSON,
+ "min": core.FieldTypeJSON,
+ "max": core.FieldTypeJSON,
},
},
{
@@ -391,11 +399,12 @@ func TestCreateViewSchema(t *testing.T) {
`,
false,
map[string]string{
- "username": schema.FieldTypeText,
- "email": schema.FieldTypeEmail,
- "emailVisibility": schema.FieldTypeBool,
- "verified": schema.FieldTypeBool,
- "relid": schema.FieldTypeRelation,
+ "id": core.FieldTypeText,
+ "username": core.FieldTypeText,
+ "email": core.FieldTypeEmail,
+ "emailVisibility": core.FieldTypeBool,
+ "verified": core.FieldTypeBool,
+ "relid": core.FieldTypeRelation,
},
},
{
@@ -413,14 +422,15 @@ func TestCreateViewSchema(t *testing.T) {
from demo1`,
false,
map[string]string{
- "id2": schema.FieldTypeRelation,
- "text_alias": schema.FieldTypeText,
- "url_alias": schema.FieldTypeUrl,
- "bool_alias": schema.FieldTypeBool,
- "number_alias": schema.FieldTypeNumber,
- "created_alias": schema.FieldTypeDate,
- "updated_alias": schema.FieldTypeDate,
- "custom": schema.FieldTypeJson,
+ "id": core.FieldTypeText,
+ "id2": core.FieldTypeRelation,
+ "text_alias": core.FieldTypeText,
+ "url_alias": core.FieldTypeURL,
+ "bool_alias": core.FieldTypeBool,
+ "number_alias": core.FieldTypeNumber,
+ "created_alias": core.FieldTypeAutodate,
+ "updated_alias": core.FieldTypeAutodate,
+ "custom": core.FieldTypeJSON,
},
},
{
@@ -432,8 +442,9 @@ func TestCreateViewSchema(t *testing.T) {
from demo1`,
false,
map[string]string{
- "id2": schema.FieldTypeRelation,
- "custom": schema.FieldTypeJson,
+ "id2": core.FieldTypeRelation,
+ "id": core.FieldTypeText,
+ "custom": core.FieldTypeJSON,
},
},
{
@@ -448,46 +459,45 @@ func TestCreateViewSchema(t *testing.T) {
left join demo1 as b`,
false,
map[string]string{
- "alias1": schema.FieldTypeText,
- "alias2": schema.FieldTypeText,
- "alias3": schema.FieldTypeText,
- "alias4": schema.FieldTypeText,
+ "id": core.FieldTypeText,
+ "alias1": core.FieldTypeText,
+ "alias2": core.FieldTypeText,
+ "alias3": core.FieldTypeText,
+ "alias4": core.FieldTypeText,
},
},
}
for _, s := range scenarios {
- result, err := app.Dao().CreateViewSchema(s.query)
+ t.Run(s.name, func(t *testing.T) {
+ result, err := app.CreateViewFields(s.query)
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("[%s] Expected hasErr %v, got %v (%v)", s.name, s.expectError, hasErr, err)
- continue
- }
-
- if hasErr {
- continue
- }
-
- if len(s.expectFields) != len(result.Fields()) {
- serialized, _ := json.Marshal(result)
- t.Errorf("[%s] Expected %d fields, got %d: \n%s", s.name, len(s.expectFields), len(result.Fields()), serialized)
- continue
- }
-
- for name, typ := range s.expectFields {
- field := result.GetFieldByName(name)
-
- if field == nil {
- t.Errorf("[%s] Expected to find field %s, got nil", s.name, name)
- continue
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
}
- if field.Type != typ {
- t.Errorf("[%s] Expected field %s to be %q, got %s", s.name, name, typ, field.Type)
- continue
+ if hasErr {
+ return
}
- }
+
+ if len(s.expectFields) != len(result) {
+ serialized, _ := json.Marshal(result)
+ t.Fatalf("Expected %d fields, got %d: \n%s", len(s.expectFields), len(result), serialized)
+ }
+
+ for name, typ := range s.expectFields {
+ field := result.GetByName(name)
+
+ if field == nil {
+ t.Fatalf("Expected to find field %s, got nil", name)
+ }
+
+ if field.Type() != typ {
+ t.Fatalf("Expected field %s to be %q, got %q", name, typ, field.Type())
+ }
+ }
+ })
}
ensureNoTempViews(app, t)
@@ -499,7 +509,7 @@ func TestFindRecordByViewFile(t *testing.T) {
app, _ := tests.NewTestApp()
defer app.Cleanup()
- prevCollection, err := app.Dao().FindCollectionByNameOrId("demo1")
+ prevCollection, err := app.FindCollectionByNameOrId("demo1")
if err != nil {
t.Fatal(err)
}
@@ -509,22 +519,20 @@ func TestFindRecordByViewFile(t *testing.T) {
// create collection view mocks
fileOneAlias := "file_one one0"
fileManyAlias := "file_many many0"
- mockCollections := make([]*models.Collection, 0, totalLevels)
+ mockCollections := make([]*core.Collection, 0, totalLevels)
for i := 0; i <= totalLevels; i++ {
- view := new(models.Collection)
- view.Type = models.CollectionTypeView
+ view := new(core.Collection)
+ view.Type = core.CollectionTypeView
view.Name = fmt.Sprintf("_test_view%d", i)
- view.SetOptions(&models.CollectionViewOptions{
- Query: fmt.Sprintf(
- "select id, %s, %s from %s",
- fileOneAlias,
- fileManyAlias,
- prevCollection.Name,
- ),
- })
+ view.ViewQuery = fmt.Sprintf(
+ "select id, %s, %s from %s",
+ fileOneAlias,
+ fileManyAlias,
+ prevCollection.Name,
+ )
// save view
- if err := app.Dao().SaveCollection(view); err != nil {
+ if err := app.Save(view); err != nil {
t.Fatalf("Failed to save view%d: %v", i, err)
}
@@ -586,7 +594,6 @@ func TestFindRecordByViewFile(t *testing.T) {
false,
expectedRecordId,
},
-
{
"last view collection before the recursion limit (single file)",
mockCollections[totalLevels-2].Name,
@@ -606,24 +613,25 @@ func TestFindRecordByViewFile(t *testing.T) {
}
for _, s := range scenarios {
- record, err := app.Dao().FindRecordByViewFile(
- s.collectionNameOrId,
- s.fileFieldName,
- s.filename,
- )
+ t.Run(s.name, func(t *testing.T) {
+ record, err := app.FindRecordByViewFile(
+ s.collectionNameOrId,
+ s.fileFieldName,
+ s.filename,
+ )
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("[%s] Expected hasErr %v, got %v (%v)", s.name, s.expectError, hasErr, err)
- continue
- }
+ hasErr := err != nil
+ if hasErr != s.expectError {
+ t.Fatalf("Expected hasErr %v, got %v (%v)", s.expectError, hasErr, err)
+ }
- if hasErr {
- continue
- }
+ if hasErr {
+ return
+ }
- if record.Id != s.expectRecordId {
- t.Errorf("[%s] Expected recordId %q, got %q", s.name, s.expectRecordId, record.Id)
- }
+ if record.Id != s.expectRecordId {
+ t.Fatalf("Expected recordId %q, got %q", s.expectRecordId, record.Id)
+ }
+ })
}
}
diff --git a/daos/admin.go b/daos/admin.go
deleted file mode 100644
index 5b80882b..00000000
--- a/daos/admin.go
+++ /dev/null
@@ -1,128 +0,0 @@
-package daos
-
-import (
- "errors"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tools/list"
- "github.com/pocketbase/pocketbase/tools/security"
-)
-
-// AdminQuery returns a new Admin select query.
-func (dao *Dao) AdminQuery() *dbx.SelectQuery {
- return dao.ModelQuery(&models.Admin{})
-}
-
-// FindAdminById finds the admin with the provided id.
-func (dao *Dao) FindAdminById(id string) (*models.Admin, error) {
- model := &models.Admin{}
-
- err := dao.AdminQuery().
- AndWhere(dbx.HashExp{"id": id}).
- Limit(1).
- One(model)
-
- if err != nil {
- return nil, err
- }
-
- return model, nil
-}
-
-// FindAdminByEmail finds the admin with the provided email address.
-func (dao *Dao) FindAdminByEmail(email string) (*models.Admin, error) {
- model := &models.Admin{}
-
- err := dao.AdminQuery().
- AndWhere(dbx.HashExp{"email": email}).
- Limit(1).
- One(model)
-
- if err != nil {
- return nil, err
- }
-
- return model, nil
-}
-
-// FindAdminByToken finds the admin associated with the provided JWT.
-//
-// Returns an error if the JWT is invalid or expired.
-func (dao *Dao) FindAdminByToken(token string, baseTokenKey string) (*models.Admin, error) {
- // @todo consider caching the unverified claims
- unverifiedClaims, err := security.ParseUnverifiedJWT(token)
- if err != nil {
- return nil, err
- }
-
- // check required claims
- id, _ := unverifiedClaims["id"].(string)
- if id == "" {
- return nil, errors.New("missing or invalid token claims")
- }
-
- admin, err := dao.FindAdminById(id)
- if err != nil || admin == nil {
- return nil, err
- }
-
- verificationKey := admin.TokenKey + baseTokenKey
-
- // verify token signature
- if _, err := security.ParseJWT(token, verificationKey); err != nil {
- return nil, err
- }
-
- return admin, nil
-}
-
-// TotalAdmins returns the number of existing admin records.
-func (dao *Dao) TotalAdmins() (int, error) {
- var total int
-
- err := dao.AdminQuery().Select("count(*)").Row(&total)
-
- return total, err
-}
-
-// IsAdminEmailUnique checks if the provided email address is not
-// already in use by other admins.
-func (dao *Dao) IsAdminEmailUnique(email string, excludeIds ...string) bool {
- if email == "" {
- return false
- }
-
- query := dao.AdminQuery().Select("count(*)").
- AndWhere(dbx.HashExp{"email": email}).
- Limit(1)
-
- if uniqueExcludeIds := list.NonzeroUniques(excludeIds); len(uniqueExcludeIds) > 0 {
- query.AndWhere(dbx.NotIn("id", list.ToInterfaceSlice(uniqueExcludeIds)...))
- }
-
- var exists bool
-
- return query.Row(&exists) == nil && !exists
-}
-
-// DeleteAdmin deletes the provided Admin model.
-//
-// Returns an error if there is only 1 admin.
-func (dao *Dao) DeleteAdmin(admin *models.Admin) error {
- total, err := dao.TotalAdmins()
- if err != nil {
- return err
- }
-
- if total == 1 {
- return errors.New("you cannot delete the only existing admin")
- }
-
- return dao.Delete(admin)
-}
-
-// SaveAdmin upserts the provided Admin model.
-func (dao *Dao) SaveAdmin(admin *models.Admin) error {
- return dao.Save(admin)
-}
diff --git a/daos/admin_test.go b/daos/admin_test.go
deleted file mode 100644
index 89ab8e8b..00000000
--- a/daos/admin_test.go
+++ /dev/null
@@ -1,274 +0,0 @@
-package daos_test
-
-import (
- "testing"
-
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
-)
-
-func TestAdminQuery(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- expected := "SELECT {{_admins}}.* FROM `_admins`"
-
- sql := app.Dao().AdminQuery().Build().SQL()
- if sql != expected {
- t.Errorf("Expected sql %s, got %s", expected, sql)
- }
-}
-
-func TestFindAdminById(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- id string
- expectError bool
- }{
- {" ", true},
- {"missing", true},
- {"9q2trqumvlyr3bd", false},
- }
-
- for i, scenario := range scenarios {
- admin, err := app.Dao().FindAdminById(scenario.id)
-
- hasErr := err != nil
- if hasErr != scenario.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, scenario.expectError, hasErr, err)
- }
-
- if admin != nil && admin.Id != scenario.id {
- t.Errorf("(%d) Expected admin with id %s, got %s", i, scenario.id, admin.Id)
- }
- }
-}
-
-func TestFindAdminByEmail(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- email string
- expectError bool
- }{
- {"", true},
- {"invalid", true},
- {"missing@example.com", true},
- {"test@example.com", false},
- }
-
- for i, scenario := range scenarios {
- admin, err := app.Dao().FindAdminByEmail(scenario.email)
-
- hasErr := err != nil
- if hasErr != scenario.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, scenario.expectError, hasErr, err)
- continue
- }
-
- if !scenario.expectError && admin.Email != scenario.email {
- t.Errorf("(%d) Expected admin with email %s, got %s", i, scenario.email, admin.Email)
- }
- }
-}
-
-func TestFindAdminByToken(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- token string
- baseKey string
- expectedEmail string
- expectError bool
- }{
- // invalid auth token
- {
- "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6InN5d2JoZWNuaDQ2cmhtMCIsInR5cGUiOiJhZG1pbiIsImV4cCI6MTY0MDk5MTY2MX0.qrbkI2TITtFKMP6vrATrBVKPGjEiDIBeQ0mlqPGMVeY",
- app.Settings().AdminAuthToken.Secret,
- "",
- true,
- },
- // expired token
- {
- "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6InN5d2JoZWNuaDQ2cmhtMCIsInR5cGUiOiJhZG1pbiIsImV4cCI6MTY0MDk5MTY2MX0.I7w8iktkleQvC7_UIRpD7rNzcU4OnF7i7SFIUu6lD_4",
- app.Settings().AdminAuthToken.Secret,
- "",
- true,
- },
- // wrong base token (password reset token secret instead of auth secret)
- {
- "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6InN5d2JoZWNuaDQ2cmhtMCIsInR5cGUiOiJhZG1pbiIsImV4cCI6MjIwODk4NTI2MX0.M1m--VOqGyv0d23eeUc0r9xE8ZzHaYVmVFw1VZW6gT8",
- app.Settings().AdminPasswordResetToken.Secret,
- "",
- true,
- },
- // valid token
- {
- "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6InN5d2JoZWNuaDQ2cmhtMCIsInR5cGUiOiJhZG1pbiIsImV4cCI6MjIwODk4NTI2MX0.M1m--VOqGyv0d23eeUc0r9xE8ZzHaYVmVFw1VZW6gT8",
- app.Settings().AdminAuthToken.Secret,
- "test@example.com",
- false,
- },
- }
-
- for i, scenario := range scenarios {
- admin, err := app.Dao().FindAdminByToken(scenario.token, scenario.baseKey)
-
- hasErr := err != nil
- if hasErr != scenario.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, scenario.expectError, hasErr, err)
- continue
- }
-
- if !scenario.expectError && admin.Email != scenario.expectedEmail {
- t.Errorf("(%d) Expected admin model %s, got %s", i, scenario.expectedEmail, admin.Email)
- }
- }
-}
-
-func TestTotalAdmins(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- result1, err := app.Dao().TotalAdmins()
- if err != nil {
- t.Fatal(err)
- }
- if result1 != 3 {
- t.Fatalf("Expected 3 admins, got %d", result1)
- }
-
- // delete all
- app.Dao().DB().NewQuery("delete from {{_admins}}").Execute()
-
- result2, err := app.Dao().TotalAdmins()
- if err != nil {
- t.Fatal(err)
- }
- if result2 != 0 {
- t.Fatalf("Expected 0 admins, got %d", result2)
- }
-}
-
-func TestIsAdminEmailUnique(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- email string
- excludeId string
- expected bool
- }{
- {"", "", false},
- {"test@example.com", "", false},
- {"test2@example.com", "", false},
- {"test3@example.com", "", false},
- {"new@example.com", "", true},
- {"test@example.com", "sywbhecnh46rhm0", true},
- }
-
- for i, scenario := range scenarios {
- result := app.Dao().IsAdminEmailUnique(scenario.email, scenario.excludeId)
- if result != scenario.expected {
- t.Errorf("(%d) Expected %v, got %v", i, scenario.expected, result)
- }
- }
-}
-
-func TestDeleteAdmin(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- // try to delete unsaved admin model
- deleteErr0 := app.Dao().DeleteAdmin(&models.Admin{})
- if deleteErr0 == nil {
- t.Fatal("Expected error, got nil")
- }
-
- admin1, err := app.Dao().FindAdminByEmail("test@example.com")
- if err != nil {
- t.Fatal(err)
- }
- admin2, err := app.Dao().FindAdminByEmail("test2@example.com")
- if err != nil {
- t.Fatal(err)
- }
- admin3, err := app.Dao().FindAdminByEmail("test3@example.com")
- if err != nil {
- t.Fatal(err)
- }
-
- deleteErr1 := app.Dao().DeleteAdmin(admin1)
- if deleteErr1 != nil {
- t.Fatal(deleteErr1)
- }
-
- deleteErr2 := app.Dao().DeleteAdmin(admin2)
- if deleteErr2 != nil {
- t.Fatal(deleteErr2)
- }
-
- // cannot delete the only remaining admin
- deleteErr3 := app.Dao().DeleteAdmin(admin3)
- if deleteErr3 == nil {
- t.Fatal("Expected delete error, got nil")
- }
-
- total, _ := app.Dao().TotalAdmins()
- if total != 1 {
- t.Fatalf("Expected only 1 admin, got %d", total)
- }
-}
-
-func TestSaveAdmin(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- // create
- newAdmin := &models.Admin{}
- newAdmin.Email = "new@example.com"
- newAdmin.SetPassword("123456")
- saveErr1 := app.Dao().SaveAdmin(newAdmin)
- if saveErr1 != nil {
- t.Fatal(saveErr1)
- }
- if newAdmin.Id == "" {
- t.Fatal("Expected admin id to be set")
- }
-
- // update
- existingAdmin, err := app.Dao().FindAdminByEmail("test@example.com")
- if err != nil {
- t.Fatal(err)
- }
- updatedEmail := "test_update@example.com"
- existingAdmin.Email = updatedEmail
- saveErr2 := app.Dao().SaveAdmin(existingAdmin)
- if saveErr2 != nil {
- t.Fatal(saveErr2)
- }
- existingAdmin, _ = app.Dao().FindAdminById(existingAdmin.Id)
- if existingAdmin.Email != updatedEmail {
- t.Fatalf("Expected admin email to be %s, got %s", updatedEmail, existingAdmin.Email)
- }
-}
diff --git a/daos/base.go b/daos/base.go
deleted file mode 100644
index 5698c421..00000000
--- a/daos/base.go
+++ /dev/null
@@ -1,372 +0,0 @@
-// Package daos handles common PocketBase DB model manipulations.
-//
-// Think of daos as DB repository and service layer in one.
-package daos
-
-import (
- "errors"
- "fmt"
- "time"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models"
-)
-
-// New creates a new Dao instance with the provided db builder
-// (for both async and sync db operations).
-func New(db dbx.Builder) *Dao {
- return NewMultiDB(db, db)
-}
-
-// NewMultiDB creates a new Dao instance with the provided dedicated
-// async and sync db builders.
-func NewMultiDB(concurrentDB, nonconcurrentDB dbx.Builder) *Dao {
- return &Dao{
- concurrentDB: concurrentDB,
- nonconcurrentDB: nonconcurrentDB,
- MaxLockRetries: 8,
- ModelQueryTimeout: 30 * time.Second,
- }
-}
-
-// Dao handles various db operations.
-//
-// You can think of Dao as a repository and service layer in one.
-type Dao struct {
- // in a transaction both refer to the same *dbx.TX instance
- concurrentDB dbx.Builder
- nonconcurrentDB dbx.Builder
-
- // MaxLockRetries specifies the default max "database is locked" auto retry attempts.
- MaxLockRetries int
-
- // ModelQueryTimeout is the default max duration of a running ModelQuery().
- //
- // This field has no effect if an explicit query context is already specified.
- ModelQueryTimeout time.Duration
-
- // write hooks
- BeforeCreateFunc func(eventDao *Dao, m models.Model, action func() error) error
- AfterCreateFunc func(eventDao *Dao, m models.Model) error
- BeforeUpdateFunc func(eventDao *Dao, m models.Model, action func() error) error
- AfterUpdateFunc func(eventDao *Dao, m models.Model) error
- BeforeDeleteFunc func(eventDao *Dao, m models.Model, action func() error) error
- AfterDeleteFunc func(eventDao *Dao, m models.Model) error
-}
-
-// DB returns the default dao db builder (*dbx.DB or *dbx.TX).
-//
-// Currently the default db builder is dao.concurrentDB but that may change in the future.
-func (dao *Dao) DB() dbx.Builder {
- return dao.ConcurrentDB()
-}
-
-// ConcurrentDB returns the dao concurrent (aka. multiple open connections)
-// db builder (*dbx.DB or *dbx.TX).
-//
-// In a transaction the concurrentDB and nonconcurrentDB refer to the same *dbx.TX instance.
-func (dao *Dao) ConcurrentDB() dbx.Builder {
- return dao.concurrentDB
-}
-
-// NonconcurrentDB returns the dao nonconcurrent (aka. single open connection)
-// db builder (*dbx.DB or *dbx.TX).
-//
-// In a transaction the concurrentDB and nonconcurrentDB refer to the same *dbx.TX instance.
-func (dao *Dao) NonconcurrentDB() dbx.Builder {
- return dao.nonconcurrentDB
-}
-
-// Clone returns a new Dao with the same configuration options as the current one.
-func (dao *Dao) Clone() *Dao {
- clone := *dao
-
- return &clone
-}
-
-// WithoutHooks returns a new Dao with the same configuration options
-// as the current one, but without create/update/delete hooks.
-func (dao *Dao) WithoutHooks() *Dao {
- clone := dao.Clone()
-
- clone.BeforeCreateFunc = nil
- clone.AfterCreateFunc = nil
- clone.BeforeUpdateFunc = nil
- clone.AfterUpdateFunc = nil
- clone.BeforeDeleteFunc = nil
- clone.AfterDeleteFunc = nil
-
- return clone
-}
-
-// ModelQuery creates a new preconfigured select query with preset
-// SELECT, FROM and other common fields based on the provided model.
-func (dao *Dao) ModelQuery(m models.Model) *dbx.SelectQuery {
- tableName := m.TableName()
-
- return dao.DB().
- Select("{{" + tableName + "}}.*").
- From(tableName).
- WithBuildHook(func(query *dbx.Query) {
- query.WithExecHook(execLockRetry(dao.ModelQueryTimeout, dao.MaxLockRetries))
- })
-}
-
-// FindById finds a single db record with the specified id and
-// scans the result into m.
-func (dao *Dao) FindById(m models.Model, id string) error {
- return dao.ModelQuery(m).Where(dbx.HashExp{"id": id}).Limit(1).One(m)
-}
-
-type afterCallGroup struct {
- Model models.Model
- EventDao *Dao
- Action string
-}
-
-// RunInTransaction wraps fn into a transaction.
-//
-// It is safe to nest RunInTransaction calls as long as you use the txDao.
-func (dao *Dao) RunInTransaction(fn func(txDao *Dao) error) error {
- switch txOrDB := dao.NonconcurrentDB().(type) {
- case *dbx.Tx:
- // nested transactions are not supported by default
- // so execute the function within the current transaction
- // ---
- // create a new dao with the same hooks to avoid semaphore deadlock when nesting
- txDao := New(txOrDB)
- txDao.MaxLockRetries = dao.MaxLockRetries
- txDao.ModelQueryTimeout = dao.ModelQueryTimeout
- txDao.BeforeCreateFunc = dao.BeforeCreateFunc
- txDao.BeforeUpdateFunc = dao.BeforeUpdateFunc
- txDao.BeforeDeleteFunc = dao.BeforeDeleteFunc
- txDao.AfterCreateFunc = dao.AfterCreateFunc
- txDao.AfterUpdateFunc = dao.AfterUpdateFunc
- txDao.AfterDeleteFunc = dao.AfterDeleteFunc
-
- return fn(txDao)
- case *dbx.DB:
- afterCalls := []afterCallGroup{}
-
- txError := txOrDB.Transactional(func(tx *dbx.Tx) error {
- txDao := New(tx)
-
- if dao.BeforeCreateFunc != nil {
- txDao.BeforeCreateFunc = func(eventDao *Dao, m models.Model, action func() error) error {
- return dao.BeforeCreateFunc(eventDao, m, action)
- }
- }
- if dao.BeforeUpdateFunc != nil {
- txDao.BeforeUpdateFunc = func(eventDao *Dao, m models.Model, action func() error) error {
- return dao.BeforeUpdateFunc(eventDao, m, action)
- }
- }
- if dao.BeforeDeleteFunc != nil {
- txDao.BeforeDeleteFunc = func(eventDao *Dao, m models.Model, action func() error) error {
- return dao.BeforeDeleteFunc(eventDao, m, action)
- }
- }
-
- if dao.AfterCreateFunc != nil {
- txDao.AfterCreateFunc = func(eventDao *Dao, m models.Model) error {
- afterCalls = append(afterCalls, afterCallGroup{m, eventDao, "create"})
- return nil
- }
- }
- if dao.AfterUpdateFunc != nil {
- txDao.AfterUpdateFunc = func(eventDao *Dao, m models.Model) error {
- afterCalls = append(afterCalls, afterCallGroup{m, eventDao, "update"})
- return nil
- }
- }
- if dao.AfterDeleteFunc != nil {
- txDao.AfterDeleteFunc = func(eventDao *Dao, m models.Model) error {
- afterCalls = append(afterCalls, afterCallGroup{m, eventDao, "delete"})
- return nil
- }
- }
-
- return fn(txDao)
- })
- if txError != nil {
- return txError
- }
-
- // execute after event calls on successful transaction
- // (note: using the non-transaction dao to allow following queries in the after hooks)
- var errs []error
- for _, call := range afterCalls {
- var err error
- switch call.Action {
- case "create":
- err = dao.AfterCreateFunc(dao, call.Model)
- case "update":
- err = dao.AfterUpdateFunc(dao, call.Model)
- case "delete":
- err = dao.AfterDeleteFunc(dao, call.Model)
- }
-
- if err != nil {
- errs = append(errs, err)
- }
- }
- if len(errs) > 0 {
- return fmt.Errorf("after transaction errors: %w", errors.Join(errs...))
- }
-
- return nil
- }
-
- return errors.New("failed to start transaction (unknown dao.NonconcurrentDB() instance)")
-}
-
-// Delete deletes the provided model.
-func (dao *Dao) Delete(m models.Model) error {
- if !m.HasId() {
- return errors.New("ID is not set")
- }
-
- return dao.lockRetry(func(retryDao *Dao) error {
- action := func() error {
- if err := retryDao.NonconcurrentDB().Model(m).Delete(); err != nil {
- return err
- }
-
- if retryDao.AfterDeleteFunc != nil {
- retryDao.AfterDeleteFunc(retryDao, m)
- }
-
- return nil
- }
-
- if retryDao.BeforeDeleteFunc != nil {
- return retryDao.BeforeDeleteFunc(retryDao, m, action)
- }
-
- return action()
- })
-}
-
-// Save persists the provided model in the database.
-//
-// If m.IsNew() is true, the method will perform a create, otherwise an update.
-// To explicitly mark a model for update you can use m.MarkAsNotNew().
-func (dao *Dao) Save(m models.Model) error {
- if m.IsNew() {
- return dao.lockRetry(func(retryDao *Dao) error {
- return retryDao.create(m)
- })
- }
-
- return dao.lockRetry(func(retryDao *Dao) error {
- return retryDao.update(m)
- })
-}
-
-func (dao *Dao) update(m models.Model) error {
- if !m.HasId() {
- return errors.New("ID is not set")
- }
-
- if m.GetCreated().IsZero() {
- m.RefreshCreated()
- }
-
- m.RefreshUpdated()
-
- action := func() error {
- if v, ok := any(m).(models.ColumnValueMapper); ok {
- dataMap := v.ColumnValueMap()
-
- _, err := dao.NonconcurrentDB().Update(
- m.TableName(),
- dataMap,
- dbx.HashExp{"id": m.GetId()},
- ).Execute()
-
- if err != nil {
- return err
- }
- } else if err := dao.NonconcurrentDB().Model(m).Update(); err != nil {
- return err
- }
-
- if dao.AfterUpdateFunc != nil {
- return dao.AfterUpdateFunc(dao, m)
- }
-
- return nil
- }
-
- if dao.BeforeUpdateFunc != nil {
- return dao.BeforeUpdateFunc(dao, m, action)
- }
-
- return action()
-}
-
-func (dao *Dao) create(m models.Model) error {
- if !m.HasId() {
- // auto generate id
- m.RefreshId()
- }
-
- // mark the model as "new" since the model now always has an ID
- m.MarkAsNew()
-
- if m.GetCreated().IsZero() {
- m.RefreshCreated()
- }
-
- if m.GetUpdated().IsZero() {
- m.RefreshUpdated()
- }
-
- action := func() error {
- if v, ok := any(m).(models.ColumnValueMapper); ok {
- dataMap := v.ColumnValueMap()
- if _, ok := dataMap["id"]; !ok {
- dataMap["id"] = m.GetId()
- }
-
- _, err := dao.NonconcurrentDB().Insert(m.TableName(), dataMap).Execute()
- if err != nil {
- return err
- }
- } else if err := dao.NonconcurrentDB().Model(m).Insert(); err != nil {
- return err
- }
-
- // clears the "new" model flag
- m.MarkAsNotNew()
-
- if dao.AfterCreateFunc != nil {
- return dao.AfterCreateFunc(dao, m)
- }
-
- return nil
- }
-
- if dao.BeforeCreateFunc != nil {
- return dao.BeforeCreateFunc(dao, m, action)
- }
-
- return action()
-}
-
-func (dao *Dao) lockRetry(op func(retryDao *Dao) error) error {
- retryDao := dao
-
- return baseLockRetry(func(attempt int) error {
- if attempt == 2 {
- // assign new Dao without the before hooks to avoid triggering
- // the already fired before callbacks multiple times
- retryDao = NewMultiDB(dao.concurrentDB, dao.nonconcurrentDB)
- retryDao.AfterCreateFunc = dao.AfterCreateFunc
- retryDao.AfterUpdateFunc = dao.AfterUpdateFunc
- retryDao.AfterDeleteFunc = dao.AfterDeleteFunc
- }
-
- return op(retryDao)
- }, dao.MaxLockRetries)
-}
diff --git a/daos/base_retry_test.go b/daos/base_retry_test.go
deleted file mode 100644
index 72b80478..00000000
--- a/daos/base_retry_test.go
+++ /dev/null
@@ -1,64 +0,0 @@
-package daos
-
-import (
- "errors"
- "testing"
-)
-
-func TestGetDefaultRetryInterval(t *testing.T) {
- t.Parallel()
-
- if i := getDefaultRetryInterval(-1); i.Milliseconds() != 1000 {
- t.Fatalf("Expected 1000ms, got %v", i)
- }
-
- if i := getDefaultRetryInterval(999); i.Milliseconds() != 1000 {
- t.Fatalf("Expected 1000ms, got %v", i)
- }
-
- if i := getDefaultRetryInterval(3); i.Milliseconds() != 500 {
- t.Fatalf("Expected 500ms, got %v", i)
- }
-}
-
-func TestBaseLockRetry(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- err error
- failUntilAttempt int
- expectedAttempts int
- }{
- {nil, 3, 1},
- {errors.New("test"), 3, 1},
- {errors.New("database is locked"), 3, 3},
- }
-
- for i, s := range scenarios {
- lastAttempt := 0
-
- err := baseLockRetry(func(attempt int) error {
- lastAttempt = attempt
-
- if attempt < s.failUntilAttempt {
- return s.err
- }
-
- return nil
- }, s.failUntilAttempt+2)
-
- if lastAttempt != s.expectedAttempts {
- t.Errorf("[%d] Expected lastAttempt to be %d, got %d", i, s.expectedAttempts, lastAttempt)
- }
-
- if s.failUntilAttempt == s.expectedAttempts && err != nil {
- t.Errorf("[%d] Expected nil, got err %v", i, err)
- continue
- }
-
- if s.failUntilAttempt != s.expectedAttempts && s.err != nil && err == nil {
- t.Errorf("[%d] Expected error %q, got nil", i, s.err)
- continue
- }
- }
-}
diff --git a/daos/base_test.go b/daos/base_test.go
deleted file mode 100644
index 9de1ba9d..00000000
--- a/daos/base_test.go
+++ /dev/null
@@ -1,870 +0,0 @@
-package daos_test
-
-import (
- "errors"
- "testing"
- "time"
-
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
-)
-
-func TestNew(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- dao := daos.New(testApp.DB())
-
- if dao.DB() != testApp.DB() {
- t.Fatal("The 2 db instances are different")
- }
-}
-
-func TestNewMultiDB(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- dao := daos.NewMultiDB(testApp.Dao().ConcurrentDB(), testApp.Dao().NonconcurrentDB())
-
- if dao.DB() != testApp.Dao().ConcurrentDB() {
- t.Fatal("[db-concurrentDB] The 2 db instances are different")
- }
-
- if dao.ConcurrentDB() != testApp.Dao().ConcurrentDB() {
- t.Fatal("[concurrentDB-concurrentDB] The 2 db instances are different")
- }
-
- if dao.NonconcurrentDB() != testApp.Dao().NonconcurrentDB() {
- t.Fatal("[nonconcurrentDB-nonconcurrentDB] The 2 db instances are different")
- }
-}
-
-func TestDaoClone(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- hookCalls := map[string]int{}
-
- dao := daos.NewMultiDB(testApp.Dao().ConcurrentDB(), testApp.Dao().NonconcurrentDB())
- dao.MaxLockRetries = 1
- dao.ModelQueryTimeout = 2
- dao.BeforeDeleteFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- hookCalls["BeforeDeleteFunc"]++
- return action()
- }
- dao.BeforeUpdateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- hookCalls["BeforeUpdateFunc"]++
- return action()
- }
- dao.BeforeCreateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- hookCalls["BeforeCreateFunc"]++
- return action()
- }
- dao.AfterDeleteFunc = func(eventDao *daos.Dao, m models.Model) error {
- hookCalls["AfterDeleteFunc"]++
- return nil
- }
- dao.AfterUpdateFunc = func(eventDao *daos.Dao, m models.Model) error {
- hookCalls["AfterUpdateFunc"]++
- return nil
- }
- dao.AfterCreateFunc = func(eventDao *daos.Dao, m models.Model) error {
- hookCalls["AfterCreateFunc"]++
- return nil
- }
-
- clone := dao.Clone()
- clone.MaxLockRetries = 3
- clone.ModelQueryTimeout = 4
- clone.AfterCreateFunc = func(eventDao *daos.Dao, m models.Model) error {
- hookCalls["NewAfterCreateFunc"]++
- return nil
- }
-
- if dao.MaxLockRetries == clone.MaxLockRetries {
- t.Fatal("Expected different MaxLockRetries")
- }
-
- if dao.ModelQueryTimeout == clone.ModelQueryTimeout {
- t.Fatal("Expected different ModelQueryTimeout")
- }
-
- emptyAction := func() error { return nil }
-
- // trigger hooks
- dao.BeforeDeleteFunc(nil, nil, emptyAction)
- dao.BeforeUpdateFunc(nil, nil, emptyAction)
- dao.BeforeCreateFunc(nil, nil, emptyAction)
- dao.AfterDeleteFunc(nil, nil)
- dao.AfterUpdateFunc(nil, nil)
- dao.AfterCreateFunc(nil, nil)
- clone.BeforeDeleteFunc(nil, nil, emptyAction)
- clone.BeforeUpdateFunc(nil, nil, emptyAction)
- clone.BeforeCreateFunc(nil, nil, emptyAction)
- clone.AfterDeleteFunc(nil, nil)
- clone.AfterUpdateFunc(nil, nil)
- clone.AfterCreateFunc(nil, nil)
-
- expectations := []struct {
- hook string
- total int
- }{
- {"BeforeDeleteFunc", 2},
- {"BeforeUpdateFunc", 2},
- {"BeforeCreateFunc", 2},
- {"AfterDeleteFunc", 2},
- {"AfterUpdateFunc", 2},
- {"AfterCreateFunc", 1},
- {"NewAfterCreateFunc", 1},
- }
-
- for _, e := range expectations {
- if hookCalls[e.hook] != e.total {
- t.Errorf("Expected %s to be caleed %d", e.hook, e.total)
- }
- }
-}
-
-func TestDaoWithoutHooks(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- hookCalls := map[string]int{}
-
- dao := daos.NewMultiDB(testApp.Dao().ConcurrentDB(), testApp.Dao().NonconcurrentDB())
- dao.MaxLockRetries = 1
- dao.ModelQueryTimeout = 2
- dao.BeforeDeleteFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- hookCalls["BeforeDeleteFunc"]++
- return action()
- }
- dao.BeforeUpdateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- hookCalls["BeforeUpdateFunc"]++
- return action()
- }
- dao.BeforeCreateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- hookCalls["BeforeCreateFunc"]++
- return action()
- }
- dao.AfterDeleteFunc = func(eventDao *daos.Dao, m models.Model) error {
- hookCalls["AfterDeleteFunc"]++
- return nil
- }
- dao.AfterUpdateFunc = func(eventDao *daos.Dao, m models.Model) error {
- hookCalls["AfterUpdateFunc"]++
- return nil
- }
- dao.AfterCreateFunc = func(eventDao *daos.Dao, m models.Model) error {
- hookCalls["AfterCreateFunc"]++
- return nil
- }
-
- new := dao.WithoutHooks()
-
- if new.MaxLockRetries != dao.MaxLockRetries {
- t.Fatalf("Expected MaxLockRetries %d, got %d", new.Clone().MaxLockRetries, dao.MaxLockRetries)
- }
-
- if new.ModelQueryTimeout != dao.ModelQueryTimeout {
- t.Fatalf("Expected ModelQueryTimeout %d, got %d", new.Clone().ModelQueryTimeout, dao.ModelQueryTimeout)
- }
-
- if new.BeforeDeleteFunc != nil {
- t.Fatal("Expected BeforeDeleteFunc to be nil")
- }
-
- if new.BeforeUpdateFunc != nil {
- t.Fatal("Expected BeforeUpdateFunc to be nil")
- }
-
- if new.BeforeCreateFunc != nil {
- t.Fatal("Expected BeforeCreateFunc to be nil")
- }
-
- if new.AfterDeleteFunc != nil {
- t.Fatal("Expected AfterDeleteFunc to be nil")
- }
-
- if new.AfterUpdateFunc != nil {
- t.Fatal("Expected AfterUpdateFunc to be nil")
- }
-
- if new.AfterCreateFunc != nil {
- t.Fatal("Expected AfterCreateFunc to be nil")
- }
-}
-
-func TestDaoModelQuery(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- dao := daos.New(testApp.DB())
-
- scenarios := []struct {
- model models.Model
- expected string
- }{
- {
- &models.Collection{},
- "SELECT {{_collections}}.* FROM `_collections`",
- },
- {
- &models.Admin{},
- "SELECT {{_admins}}.* FROM `_admins`",
- },
- {
- &models.Request{},
- "SELECT {{_requests}}.* FROM `_requests`",
- },
- }
-
- for i, scenario := range scenarios {
- sql := dao.ModelQuery(scenario.model).Build().SQL()
- if sql != scenario.expected {
- t.Errorf("(%d) Expected select %s, got %s", i, scenario.expected, sql)
- }
- }
-}
-
-func TestDaoModelQueryCancellation(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- dao := daos.New(testApp.DB())
-
- m := &models.Admin{}
-
- if err := dao.ModelQuery(m).One(m); err != nil {
- t.Fatalf("Failed to execute control query: %v", err)
- }
-
- dao.ModelQueryTimeout = 0 * time.Millisecond
- if err := dao.ModelQuery(m).One(m); err == nil {
- t.Fatal("Expected to be cancelled, got nil")
- }
-}
-
-func TestDaoFindById(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- scenarios := []struct {
- model models.Model
- id string
- expectError bool
- }{
- // missing id
- {
- &models.Collection{},
- "missing",
- true,
- },
- // existing collection id
- {
- &models.Collection{},
- "wsmn24bux7wo113",
- false,
- },
- // existing admin id
- {
- &models.Admin{},
- "sbmbsdb40jyxf7h",
- false,
- },
- }
-
- for i, scenario := range scenarios {
- err := testApp.Dao().FindById(scenario.model, scenario.id)
- hasErr := err != nil
- if hasErr != scenario.expectError {
- t.Errorf("(%d) Expected %v, got %v", i, scenario.expectError, err)
- }
-
- if !scenario.expectError && scenario.id != scenario.model.GetId() {
- t.Errorf("(%d) Expected model with id %v, got %v", i, scenario.id, scenario.model.GetId())
- }
- }
-}
-
-func TestDaoRunInTransaction(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- // failed nested transaction
- testApp.Dao().RunInTransaction(func(txDao *daos.Dao) error {
- admin, _ := txDao.FindAdminByEmail("test@example.com")
-
- return txDao.RunInTransaction(func(tx2Dao *daos.Dao) error {
- if err := tx2Dao.DeleteAdmin(admin); err != nil {
- t.Fatal(err)
- }
- return errors.New("test error")
- })
- })
-
- // admin should still exist
- admin1, _ := testApp.Dao().FindAdminByEmail("test@example.com")
- if admin1 == nil {
- t.Fatal("Expected admin test@example.com to not be deleted")
- }
-
- // successful nested transaction
- testApp.Dao().RunInTransaction(func(txDao *daos.Dao) error {
- admin, _ := txDao.FindAdminByEmail("test@example.com")
-
- return txDao.RunInTransaction(func(tx2Dao *daos.Dao) error {
- return tx2Dao.DeleteAdmin(admin)
- })
- })
-
- // admin should have been deleted
- admin2, _ := testApp.Dao().FindAdminByEmail("test@example.com")
- if admin2 != nil {
- t.Fatalf("Expected admin test@example.com to be deleted, found %v", admin2)
- }
-}
-
-func TestDaoSaveCreate(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- model := &models.Admin{}
- model.Email = "test_new@example.com"
- model.Avatar = 8
- if err := testApp.Dao().Save(model); err != nil {
- t.Fatal(err)
- }
-
- // refresh
- model, _ = testApp.Dao().FindAdminByEmail("test_new@example.com")
-
- if model.Avatar != 8 {
- t.Fatalf("Expected model avatar field to be 8, got %v", model.Avatar)
- }
-
- expectedHooks := []string{"OnModelBeforeCreate", "OnModelAfterCreate"}
- for _, h := range expectedHooks {
- if v, ok := testApp.EventCalls[h]; !ok || v != 1 {
- t.Fatalf("Expected event %s to be called exactly one time, got %d", h, v)
- }
- }
-}
-
-func TestDaoSaveWithInsertId(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- model := &models.Admin{}
- model.Id = "test"
- model.Email = "test_new@example.com"
- model.MarkAsNew()
- if err := testApp.Dao().Save(model); err != nil {
- t.Fatal(err)
- }
-
- // refresh
- model, _ = testApp.Dao().FindAdminById("test")
-
- if model == nil {
- t.Fatal("Failed to find admin with id 'test'")
- }
-
- expectedHooks := []string{"OnModelBeforeCreate", "OnModelAfterCreate"}
- for _, h := range expectedHooks {
- if v, ok := testApp.EventCalls[h]; !ok || v != 1 {
- t.Fatalf("Expected event %s to be called exactly one time, got %d", h, v)
- }
- }
-}
-
-func TestDaoSaveUpdate(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- model, _ := testApp.Dao().FindAdminByEmail("test@example.com")
-
- model.Avatar = 8
- if err := testApp.Dao().Save(model); err != nil {
- t.Fatal(err)
- }
-
- // refresh
- model, _ = testApp.Dao().FindAdminByEmail("test@example.com")
-
- if model.Avatar != 8 {
- t.Fatalf("Expected model avatar field to be updated to 8, got %v", model.Avatar)
- }
-
- expectedHooks := []string{"OnModelBeforeUpdate", "OnModelAfterUpdate"}
- for _, h := range expectedHooks {
- if v, ok := testApp.EventCalls[h]; !ok || v != 1 {
- t.Fatalf("Expected event %s to be called exactly one time, got %d", h, v)
- }
- }
-}
-
-type dummyColumnValueMapper struct {
- models.Admin
-}
-
-func (a *dummyColumnValueMapper) ColumnValueMap() map[string]any {
- return map[string]any{
- "email": a.Email,
- "passwordHash": a.PasswordHash,
- "tokenKey": "custom_token_key",
- }
-}
-
-func TestDaoSaveWithColumnValueMapper(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- model := &dummyColumnValueMapper{}
- model.Id = "test_mapped_id" // explicitly set an id
- model.Email = "test_mapped_create@example.com"
- model.TokenKey = "test_unmapped_token_key" // not used in the map
- model.SetPassword("123456")
- model.MarkAsNew()
- if err := testApp.Dao().Save(model); err != nil {
- t.Fatal(err)
- }
-
- createdModel, _ := testApp.Dao().FindAdminById("test_mapped_id")
- if createdModel == nil {
- t.Fatal("[create] Failed to find model with id 'test_mapped_id'")
- }
- if createdModel.Email != model.Email {
- t.Fatalf("Expected model with email %q, got %q", model.Email, createdModel.Email)
- }
- if createdModel.TokenKey != "custom_token_key" {
- t.Fatalf("Expected model with tokenKey %q, got %q", "custom_token_key", createdModel.TokenKey)
- }
-
- model.Email = "test_mapped_update@example.com"
- model.Avatar = 9 // not mapped and expect to be ignored
- if err := testApp.Dao().Save(model); err != nil {
- t.Fatal(err)
- }
-
- updatedModel, _ := testApp.Dao().FindAdminById("test_mapped_id")
- if updatedModel == nil {
- t.Fatal("[update] Failed to find model with id 'test_mapped_id'")
- }
- if updatedModel.Email != model.Email {
- t.Fatalf("Expected model with email %q, got %q", model.Email, createdModel.Email)
- }
- if updatedModel.Avatar != 0 {
- t.Fatalf("Expected model avatar 0, got %v", updatedModel.Avatar)
- }
-}
-
-func TestDaoDelete(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- model, _ := testApp.Dao().FindAdminByEmail("test@example.com")
-
- if err := testApp.Dao().Delete(model); err != nil {
- t.Fatal(err)
- }
-
- model, _ = testApp.Dao().FindAdminByEmail("test@example.com")
- if model != nil {
- t.Fatalf("Expected model to be deleted, found %v", model)
- }
-
- expectedHooks := []string{"OnModelBeforeDelete", "OnModelAfterDelete"}
- for _, h := range expectedHooks {
- if v, ok := testApp.EventCalls[h]; !ok || v != 1 {
- t.Fatalf("Expected event %s to be called exactly one time, got %d", h, v)
- }
- }
-}
-
-func TestDaoRetryCreate(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- // init mock retry dao
- retryBeforeCreateHookCalls := 0
- retryAfterCreateHookCalls := 0
- retryDao := daos.New(testApp.DB())
- retryDao.BeforeCreateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- retryBeforeCreateHookCalls++
- return errors.New("database is locked")
- }
- retryDao.AfterCreateFunc = func(eventDao *daos.Dao, m models.Model) error {
- retryAfterCreateHookCalls++
- return nil
- }
-
- model := &models.Admin{Email: "new@example.com"}
- if err := retryDao.Save(model); err != nil {
- t.Fatalf("Expected nil after retry, got error: %v", err)
- }
-
- // the before hook is expected to be called only once because
- // it is ignored after the first "database is locked" error
- if retryBeforeCreateHookCalls != 1 {
- t.Fatalf("Expected before hook calls to be 1, got %d", retryBeforeCreateHookCalls)
- }
-
- if retryAfterCreateHookCalls != 1 {
- t.Fatalf("Expected after hook calls to be 1, got %d", retryAfterCreateHookCalls)
- }
-
- // with non-locking error
- retryBeforeCreateHookCalls = 0
- retryAfterCreateHookCalls = 0
- retryDao.BeforeCreateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- retryBeforeCreateHookCalls++
- return errors.New("non-locking error")
- }
-
- dummy := &models.Admin{Email: "test@example.com"}
- if err := retryDao.Save(dummy); err == nil {
- t.Fatal("Expected error, got nil")
- }
-
- if retryBeforeCreateHookCalls != 1 {
- t.Fatalf("Expected before hook calls to be 1, got %d", retryBeforeCreateHookCalls)
- }
-
- if retryAfterCreateHookCalls != 0 {
- t.Fatalf("Expected after hook calls to be 0, got %d", retryAfterCreateHookCalls)
- }
-}
-
-func TestDaoRetryUpdate(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- model, err := testApp.Dao().FindAdminByEmail("test@example.com")
- if err != nil {
- t.Fatal(err)
- }
-
- // init mock retry dao
- retryBeforeUpdateHookCalls := 0
- retryAfterUpdateHookCalls := 0
- retryDao := daos.New(testApp.DB())
- retryDao.BeforeUpdateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- retryBeforeUpdateHookCalls++
- return errors.New("database is locked")
- }
- retryDao.AfterUpdateFunc = func(eventDao *daos.Dao, m models.Model) error {
- retryAfterUpdateHookCalls++
- return nil
- }
-
- if err := retryDao.Save(model); err != nil {
- t.Fatalf("Expected nil after retry, got error: %v", err)
- }
-
- // the before hook is expected to be called only once because
- // it is ignored after the first "database is locked" error
- if retryBeforeUpdateHookCalls != 1 {
- t.Fatalf("Expected before hook calls to be 1, got %d", retryBeforeUpdateHookCalls)
- }
-
- if retryAfterUpdateHookCalls != 1 {
- t.Fatalf("Expected after hook calls to be 1, got %d", retryAfterUpdateHookCalls)
- }
-
- // with non-locking error
- retryBeforeUpdateHookCalls = 0
- retryAfterUpdateHookCalls = 0
- retryDao.BeforeUpdateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- retryBeforeUpdateHookCalls++
- return errors.New("non-locking error")
- }
-
- if err := retryDao.Save(model); err == nil {
- t.Fatal("Expected error, got nil")
- }
-
- if retryBeforeUpdateHookCalls != 1 {
- t.Fatalf("Expected before hook calls to be 1, got %d", retryBeforeUpdateHookCalls)
- }
-
- if retryAfterUpdateHookCalls != 0 {
- t.Fatalf("Expected after hook calls to be 0, got %d", retryAfterUpdateHookCalls)
- }
-}
-
-func TestDaoRetryDelete(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- // init mock retry dao
- retryBeforeDeleteHookCalls := 0
- retryAfterDeleteHookCalls := 0
- retryDao := daos.New(testApp.DB())
- retryDao.BeforeDeleteFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- retryBeforeDeleteHookCalls++
- return errors.New("database is locked")
- }
- retryDao.AfterDeleteFunc = func(eventDao *daos.Dao, m models.Model) error {
- retryAfterDeleteHookCalls++
- return nil
- }
-
- model, _ := retryDao.FindAdminByEmail("test@example.com")
- if err := retryDao.Delete(model); err != nil {
- t.Fatalf("Expected nil after retry, got error: %v", err)
- }
-
- // the before hook is expected to be called only once because
- // it is ignored after the first "database is locked" error
- if retryBeforeDeleteHookCalls != 1 {
- t.Fatalf("Expected before hook calls to be 1, got %d", retryBeforeDeleteHookCalls)
- }
-
- if retryAfterDeleteHookCalls != 1 {
- t.Fatalf("Expected after hook calls to be 1, got %d", retryAfterDeleteHookCalls)
- }
-
- // with non-locking error
- retryBeforeDeleteHookCalls = 0
- retryAfterDeleteHookCalls = 0
- retryDao.BeforeDeleteFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- retryBeforeDeleteHookCalls++
- return errors.New("non-locking error")
- }
-
- dummy := &models.Admin{}
- dummy.RefreshId()
- dummy.MarkAsNotNew()
- if err := retryDao.Delete(dummy); err == nil {
- t.Fatal("Expected error, got nil")
- }
-
- if retryBeforeDeleteHookCalls != 1 {
- t.Fatalf("Expected before hook calls to be 1, got %d", retryBeforeDeleteHookCalls)
- }
-
- if retryAfterDeleteHookCalls != 0 {
- t.Fatalf("Expected after hook calls to be 0, got %d", retryAfterDeleteHookCalls)
- }
-}
-
-func TestDaoBeforeHooksError(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- baseDao := testApp.Dao()
-
- baseDao.BeforeCreateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- return errors.New("before_create")
- }
- baseDao.BeforeUpdateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- return errors.New("before_update")
- }
- baseDao.BeforeDeleteFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- return errors.New("before_delete")
- }
-
- existingModel, _ := testApp.Dao().FindAdminByEmail("test@example.com")
-
- // test create error
- // ---
- newModel := &models.Admin{}
- if err := baseDao.Save(newModel); err.Error() != "before_create" {
- t.Fatalf("Expected before_create error, got %v", err)
- }
-
- // test update error
- // ---
- if err := baseDao.Save(existingModel); err.Error() != "before_update" {
- t.Fatalf("Expected before_update error, got %v", err)
- }
-
- // test delete error
- // ---
- if err := baseDao.Delete(existingModel); err.Error() != "before_delete" {
- t.Fatalf("Expected before_delete error, got %v", err)
- }
-}
-
-func TestDaoTransactionHooksCallsOnFailure(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- beforeCreateFuncCalls := 0
- beforeUpdateFuncCalls := 0
- beforeDeleteFuncCalls := 0
- afterCreateFuncCalls := 0
- afterUpdateFuncCalls := 0
- afterDeleteFuncCalls := 0
-
- baseDao := testApp.Dao()
-
- baseDao.BeforeCreateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- beforeCreateFuncCalls++
- return action()
- }
- baseDao.BeforeUpdateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- beforeUpdateFuncCalls++
- return action()
- }
- baseDao.BeforeDeleteFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- beforeDeleteFuncCalls++
- return action()
- }
-
- baseDao.AfterCreateFunc = func(eventDao *daos.Dao, m models.Model) error {
- afterCreateFuncCalls++
- return nil
- }
- baseDao.AfterUpdateFunc = func(eventDao *daos.Dao, m models.Model) error {
- afterUpdateFuncCalls++
- return nil
- }
- baseDao.AfterDeleteFunc = func(eventDao *daos.Dao, m models.Model) error {
- afterDeleteFuncCalls++
- return nil
- }
-
- existingModel, _ := testApp.Dao().FindAdminByEmail("test@example.com")
-
- baseDao.RunInTransaction(func(txDao1 *daos.Dao) error {
- return txDao1.RunInTransaction(func(txDao2 *daos.Dao) error {
- // test create
- // ---
- newModel := &models.Admin{}
- newModel.Email = "test_new1@example.com"
- newModel.SetPassword("123456")
- if err := txDao2.Save(newModel); err != nil {
- t.Fatal(err)
- }
-
- // test update (twice)
- // ---
- if err := txDao2.Save(existingModel); err != nil {
- t.Fatal(err)
- }
- if err := txDao2.Save(existingModel); err != nil {
- t.Fatal(err)
- }
-
- // test delete
- // ---
- if err := txDao2.Delete(existingModel); err != nil {
- t.Fatal(err)
- }
-
- return errors.New("test_tx_error")
- })
- })
-
- if beforeCreateFuncCalls != 1 {
- t.Fatalf("Expected beforeCreateFuncCalls to be called 1 times, got %d", beforeCreateFuncCalls)
- }
- if beforeUpdateFuncCalls != 2 {
- t.Fatalf("Expected beforeUpdateFuncCalls to be called 2 times, got %d", beforeUpdateFuncCalls)
- }
- if beforeDeleteFuncCalls != 1 {
- t.Fatalf("Expected beforeDeleteFuncCalls to be called 1 times, got %d", beforeDeleteFuncCalls)
- }
- if afterCreateFuncCalls != 0 {
- t.Fatalf("Expected afterCreateFuncCalls to be called 0 times, got %d", afterCreateFuncCalls)
- }
- if afterUpdateFuncCalls != 0 {
- t.Fatalf("Expected afterUpdateFuncCalls to be called 0 times, got %d", afterUpdateFuncCalls)
- }
- if afterDeleteFuncCalls != 0 {
- t.Fatalf("Expected afterDeleteFuncCalls to be called 0 times, got %d", afterDeleteFuncCalls)
- }
-}
-
-func TestDaoTransactionHooksCallsOnSuccess(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- beforeCreateFuncCalls := 0
- beforeUpdateFuncCalls := 0
- beforeDeleteFuncCalls := 0
- afterCreateFuncCalls := 0
- afterUpdateFuncCalls := 0
- afterDeleteFuncCalls := 0
-
- baseDao := testApp.Dao()
-
- baseDao.BeforeCreateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- beforeCreateFuncCalls++
- return action()
- }
- baseDao.BeforeUpdateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- beforeUpdateFuncCalls++
- return action()
- }
- baseDao.BeforeDeleteFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- beforeDeleteFuncCalls++
- return action()
- }
-
- baseDao.AfterCreateFunc = func(eventDao *daos.Dao, m models.Model) error {
- afterCreateFuncCalls++
- return nil
- }
- baseDao.AfterUpdateFunc = func(eventDao *daos.Dao, m models.Model) error {
- afterUpdateFuncCalls++
- return nil
- }
- baseDao.AfterDeleteFunc = func(eventDao *daos.Dao, m models.Model) error {
- afterDeleteFuncCalls++
- return nil
- }
-
- existingModel, _ := testApp.Dao().FindAdminByEmail("test@example.com")
-
- baseDao.RunInTransaction(func(txDao1 *daos.Dao) error {
- return txDao1.RunInTransaction(func(txDao2 *daos.Dao) error {
- // test create
- // ---
- newModel := &models.Admin{}
- newModel.Email = "test_new1@example.com"
- newModel.SetPassword("123456")
- if err := txDao2.Save(newModel); err != nil {
- t.Fatal(err)
- }
-
- // test update (twice)
- // ---
- if err := txDao2.Save(existingModel); err != nil {
- t.Fatal(err)
- }
- if err := txDao2.Save(existingModel); err != nil {
- t.Fatal(err)
- }
-
- // test delete
- // ---
- if err := txDao2.Delete(existingModel); err != nil {
- t.Fatal(err)
- }
-
- return nil
- })
- })
-
- if beforeCreateFuncCalls != 1 {
- t.Fatalf("Expected beforeCreateFuncCalls to be called 1 times, got %d", beforeCreateFuncCalls)
- }
- if beforeUpdateFuncCalls != 2 {
- t.Fatalf("Expected beforeUpdateFuncCalls to be called 2 times, got %d", beforeUpdateFuncCalls)
- }
- if beforeDeleteFuncCalls != 1 {
- t.Fatalf("Expected beforeDeleteFuncCalls to be called 1 times, got %d", beforeDeleteFuncCalls)
- }
- if afterCreateFuncCalls != 1 {
- t.Fatalf("Expected afterCreateFuncCalls to be called 1 times, got %d", afterCreateFuncCalls)
- }
- if afterUpdateFuncCalls != 2 {
- t.Fatalf("Expected afterUpdateFuncCalls to be called 2 times, got %d", afterUpdateFuncCalls)
- }
- if afterDeleteFuncCalls != 1 {
- t.Fatalf("Expected afterDeleteFuncCalls to be called 1 times, got %d", afterDeleteFuncCalls)
- }
-}
diff --git a/daos/collection.go b/daos/collection.go
deleted file mode 100644
index 6a241e74..00000000
--- a/daos/collection.go
+++ /dev/null
@@ -1,500 +0,0 @@
-package daos
-
-import (
- "bytes"
- "encoding/json"
- "errors"
- "fmt"
- "strings"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/tools/list"
-)
-
-// CollectionQuery returns a new Collection select query.
-func (dao *Dao) CollectionQuery() *dbx.SelectQuery {
- return dao.ModelQuery(&models.Collection{})
-}
-
-// FindCollectionsByType finds all collections by the given type.
-func (dao *Dao) FindCollectionsByType(collectionType string) ([]*models.Collection, error) {
- collections := []*models.Collection{}
-
- err := dao.CollectionQuery().
- AndWhere(dbx.HashExp{"type": collectionType}).
- OrderBy("created ASC").
- All(&collections)
-
- if err != nil {
- return nil, err
- }
-
- return collections, nil
-}
-
-// FindCollectionByNameOrId finds a single collection by its name (case insensitive) or id.
-func (dao *Dao) FindCollectionByNameOrId(nameOrId string) (*models.Collection, error) {
- model := &models.Collection{}
-
- err := dao.CollectionQuery().
- AndWhere(dbx.NewExp("[[id]] = {:id} OR LOWER([[name]])={:name}", dbx.Params{
- "id": nameOrId,
- "name": strings.ToLower(nameOrId),
- })).
- Limit(1).
- One(model)
-
- if err != nil {
- return nil, err
- }
-
- return model, nil
-}
-
-// IsCollectionNameUnique checks that there is no existing collection
-// with the provided name (case insensitive!).
-//
-// Note: case insensitive check because the name is used also as a table name for the records.
-func (dao *Dao) IsCollectionNameUnique(name string, excludeIds ...string) bool {
- if name == "" {
- return false
- }
-
- query := dao.CollectionQuery().
- Select("count(*)").
- AndWhere(dbx.NewExp("LOWER([[name]])={:name}", dbx.Params{"name": strings.ToLower(name)})).
- Limit(1)
-
- if uniqueExcludeIds := list.NonzeroUniques(excludeIds); len(uniqueExcludeIds) > 0 {
- query.AndWhere(dbx.NotIn("id", list.ToInterfaceSlice(uniqueExcludeIds)...))
- }
-
- var exists bool
-
- return query.Row(&exists) == nil && !exists
-}
-
-// FindCollectionReferences returns information for all
-// relation schema fields referencing the provided collection.
-//
-// If the provided collection has reference to itself then it will be
-// also included in the result. To exclude it, pass the collection id
-// as the excludeId argument.
-func (dao *Dao) FindCollectionReferences(collection *models.Collection, excludeIds ...string) (map[*models.Collection][]*schema.SchemaField, error) {
- collections := []*models.Collection{}
-
- query := dao.CollectionQuery()
-
- if uniqueExcludeIds := list.NonzeroUniques(excludeIds); len(uniqueExcludeIds) > 0 {
- query.AndWhere(dbx.NotIn("id", list.ToInterfaceSlice(uniqueExcludeIds)...))
- }
-
- if err := query.All(&collections); err != nil {
- return nil, err
- }
-
- result := map[*models.Collection][]*schema.SchemaField{}
-
- for _, c := range collections {
- for _, f := range c.Schema.Fields() {
- if f.Type != schema.FieldTypeRelation {
- continue
- }
- f.InitOptions()
- options, _ := f.Options.(*schema.RelationOptions)
- if options != nil && options.CollectionId == collection.Id {
- result[c] = append(result[c], f)
- }
- }
- }
-
- return result, nil
-}
-
-// DeleteCollection deletes the provided Collection model.
-// This method automatically deletes the related collection records table.
-//
-// NB! The collection cannot be deleted, if:
-// - is system collection (aka. collection.System is true)
-// - is referenced as part of a relation field in another collection
-func (dao *Dao) DeleteCollection(collection *models.Collection) error {
- if collection.System {
- return fmt.Errorf("system collection %q cannot be deleted", collection.Name)
- }
-
- // ensure that there aren't any existing references.
- // note: the select is outside of the transaction to prevent SQLITE_LOCKED error when mixing read&write in a single transaction
- result, err := dao.FindCollectionReferences(collection, collection.Id)
- if err != nil {
- return err
- }
- if total := len(result); total > 0 {
- names := make([]string, 0, len(result))
- for ref := range result {
- names = append(names, ref.Name)
- }
- return fmt.Errorf("the collection %q has external relation field references (%s)", collection.Name, strings.Join(names, ", "))
- }
-
- return dao.RunInTransaction(func(txDao *Dao) error {
- // delete the related view or records table
- if collection.IsView() {
- if err := txDao.DeleteView(collection.Name); err != nil {
- return err
- }
- } else {
- if err := txDao.DeleteTable(collection.Name); err != nil {
- return err
- }
- }
-
- // trigger views resave to check for dependencies
- if err := txDao.resaveViewsWithChangedSchema(collection.Id); err != nil {
- return fmt.Errorf("the collection has a view dependency - %w", err)
- }
-
- return txDao.Delete(collection)
- })
-}
-
-// SaveCollection persists the provided Collection model and updates
-// its related records table schema.
-//
-// If collection.IsNew() is true, the method will perform a create, otherwise an update.
-// To explicitly mark a collection for update you can use collection.MarkAsNotNew().
-func (dao *Dao) SaveCollection(collection *models.Collection) error {
- var oldCollection *models.Collection
-
- if !collection.IsNew() {
- // get the existing collection state to compare with the new one
- // note: the select is outside of the transaction to prevent SQLITE_LOCKED error when mixing read&write in a single transaction
- var findErr error
- oldCollection, findErr = dao.FindCollectionByNameOrId(collection.Id)
- if findErr != nil {
- return findErr
- }
- }
-
- txErr := dao.RunInTransaction(func(txDao *Dao) error {
- // set default collection type
- if collection.Type == "" {
- collection.Type = models.CollectionTypeBase
- }
-
- switch collection.Type {
- case models.CollectionTypeView:
- if err := txDao.saveViewCollection(collection, oldCollection); err != nil {
- return err
- }
- default:
- // persist the collection model
- if err := txDao.Save(collection); err != nil {
- return err
- }
-
- // sync the changes with the related records table
- if err := txDao.SyncRecordTableSchema(collection, oldCollection); err != nil {
- return err
- }
- }
-
- return nil
- })
-
- if txErr != nil {
- return txErr
- }
-
- // trigger an update for all views with changed schema as a result of the current collection save
- // (ignoring view errors to allow users to update the query from the UI)
- dao.resaveViewsWithChangedSchema(collection.Id)
-
- return nil
-}
-
-// ImportCollections imports the provided collections list within a single transaction.
-//
-// NB1! If deleteMissing is set, all local collections and schema fields, that are not present
-// in the imported configuration, WILL BE DELETED (including their related records data).
-//
-// NB2! This method doesn't perform validations on the imported collections data!
-// If you need validations, use [forms.CollectionsImport].
-func (dao *Dao) ImportCollections(
- importedCollections []*models.Collection,
- deleteMissing bool,
- afterSync func(txDao *Dao, mappedImported, mappedExisting map[string]*models.Collection) error,
-) error {
- if len(importedCollections) == 0 {
- return errors.New("no collections to import")
- }
-
- return dao.RunInTransaction(func(txDao *Dao) error {
- existingCollections := []*models.Collection{}
- if err := txDao.CollectionQuery().OrderBy("updated ASC").All(&existingCollections); err != nil {
- return err
- }
- mappedExisting := make(map[string]*models.Collection, len(existingCollections))
- for _, existing := range existingCollections {
- mappedExisting[existing.GetId()] = existing
- }
-
- mappedImported := make(map[string]*models.Collection, len(importedCollections))
- for _, imported := range importedCollections {
- // generate id if not set
- if !imported.HasId() {
- imported.MarkAsNew()
- imported.RefreshId()
- }
-
- // set default type if missing
- if imported.Type == "" {
- imported.Type = models.CollectionTypeBase
- }
-
- if existing, ok := mappedExisting[imported.GetId()]; ok {
- imported.MarkAsNotNew()
-
- // preserve original created date
- if !existing.Created.IsZero() {
- imported.Created = existing.Created
- }
-
- // extend existing schema
- if !deleteMissing {
- schemaClone, _ := existing.Schema.Clone()
- for _, f := range imported.Schema.Fields() {
- schemaClone.AddField(f) // add or replace
- }
- imported.Schema = *schemaClone
- }
- } else {
- imported.MarkAsNew()
- }
-
- mappedImported[imported.GetId()] = imported
- }
-
- // delete old collections not available in the new configuration
- // (before saving the imports in case a deleted collection name is being reused)
- if deleteMissing {
- for _, existing := range existingCollections {
- if mappedImported[existing.GetId()] != nil {
- continue // exist
- }
-
- if existing.System {
- return fmt.Errorf("system collection %q cannot be deleted", existing.Name)
- }
-
- // delete the related records table or view
- if existing.IsView() {
- if err := txDao.DeleteView(existing.Name); err != nil {
- return err
- }
- } else {
- if err := txDao.DeleteTable(existing.Name); err != nil {
- return err
- }
- }
-
- // delete the collection
- if err := txDao.Delete(existing); err != nil {
- return err
- }
- }
- }
-
- // upsert imported collections
- for _, imported := range importedCollections {
- if err := txDao.Save(imported); err != nil {
- return err
- }
- }
-
- // sync record tables
- for _, imported := range importedCollections {
- if imported.IsView() {
- continue
- }
-
- existing := mappedExisting[imported.GetId()]
-
- if err := txDao.SyncRecordTableSchema(imported, existing); err != nil {
- return err
- }
- }
-
- // sync views
- for _, imported := range importedCollections {
- if !imported.IsView() {
- continue
- }
-
- existing := mappedExisting[imported.GetId()]
-
- if err := txDao.saveViewCollection(imported, existing); err != nil {
- return err
- }
- }
-
- if afterSync != nil {
- if err := afterSync(txDao, mappedImported, mappedExisting); err != nil {
- return err
- }
- }
-
- return nil
- })
-}
-
-// saveViewCollection persists the provided View collection changes:
-// - deletes the old related SQL view (if any)
-// - creates a new SQL view with the latest newCollection.Options.Query
-// - generates a new schema based on newCollection.Options.Query
-// - updates newCollection.Schema based on the generated view table info and query
-// - saves the newCollection
-//
-// This method returns an error if newCollection is not a "view".
-func (dao *Dao) saveViewCollection(newCollection, oldCollection *models.Collection) error {
- if !newCollection.IsView() {
- return errors.New("not a view collection")
- }
-
- return dao.RunInTransaction(func(txDao *Dao) error {
- query := newCollection.ViewOptions().Query
-
- // generate collection schema from the query
- viewSchema, err := txDao.CreateViewSchema(query)
- if err != nil {
- return err
- }
-
- // delete old renamed view
- if oldCollection != nil {
- if err := txDao.DeleteView(oldCollection.Name); err != nil {
- return err
- }
- }
-
- // wrap view query if necessary
- query, err = txDao.normalizeViewQueryId(query)
- if err != nil {
- return fmt.Errorf("failed to normalize view query id: %w", err)
- }
-
- // (re)create the view
- if err := txDao.SaveView(newCollection.Name, query); err != nil {
- return err
- }
-
- newCollection.Schema = viewSchema
-
- return txDao.Save(newCollection)
- })
-}
-
-// @todo consider removing once custom id types are supported
-//
-// normalizeViewQueryId wraps (if necessary) the provided view query
-// with a subselect to ensure that the id column is a text since
-// currently we don't support non-string model ids
-// (see https://github.com/pocketbase/pocketbase/issues/3110).
-func (dao *Dao) normalizeViewQueryId(query string) (string, error) {
- query = strings.Trim(strings.TrimSpace(query), ";")
-
- parsed, err := dao.parseQueryToFields(query)
- if err != nil {
- return "", err
- }
-
- needWrapping := true
-
- idField := parsed[schema.FieldNameId]
- if idField != nil && idField.field != nil &&
- idField.field.Type != schema.FieldTypeJson &&
- idField.field.Type != schema.FieldTypeNumber &&
- idField.field.Type != schema.FieldTypeBool {
- needWrapping = false
- }
-
- if !needWrapping {
- return query, nil // no changes needed
- }
-
- // raw parse to preserve the columns order
- rawParsed := new(identifiersParser)
- if err := rawParsed.parse(query); err != nil {
- return "", err
- }
-
- columns := make([]string, 0, len(rawParsed.columns))
- for _, col := range rawParsed.columns {
- if col.alias == schema.FieldNameId {
- columns = append(columns, fmt.Sprintf("cast([[%s]] as text) [[%s]]", col.alias, col.alias))
- } else {
- columns = append(columns, "[["+col.alias+"]]")
- }
- }
-
- query = fmt.Sprintf("SELECT %s FROM (%s)", strings.Join(columns, ","), query)
-
- return query, nil
-}
-
-// resaveViewsWithChangedSchema updates all view collections with changed schemas.
-func (dao *Dao) resaveViewsWithChangedSchema(excludeIds ...string) error {
- collections, err := dao.FindCollectionsByType(models.CollectionTypeView)
- if err != nil {
- return err
- }
-
- return dao.RunInTransaction(func(txDao *Dao) error {
- for _, collection := range collections {
- if len(excludeIds) > 0 && list.ExistInSlice(collection.Id, excludeIds) {
- continue
- }
-
- // clone the existing schema so that it is safe for temp modifications
- oldSchema, err := collection.Schema.Clone()
- if err != nil {
- return err
- }
-
- // generate a new schema from the query
- newSchema, err := txDao.CreateViewSchema(collection.ViewOptions().Query)
- if err != nil {
- return err
- }
-
- // unset the schema field ids to exclude from the comparison
- for _, f := range oldSchema.Fields() {
- f.Id = ""
- }
- for _, f := range newSchema.Fields() {
- f.Id = ""
- }
-
- encodedNewSchema, err := json.Marshal(newSchema)
- if err != nil {
- return err
- }
-
- encodedOldSchema, err := json.Marshal(oldSchema)
- if err != nil {
- return err
- }
-
- if bytes.EqualFold(encodedNewSchema, encodedOldSchema) {
- continue // no changes
- }
-
- if err := txDao.saveViewCollection(collection, nil); err != nil {
- return err
- }
- }
-
- return nil
- })
-}
diff --git a/daos/collection_test.go b/daos/collection_test.go
deleted file mode 100644
index 62ad42ae..00000000
--- a/daos/collection_test.go
+++ /dev/null
@@ -1,813 +0,0 @@
-package daos_test
-
-import (
- "encoding/json"
- "errors"
- "strings"
- "testing"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/list"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-func TestCollectionQuery(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- expected := "SELECT {{_collections}}.* FROM `_collections`"
-
- sql := app.Dao().CollectionQuery().Build().SQL()
- if sql != expected {
- t.Errorf("Expected sql %s, got %s", expected, sql)
- }
-}
-
-func TestFindCollectionsByType(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- collectionType string
- expectError bool
- expectTotal int
- }{
- {"", false, 0},
- {"unknown", false, 0},
- {models.CollectionTypeAuth, false, 3},
- {models.CollectionTypeBase, false, 5},
- }
-
- for i, scenario := range scenarios {
- collections, err := app.Dao().FindCollectionsByType(scenario.collectionType)
-
- hasErr := err != nil
- if hasErr != scenario.expectError {
- t.Errorf("[%d] Expected hasErr to be %v, got %v (%v)", i, scenario.expectError, hasErr, err)
- }
-
- if len(collections) != scenario.expectTotal {
- t.Errorf("[%d] Expected %d collections, got %d", i, scenario.expectTotal, len(collections))
- }
-
- for _, c := range collections {
- if c.Type != scenario.collectionType {
- t.Errorf("[%d] Expected collection with type %s, got %s: \n%v", i, scenario.collectionType, c.Type, c)
- }
- }
- }
-}
-
-func TestFindCollectionByNameOrId(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- nameOrId string
- expectError bool
- }{
- {"", true},
- {"missing", true},
- {"wsmn24bux7wo113", false},
- {"demo1", false},
- {"DEMO1", false}, // case insensitive check
- }
-
- for i, scenario := range scenarios {
- model, err := app.Dao().FindCollectionByNameOrId(scenario.nameOrId)
-
- hasErr := err != nil
- if hasErr != scenario.expectError {
- t.Errorf("[%d] Expected hasErr to be %v, got %v (%v)", i, scenario.expectError, hasErr, err)
- }
-
- if model != nil && model.Id != scenario.nameOrId && !strings.EqualFold(model.Name, scenario.nameOrId) {
- t.Errorf("[%d] Expected model with identifier %s, got %v", i, scenario.nameOrId, model)
- }
- }
-}
-
-func TestIsCollectionNameUnique(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- name string
- excludeId string
- expected bool
- }{
- {"", "", false},
- {"demo1", "", false},
- {"Demo1", "", false},
- {"new", "", true},
- {"demo1", "wsmn24bux7wo113", true},
- }
-
- for i, scenario := range scenarios {
- result := app.Dao().IsCollectionNameUnique(scenario.name, scenario.excludeId)
- if result != scenario.expected {
- t.Errorf("[%d] Expected %v, got %v", i, scenario.expected, result)
- }
- }
-}
-
-func TestFindCollectionReferences(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection, err := app.Dao().FindCollectionByNameOrId("demo3")
- if err != nil {
- t.Fatal(err)
- }
-
- result, err := app.Dao().FindCollectionReferences(
- collection,
- collection.Id,
- // test whether "nonempty" exclude ids condition will be skipped
- "",
- "",
- )
- if err != nil {
- t.Fatal(err)
- }
-
- if len(result) != 1 {
- t.Fatalf("Expected 1 collection, got %d: %v", len(result), result)
- }
-
- expectedFields := []string{
- "rel_one_no_cascade",
- "rel_one_no_cascade_required",
- "rel_one_cascade",
- "rel_one_unique",
- "rel_many_no_cascade",
- "rel_many_no_cascade_required",
- "rel_many_cascade",
- "rel_many_unique",
- }
-
- for col, fields := range result {
- if col.Name != "demo4" {
- t.Fatalf("Expected collection demo4, got %s", col.Name)
- }
- if len(fields) != len(expectedFields) {
- t.Fatalf("Expected fields %v, got %v", expectedFields, fields)
- }
- for i, f := range fields {
- if !list.ExistInSlice(f.Name, expectedFields) {
- t.Fatalf("[%d] Didn't expect field %v", i, f)
- }
- }
- }
-}
-
-func TestDeleteCollection(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- colUnsaved := &models.Collection{}
-
- colAuth, err := app.Dao().FindCollectionByNameOrId("users")
- if err != nil {
- t.Fatal(err)
- }
-
- colReferenced, err := app.Dao().FindCollectionByNameOrId("demo2")
- if err != nil {
- t.Fatal(err)
- }
-
- colSystem, err := app.Dao().FindCollectionByNameOrId("demo3")
- if err != nil {
- t.Fatal(err)
- }
- colSystem.System = true
- if err := app.Dao().Save(colSystem); err != nil {
- t.Fatal(err)
- }
-
- colBase, err := app.Dao().FindCollectionByNameOrId("demo1")
- if err != nil {
- t.Fatal(err)
- }
-
- colView1, err := app.Dao().FindCollectionByNameOrId("view1")
- if err != nil {
- t.Fatal(err)
- }
-
- colView2, err := app.Dao().FindCollectionByNameOrId("view2")
- if err != nil {
- t.Fatal(err)
- }
-
- scenarios := []struct {
- model *models.Collection
- expectError bool
- }{
- {colUnsaved, true},
- {colReferenced, true},
- {colSystem, true},
- {colBase, true}, // depend on view1, view2 and view2
- {colView1, true}, // view2 depend on it
- {colView2, false},
- {colView1, false}, // no longer has dependent collections
- {colBase, false}, // no longer has dependent views
- {colAuth, false}, // should delete also its related external auths
- }
-
- for i, s := range scenarios {
- err := app.Dao().DeleteCollection(s.model)
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("[%d] Expected hasErr %v, got %v (%v)", i, s.expectError, hasErr, err)
- continue
- }
-
- if hasErr {
- continue
- }
-
- if app.Dao().HasTable(s.model.Name) {
- t.Errorf("[%d] Expected table/view %s to be deleted", i, s.model.Name)
- }
-
- // check if the external auths were deleted
- if s.model.IsAuth() {
- var total int
- err := app.Dao().ExternalAuthQuery().
- Select("count(*)").
- AndWhere(dbx.HashExp{"collectionId": s.model.Id}).
- Row(&total)
-
- if err != nil || total > 0 {
- t.Fatalf("[%d] Expected external auths to be deleted, got %v (%v)", i, total, err)
- }
- }
- }
-}
-
-func TestSaveCollectionCreate(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection := &models.Collection{
- Name: "new_test",
- Type: models.CollectionTypeBase,
- Schema: schema.NewSchema(
- &schema.SchemaField{
- Type: schema.FieldTypeText,
- Name: "test",
- },
- ),
- }
-
- err := app.Dao().SaveCollection(collection)
- if err != nil {
- t.Fatal(err)
- }
-
- if collection.Id == "" {
- t.Fatal("Expected collection id to be set")
- }
-
- // check if the records table was created
- hasTable := app.Dao().HasTable(collection.Name)
- if !hasTable {
- t.Fatalf("Expected records table %s to be created", collection.Name)
- }
-
- // check if the records table has the schema fields
- columns, err := app.Dao().TableColumns(collection.Name)
- if err != nil {
- t.Fatal(err)
- }
- expectedColumns := []string{"id", "created", "updated", "test"}
- if len(columns) != len(expectedColumns) {
- t.Fatalf("Expected columns %v, got %v", expectedColumns, columns)
- }
- for i, c := range columns {
- if !list.ExistInSlice(c, expectedColumns) {
- t.Fatalf("[%d] Didn't expect record column %s", i, c)
- }
- }
-}
-
-func TestSaveCollectionUpdate(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection, err := app.Dao().FindCollectionByNameOrId("demo3")
- if err != nil {
- t.Fatal(err)
- }
-
- // rename an existing schema field and add a new one
- oldField := collection.Schema.GetFieldByName("title")
- oldField.Name = "title_update"
- collection.Schema.AddField(&schema.SchemaField{
- Type: schema.FieldTypeText,
- Name: "test",
- })
-
- saveErr := app.Dao().SaveCollection(collection)
- if saveErr != nil {
- t.Fatal(saveErr)
- }
-
- // check if the records table has the schema fields
- expectedColumns := []string{"id", "created", "updated", "title_update", "test", "files"}
- columns, err := app.Dao().TableColumns(collection.Name)
- if err != nil {
- t.Fatal(err)
- }
- if len(columns) != len(expectedColumns) {
- t.Fatalf("Expected columns %v, got %v", expectedColumns, columns)
- }
- for i, c := range columns {
- if !list.ExistInSlice(c, expectedColumns) {
- t.Fatalf("[%d] Didn't expect record column %s", i, c)
- }
- }
-}
-
-// indirect update of a field used in view should cause view(s) update
-func TestSaveCollectionIndirectViewsUpdate(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection, err := app.Dao().FindCollectionByNameOrId("demo1")
- if err != nil {
- t.Fatal(err)
- }
-
- // update MaxSelect fields
- {
- relMany := collection.Schema.GetFieldByName("rel_many")
- relManyOpt := relMany.Options.(*schema.RelationOptions)
- relManyOpt.MaxSelect = types.Pointer(1)
-
- fileOne := collection.Schema.GetFieldByName("file_one")
- fileOneOpt := fileOne.Options.(*schema.FileOptions)
- fileOneOpt.MaxSelect = 10
-
- if err := app.Dao().SaveCollection(collection); err != nil {
- t.Fatal(err)
- }
- }
-
- // check view1 schema
- {
- view1, err := app.Dao().FindCollectionByNameOrId("view1")
- if err != nil {
- t.Fatal(err)
- }
-
- relMany := view1.Schema.GetFieldByName("rel_many")
- relManyOpt := relMany.Options.(*schema.RelationOptions)
- if relManyOpt.MaxSelect == nil || *relManyOpt.MaxSelect != 1 {
- t.Fatalf("Expected view1.rel_many MaxSelect to be %d, got %v", 1, relManyOpt.MaxSelect)
- }
-
- fileOne := view1.Schema.GetFieldByName("file_one")
- fileOneOpt := fileOne.Options.(*schema.FileOptions)
- if fileOneOpt.MaxSelect != 10 {
- t.Fatalf("Expected view1.file_one MaxSelect to be %d, got %v", 10, fileOneOpt.MaxSelect)
- }
- }
-
- // check view2 schema
- {
- view2, err := app.Dao().FindCollectionByNameOrId("view2")
- if err != nil {
- t.Fatal(err)
- }
-
- relMany := view2.Schema.GetFieldByName("rel_many")
- relManyOpt := relMany.Options.(*schema.RelationOptions)
- if relManyOpt.MaxSelect == nil || *relManyOpt.MaxSelect != 1 {
- t.Fatalf("Expected view2.rel_many MaxSelect to be %d, got %v", 1, relManyOpt.MaxSelect)
- }
- }
-}
-
-func TestSaveCollectionViewWrapping(t *testing.T) {
- t.Parallel()
-
- viewName := "test_wrapping"
-
- scenarios := []struct {
- name string
- query string
- expected string
- }{
- {
- "no wrapping - text field",
- "select text as id, bool from demo1",
- "CREATE VIEW `test_wrapping` AS SELECT * FROM (select text as id, bool from demo1)",
- },
- {
- "no wrapping - id field",
- "select text as id, bool from demo1",
- "CREATE VIEW `test_wrapping` AS SELECT * FROM (select text as id, bool from demo1)",
- },
- {
- "no wrapping - relation field",
- "select rel_one as id, bool from demo1",
- "CREATE VIEW `test_wrapping` AS SELECT * FROM (select rel_one as id, bool from demo1)",
- },
- {
- "no wrapping - select field",
- "select select_many as id, bool from demo1",
- "CREATE VIEW `test_wrapping` AS SELECT * FROM (select select_many as id, bool from demo1)",
- },
- {
- "no wrapping - email field",
- "select email as id, bool from demo1",
- "CREATE VIEW `test_wrapping` AS SELECT * FROM (select email as id, bool from demo1)",
- },
- {
- "no wrapping - datetime field",
- "select datetime as id, bool from demo1",
- "CREATE VIEW `test_wrapping` AS SELECT * FROM (select datetime as id, bool from demo1)",
- },
- {
- "no wrapping - url field",
- "select url as id, bool from demo1",
- "CREATE VIEW `test_wrapping` AS SELECT * FROM (select url as id, bool from demo1)",
- },
- {
- "wrapping - bool field",
- "select bool as id, text as txt, url from demo1",
- "CREATE VIEW `test_wrapping` AS SELECT * FROM (SELECT cast(`id` as text) `id`,`txt`,`url` FROM (select bool as id, text as txt, url from demo1))",
- },
- {
- "wrapping - bool field (different order)",
- "select text as txt, url, bool as id from demo1",
- "CREATE VIEW `test_wrapping` AS SELECT * FROM (SELECT `txt`,`url`,cast(`id` as text) `id` FROM (select text as txt, url, bool as id from demo1))",
- },
- {
- "wrapping - json field",
- "select json as id, text, url from demo1",
- "CREATE VIEW `test_wrapping` AS SELECT * FROM (SELECT cast(`id` as text) `id`,`text`,`url` FROM (select json as id, text, url from demo1))",
- },
- {
- "wrapping - numeric id",
- "select 1 as id",
- "CREATE VIEW `test_wrapping` AS SELECT * FROM (SELECT cast(`id` as text) `id` FROM (select 1 as id))",
- },
- {
- "wrapping - expresion",
- "select ('test') as id",
- "CREATE VIEW `test_wrapping` AS SELECT * FROM (SELECT cast(`id` as text) `id` FROM (select ('test') as id))",
- },
- {
- "no wrapping - cast as text",
- "select cast('test' as text) as id",
- "CREATE VIEW `test_wrapping` AS SELECT * FROM (select cast('test' as text) as id)",
- },
- }
-
- for _, s := range scenarios {
- t.Run(s.name, func(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection := &models.Collection{
- Name: viewName,
- Type: models.CollectionTypeView,
- Options: types.JsonMap{
- "query": s.query,
- },
- }
-
- err := app.Dao().SaveCollection(collection)
- if err != nil {
- t.Fatal(err)
- }
-
- var sql string
-
- rowErr := app.Dao().DB().NewQuery("SELECT sql FROM sqlite_master WHERE type='view' AND name={:name}").
- Bind(dbx.Params{"name": viewName}).
- Row(&sql)
- if rowErr != nil {
- t.Fatalf("Failed to retrieve view sql: %v", rowErr)
- }
-
- if sql != s.expected {
- t.Fatalf("Expected query \n%v, \ngot \n%v", s.expected, sql)
- }
- })
- }
-}
-
-func TestImportCollections(t *testing.T) {
- t.Parallel()
-
- totalCollections := 11
-
- scenarios := []struct {
- name string
- jsonData string
- deleteMissing bool
- beforeRecordsSync func(txDao *daos.Dao, mappedImported, mappedExisting map[string]*models.Collection) error
- expectError bool
- expectCollectionsCount int
- beforeTestFunc func(testApp *tests.TestApp, resultCollections []*models.Collection)
- afterTestFunc func(testApp *tests.TestApp, resultCollections []*models.Collection)
- }{
- {
- name: "empty collections",
- jsonData: `[]`,
- expectError: true,
- expectCollectionsCount: totalCollections,
- },
- {
- name: "minimal collection import",
- jsonData: `[
- {"name": "import_test1", "schema": [{"name":"test", "type": "text"}]},
- {"name": "import_test2", "type": "auth"}
- ]`,
- deleteMissing: false,
- expectError: false,
- expectCollectionsCount: totalCollections + 2,
- },
- {
- name: "minimal collection import + failed beforeRecordsSync",
- jsonData: `[
- {"name": "import_test", "schema": [{"name":"test", "type": "text"}]}
- ]`,
- beforeRecordsSync: func(txDao *daos.Dao, mappedImported, mappedExisting map[string]*models.Collection) error {
- return errors.New("test_error")
- },
- deleteMissing: false,
- expectError: true,
- expectCollectionsCount: totalCollections,
- },
- {
- name: "minimal collection import + successful beforeRecordsSync",
- jsonData: `[
- {"name": "import_test", "schema": [{"name":"test", "type": "text"}]}
- ]`,
- beforeRecordsSync: func(txDao *daos.Dao, mappedImported, mappedExisting map[string]*models.Collection) error {
- return nil
- },
- deleteMissing: false,
- expectError: false,
- expectCollectionsCount: totalCollections + 1,
- },
- {
- name: "new + update + delete system collection",
- jsonData: `[
- {
- "id":"wsmn24bux7wo113",
- "name":"demo",
- "schema":[
- {
- "id":"_2hlxbmp",
- "name":"title",
- "type":"text",
- "system":false,
- "required":true,
- "unique":false,
- "options":{
- "min":3,
- "max":null,
- "pattern":""
- }
- }
- ]
- },
- {
- "name": "import1",
- "schema": [
- {
- "name":"active",
- "type":"bool"
- }
- ]
- }
- ]`,
- deleteMissing: true,
- expectError: true,
- expectCollectionsCount: totalCollections,
- },
- {
- name: "new + update + delete non-system collection",
- jsonData: `[
- {
- "id": "kpv709sk2lqbqk8",
- "system": true,
- "name": "nologin",
- "type": "auth",
- "options": {
- "allowEmailAuth": false,
- "allowOAuth2Auth": false,
- "allowUsernameAuth": false,
- "exceptEmailDomains": [],
- "manageRule": "@request.auth.collectionName = 'users'",
- "minPasswordLength": 8,
- "onlyEmailDomains": [],
- "requireEmail": true
- },
- "listRule": "",
- "viewRule": "",
- "createRule": "",
- "updateRule": "",
- "deleteRule": "",
- "schema": [
- {
- "id": "x8zzktwe",
- "name": "name",
- "type": "text",
- "system": false,
- "required": false,
- "unique": false,
- "options": {
- "min": null,
- "max": null,
- "pattern": ""
- }
- }
- ]
- },
- {
- "id":"wsmn24bux7wo113",
- "name":"demo1_rename",
- "schema":[
- {
- "id":"_2hlxbmp",
- "name":"title",
- "type":"text",
- "system":false,
- "required":true,
- "unique":false,
- "options":{
- "min":3,
- "max":null,
- "pattern":""
- }
- }
- ]
- },
- {
- "id": "test_deleted_collection_name_reuse",
- "name": "demo2",
- "schema": [
- {
- "id":"fz6iql2m",
- "name":"active",
- "type":"bool"
- }
- ]
- },
- {
- "id": "test_new_view",
- "name": "new_view",
- "type": "view",
- "options": {
- "query": "select id from demo2"
- }
- }
- ]`,
- deleteMissing: true,
- expectError: false,
- expectCollectionsCount: 4,
- },
- {
- name: "test with deleteMissing: false",
- jsonData: `[
- {
- "id":"wsmn24bux7wo113",
- "name":"demo1",
- "schema":[
- {
- "id":"_2hlxbmp",
- "name":"title",
- "type":"text",
- "system":false,
- "required":true,
- "unique":false,
- "options":{
- "min":3,
- "max":null,
- "pattern":""
- }
- },
- {
- "id":"_2hlxbmp",
- "name":"field_with_duplicate_id",
- "type":"text",
- "system":false,
- "required":true,
- "unique":false,
- "options":{
- "min":3,
- "max":null,
- "pattern":""
- }
- },
- {
- "id":"abcd_import",
- "name":"new_field",
- "type":"text"
- }
- ]
- },
- {
- "name": "new_import",
- "schema": [
- {
- "id":"abcd_import",
- "name":"active",
- "type":"bool"
- }
- ]
- }
- ]`,
- deleteMissing: false,
- expectError: false,
- expectCollectionsCount: totalCollections + 1,
- afterTestFunc: func(testApp *tests.TestApp, resultCollections []*models.Collection) {
- expectedCollectionFields := map[string]int{
- "nologin": 1,
- "demo1": 15,
- "demo2": 2,
- "demo3": 2,
- "demo4": 13,
- "demo5": 6,
- "new_import": 1,
- }
- for name, expectedCount := range expectedCollectionFields {
- collection, err := testApp.Dao().FindCollectionByNameOrId(name)
- if err != nil {
- t.Fatal(err)
- }
-
- if totalFields := len(collection.Schema.Fields()); totalFields != expectedCount {
- t.Errorf("Expected %d %q fields, got %d", expectedCount, collection.Name, totalFields)
- }
- }
- },
- },
- }
-
- for _, s := range scenarios {
- t.Run(s.name, func(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- importedCollections := []*models.Collection{}
-
- // load data
- loadErr := json.Unmarshal([]byte(s.jsonData), &importedCollections)
- if loadErr != nil {
- t.Fatalf("Failed to load data: %v", loadErr)
- }
-
- err := testApp.Dao().ImportCollections(importedCollections, s.deleteMissing, s.beforeRecordsSync)
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
- }
-
- // check collections count
- collections := []*models.Collection{}
- if err := testApp.Dao().CollectionQuery().All(&collections); err != nil {
- t.Fatal(err)
- }
- if len(collections) != s.expectCollectionsCount {
- t.Fatalf("Expected %d collections, got %d", s.expectCollectionsCount, len(collections))
- }
-
- if s.afterTestFunc != nil {
- s.afterTestFunc(testApp, collections)
- }
- })
- }
-}
diff --git a/daos/external_auth.go b/daos/external_auth.go
deleted file mode 100644
index a5cfa79e..00000000
--- a/daos/external_auth.go
+++ /dev/null
@@ -1,88 +0,0 @@
-package daos
-
-import (
- "errors"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models"
-)
-
-// ExternalAuthQuery returns a new ExternalAuth select query.
-func (dao *Dao) ExternalAuthQuery() *dbx.SelectQuery {
- return dao.ModelQuery(&models.ExternalAuth{})
-}
-
-// FindAllExternalAuthsByRecord returns all ExternalAuth models
-// linked to the provided auth record.
-func (dao *Dao) FindAllExternalAuthsByRecord(authRecord *models.Record) ([]*models.ExternalAuth, error) {
- auths := []*models.ExternalAuth{}
-
- err := dao.ExternalAuthQuery().
- AndWhere(dbx.HashExp{
- "collectionId": authRecord.Collection().Id,
- "recordId": authRecord.Id,
- }).
- OrderBy("created ASC").
- All(&auths)
-
- if err != nil {
- return nil, err
- }
-
- return auths, nil
-}
-
-// FindExternalAuthByRecordAndProvider returns the first available
-// ExternalAuth model for the specified record data and provider.
-func (dao *Dao) FindExternalAuthByRecordAndProvider(authRecord *models.Record, provider string) (*models.ExternalAuth, error) {
- model := &models.ExternalAuth{}
-
- err := dao.ExternalAuthQuery().
- AndWhere(dbx.HashExp{
- "collectionId": authRecord.Collection().Id,
- "recordId": authRecord.Id,
- "provider": provider,
- }).
- Limit(1).
- One(model)
-
- if err != nil {
- return nil, err
- }
-
- return model, nil
-}
-
-// FindFirstExternalAuthByExpr returns the first available
-// ExternalAuth model that satisfies the non-nil expression.
-func (dao *Dao) FindFirstExternalAuthByExpr(expr dbx.Expression) (*models.ExternalAuth, error) {
- model := &models.ExternalAuth{}
-
- err := dao.ExternalAuthQuery().
- AndWhere(dbx.Not(dbx.HashExp{"providerId": ""})). // exclude empty providerIds
- AndWhere(expr).
- Limit(1).
- One(model)
-
- if err != nil {
- return nil, err
- }
-
- return model, nil
-}
-
-// SaveExternalAuth upserts the provided ExternalAuth model.
-func (dao *Dao) SaveExternalAuth(model *models.ExternalAuth) error {
- // extra check the model data in case the provider's API response
- // has changed and no longer returns the expected fields
- if model.CollectionId == "" || model.RecordId == "" || model.Provider == "" || model.ProviderId == "" {
- return errors.New("Missing required ExternalAuth fields.")
- }
-
- return dao.Save(model)
-}
-
-// DeleteExternalAuth deletes the provided ExternalAuth model.
-func (dao *Dao) DeleteExternalAuth(model *models.ExternalAuth) error {
- return dao.Delete(model)
-}
diff --git a/daos/external_auth_test.go b/daos/external_auth_test.go
deleted file mode 100644
index 17e55aeb..00000000
--- a/daos/external_auth_test.go
+++ /dev/null
@@ -1,204 +0,0 @@
-package daos_test
-
-import (
- "testing"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
-)
-
-func TestExternalAuthQuery(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- expected := "SELECT {{_externalAuths}}.* FROM `_externalAuths`"
-
- sql := app.Dao().ExternalAuthQuery().Build().SQL()
- if sql != expected {
- t.Errorf("Expected sql %s, got %s", expected, sql)
- }
-}
-
-func TestFindAllExternalAuthsByRecord(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- userId string
- expectedCount int
- }{
- {"oap640cot4yru2s", 0},
- {"4q1xlclmfloku33", 2},
- }
-
- for i, s := range scenarios {
- record, err := app.Dao().FindRecordById("users", s.userId)
- if err != nil {
- t.Errorf("(%d) Unexpected record fetch error %v", i, err)
- continue
- }
-
- auths, err := app.Dao().FindAllExternalAuthsByRecord(record)
- if err != nil {
- t.Errorf("(%d) Unexpected auths fetch error %v", i, err)
- continue
- }
-
- if len(auths) != s.expectedCount {
- t.Errorf("(%d) Expected %d auths, got %d", i, s.expectedCount, len(auths))
- }
-
- for _, auth := range auths {
- if auth.RecordId != record.Id {
- t.Errorf("(%d) Expected all auths to be linked to record id %s, got %v", i, record.Id, auth)
- }
- }
- }
-}
-
-func TestFindFirstExternalAuthByExpr(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- expr dbx.Expression
- expectedId string
- }{
- {dbx.HashExp{"provider": "github", "providerId": ""}, ""},
- {dbx.HashExp{"provider": "github", "providerId": "id1"}, ""},
- {dbx.HashExp{"provider": "github", "providerId": "id2"}, ""},
- {dbx.HashExp{"provider": "google", "providerId": "test123"}, "clmflokuq1xl341"},
- {dbx.HashExp{"provider": "gitlab", "providerId": "test123"}, "dlmflokuq1xl342"},
- }
-
- for i, s := range scenarios {
- auth, err := app.Dao().FindFirstExternalAuthByExpr(s.expr)
-
- hasErr := err != nil
- expectErr := s.expectedId == ""
- if hasErr != expectErr {
- t.Errorf("(%d) Expected hasErr %v, got %v", i, expectErr, err)
- continue
- }
-
- if auth != nil && auth.Id != s.expectedId {
- t.Errorf("(%d) Expected external auth with ID %s, got \n%v", i, s.expectedId, auth)
- }
- }
-}
-
-func TestFindExternalAuthByRecordAndProvider(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- userId string
- provider string
- expectedId string
- }{
- {"bgs820n361vj1qd", "google", ""},
- {"4q1xlclmfloku33", "google", "clmflokuq1xl341"},
- {"4q1xlclmfloku33", "gitlab", "dlmflokuq1xl342"},
- }
-
- for i, s := range scenarios {
- record, err := app.Dao().FindRecordById("users", s.userId)
- if err != nil {
- t.Errorf("(%d) Unexpected record fetch error %v", i, err)
- continue
- }
-
- auth, err := app.Dao().FindExternalAuthByRecordAndProvider(record, s.provider)
-
- hasErr := err != nil
- expectErr := s.expectedId == ""
- if hasErr != expectErr {
- t.Errorf("(%d) Expected hasErr %v, got %v", i, expectErr, err)
- continue
- }
-
- if auth != nil && auth.Id != s.expectedId {
- t.Errorf("(%d) Expected external auth with ID %s, got \n%v", i, s.expectedId, auth)
- }
- }
-}
-
-func TestSaveExternalAuth(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- // save with empty provider data
- emptyAuth := &models.ExternalAuth{}
- if err := app.Dao().SaveExternalAuth(emptyAuth); err == nil {
- t.Fatal("Expected error, got nil")
- }
-
- auth := &models.ExternalAuth{
- RecordId: "o1y0dd0spd786md",
- CollectionId: "v851q4r790rhknl",
- Provider: "test",
- ProviderId: "test_id",
- }
-
- if err := app.Dao().SaveExternalAuth(auth); err != nil {
- t.Fatal(err)
- }
-
- // check if it was really saved
- foundAuth, err := app.Dao().FindFirstExternalAuthByExpr(dbx.HashExp{
- "collectionId": "v851q4r790rhknl",
- "provider": "test",
- "providerId": "test_id",
- })
- if err != nil {
- t.Fatal(err)
- }
-
- if auth.Id != foundAuth.Id {
- t.Fatalf("Expected ExternalAuth with id %s, got \n%v", auth.Id, foundAuth)
- }
-}
-
-func TestDeleteExternalAuth(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- record, err := app.Dao().FindRecordById("users", "4q1xlclmfloku33")
- if err != nil {
- t.Fatal(err)
- }
-
- auths, err := app.Dao().FindAllExternalAuthsByRecord(record)
- if err != nil {
- t.Fatal(err)
- }
-
- for _, auth := range auths {
- if err := app.Dao().DeleteExternalAuth(auth); err != nil {
- t.Fatalf("Failed to delete the ExternalAuth relation, got \n%v", err)
- }
- }
-
- // check if the relations were really deleted
- newAuths, err := app.Dao().FindAllExternalAuthsByRecord(record)
- if err != nil {
- t.Fatal(err)
- }
-
- if len(newAuths) != 0 {
- t.Fatalf("Expected all record %s ExternalAuth relations to be deleted, got \n%v", record.Id, newAuths)
- }
-}
diff --git a/daos/log_test.go b/daos/log_test.go
deleted file mode 100644
index 5fc9549d..00000000
--- a/daos/log_test.go
+++ /dev/null
@@ -1,158 +0,0 @@
-package daos_test
-
-import (
- "encoding/json"
- "testing"
- "time"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-func TestLogQuery(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- expected := "SELECT {{_logs}}.* FROM `_logs`"
-
- sql := app.Dao().LogQuery().Build().SQL()
- if sql != expected {
- t.Errorf("Expected sql %s, got %s", expected, sql)
- }
-}
-
-func TestFindLogById(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- tests.MockLogsData(app)
-
- scenarios := []struct {
- id string
- expectError bool
- }{
- {"", true},
- {"invalid", true},
- {"00000000-9f38-44fb-bf82-c8f53b310d91", true},
- {"873f2133-9f38-44fb-bf82-c8f53b310d91", false},
- }
-
- for i, scenario := range scenarios {
- admin, err := app.LogsDao().FindLogById(scenario.id)
-
- hasErr := err != nil
- if hasErr != scenario.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, scenario.expectError, hasErr, err)
- }
-
- if admin != nil && admin.Id != scenario.id {
- t.Errorf("(%d) Expected admin with id %s, got %s", i, scenario.id, admin.Id)
- }
- }
-}
-
-func TestLogsStats(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- tests.MockLogsData(app)
-
- expected := `[{"total":1,"date":"2022-05-01 10:00:00.000Z"},{"total":1,"date":"2022-05-02 10:00:00.000Z"}]`
-
- now := time.Now().UTC().Format(types.DefaultDateLayout)
- exp := dbx.NewExp("[[created]] <= {:date}", dbx.Params{"date": now})
- result, err := app.LogsDao().LogsStats(exp)
- if err != nil {
- t.Fatal(err)
- }
-
- encoded, _ := json.Marshal(result)
- if string(encoded) != expected {
- t.Fatalf("Expected %s, got %s", expected, string(encoded))
- }
-}
-
-func TestDeleteOldLogs(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- tests.MockLogsData(app)
-
- scenarios := []struct {
- date string
- expectedTotal int
- }{
- {"2022-01-01 10:00:00.000Z", 2}, // no logs to delete before that time
- {"2022-05-01 11:00:00.000Z", 1}, // only 1 log should have left
- {"2022-05-03 11:00:00.000Z", 0}, // no more logs should have left
- {"2022-05-04 11:00:00.000Z", 0}, // no more logs should have left
- }
-
- for i, scenario := range scenarios {
- date, dateErr := time.Parse(types.DefaultDateLayout, scenario.date)
- if dateErr != nil {
- t.Errorf("(%d) Date error %v", i, dateErr)
- }
-
- deleteErr := app.LogsDao().DeleteOldLogs(date)
- if deleteErr != nil {
- t.Errorf("(%d) Delete error %v", i, deleteErr)
- }
-
- // check total remaining logs
- var total int
- countErr := app.LogsDao().LogQuery().Select("count(*)").Row(&total)
- if countErr != nil {
- t.Errorf("(%d) Count error %v", i, countErr)
- }
-
- if total != scenario.expectedTotal {
- t.Errorf("(%d) Expected %d remaining logs, got %d", i, scenario.expectedTotal, total)
- }
- }
-}
-
-func TestSaveLog(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- tests.MockLogsData(app)
-
- // create new log
- newLog := &models.Log{}
- newLog.Level = -4
- newLog.Data = types.JsonMap{}
- createErr := app.LogsDao().SaveLog(newLog)
- if createErr != nil {
- t.Fatal(createErr)
- }
-
- // check if it was really created
- existingLog, fetchErr := app.LogsDao().FindLogById(newLog.Id)
- if fetchErr != nil {
- t.Fatal(fetchErr)
- }
-
- existingLog.Level = 4
- updateErr := app.LogsDao().SaveLog(existingLog)
- if updateErr != nil {
- t.Fatal(updateErr)
- }
- // refresh instance to check if it was really updated
- existingLog, _ = app.LogsDao().FindLogById(existingLog.Id)
- if existingLog.Level != 4 {
- t.Fatalf("Expected log level to be %d, got %d", 4, existingLog.Level)
- }
-}
diff --git a/daos/param.go b/daos/param.go
deleted file mode 100644
index 23ba07dd..00000000
--- a/daos/param.go
+++ /dev/null
@@ -1,73 +0,0 @@
-package daos
-
-import (
- "encoding/json"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tools/security"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-// ParamQuery returns a new Param select query.
-func (dao *Dao) ParamQuery() *dbx.SelectQuery {
- return dao.ModelQuery(&models.Param{})
-}
-
-// FindParamByKey finds the first Param model with the provided key.
-func (dao *Dao) FindParamByKey(key string) (*models.Param, error) {
- param := &models.Param{}
-
- err := dao.ParamQuery().
- AndWhere(dbx.HashExp{"key": key}).
- Limit(1).
- One(param)
-
- if err != nil {
- return nil, err
- }
-
- return param, nil
-}
-
-// SaveParam creates or updates a Param model by the provided key-value pair.
-// The value argument will be encoded as json string.
-//
-// If `optEncryptionKey` is provided it will encrypt the value before storing it.
-func (dao *Dao) SaveParam(key string, value any, optEncryptionKey ...string) error {
- param, _ := dao.FindParamByKey(key)
- if param == nil {
- param = &models.Param{Key: key}
- }
-
- normalizedValue := value
-
- // encrypt if optEncryptionKey is set
- if len(optEncryptionKey) > 0 && optEncryptionKey[0] != "" {
- encoded, encodingErr := json.Marshal(value)
- if encodingErr != nil {
- return encodingErr
- }
-
- encryptVal, encryptErr := security.Encrypt(encoded, optEncryptionKey[0])
- if encryptErr != nil {
- return encryptErr
- }
-
- normalizedValue = encryptVal
- }
-
- encodedValue := types.JsonRaw{}
- if err := encodedValue.Scan(normalizedValue); err != nil {
- return err
- }
-
- param.Value = encodedValue
-
- return dao.Save(param)
-}
-
-// DeleteParam deletes the provided Param model.
-func (dao *Dao) DeleteParam(param *models.Param) error {
- return dao.Delete(param)
-}
diff --git a/daos/param_test.go b/daos/param_test.go
deleted file mode 100644
index ef1b8144..00000000
--- a/daos/param_test.go
+++ /dev/null
@@ -1,160 +0,0 @@
-package daos_test
-
-import (
- "encoding/json"
- "testing"
-
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/security"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-func TestParamQuery(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- expected := "SELECT {{_params}}.* FROM `_params`"
-
- sql := app.Dao().ParamQuery().Build().SQL()
- if sql != expected {
- t.Errorf("Expected sql %s, got %s", expected, sql)
- }
-}
-
-func TestFindParamByKey(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- key string
- expectError bool
- }{
- {"", true},
- {"missing", true},
- {models.ParamAppSettings, false},
- }
-
- for i, scenario := range scenarios {
- param, err := app.Dao().FindParamByKey(scenario.key)
-
- hasErr := err != nil
- if hasErr != scenario.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, scenario.expectError, hasErr, err)
- }
-
- if param != nil && param.Key != scenario.key {
- t.Errorf("(%d) Expected param with identifier %s, got %v", i, scenario.key, param.Key)
- }
- }
-}
-
-func TestSaveParam(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- key string
- value any
- }{
- {"", "demo"},
- {"test", nil},
- {"test", ""},
- {"test", 1},
- {"test", 123},
- {models.ParamAppSettings, map[string]any{"test": 123}},
- }
-
- for i, scenario := range scenarios {
- err := app.Dao().SaveParam(scenario.key, scenario.value)
- if err != nil {
- t.Errorf("(%d) %v", i, err)
- }
-
- jsonRaw := types.JsonRaw{}
- jsonRaw.Scan(scenario.value)
- encodedScenarioValue, err := jsonRaw.MarshalJSON()
- if err != nil {
- t.Errorf("(%d) Encoded error %v", i, err)
- }
-
- // check if the param was really saved
- param, _ := app.Dao().FindParamByKey(scenario.key)
- encodedParamValue, err := param.Value.MarshalJSON()
- if err != nil {
- t.Errorf("(%d) Encoded error %v", i, err)
- }
-
- if string(encodedParamValue) != string(encodedScenarioValue) {
- t.Errorf("(%d) Expected the two values to be equal, got %v vs %v", i, string(encodedParamValue), string(encodedScenarioValue))
- }
- }
-}
-
-func TestSaveParamEncrypted(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- encryptionKey := security.RandomString(32)
- data := map[string]int{"test": 123}
- expected := map[string]int{}
-
- err := app.Dao().SaveParam("test", data, encryptionKey)
- if err != nil {
- t.Fatal(err)
- }
-
- // check if the param was really saved
- param, _ := app.Dao().FindParamByKey("test")
-
- // decrypt
- decrypted, decryptErr := security.Decrypt(string(param.Value), encryptionKey)
- if decryptErr != nil {
- t.Fatal(decryptErr)
- }
-
- // decode
- decryptedDecodeErr := json.Unmarshal(decrypted, &expected)
- if decryptedDecodeErr != nil {
- t.Fatal(decryptedDecodeErr)
- }
-
- // check if the decoded value is correct
- if len(expected) != len(data) || expected["test"] != data["test"] {
- t.Fatalf("Expected %v, got %v", expected, data)
- }
-}
-
-func TestDeleteParam(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- // unsaved param
- err1 := app.Dao().DeleteParam(&models.Param{})
- if err1 == nil {
- t.Fatal("Expected error, got nil")
- }
-
- // existing param
- param, _ := app.Dao().FindParamByKey(models.ParamAppSettings)
- err2 := app.Dao().DeleteParam(param)
- if err2 != nil {
- t.Fatalf("Expected nil, got error %v", err2)
- }
-
- // check if it was really deleted
- paramCheck, _ := app.Dao().FindParamByKey(models.ParamAppSettings)
- if paramCheck != nil {
- t.Fatalf("Expected param to be deleted, got %v", paramCheck)
- }
-}
diff --git a/daos/record.go b/daos/record.go
deleted file mode 100644
index 9e2f532a..00000000
--- a/daos/record.go
+++ /dev/null
@@ -1,776 +0,0 @@
-package daos
-
-import (
- "context"
- "database/sql"
- "errors"
- "fmt"
- "sort"
- "strings"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/resolvers"
- "github.com/pocketbase/pocketbase/tools/inflector"
- "github.com/pocketbase/pocketbase/tools/list"
- "github.com/pocketbase/pocketbase/tools/search"
- "github.com/pocketbase/pocketbase/tools/security"
- "github.com/pocketbase/pocketbase/tools/types"
- "github.com/spf13/cast"
-)
-
-// RecordQuery returns a new Record select query from a collection model, id or name.
-//
-// In case a collection id or name is provided and that collection doesn't
-// actually exists, the generated query will be created with a cancelled context
-// and will fail once an executor (Row(), One(), All(), etc.) is called.
-func (dao *Dao) RecordQuery(collectionModelOrIdentifier any) *dbx.SelectQuery {
- var tableName string
- var collection *models.Collection
- var collectionErr error
- switch c := collectionModelOrIdentifier.(type) {
- case *models.Collection:
- collection = c
- tableName = collection.Name
- case models.Collection:
- collection = &c
- tableName = collection.Name
- case string:
- collection, collectionErr = dao.FindCollectionByNameOrId(c)
- if collection != nil {
- tableName = collection.Name
- }
- default:
- collectionErr = errors.New("unsupported collection identifier, must be collection model, id or name")
- }
-
- // update with some fake table name for easier debugging
- if tableName == "" {
- tableName = "@@__invalidCollectionModelOrIdentifier"
- }
-
- selectCols := fmt.Sprintf("%s.*", dao.DB().QuoteSimpleColumnName(tableName))
-
- query := dao.DB().Select(selectCols).From(tableName)
-
- // in case of an error attach a new context and cancel it immediately with the error
- if collectionErr != nil {
- // @todo consider changing to WithCancelCause when upgrading
- // the min Go requirement to 1.20, so that we can pass the error
- ctx, cancelFunc := context.WithCancel(context.Background())
- query.WithContext(ctx)
- cancelFunc()
- }
-
- return query.WithBuildHook(func(q *dbx.Query) {
- q.WithExecHook(execLockRetry(dao.ModelQueryTimeout, dao.MaxLockRetries)).
- WithOneHook(func(q *dbx.Query, a any, op func(b any) error) error {
- switch v := a.(type) {
- case *models.Record:
- if v == nil {
- return op(a)
- }
-
- row := dbx.NullStringMap{}
- if err := op(&row); err != nil {
- return err
- }
-
- record := models.NewRecordFromNullStringMap(collection, row)
-
- *v = *record
-
- return nil
- default:
- return op(a)
- }
- }).
- WithAllHook(func(q *dbx.Query, sliceA any, op func(sliceB any) error) error {
- switch v := sliceA.(type) {
- case *[]*models.Record:
- if v == nil {
- return op(sliceA)
- }
-
- rows := []dbx.NullStringMap{}
- if err := op(&rows); err != nil {
- return err
- }
-
- records := models.NewRecordsFromNullStringMaps(collection, rows)
-
- *v = records
-
- return nil
- case *[]models.Record:
- if v == nil {
- return op(sliceA)
- }
-
- rows := []dbx.NullStringMap{}
- if err := op(&rows); err != nil {
- return err
- }
-
- records := models.NewRecordsFromNullStringMaps(collection, rows)
-
- nonPointers := make([]models.Record, len(records))
- for i, r := range records {
- nonPointers[i] = *r
- }
-
- *v = nonPointers
-
- return nil
- default:
- return op(sliceA)
- }
- })
- })
-}
-
-// FindRecordById finds the Record model by its id.
-func (dao *Dao) FindRecordById(
- collectionNameOrId string,
- recordId string,
- optFilters ...func(q *dbx.SelectQuery) error,
-) (*models.Record, error) {
- collection, err := dao.FindCollectionByNameOrId(collectionNameOrId)
- if err != nil {
- return nil, err
- }
-
- query := dao.RecordQuery(collection).
- AndWhere(dbx.HashExp{collection.Name + ".id": recordId})
-
- for _, filter := range optFilters {
- if filter == nil {
- continue
- }
- if err := filter(query); err != nil {
- return nil, err
- }
- }
-
- record := &models.Record{}
-
- if err := query.Limit(1).One(record); err != nil {
- return nil, err
- }
-
- return record, nil
-}
-
-// FindRecordsByIds finds all Record models by the provided ids.
-// If no records are found, returns an empty slice.
-func (dao *Dao) FindRecordsByIds(
- collectionNameOrId string,
- recordIds []string,
- optFilters ...func(q *dbx.SelectQuery) error,
-) ([]*models.Record, error) {
- collection, err := dao.FindCollectionByNameOrId(collectionNameOrId)
- if err != nil {
- return nil, err
- }
-
- query := dao.RecordQuery(collection).
- AndWhere(dbx.In(
- collection.Name+".id",
- list.ToInterfaceSlice(recordIds)...,
- ))
-
- for _, filter := range optFilters {
- if filter == nil {
- continue
- }
- if err := filter(query); err != nil {
- return nil, err
- }
- }
-
- records := make([]*models.Record, 0, len(recordIds))
-
- if err := query.All(&records); err != nil {
- return nil, err
- }
-
- return records, nil
-}
-
-// FindRecordsByExpr finds all records by the specified db expression.
-//
-// Returns all collection records if no expressions are provided.
-//
-// Returns an empty slice if no records are found.
-//
-// Example:
-//
-// expr1 := dbx.HashExp{"email": "test@example.com"}
-// expr2 := dbx.NewExp("LOWER(username) = {:username}", dbx.Params{"username": "test"})
-// dao.FindRecordsByExpr("example", expr1, expr2)
-func (dao *Dao) FindRecordsByExpr(collectionNameOrId string, exprs ...dbx.Expression) ([]*models.Record, error) {
- query := dao.RecordQuery(collectionNameOrId)
-
- // add only the non-nil expressions
- for _, expr := range exprs {
- if expr != nil {
- query.AndWhere(expr)
- }
- }
-
- var records []*models.Record
-
- if err := query.All(&records); err != nil {
- return nil, err
- }
-
- return records, nil
-}
-
-// FindFirstRecordByData returns the first found record matching
-// the provided key-value pair.
-func (dao *Dao) FindFirstRecordByData(
- collectionNameOrId string,
- key string,
- value any,
-) (*models.Record, error) {
- record := &models.Record{}
-
- err := dao.RecordQuery(collectionNameOrId).
- AndWhere(dbx.HashExp{inflector.Columnify(key): value}).
- Limit(1).
- One(record)
- if err != nil {
- return nil, err
- }
-
- return record, nil
-}
-
-// FindRecordsByFilter returns limit number of records matching the
-// provided string filter.
-//
-// NB! Use the last "params" argument to bind untrusted user variables!
-//
-// The sort argument is optional and can be empty string OR the same format
-// used in the web APIs, eg. "-created,title".
-//
-// If the limit argument is <= 0, no limit is applied to the query and
-// all matching records are returned.
-//
-// Example:
-//
-// dao.FindRecordsByFilter(
-// "posts",
-// "title ~ {:title} && visible = {:visible}",
-// "-created",
-// 10,
-// 0,
-// dbx.Params{"title": "lorem ipsum", "visible": true}
-// )
-func (dao *Dao) FindRecordsByFilter(
- collectionNameOrId string,
- filter string,
- sort string,
- limit int,
- offset int,
- params ...dbx.Params,
-) ([]*models.Record, error) {
- collection, err := dao.FindCollectionByNameOrId(collectionNameOrId)
- if err != nil {
- return nil, err
- }
-
- q := dao.RecordQuery(collection)
-
- // build a fields resolver and attach the generated conditions to the query
- // ---
- resolver := resolvers.NewRecordFieldResolver(
- dao,
- collection, // the base collection
- nil, // no request data
- true, // allow searching hidden/protected fields like "email"
- )
-
- expr, err := search.FilterData(filter).BuildExpr(resolver, params...)
- if err != nil || expr == nil {
- return nil, errors.New("invalid or empty filter expression")
- }
- q.AndWhere(expr)
-
- if sort != "" {
- for _, sortField := range search.ParseSortFromString(sort) {
- expr, err := sortField.BuildExpr(resolver)
- if err != nil {
- return nil, err
- }
- if expr != "" {
- q.AndOrderBy(expr)
- }
- }
- }
-
- resolver.UpdateQuery(q) // attaches any adhoc joins and aliases
- // ---
-
- if offset > 0 {
- q.Offset(int64(offset))
- }
-
- if limit > 0 {
- q.Limit(int64(limit))
- }
-
- records := []*models.Record{}
-
- if err := q.All(&records); err != nil {
- return nil, err
- }
-
- return records, nil
-}
-
-// FindFirstRecordByFilter returns the first available record matching the provided filter.
-//
-// NB! Use the last params argument to bind untrusted user variables!
-//
-// Example:
-//
-// dao.FindFirstRecordByFilter("posts", "slug={:slug} && status='public'", dbx.Params{"slug": "test"})
-func (dao *Dao) FindFirstRecordByFilter(
- collectionNameOrId string,
- filter string,
- params ...dbx.Params,
-) (*models.Record, error) {
- result, err := dao.FindRecordsByFilter(collectionNameOrId, filter, "", 1, 0, params...)
- if err != nil {
- return nil, err
- }
-
- if len(result) == 0 {
- return nil, sql.ErrNoRows
- }
-
- return result[0], nil
-}
-
-// IsRecordValueUnique checks if the provided key-value pair is a unique Record value.
-//
-// For correctness, if the collection is "auth" and the key is "username",
-// the unique check will be case insensitive.
-//
-// NB! Array values (eg. from multiple select fields) are matched
-// as a serialized json strings (eg. `["a","b"]`), so the value uniqueness
-// depends on the elements order. Or in other words the following values
-// are considered different: `[]string{"a","b"}` and `[]string{"b","a"}`
-func (dao *Dao) IsRecordValueUnique(
- collectionNameOrId string,
- key string,
- value any,
- excludeIds ...string,
-) bool {
- collection, err := dao.FindCollectionByNameOrId(collectionNameOrId)
- if err != nil {
- return false
- }
-
- var expr dbx.Expression
- if collection.IsAuth() && key == schema.FieldNameUsername {
- expr = dbx.NewExp("LOWER([["+schema.FieldNameUsername+"]])={:username}", dbx.Params{
- "username": strings.ToLower(cast.ToString(value)),
- })
- } else {
- var normalizedVal any
- switch val := value.(type) {
- case []string:
- normalizedVal = append(types.JsonArray[string]{}, val...)
- case []any:
- normalizedVal = append(types.JsonArray[any]{}, val...)
- default:
- normalizedVal = val
- }
-
- expr = dbx.HashExp{inflector.Columnify(key): normalizedVal}
- }
-
- query := dao.RecordQuery(collection).
- Select("count(*)").
- AndWhere(expr).
- Limit(1)
-
- if uniqueExcludeIds := list.NonzeroUniques(excludeIds); len(uniqueExcludeIds) > 0 {
- query.AndWhere(dbx.NotIn(collection.Name+".id", list.ToInterfaceSlice(uniqueExcludeIds)...))
- }
-
- var exists bool
-
- return query.Row(&exists) == nil && !exists
-}
-
-// FindAuthRecordByToken finds the auth record associated with the provided JWT.
-//
-// Returns an error if the JWT is invalid, expired or not associated to an auth collection record.
-func (dao *Dao) FindAuthRecordByToken(token string, baseTokenKey string) (*models.Record, error) {
- unverifiedClaims, err := security.ParseUnverifiedJWT(token)
- if err != nil {
- return nil, err
- }
-
- // check required claims
- id, _ := unverifiedClaims["id"].(string)
- collectionId, _ := unverifiedClaims["collectionId"].(string)
- if id == "" || collectionId == "" {
- return nil, errors.New("missing or invalid token claims")
- }
-
- record, err := dao.FindRecordById(collectionId, id)
- if err != nil {
- return nil, err
- }
-
- if !record.Collection().IsAuth() {
- return nil, errors.New("the token is not associated to an auth collection record")
- }
-
- verificationKey := record.TokenKey() + baseTokenKey
-
- // verify token signature
- if _, err := security.ParseJWT(token, verificationKey); err != nil {
- return nil, err
- }
-
- return record, nil
-}
-
-// FindAuthRecordByEmail finds the auth record associated with the provided email.
-//
-// Returns an error if it is not an auth collection or the record is not found.
-func (dao *Dao) FindAuthRecordByEmail(collectionNameOrId string, email string) (*models.Record, error) {
- collection, err := dao.FindCollectionByNameOrId(collectionNameOrId)
- if err != nil {
- return nil, fmt.Errorf("failed to fetch auth collection %q (%w)", collectionNameOrId, err)
- }
- if !collection.IsAuth() {
- return nil, fmt.Errorf("%q is not an auth collection", collectionNameOrId)
- }
-
- record := &models.Record{}
-
- err = dao.RecordQuery(collection).
- AndWhere(dbx.HashExp{schema.FieldNameEmail: email}).
- Limit(1).
- One(record)
- if err != nil {
- return nil, err
- }
-
- return record, nil
-}
-
-// FindAuthRecordByUsername finds the auth record associated with the provided username (case insensitive).
-//
-// Returns an error if it is not an auth collection or the record is not found.
-func (dao *Dao) FindAuthRecordByUsername(collectionNameOrId string, username string) (*models.Record, error) {
- collection, err := dao.FindCollectionByNameOrId(collectionNameOrId)
- if err != nil {
- return nil, fmt.Errorf("failed to fetch auth collection %q (%w)", collectionNameOrId, err)
- }
- if !collection.IsAuth() {
- return nil, fmt.Errorf("%q is not an auth collection", collectionNameOrId)
- }
-
- record := &models.Record{}
-
- err = dao.RecordQuery(collection).
- AndWhere(dbx.NewExp("LOWER([["+schema.FieldNameUsername+"]])={:username}", dbx.Params{
- "username": strings.ToLower(username),
- })).
- Limit(1).
- One(record)
- if err != nil {
- return nil, err
- }
-
- return record, nil
-}
-
-// SuggestUniqueAuthRecordUsername checks if the provided username is unique
-// and return a new "unique" username with appended random numeric part
-// (eg. "existingName" -> "existingName583").
-//
-// The same username will be returned if the provided string is already unique.
-func (dao *Dao) SuggestUniqueAuthRecordUsername(
- collectionNameOrId string,
- baseUsername string,
- excludeIds ...string,
-) string {
- username := baseUsername
-
- for i := 0; i < 10; i++ { // max 10 attempts
- isUnique := dao.IsRecordValueUnique(
- collectionNameOrId,
- schema.FieldNameUsername,
- username,
- excludeIds...,
- )
- if isUnique {
- break // already unique
- }
- username = baseUsername + security.RandomStringWithAlphabet(3+i, "123456789")
- }
-
- return username
-}
-
-// CanAccessRecord checks if a record is allowed to be accessed by the
-// specified requestInfo and accessRule.
-//
-// Rule and db checks are ignored in case requestInfo.Admin is set.
-//
-// The returned error indicate that something unexpected happened during
-// the check (eg. invalid rule or db error).
-//
-// The method always return false on invalid access rule or db error.
-//
-// Example:
-//
-// requestInfo := apis.RequestInfo(c /* echo.Context */)
-// record, _ := dao.FindRecordById("example", "RECORD_ID")
-// rule := types.Pointer("@request.auth.id != '' || status = 'public'")
-// // ... or use one of the record collection's rule, eg. record.Collection().ViewRule
-//
-// if ok, _ := dao.CanAccessRecord(record, requestInfo, rule); ok { ... }
-func (dao *Dao) CanAccessRecord(record *models.Record, requestInfo *models.RequestInfo, accessRule *string) (bool, error) {
- if requestInfo.Admin != nil {
- // admins can access everything
- return true, nil
- }
-
- if accessRule == nil {
- // only admins can access this record
- return false, nil
- }
-
- if *accessRule == "" {
- // empty public rule, aka. everyone can access
- return true, nil
- }
-
- var exists bool
-
- query := dao.RecordQuery(record.Collection()).
- Select("(1)").
- AndWhere(dbx.HashExp{record.Collection().Name + ".id": record.Id})
-
- // parse and apply the access rule filter
- resolver := resolvers.NewRecordFieldResolver(dao, record.Collection(), requestInfo, true)
- expr, err := search.FilterData(*accessRule).BuildExpr(resolver)
- if err != nil {
- return false, err
- }
- resolver.UpdateQuery(query)
- query.AndWhere(expr)
-
- if err := query.Limit(1).Row(&exists); err != nil && !errors.Is(err, sql.ErrNoRows) {
- return false, err
- }
-
- return exists, nil
-}
-
-// SaveRecord persists the provided Record model in the database.
-//
-// If record.IsNew() is true, the method will perform a create, otherwise an update.
-// To explicitly mark a record for update you can use record.MarkAsNotNew().
-func (dao *Dao) SaveRecord(record *models.Record) error {
- if record.Collection().IsAuth() {
- if record.Username() == "" {
- return errors.New("unable to save auth record without username")
- }
-
- // Cross-check that the auth record id is unique for all auth collections.
- // This is to make sure that the filter `@request.auth.id` always returns a unique id.
- authCollections, err := dao.FindCollectionsByType(models.CollectionTypeAuth)
- if err != nil {
- return fmt.Errorf("unable to fetch the auth collections for cross-id unique check: %w", err)
- }
- for _, collection := range authCollections {
- if record.Collection().Id == collection.Id {
- continue // skip current collection (sqlite will do the check for us)
- }
- isUnique := dao.IsRecordValueUnique(collection.Id, schema.FieldNameId, record.Id)
- if !isUnique {
- return errors.New("the auth record ID must be unique across all auth collections")
- }
- }
- }
-
- return dao.Save(record)
-}
-
-// DeleteRecord deletes the provided Record model.
-//
-// This method will also cascade the delete operation to all linked
-// relational records (delete or unset, depending on the rel settings).
-//
-// The delete operation may fail if the record is part of a required
-// reference in another record (aka. cannot be deleted or unset).
-func (dao *Dao) DeleteRecord(record *models.Record) error {
- // fetch rel references (if any)
- //
- // note: the select is outside of the transaction to minimize
- // SQLITE_BUSY errors when mixing read&write in a single transaction
- refs, err := dao.FindCollectionReferences(record.Collection())
- if err != nil {
- return err
- }
-
- return dao.RunInTransaction(func(txDao *Dao) error {
- // manually trigger delete on any linked external auth to ensure
- // that the `OnModel*` hooks are triggered
- if record.Collection().IsAuth() {
- // note: the select is outside of the transaction to minimize
- // SQLITE_BUSY errors when mixing read&write in a single transaction
- externalAuths, err := dao.FindAllExternalAuthsByRecord(record)
- if err != nil {
- return err
- }
- for _, auth := range externalAuths {
- if err := txDao.DeleteExternalAuth(auth); err != nil {
- return err
- }
- }
- }
-
- // delete the record before the relation references to ensure that there
- // will be no "A<->B" relations to prevent deadlock when calling DeleteRecord recursively
- if err := txDao.Delete(record); err != nil {
- return err
- }
-
- return txDao.cascadeRecordDelete(record, refs)
- })
-}
-
-// cascadeRecordDelete triggers cascade deletion for the provided references.
-//
-// NB! This method is expected to be called inside a transaction.
-func (dao *Dao) cascadeRecordDelete(mainRecord *models.Record, refs map[*models.Collection][]*schema.SchemaField) error {
- // @todo consider changing refs to a slice
- //
- // Sort the refs keys to ensure that the cascade events firing order is always the same.
- // This is not necessary for the operation to function correctly but it helps having deterministic output during testing.
- sortedRefKeys := make([]*models.Collection, 0, len(refs))
- for k := range refs {
- sortedRefKeys = append(sortedRefKeys, k)
- }
- sort.Slice(sortedRefKeys, func(i, j int) bool {
- return sortedRefKeys[i].Name < sortedRefKeys[j].Name
- })
-
- for _, refCollection := range sortedRefKeys {
- fields, ok := refs[refCollection]
-
- if refCollection.IsView() || !ok {
- continue // skip missing or view collections
- }
-
- for _, field := range fields {
- recordTableName := inflector.Columnify(refCollection.Name)
- prefixedFieldName := recordTableName + "." + inflector.Columnify(field.Name)
-
- query := dao.RecordQuery(refCollection)
-
- if opt, ok := field.Options.(schema.MultiValuer); !ok || !opt.IsMultiple() {
- query.AndWhere(dbx.HashExp{prefixedFieldName: mainRecord.Id})
- } else {
- query.AndWhere(dbx.Exists(dbx.NewExp(fmt.Sprintf(
- `SELECT 1 FROM json_each(CASE WHEN json_valid([[%s]]) THEN [[%s]] ELSE json_array([[%s]]) END) {{__je__}} WHERE [[__je__.value]]={:jevalue}`,
- prefixedFieldName, prefixedFieldName, prefixedFieldName,
- ), dbx.Params{
- "jevalue": mainRecord.Id,
- })))
- }
-
- if refCollection.Id == mainRecord.Collection().Id {
- query.AndWhere(dbx.Not(dbx.HashExp{recordTableName + ".id": mainRecord.Id}))
- }
-
- // trigger cascade for each batchSize rel items until there is none
- batchSize := 4000
- rows := make([]dbx.NullStringMap, 0, batchSize)
- for {
- if err := query.Limit(int64(batchSize)).All(&rows); err != nil {
- return err
- }
-
- total := len(rows)
- if total == 0 {
- break
- }
-
- refRecords := models.NewRecordsFromNullStringMaps(refCollection, rows)
-
- err := dao.deleteRefRecords(mainRecord, refRecords, field)
- if err != nil {
- return err
- }
-
- if total < batchSize {
- break // no more items
- }
-
- rows = rows[:0] // keep allocated memory
- }
- }
- }
-
- return nil
-}
-
-// deleteRefRecords checks if related records has to be deleted (if `CascadeDelete` is set)
-// OR
-// just unset the record id from any relation field values (if they are not required).
-//
-// NB! This method is expected to be called inside a transaction.
-func (dao *Dao) deleteRefRecords(mainRecord *models.Record, refRecords []*models.Record, field *schema.SchemaField) error {
- options, _ := field.Options.(*schema.RelationOptions)
- if options == nil {
- return errors.New("relation field options are not initialized")
- }
-
- for _, refRecord := range refRecords {
- ids := refRecord.GetStringSlice(field.Name)
-
- // unset the record id
- for i := len(ids) - 1; i >= 0; i-- {
- if ids[i] == mainRecord.Id {
- ids = append(ids[:i], ids[i+1:]...)
- break
- }
- }
-
- // cascade delete the reference
- // (only if there are no other active references in case of multiple select)
- if options.CascadeDelete && len(ids) == 0 {
- if err := dao.DeleteRecord(refRecord); err != nil {
- return err
- }
- // no further actions are needed (the reference is deleted)
- continue
- }
-
- if field.Required && len(ids) == 0 {
- return fmt.Errorf("the record cannot be deleted because it is part of a required reference in record %s (%s collection)", refRecord.Id, refRecord.Collection().Name)
- }
-
- // save the reference changes
- refRecord.Set(field.Name, field.PrepareValue(ids))
- if err := dao.SaveRecord(refRecord); err != nil {
- return err
- }
- }
-
- return nil
-}
diff --git a/daos/record_table_sync.go b/daos/record_table_sync.go
deleted file mode 100644
index 83f45370..00000000
--- a/daos/record_table_sync.go
+++ /dev/null
@@ -1,361 +0,0 @@
-package daos
-
-import (
- "fmt"
- "strconv"
- "strings"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/tools/dbutils"
- "github.com/pocketbase/pocketbase/tools/security"
-)
-
-// SyncRecordTableSchema compares the two provided collections
-// and applies the necessary related record table changes.
-//
-// If `oldCollection` is null, then only `newCollection` is used to create the record table.
-func (dao *Dao) SyncRecordTableSchema(newCollection *models.Collection, oldCollection *models.Collection) error {
- return dao.RunInTransaction(func(txDao *Dao) error {
- // create
- // -----------------------------------------------------------
- if oldCollection == nil {
- cols := map[string]string{
- schema.FieldNameId: "TEXT PRIMARY KEY DEFAULT ('r'||lower(hex(randomblob(7)))) NOT NULL",
- schema.FieldNameCreated: "TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL",
- schema.FieldNameUpdated: "TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL",
- }
-
- if newCollection.IsAuth() {
- cols[schema.FieldNameUsername] = "TEXT NOT NULL"
- cols[schema.FieldNameEmail] = "TEXT DEFAULT '' NOT NULL"
- cols[schema.FieldNameEmailVisibility] = "BOOLEAN DEFAULT FALSE NOT NULL"
- cols[schema.FieldNameVerified] = "BOOLEAN DEFAULT FALSE NOT NULL"
- cols[schema.FieldNameTokenKey] = "TEXT NOT NULL"
- cols[schema.FieldNamePasswordHash] = "TEXT NOT NULL"
- cols[schema.FieldNameLastResetSentAt] = "TEXT DEFAULT '' NOT NULL"
- cols[schema.FieldNameLastVerificationSentAt] = "TEXT DEFAULT '' NOT NULL"
- cols[schema.FieldNameLastLoginAlertSentAt] = "TEXT DEFAULT '' NOT NULL"
- }
-
- // ensure that the new collection has an id
- if !newCollection.HasId() {
- newCollection.RefreshId()
- newCollection.MarkAsNew()
- }
-
- tableName := newCollection.Name
-
- // add schema field definitions
- for _, field := range newCollection.Schema.Fields() {
- cols[field.Name] = field.ColDefinition()
- }
-
- // create table
- if _, err := txDao.DB().CreateTable(tableName, cols).Execute(); err != nil {
- return err
- }
-
- // add named unique index on the email and tokenKey columns
- if newCollection.IsAuth() {
- _, err := txDao.DB().NewQuery(fmt.Sprintf(
- `
- CREATE UNIQUE INDEX _%s_username_idx ON {{%s}} ([[username]]);
- CREATE UNIQUE INDEX _%s_email_idx ON {{%s}} ([[email]]) WHERE [[email]] != '';
- CREATE UNIQUE INDEX _%s_tokenKey_idx ON {{%s}} ([[tokenKey]]);
- `,
- newCollection.Id, tableName,
- newCollection.Id, tableName,
- newCollection.Id, tableName,
- )).Execute()
- if err != nil {
- return err
- }
- }
-
- return txDao.createCollectionIndexes(newCollection)
- }
-
- // update
- // -----------------------------------------------------------
- oldTableName := oldCollection.Name
- newTableName := newCollection.Name
- oldSchema := oldCollection.Schema
- newSchema := newCollection.Schema
- deletedFieldNames := []string{}
- renamedFieldNames := map[string]string{}
-
- // drop old indexes (if any)
- if err := txDao.dropCollectionIndex(oldCollection); err != nil {
- return err
- }
-
- // check for renamed table
- if !strings.EqualFold(oldTableName, newTableName) {
- _, err := txDao.DB().RenameTable("{{"+oldTableName+"}}", "{{"+newTableName+"}}").Execute()
- if err != nil {
- return err
- }
- }
-
- // check for deleted columns
- for _, oldField := range oldSchema.Fields() {
- if f := newSchema.GetFieldById(oldField.Id); f != nil {
- continue // exist
- }
-
- _, err := txDao.DB().DropColumn(newTableName, oldField.Name).Execute()
- if err != nil {
- return fmt.Errorf("failed to drop column %s - %w", oldField.Name, err)
- }
-
- deletedFieldNames = append(deletedFieldNames, oldField.Name)
- }
-
- // check for new or renamed columns
- toRename := map[string]string{}
- for _, field := range newSchema.Fields() {
- oldField := oldSchema.GetFieldById(field.Id)
- // Note:
- // We are using a temporary column name when adding or renaming columns
- // to ensure that there are no name collisions in case there is
- // names switch/reuse of existing columns (eg. name, title -> title, name).
- // This way we are always doing 1 more rename operation but it provides better dev experience.
-
- if oldField == nil {
- tempName := field.Name + security.PseudorandomString(5)
- toRename[tempName] = field.Name
-
- // add
- _, err := txDao.DB().AddColumn(newTableName, tempName, field.ColDefinition()).Execute()
- if err != nil {
- return fmt.Errorf("failed to add column %s - %w", field.Name, err)
- }
- } else if oldField.Name != field.Name {
- tempName := field.Name + security.PseudorandomString(5)
- toRename[tempName] = field.Name
-
- // rename
- _, err := txDao.DB().RenameColumn(newTableName, oldField.Name, tempName).Execute()
- if err != nil {
- return fmt.Errorf("failed to rename column %s - %w", oldField.Name, err)
- }
-
- renamedFieldNames[oldField.Name] = field.Name
- }
- }
-
- // set the actual columns name
- for tempName, actualName := range toRename {
- _, err := txDao.DB().RenameColumn(newTableName, tempName, actualName).Execute()
- if err != nil {
- return err
- }
- }
-
- if err := txDao.normalizeSingleVsMultipleFieldChanges(newCollection, oldCollection); err != nil {
- return err
- }
-
- return txDao.createCollectionIndexes(newCollection)
- })
-}
-
-func (dao *Dao) normalizeSingleVsMultipleFieldChanges(newCollection, oldCollection *models.Collection) error {
- if newCollection.IsView() || oldCollection == nil {
- return nil // view or not an update
- }
-
- return dao.RunInTransaction(func(txDao *Dao) error {
- // temporary disable the schema error checks to prevent view and trigger errors
- // when "altering" (aka. deleting and recreating) the non-normalized columns
- if _, err := txDao.DB().NewQuery("PRAGMA writable_schema = ON").Execute(); err != nil {
- return err
- }
- // executed with defer to make sure that the pragma is always reverted
- // in case of an error and when nested transactions are used
- defer txDao.DB().NewQuery("PRAGMA writable_schema = RESET").Execute()
-
- for _, newField := range newCollection.Schema.Fields() {
- // allow to continue even if there is no old field for the cases
- // when a new field is added and there are already inserted data
- var isOldMultiple bool
- if oldField := oldCollection.Schema.GetFieldById(newField.Id); oldField != nil {
- if opt, ok := oldField.Options.(schema.MultiValuer); ok {
- isOldMultiple = opt.IsMultiple()
- }
- }
-
- var isNewMultiple bool
- if opt, ok := newField.Options.(schema.MultiValuer); ok {
- isNewMultiple = opt.IsMultiple()
- }
-
- if isOldMultiple == isNewMultiple {
- continue // no change
- }
-
- // update the column definition by:
- // 1. inserting a new column with the new definition
- // 2. copy normalized values from the original column to the new one
- // 3. drop the original column
- // 4. rename the new column to the original column
- // -------------------------------------------------------
-
- originalName := newField.Name
- tempName := "_" + newField.Name + security.PseudorandomString(5)
-
- _, err := txDao.DB().AddColumn(newCollection.Name, tempName, newField.ColDefinition()).Execute()
- if err != nil {
- return err
- }
-
- var copyQuery *dbx.Query
-
- if !isOldMultiple && isNewMultiple {
- // single -> multiple (convert to array)
- copyQuery = txDao.DB().NewQuery(fmt.Sprintf(
- `UPDATE {{%s}} set [[%s]] = (
- CASE
- WHEN COALESCE([[%s]], '') = ''
- THEN '[]'
- ELSE (
- CASE
- WHEN json_valid([[%s]]) AND json_type([[%s]]) == 'array'
- THEN [[%s]]
- ELSE json_array([[%s]])
- END
- )
- END
- )`,
- newCollection.Name,
- tempName,
- originalName,
- originalName,
- originalName,
- originalName,
- originalName,
- ))
- } else {
- // multiple -> single (keep only the last element)
- //
- // note: for file fields the actual file objects are not
- // deleted allowing additional custom handling via migration
- copyQuery = txDao.DB().NewQuery(fmt.Sprintf(
- `UPDATE {{%s}} set [[%s]] = (
- CASE
- WHEN COALESCE([[%s]], '[]') = '[]'
- THEN ''
- ELSE (
- CASE
- WHEN json_valid([[%s]]) AND json_type([[%s]]) == 'array'
- THEN COALESCE(json_extract([[%s]], '$[#-1]'), '')
- ELSE [[%s]]
- END
- )
- END
- )`,
- newCollection.Name,
- tempName,
- originalName,
- originalName,
- originalName,
- originalName,
- originalName,
- ))
- }
-
- // copy the normalized values
- if _, err := copyQuery.Execute(); err != nil {
- return err
- }
-
- // drop the original column
- if _, err := txDao.DB().DropColumn(newCollection.Name, originalName).Execute(); err != nil {
- return err
- }
-
- // rename the new column back to the original
- if _, err := txDao.DB().RenameColumn(newCollection.Name, tempName, originalName).Execute(); err != nil {
- return err
- }
- }
-
- // revert the pragma and reload the schema
- _, revertErr := txDao.DB().NewQuery("PRAGMA writable_schema = RESET").Execute()
-
- return revertErr
- })
-}
-
-func (dao *Dao) dropCollectionIndex(collection *models.Collection) error {
- if collection.IsView() {
- return nil // views don't have indexes
- }
-
- return dao.RunInTransaction(func(txDao *Dao) error {
- for _, raw := range collection.Indexes {
- parsed := dbutils.ParseIndex(raw)
-
- if !parsed.IsValid() {
- continue
- }
-
- if _, err := txDao.DB().NewQuery(fmt.Sprintf("DROP INDEX IF EXISTS [[%s]]", parsed.IndexName)).Execute(); err != nil {
- return err
- }
- }
-
- return nil
- })
-}
-
-func (dao *Dao) createCollectionIndexes(collection *models.Collection) error {
- if collection.IsView() {
- return nil // views don't have indexes
- }
-
- return dao.RunInTransaction(func(txDao *Dao) error {
- // drop new indexes in case a duplicated index name is used
- if err := txDao.dropCollectionIndex(collection); err != nil {
- return err
- }
-
- // upsert new indexes
- //
- // note: we are returning validation errors because the indexes cannot be
- // validated in a form, aka. before persisting the related collection
- // record table changes
- errs := validation.Errors{}
- for i, idx := range collection.Indexes {
- parsed := dbutils.ParseIndex(idx)
-
- // ensure that the index is always for the current collection
- parsed.TableName = collection.Name
-
- if !parsed.IsValid() {
- errs[strconv.Itoa(i)] = validation.NewError(
- "validation_invalid_index_expression",
- "Invalid CREATE INDEX expression.",
- )
- continue
- }
-
- if _, err := txDao.DB().NewQuery(parsed.Build()).Execute(); err != nil {
- errs[strconv.Itoa(i)] = validation.NewError(
- "validation_invalid_index_expression",
- fmt.Sprintf("Failed to create index %s - %v.", parsed.IndexName, err.Error()),
- )
- continue
- }
- }
-
- if len(errs) > 0 {
- return validation.Errors{"indexes": errs}
- }
-
- return nil
- })
-}
diff --git a/daos/record_test.go b/daos/record_test.go
deleted file mode 100644
index 12797128..00000000
--- a/daos/record_test.go
+++ /dev/null
@@ -1,1368 +0,0 @@
-package daos_test
-
-import (
- "context"
- "database/sql"
- "errors"
- "regexp"
- "strings"
- "testing"
- "time"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/list"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-func TestRecordQueryWithDifferentCollectionValues(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection, err := app.Dao().FindCollectionByNameOrId("demo1")
- if err != nil {
- t.Fatal(err)
- }
-
- scenarios := []struct {
- name any
- collection any
- expectedTotal int
- expectError bool
- }{
- {"with nil value", nil, 0, true},
- {"with invalid or missing collection id/name", "missing", 0, true},
- {"with pointer model", collection, 3, false},
- {"with value model", *collection, 3, false},
- {"with name", "demo1", 3, false},
- {"with id", "wsmn24bux7wo113", 3, false},
- }
-
- for _, s := range scenarios {
- var records []*models.Record
- err := app.Dao().RecordQuery(s.collection).All(&records)
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("[%s] Expected hasError %v, got %v", s.name, s.expectError, hasErr)
- continue
- }
-
- if total := len(records); total != s.expectedTotal {
- t.Errorf("[%s] Expected %d records, got %d", s.name, s.expectedTotal, total)
- }
- }
-}
-
-func TestRecordQueryOneWithRecord(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection, err := app.Dao().FindCollectionByNameOrId("demo1")
- if err != nil {
- t.Fatal(err)
- }
-
- id := "84nmscqy84lsi1t"
-
- q := app.Dao().RecordQuery(collection).
- Where(dbx.HashExp{"id": id})
-
- record := &models.Record{}
- if err := q.One(record); err != nil {
- t.Fatal(err)
- }
-
- if record.GetString("id") != id {
- t.Fatalf("Expected record with id %q, got %q", id, record.GetString("id"))
- }
-}
-
-func TestRecordQueryAllWithRecordsSlices(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection, err := app.Dao().FindCollectionByNameOrId("demo1")
- if err != nil {
- t.Fatal(err)
- }
-
- id1 := "84nmscqy84lsi1t"
- id2 := "al1h9ijdeojtsjy"
-
- {
- records := []models.Record{}
-
- q := app.Dao().RecordQuery(collection).
- Where(dbx.HashExp{"id": []any{id1, id2}}).
- OrderBy("created asc")
-
- if err := q.All(&records); err != nil {
- t.Fatal(err)
- }
-
- if len(records) != 2 {
- t.Fatalf("Expected %d records, got %d", 2, len(records))
- }
-
- if records[0].Id != id1 {
- t.Fatalf("Expected record with id %q, got %q", id1, records[0].Id)
- }
-
- if records[1].Id != id2 {
- t.Fatalf("Expected record with id %q, got %q", id2, records[1].Id)
- }
- }
-
- {
- records := []*models.Record{}
-
- q := app.Dao().RecordQuery(collection).
- Where(dbx.HashExp{"id": []any{id1, id2}}).
- OrderBy("created asc")
-
- if err := q.All(&records); err != nil {
- t.Fatal(err)
- }
-
- if len(records) != 2 {
- t.Fatalf("Expected %d records, got %d", 2, len(records))
- }
-
- if records[0].Id != id1 {
- t.Fatalf("Expected record with id %q, got %q", id1, records[0].Id)
- }
-
- if records[1].Id != id2 {
- t.Fatalf("Expected record with id %q, got %q", id2, records[1].Id)
- }
- }
-}
-
-func TestFindRecordById(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- collectionIdOrName string
- id string
- filter1 func(q *dbx.SelectQuery) error
- filter2 func(q *dbx.SelectQuery) error
- expectError bool
- }{
- {"demo2", "missing", nil, nil, true},
- {"missing", "0yxhwia2amd8gec", nil, nil, true},
- {"demo2", "0yxhwia2amd8gec", nil, nil, false},
- {"demo2", "0yxhwia2amd8gec", func(q *dbx.SelectQuery) error {
- q.AndWhere(dbx.HashExp{"title": "missing"})
- return nil
- }, nil, true},
- {"demo2", "0yxhwia2amd8gec", func(q *dbx.SelectQuery) error {
- return errors.New("test error")
- }, nil, true},
- {"demo2", "0yxhwia2amd8gec", func(q *dbx.SelectQuery) error {
- q.AndWhere(dbx.HashExp{"title": "test3"})
- return nil
- }, nil, false},
- {"demo2", "0yxhwia2amd8gec", func(q *dbx.SelectQuery) error {
- q.AndWhere(dbx.HashExp{"title": "test3"})
- return nil
- }, func(q *dbx.SelectQuery) error {
- q.AndWhere(dbx.HashExp{"active": false})
- return nil
- }, true},
- {"sz5l5z67tg7gku0", "0yxhwia2amd8gec", func(q *dbx.SelectQuery) error {
- q.AndWhere(dbx.HashExp{"title": "test3"})
- return nil
- }, func(q *dbx.SelectQuery) error {
- q.AndWhere(dbx.HashExp{"active": true})
- return nil
- }, false},
- }
-
- for i, scenario := range scenarios {
- record, err := app.Dao().FindRecordById(
- scenario.collectionIdOrName,
- scenario.id,
- scenario.filter1,
- scenario.filter2,
- )
-
- hasErr := err != nil
- if hasErr != scenario.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, scenario.expectError, hasErr, err)
- }
-
- if record != nil && record.Id != scenario.id {
- t.Errorf("(%d) Expected record with id %s, got %s", i, scenario.id, record.Id)
- }
- }
-}
-
-func TestFindRecordsByIds(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- collectionIdOrName string
- ids []string
- filter1 func(q *dbx.SelectQuery) error
- filter2 func(q *dbx.SelectQuery) error
- expectTotal int
- expectError bool
- }{
- {"demo2", []string{}, nil, nil, 0, false},
- {"demo2", []string{""}, nil, nil, 0, false},
- {"demo2", []string{"missing"}, nil, nil, 0, false},
- {"missing", []string{"0yxhwia2amd8gec"}, nil, nil, 0, true},
- {"demo2", []string{"0yxhwia2amd8gec"}, nil, nil, 1, false},
- {"sz5l5z67tg7gku0", []string{"0yxhwia2amd8gec"}, nil, nil, 1, false},
- {
- "demo2",
- []string{"0yxhwia2amd8gec", "llvuca81nly1qls"},
- nil,
- nil,
- 2,
- false,
- },
- {
- "demo2",
- []string{"0yxhwia2amd8gec", "llvuca81nly1qls"},
- func(q *dbx.SelectQuery) error {
- return nil // empty filter
- },
- func(q *dbx.SelectQuery) error {
- return errors.New("test error")
- },
- 0,
- true,
- },
- {
- "demo2",
- []string{"0yxhwia2amd8gec", "llvuca81nly1qls"},
- func(q *dbx.SelectQuery) error {
- q.AndWhere(dbx.HashExp{"active": true})
- return nil
- },
- nil,
- 1,
- false,
- },
- {
- "sz5l5z67tg7gku0",
- []string{"0yxhwia2amd8gec", "llvuca81nly1qls"},
- func(q *dbx.SelectQuery) error {
- q.AndWhere(dbx.HashExp{"active": true})
- return nil
- },
- func(q *dbx.SelectQuery) error {
- q.AndWhere(dbx.Not(dbx.HashExp{"title": ""}))
- return nil
- },
- 1,
- false,
- },
- }
-
- for i, scenario := range scenarios {
- records, err := app.Dao().FindRecordsByIds(
- scenario.collectionIdOrName,
- scenario.ids,
- scenario.filter1,
- scenario.filter2,
- )
-
- hasErr := err != nil
- if hasErr != scenario.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, scenario.expectError, hasErr, err)
- }
-
- if len(records) != scenario.expectTotal {
- t.Errorf("(%d) Expected %d records, got %d", i, scenario.expectTotal, len(records))
- continue
- }
-
- for _, r := range records {
- if !list.ExistInSlice(r.Id, scenario.ids) {
- t.Errorf("(%d) Couldn't find id %s in %v", i, r.Id, scenario.ids)
- }
- }
- }
-}
-
-func TestFindRecordsByExpr(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- collectionIdOrName string
- expressions []dbx.Expression
- expectIds []string
- expectError bool
- }{
- {
- "missing",
- nil,
- []string{},
- true,
- },
- {
- "demo2",
- nil,
- []string{
- "achvryl401bhse3",
- "llvuca81nly1qls",
- "0yxhwia2amd8gec",
- },
- false,
- },
- {
- "demo2",
- []dbx.Expression{
- nil,
- dbx.HashExp{"id": "123"},
- },
- []string{},
- false,
- },
- {
- "sz5l5z67tg7gku0",
- []dbx.Expression{
- dbx.Like("title", "test").Match(true, true),
- dbx.HashExp{"active": true},
- },
- []string{
- "achvryl401bhse3",
- "0yxhwia2amd8gec",
- },
- false,
- },
- }
-
- for i, scenario := range scenarios {
- records, err := app.Dao().FindRecordsByExpr(scenario.collectionIdOrName, scenario.expressions...)
-
- hasErr := err != nil
- if hasErr != scenario.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, scenario.expectError, hasErr, err)
- }
-
- if len(records) != len(scenario.expectIds) {
- t.Errorf("(%d) Expected %d records, got %d", i, len(scenario.expectIds), len(records))
- continue
- }
-
- for _, r := range records {
- if !list.ExistInSlice(r.Id, scenario.expectIds) {
- t.Errorf("(%d) Couldn't find id %s in %v", i, r.Id, scenario.expectIds)
- }
- }
- }
-}
-
-func TestFindFirstRecordByData(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- collectionIdOrName string
- key string
- value any
- expectId string
- expectError bool
- }{
- {
- "missing",
- "id",
- "llvuca81nly1qls",
- "llvuca81nly1qls",
- true,
- },
- {
- "demo2",
- "",
- "llvuca81nly1qls",
- "",
- true,
- },
- {
- "demo2",
- "id",
- "invalid",
- "",
- true,
- },
- {
- "demo2",
- "id",
- "llvuca81nly1qls",
- "llvuca81nly1qls",
- false,
- },
- {
- "sz5l5z67tg7gku0",
- "title",
- "test3",
- "0yxhwia2amd8gec",
- false,
- },
- }
-
- for i, scenario := range scenarios {
- record, err := app.Dao().FindFirstRecordByData(scenario.collectionIdOrName, scenario.key, scenario.value)
-
- hasErr := err != nil
- if hasErr != scenario.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, scenario.expectError, hasErr, err)
- continue
- }
-
- if !scenario.expectError && record.Id != scenario.expectId {
- t.Errorf("(%d) Expected record with id %s, got %v", i, scenario.expectId, record.Id)
- }
- }
-}
-
-func TestFindRecordsByFilter(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- name string
- collectionIdOrName string
- filter string
- sort string
- limit int
- offset int
- params []dbx.Params
- expectError bool
- expectRecordIds []string
- }{
- {
- "missing collection",
- "missing",
- "id != ''",
- "",
- 0,
- 0,
- nil,
- true,
- nil,
- },
- {
- "missing filter",
- "demo2",
- "",
- "",
- 0,
- 0,
- nil,
- true,
- nil,
- },
- {
- "invalid filter",
- "demo2",
- "someMissingField > 1",
- "",
- 0,
- 0,
- nil,
- true,
- nil,
- },
- {
- "simple filter",
- "demo2",
- "id != ''",
- "",
- 0,
- 0,
- nil,
- false,
- []string{
- "llvuca81nly1qls",
- "achvryl401bhse3",
- "0yxhwia2amd8gec",
- },
- },
- {
- "multi-condition filter with sort",
- "demo2",
- "id != '' && active=true",
- "-created,title",
- -1, // should behave the same as 0
- 0,
- nil,
- false,
- []string{
- "0yxhwia2amd8gec",
- "achvryl401bhse3",
- },
- },
- {
- "with limit and offset",
- "demo2",
- "id != ''",
- "title",
- 2,
- 1,
- nil,
- false,
- []string{
- "achvryl401bhse3",
- "0yxhwia2amd8gec",
- },
- },
- {
- "with placeholder params",
- "demo2",
- "active = {:active}",
- "",
- 10,
- 0,
- []dbx.Params{{"active": false}},
- false,
- []string{
- "llvuca81nly1qls",
- },
- },
- {
- "with json filter and sort",
- "demo4",
- "json_object != null && json_object.a.b = 'test'",
- "-json_object.a",
- 10,
- 0,
- []dbx.Params{{"active": false}},
- false,
- []string{
- "i9naidtvr6qsgb4",
- },
- },
- }
-
- for _, s := range scenarios {
- t.Run(s.name, func(t *testing.T) {
- records, err := app.Dao().FindRecordsByFilter(
- s.collectionIdOrName,
- s.filter,
- s.sort,
- s.limit,
- s.offset,
- s.params...,
- )
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Fatalf("[%s] Expected hasErr to be %v, got %v (%v)", s.name, s.expectError, hasErr, err)
- }
-
- if hasErr {
- return
- }
-
- if len(records) != len(s.expectRecordIds) {
- t.Fatalf("[%s] Expected %d records, got %d", s.name, len(s.expectRecordIds), len(records))
- }
-
- for i, id := range s.expectRecordIds {
- if id != records[i].Id {
- t.Fatalf("[%s] Expected record with id %q, got %q at index %d", s.name, id, records[i].Id, i)
- }
- }
- })
- }
-}
-
-func TestFindFirstRecordByFilter(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- name string
- collectionIdOrName string
- filter string
- params []dbx.Params
- expectError bool
- expectRecordId string
- }{
- {
- "missing collection",
- "missing",
- "id != ''",
- nil,
- true,
- "",
- },
- {
- "missing filter",
- "demo2",
- "",
- nil,
- true,
- "",
- },
- {
- "invalid filter",
- "demo2",
- "someMissingField > 1",
- nil,
- true,
- "",
- },
- {
- "valid filter but no matches",
- "demo2",
- "id = 'test'",
- nil,
- true,
- "",
- },
- {
- "valid filter and multiple matches",
- "demo2",
- "id != ''",
- nil,
- false,
- "llvuca81nly1qls",
- },
- {
- "with placeholder params",
- "demo2",
- "active = {:active}",
- []dbx.Params{{"active": false}},
- false,
- "llvuca81nly1qls",
- },
- }
-
- for _, s := range scenarios {
- record, err := app.Dao().FindFirstRecordByFilter(s.collectionIdOrName, s.filter, s.params...)
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("[%s] Expected hasErr to be %v, got %v (%v)", s.name, s.expectError, hasErr, err)
- continue
- }
-
- if hasErr {
- continue
- }
-
- if record.Id != s.expectRecordId {
- t.Errorf("[%s] Expected record with id %q, got %q", s.name, s.expectRecordId, record.Id)
- }
- }
-}
-
-func TestCanAccessRecord(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- admin, err := app.Dao().FindAdminByEmail("test@example.com")
- if err != nil {
- t.Fatal(err)
- }
-
- authRecord, err := app.Dao().FindAuthRecordByEmail("users", "test@example.com")
- if err != nil {
- t.Fatal(err)
- }
-
- record, err := app.Dao().FindRecordById("demo1", "imy661ixudk5izi")
- if err != nil {
- t.Fatal(err)
- }
-
- scenarios := []struct {
- name string
- record *models.Record
- requestInfo *models.RequestInfo
- rule *string
- expected bool
- expectError bool
- }{
- {
- "as admin with nil rule",
- record,
- &models.RequestInfo{
- Admin: admin,
- },
- nil,
- true,
- false,
- },
- {
- "as admin with non-empty rule",
- record,
- &models.RequestInfo{
- Admin: admin,
- },
- types.Pointer("id = ''"), // the filter rule should be ignored
- true,
- false,
- },
- {
- "as admin with invalid rule",
- record,
- &models.RequestInfo{
- Admin: admin,
- },
- types.Pointer("id ?!@ 1"), // the filter rule should be ignored
- true,
- false,
- },
- {
- "as guest with nil rule",
- record,
- &models.RequestInfo{},
- nil,
- false,
- false,
- },
- {
- "as guest with empty rule",
- record,
- &models.RequestInfo{},
- types.Pointer(""),
- true,
- false,
- },
- {
- "as guest with invalid rule",
- record,
- &models.RequestInfo{},
- types.Pointer("id ?!@ 1"),
- false,
- true,
- },
- {
- "as guest with mismatched rule",
- record,
- &models.RequestInfo{},
- types.Pointer("@request.auth.id != ''"),
- false,
- false,
- },
- {
- "as guest with matched rule",
- record,
- &models.RequestInfo{
- Data: map[string]any{"test": 1},
- },
- types.Pointer("@request.auth.id != '' || @request.data.test = 1"),
- true,
- false,
- },
- {
- "as auth record with nil rule",
- record,
- &models.RequestInfo{
- AuthRecord: authRecord,
- },
- nil,
- false,
- false,
- },
- {
- "as auth record with empty rule",
- record,
- &models.RequestInfo{
- AuthRecord: authRecord,
- },
- types.Pointer(""),
- true,
- false,
- },
- {
- "as auth record with invalid rule",
- record,
- &models.RequestInfo{
- AuthRecord: authRecord,
- },
- types.Pointer("id ?!@ 1"),
- false,
- true,
- },
- {
- "as auth record with mismatched rule",
- record,
- &models.RequestInfo{
- AuthRecord: authRecord,
- Data: map[string]any{"test": 1},
- },
- types.Pointer("@request.auth.id != '' && @request.data.test > 1"),
- false,
- false,
- },
- {
- "as auth record with matched rule",
- record,
- &models.RequestInfo{
- AuthRecord: authRecord,
- Data: map[string]any{"test": 2},
- },
- types.Pointer("@request.auth.id != '' && @request.data.test > 1"),
- true,
- false,
- },
- }
-
- for _, s := range scenarios {
- result, err := app.Dao().CanAccessRecord(s.record, s.requestInfo, s.rule)
-
- if result != s.expected {
- t.Errorf("[%s] Expected %v, got %v", s.name, s.expected, result)
- }
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("[%s] Expected hasErr %v, got %v (%v)", s.name, s.expectError, hasErr, err)
- }
- }
-}
-
-func TestIsRecordValueUnique(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- testManyRelsId1 := "bgs820n361vj1qd"
- testManyRelsId2 := "4q1xlclmfloku33"
- testManyRelsId3 := "oap640cot4yru2s"
-
- scenarios := []struct {
- collectionIdOrName string
- key string
- value any
- excludeIds []string
- expected bool
- }{
- {"demo2", "", "", nil, false},
- {"demo2", "", "", []string{""}, false},
- {"demo2", "missing", "unique", nil, false},
- {"demo2", "title", "unique", nil, true},
- {"demo2", "title", "unique", []string{}, true},
- {"demo2", "title", "unique", []string{""}, true},
- {"demo2", "title", "test1", []string{""}, false},
- {"demo2", "title", "test1", []string{"llvuca81nly1qls"}, true},
- {"demo1", "rel_many", []string{testManyRelsId3}, nil, false},
- {"wsmn24bux7wo113", "rel_many", []any{testManyRelsId3}, []string{""}, false},
- {"wsmn24bux7wo113", "rel_many", []any{testManyRelsId3}, []string{"84nmscqy84lsi1t"}, true},
- // mixed json array order
- {"demo1", "rel_many", []string{testManyRelsId1, testManyRelsId3, testManyRelsId2}, nil, true},
- // username special case-insensitive match
- {"users", "username", "test2_username", nil, false},
- {"users", "username", "TEST2_USERNAME", nil, false},
- {"users", "username", "new_username", nil, true},
- {"users", "username", "TEST2_USERNAME", []string{"oap640cot4yru2s"}, true},
- }
-
- for i, scenario := range scenarios {
- result := app.Dao().IsRecordValueUnique(
- scenario.collectionIdOrName,
- scenario.key,
- scenario.value,
- scenario.excludeIds...,
- )
-
- if result != scenario.expected {
- t.Errorf("(%d) Expected %v, got %v", i, scenario.expected, result)
- }
- }
-}
-
-func TestFindAuthRecordByToken(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- token string
- baseKey string
- expectedEmail string
- expectError bool
- }{
- // invalid auth token
- {
- "eyJhbGciOiJIUzI1NiJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiY29sbGVjdGlvbklkIjoiX3BiX3VzZXJzX2F1dGhfIiwiZXhwIjoyMjA4OTg1MjYxfQ.H2KKcIXiAfxvuXMFzizo1SgsinDP4hcWhD3pYoP4Nqw",
- app.Settings().RecordAuthToken.Secret,
- "",
- true,
- },
- // expired token
- {
- "eyJhbGciOiJIUzI1NiJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiY29sbGVjdGlvbklkIjoiX3BiX3VzZXJzX2F1dGhfIiwiZXhwIjoxNjQwOTkxNjYxfQ.HqvpCpM0RAk3Qu9PfCMuZsk_DKh9UYuzFLwXBMTZd1w",
- app.Settings().RecordAuthToken.Secret,
- "",
- true,
- },
- // wrong base key (password reset token secret instead of auth secret)
- {
- "eyJhbGciOiJIUzI1NiJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiY29sbGVjdGlvbklkIjoiX3BiX3VzZXJzX2F1dGhfIiwiZXhwIjoyMjA4OTg1MjYxfQ.UwD8JvkbQtXpymT09d7J6fdA0aP9g4FJ1GPh_ggEkzc",
- app.Settings().RecordPasswordResetToken.Secret,
- "",
- true,
- },
- // valid token and base key but with deleted/missing collection
- {
- "eyJhbGciOiJIUzI1NiJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiY29sbGVjdGlvbklkIjoibWlzc2luZyIsImV4cCI6MjIwODk4NTI2MX0.0oEHQpdpHp0Nb3VN8La0ssg-SjwWKiRl_k1mUGxdKlU",
- app.Settings().RecordAuthToken.Secret,
- "test@example.com",
- true,
- },
- // valid token
- {
- "eyJhbGciOiJIUzI1NiJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiY29sbGVjdGlvbklkIjoiX3BiX3VzZXJzX2F1dGhfIiwiZXhwIjoyMjA4OTg1MjYxfQ.UwD8JvkbQtXpymT09d7J6fdA0aP9g4FJ1GPh_ggEkzc",
- app.Settings().RecordAuthToken.Secret,
- "test@example.com",
- false,
- },
- }
-
- for i, scenario := range scenarios {
- record, err := app.Dao().FindAuthRecordByToken(scenario.token, scenario.baseKey)
-
- hasErr := err != nil
- if hasErr != scenario.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, scenario.expectError, hasErr, err)
- continue
- }
-
- if !scenario.expectError && record.Email() != scenario.expectedEmail {
- t.Errorf("(%d) Expected record model %s, got %s", i, scenario.expectedEmail, record.Email())
- }
- }
-}
-
-func TestFindAuthRecordByEmail(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- collectionIdOrName string
- email string
- expectError bool
- }{
- {"missing", "test@example.com", true},
- {"demo2", "test@example.com", true},
- {"users", "missing@example.com", true},
- {"users", "test@example.com", false},
- {"clients", "test2@example.com", false},
- }
-
- for i, scenario := range scenarios {
- record, err := app.Dao().FindAuthRecordByEmail(scenario.collectionIdOrName, scenario.email)
-
- hasErr := err != nil
- if hasErr != scenario.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, scenario.expectError, hasErr, err)
- continue
- }
-
- if !scenario.expectError && record.Email() != scenario.email {
- t.Errorf("(%d) Expected record with email %s, got %s", i, scenario.email, record.Email())
- }
- }
-}
-
-func TestFindAuthRecordByUsername(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- collectionIdOrName string
- username string
- expectError bool
- }{
- {"missing", "test_username", true},
- {"demo2", "test_username", true},
- {"users", "missing", true},
- {"users", "test2_username", false},
- {"users", "TEST2_USERNAME", false}, // case insensitive check
- {"clients", "clients43362", false},
- }
-
- for i, scenario := range scenarios {
- record, err := app.Dao().FindAuthRecordByUsername(scenario.collectionIdOrName, scenario.username)
-
- hasErr := err != nil
- if hasErr != scenario.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, scenario.expectError, hasErr, err)
- continue
- }
-
- if !scenario.expectError && !strings.EqualFold(record.Username(), scenario.username) {
- t.Errorf("(%d) Expected record with username %s, got %s", i, scenario.username, record.Username())
- }
- }
-}
-
-func TestSuggestUniqueAuthRecordUsername(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- collectionIdOrName string
- baseUsername string
- expectedPattern string
- }{
- // missing collection
- {"missing", "test2_username", `^test2_username\d{12}$`},
- // not an auth collection
- {"demo2", "test2_username", `^test2_username\d{12}$`},
- // auth collection with unique base username
- {"users", "new_username", `^new_username$`},
- {"users", "NEW_USERNAME", `^NEW_USERNAME$`},
- // auth collection with existing username
- {"users", "test2_username", `^test2_username\d{3}$`},
- {"users", "TEST2_USERNAME", `^TEST2_USERNAME\d{3}$`},
- }
-
- for i, scenario := range scenarios {
- username := app.Dao().SuggestUniqueAuthRecordUsername(
- scenario.collectionIdOrName,
- scenario.baseUsername,
- )
-
- pattern, err := regexp.Compile(scenario.expectedPattern)
- if err != nil {
- t.Errorf("[%d] Invalid username pattern %q: %v", i, scenario.expectedPattern, err)
- }
- if !pattern.MatchString(username) {
- t.Fatalf("Expected username to match %s, got username %s", scenario.expectedPattern, username)
- }
- }
-}
-
-func TestSaveRecord(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection, _ := app.Dao().FindCollectionByNameOrId("demo2")
-
- // create
- // ---
- r1 := models.NewRecord(collection)
- r1.Set("title", "test_new")
- err1 := app.Dao().SaveRecord(r1)
- if err1 != nil {
- t.Fatal(err1)
- }
- newR1, _ := app.Dao().FindFirstRecordByData(collection.Id, "title", "test_new")
- if newR1 == nil || newR1.Id != r1.Id || newR1.GetString("title") != r1.GetString("title") {
- t.Fatalf("Expected to find record %v, got %v", r1, newR1)
- }
-
- // update
- // ---
- r2, _ := app.Dao().FindFirstRecordByData(collection.Id, "id", "0yxhwia2amd8gec")
- r2.Set("title", "test_update")
- err2 := app.Dao().SaveRecord(r2)
- if err2 != nil {
- t.Fatal(err2)
- }
- newR2, _ := app.Dao().FindFirstRecordByData(collection.Id, "title", "test_update")
- if newR2 == nil || newR2.Id != r2.Id || newR2.GetString("title") != r2.GetString("title") {
- t.Fatalf("Expected to find record %v, got %v", r2, newR2)
- }
-}
-
-func TestSaveRecordWithIdFromOtherCollection(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- baseCollection, _ := app.Dao().FindCollectionByNameOrId("demo2")
- authCollection, _ := app.Dao().FindCollectionByNameOrId("nologin")
-
- // base collection test
- r1 := models.NewRecord(baseCollection)
- r1.Set("title", "test_new")
- r1.Set("id", "mk5fmymtx4wsprk") // existing id of demo3 record
- r1.MarkAsNew()
- if err := app.Dao().SaveRecord(r1); err != nil {
- t.Fatalf("Expected nil, got error %v", err)
- }
-
- // auth collection test
- r2 := models.NewRecord(authCollection)
- r2.Set("username", "test_new")
- r2.Set("id", "gk390qegs4y47wn") // existing id of "clients" record
- r2.MarkAsNew()
- if err := app.Dao().SaveRecord(r2); err == nil {
- t.Fatal("Expected error, got nil")
- }
-
- // try again with unique id
- r2.Set("id", "unique_id")
- if err := app.Dao().SaveRecord(r2); err != nil {
- t.Fatalf("Expected nil, got error %v", err)
- }
-}
-
-func TestDeleteRecord(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- demoCollection, _ := app.Dao().FindCollectionByNameOrId("demo2")
-
- // delete unsaved record
- // ---
- rec0 := models.NewRecord(demoCollection)
- if err := app.Dao().DeleteRecord(rec0); err == nil {
- t.Fatal("(rec0) Didn't expect to succeed deleting unsaved record")
- }
-
- // delete existing record + external auths
- // ---
- rec1, _ := app.Dao().FindRecordById("users", "4q1xlclmfloku33")
- if err := app.Dao().DeleteRecord(rec1); err != nil {
- t.Fatalf("(rec1) Expected nil, got error %v", err)
- }
- // check if it was really deleted
- if refreshed, _ := app.Dao().FindRecordById(rec1.Collection().Id, rec1.Id); refreshed != nil {
- t.Fatalf("(rec1) Expected record to be deleted, got %v", refreshed)
- }
- // check if the external auths were deleted
- if auths, _ := app.Dao().FindAllExternalAuthsByRecord(rec1); len(auths) > 0 {
- t.Fatalf("(rec1) Expected external auths to be deleted, got %v", auths)
- }
-
- // delete existing record while being part of a non-cascade required relation
- // ---
- rec2, _ := app.Dao().FindRecordById("demo3", "7nwo8tuiatetxdm")
- if err := app.Dao().DeleteRecord(rec2); err == nil {
- t.Fatalf("(rec2) Expected error, got nil")
- }
-
- // delete existing record + cascade
- // ---
- calledQueries := []string{}
- app.Dao().NonconcurrentDB().(*dbx.DB).QueryLogFunc = func(ctx context.Context, t time.Duration, sql string, rows *sql.Rows, err error) {
- calledQueries = append(calledQueries, sql)
- }
- app.Dao().ConcurrentDB().(*dbx.DB).QueryLogFunc = func(ctx context.Context, t time.Duration, sql string, rows *sql.Rows, err error) {
- calledQueries = append(calledQueries, sql)
- }
- app.Dao().NonconcurrentDB().(*dbx.DB).ExecLogFunc = func(ctx context.Context, t time.Duration, sql string, result sql.Result, err error) {
- calledQueries = append(calledQueries, sql)
- }
- app.Dao().ConcurrentDB().(*dbx.DB).ExecLogFunc = func(ctx context.Context, t time.Duration, sql string, result sql.Result, err error) {
- calledQueries = append(calledQueries, sql)
- }
- rec3, _ := app.Dao().FindRecordById("users", "oap640cot4yru2s")
- // delete
- if err := app.Dao().DeleteRecord(rec3); err != nil {
- t.Fatalf("(rec3) Expected nil, got error %v", err)
- }
- // check if it was really deleted
- rec3, _ = app.Dao().FindRecordById(rec3.Collection().Id, rec3.Id)
- if rec3 != nil {
- t.Fatalf("(rec3) Expected record to be deleted, got %v", rec3)
- }
- // check if the operation cascaded
- rel, _ := app.Dao().FindRecordById("demo1", "84nmscqy84lsi1t")
- if rel != nil {
- t.Fatalf("(rec3) Expected the delete to cascade, found relation %v", rel)
- }
- // ensure that the json rel fields were prefixed
- joinedQueries := strings.Join(calledQueries, " ")
- expectedRelManyPart := "SELECT `demo1`.* FROM `demo1` WHERE EXISTS (SELECT 1 FROM json_each(CASE WHEN json_valid([[demo1.rel_many]]) THEN [[demo1.rel_many]] ELSE json_array([[demo1.rel_many]]) END) {{__je__}} WHERE [[__je__.value]]='"
- if !strings.Contains(joinedQueries, expectedRelManyPart) {
- t.Fatalf("(rec3) Expected the cascade delete to call the query \n%v, got \n%v", expectedRelManyPart, calledQueries)
- }
- expectedRelOnePart := "SELECT `demo1`.* FROM `demo1` WHERE (`demo1`.`rel_one`='"
- if !strings.Contains(joinedQueries, expectedRelOnePart) {
- t.Fatalf("(rec3) Expected the cascade delete to call the query \n%v, got \n%v", expectedRelOnePart, calledQueries)
- }
-}
-
-func TestDeleteRecordBatchProcessing(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- if err := createMockBatchProcessingData(app.Dao()); err != nil {
- t.Fatal(err)
- }
-
- // find and delete the first c1 record to trigger cascade
- mainRecord, _ := app.Dao().FindRecordById("c1", "a")
- if err := app.Dao().DeleteRecord(mainRecord); err != nil {
- t.Fatal(err)
- }
-
- // check if the main record was deleted
- _, err := app.Dao().FindRecordById(mainRecord.Collection().Id, mainRecord.Id)
- if err == nil {
- t.Fatal("The main record wasn't deleted")
- }
-
- // check if the c1 b rel field were updated
- c1RecordB, err := app.Dao().FindRecordById("c1", "b")
- if err != nil || c1RecordB.GetString("rel") != "" {
- t.Fatalf("Expected c1RecordB.rel to be nil, got %v", c1RecordB.GetString("rel"))
- }
-
- // check if the c2 rel fields were updated
- c2Records, err := app.Dao().FindRecordsByExpr("c2", nil)
- if err != nil || len(c2Records) == 0 {
- t.Fatalf("Failed to fetch c2 records: %v", err)
- }
- for _, r := range c2Records {
- ids := r.GetStringSlice("rel")
- if len(ids) != 1 || ids[0] != "b" {
- t.Fatalf("Expected only 'b' rel id, got %v", ids)
- }
- }
-
- // check if all c3 relations were deleted
- c3Records, err := app.Dao().FindRecordsByExpr("c3", nil)
- if err != nil {
- t.Fatalf("Failed to fetch c3 records: %v", err)
- }
- if total := len(c3Records); total != 0 {
- t.Fatalf("Expected c3 records to be deleted, found %d", total)
- }
-}
-
-func createMockBatchProcessingData(dao *daos.Dao) error {
- // create mock collection without relation
- c1 := &models.Collection{}
- c1.Id = "c1"
- c1.Name = c1.Id
- c1.Schema = schema.NewSchema(
- &schema.SchemaField{
- Name: "text",
- Type: schema.FieldTypeText,
- },
- // self reference
- &schema.SchemaField{
- Name: "rel",
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{
- MaxSelect: types.Pointer(1),
- CollectionId: "c1",
- CascadeDelete: false, // should unset all rel fields
- },
- },
- )
- if err := dao.SaveCollection(c1); err != nil {
- return err
- }
-
- // create mock collection with a multi-rel field
- c2 := &models.Collection{}
- c2.Id = "c2"
- c2.Name = c2.Id
- c2.Schema = schema.NewSchema(
- &schema.SchemaField{
- Name: "rel",
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{
- MaxSelect: types.Pointer(10),
- CollectionId: "c1",
- CascadeDelete: false, // should unset all rel fields
- },
- },
- )
- if err := dao.SaveCollection(c2); err != nil {
- return err
- }
-
- // create mock collection with a single-rel field
- c3 := &models.Collection{}
- c3.Id = "c3"
- c3.Name = c3.Id
- c3.Schema = schema.NewSchema(
- &schema.SchemaField{
- Name: "rel",
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{
- MaxSelect: types.Pointer(1),
- CollectionId: "c1",
- CascadeDelete: true, // should delete all c3 records
- },
- },
- )
- if err := dao.SaveCollection(c3); err != nil {
- return err
- }
-
- // insert mock records
- c1RecordA := models.NewRecord(c1)
- c1RecordA.Id = "a"
- c1RecordA.Set("rel", c1RecordA.Id) // self reference
- if err := dao.Save(c1RecordA); err != nil {
- return err
- }
- c1RecordB := models.NewRecord(c1)
- c1RecordB.Id = "b"
- c1RecordB.Set("rel", c1RecordA.Id) // rel to another record from the same collection
- if err := dao.Save(c1RecordB); err != nil {
- return err
- }
- for i := 0; i < 4500; i++ {
- c2Record := models.NewRecord(c2)
- c2Record.Set("rel", []string{c1RecordA.Id, c1RecordB.Id})
- if err := dao.Save(c2Record); err != nil {
- return err
- }
-
- c3Record := models.NewRecord(c3)
- c3Record.Set("rel", c1RecordA.Id)
- if err := dao.Save(c3Record); err != nil {
- return err
- }
- }
-
- // set the same id as the relation for at least 1 record
- // to check whether the correct condition will be added
- c3Record := models.NewRecord(c3)
- c3Record.Set("rel", c1RecordA.Id)
- c3Record.Id = c1RecordA.Id
- if err := dao.Save(c3Record); err != nil {
- return err
- }
-
- return nil
-}
diff --git a/daos/settings.go b/daos/settings.go
deleted file mode 100644
index f4830e7b..00000000
--- a/daos/settings.go
+++ /dev/null
@@ -1,63 +0,0 @@
-package daos
-
-import (
- "encoding/json"
- "errors"
-
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/settings"
- "github.com/pocketbase/pocketbase/tools/security"
-)
-
-// FindSettings returns and decode the serialized app settings param value.
-//
-// The method will first try to decode the param value without decryption.
-// If it fails and optEncryptionKey is set, it will try again by first
-// decrypting the value and then decode it again.
-//
-// Returns an error if it fails to decode the stored serialized param value.
-func (dao *Dao) FindSettings(optEncryptionKey ...string) (*settings.Settings, error) {
- param, err := dao.FindParamByKey(models.ParamAppSettings)
- if err != nil {
- return nil, err
- }
-
- result := settings.New()
-
- // try first without decryption
- plainDecodeErr := json.Unmarshal(param.Value, result)
-
- // failed, try to decrypt
- if plainDecodeErr != nil {
- var encryptionKey string
- if len(optEncryptionKey) > 0 && optEncryptionKey[0] != "" {
- encryptionKey = optEncryptionKey[0]
- }
-
- // load without decrypt has failed and there is no encryption key to use for decrypt
- if encryptionKey == "" {
- return nil, errors.New("failed to load the stored app settings - missing or invalid encryption key")
- }
-
- // decrypt
- decrypted, decryptErr := security.Decrypt(string(param.Value), encryptionKey)
- if decryptErr != nil {
- return nil, decryptErr
- }
-
- // decode again
- decryptedDecodeErr := json.Unmarshal(decrypted, result)
- if decryptedDecodeErr != nil {
- return nil, decryptedDecodeErr
- }
- }
-
- return result, nil
-}
-
-// SaveSettings persists the specified settings configuration.
-//
-// If optEncryptionKey is set, then the stored serialized value will be encrypted with it.
-func (dao *Dao) SaveSettings(newSettings *settings.Settings, optEncryptionKey ...string) error {
- return dao.SaveParam(models.ParamAppSettings, newSettings, optEncryptionKey...)
-}
diff --git a/daos/settings_test.go b/daos/settings_test.go
deleted file mode 100644
index 9ae52449..00000000
--- a/daos/settings_test.go
+++ /dev/null
@@ -1,52 +0,0 @@
-package daos_test
-
-import (
- "testing"
-
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/security"
-)
-
-func TestSaveAndFindSettings(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- encryptionKey := security.PseudorandomString(32)
-
- // change unencrypted app settings
- app.Settings().Meta.AppName = "save_unencrypted"
- if err := app.Dao().SaveSettings(app.Settings()); err != nil {
- t.Fatal(err)
- }
-
- // check if the change was persisted
- s1, err := app.Dao().FindSettings()
- if err != nil {
- t.Fatalf("Failed to fetch settings: %v", err)
- }
- if s1.Meta.AppName != "save_unencrypted" {
- t.Fatalf("Expected settings to be changed with app name %q, got \n%v", "save_unencrypted", s1)
- }
-
- // make another change but this time provide an encryption key
- app.Settings().Meta.AppName = "save_encrypted"
- if err := app.Dao().SaveSettings(app.Settings(), encryptionKey); err != nil {
- t.Fatal(err)
- }
-
- // try to fetch the settings without encryption key (should fail)
- if s2, err := app.Dao().FindSettings(); err == nil {
- t.Fatalf("Expected FindSettings to fail without an encryption key, got \n%v", s2)
- }
-
- // try again but this time with an encryption key
- s3, err := app.Dao().FindSettings(encryptionKey)
- if err != nil {
- t.Fatalf("Failed to fetch settings with an encryption key %s: %v", encryptionKey, err)
- }
- if s3.Meta.AppName != "save_encrypted" {
- t.Fatalf("Expected settings to be changed with app name %q, got \n%v", "save_encrypted", s3)
- }
-}
diff --git a/daos/table_test.go b/daos/table_test.go
deleted file mode 100644
index 8e7d9980..00000000
--- a/daos/table_test.go
+++ /dev/null
@@ -1,195 +0,0 @@
-package daos_test
-
-import (
- "context"
- "database/sql"
- "encoding/json"
- "testing"
- "time"
-
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/list"
-)
-
-func TestHasTable(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- tableName string
- expected bool
- }{
- {"", false},
- {"test", false},
- {"_admins", true},
- {"demo3", true},
- {"DEMO3", true}, // table names are case insensitives by default
- {"view1", true}, // view
- }
-
- for i, scenario := range scenarios {
- result := app.Dao().HasTable(scenario.tableName)
- if result != scenario.expected {
- t.Errorf("[%d] Expected %v, got %v", i, scenario.expected, result)
- }
- }
-}
-
-func TestTableColumns(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- tableName string
- expected []string
- }{
- {"", nil},
- {"_params", []string{"id", "key", "value", "created", "updated"}},
- }
-
- for i, s := range scenarios {
- columns, _ := app.Dao().TableColumns(s.tableName)
-
- if len(columns) != len(s.expected) {
- t.Errorf("[%d] Expected columns %v, got %v", i, s.expected, columns)
- continue
- }
-
- for _, c := range columns {
- if !list.ExistInSlice(c, s.expected) {
- t.Errorf("[%d] Didn't expect column %s", i, c)
- }
- }
- }
-}
-
-func TestTableInfo(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- tableName string
- expected string
- }{
- {"", "null"},
- {"missing", "null"},
- {
- "_admins",
- `[{"PK":1,"Index":0,"Name":"id","Type":"TEXT","NotNull":false,"DefaultValue":null},{"PK":0,"Index":1,"Name":"avatar","Type":"INTEGER","NotNull":true,"DefaultValue":0},{"PK":0,"Index":2,"Name":"email","Type":"TEXT","NotNull":true,"DefaultValue":null},{"PK":0,"Index":3,"Name":"tokenKey","Type":"TEXT","NotNull":true,"DefaultValue":null},{"PK":0,"Index":4,"Name":"passwordHash","Type":"TEXT","NotNull":true,"DefaultValue":null},{"PK":0,"Index":5,"Name":"lastResetSentAt","Type":"TEXT","NotNull":true,"DefaultValue":""},{"PK":0,"Index":6,"Name":"created","Type":"TEXT","NotNull":true,"DefaultValue":""},{"PK":0,"Index":7,"Name":"updated","Type":"TEXT","NotNull":true,"DefaultValue":""}]`,
- },
- }
-
- for i, s := range scenarios {
- rows, _ := app.Dao().TableInfo(s.tableName)
-
- raw, _ := json.Marshal(rows)
- rawStr := string(raw)
-
- if rawStr != s.expected {
- t.Errorf("[%d] Expected \n%v, \ngot \n%v", i, s.expected, rawStr)
- }
- }
-}
-
-func TestDeleteTable(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- tableName string
- expectError bool
- }{
- {"", true},
- {"test", false}, // missing tables are ignored
- {"_admins", false},
- {"demo3", false},
- }
-
- for i, s := range scenarios {
- err := app.Dao().DeleteTable(s.tableName)
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("[%d] Expected hasErr %v, got %v", i, s.expectError, hasErr)
- }
- }
-}
-
-func TestVacuum(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- calledQueries := []string{}
- app.DB().QueryLogFunc = func(ctx context.Context, t time.Duration, sql string, rows *sql.Rows, err error) {
- calledQueries = append(calledQueries, sql)
- }
- app.DB().ExecLogFunc = func(ctx context.Context, t time.Duration, sql string, result sql.Result, err error) {
- calledQueries = append(calledQueries, sql)
- }
-
- if err := app.Dao().Vacuum(); err != nil {
- t.Fatal(err)
- }
-
- if total := len(calledQueries); total != 1 {
- t.Fatalf("Expected 1 query, got %d", total)
- }
-
- if calledQueries[0] != "VACUUM" {
- t.Fatalf("Expected VACUUM query, got %s", calledQueries[0])
- }
-}
-
-func TestTableIndexes(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- table string
- expectError bool
- expectIndexes []string
- }{
- {
- "missing",
- false,
- nil,
- },
- {
- "demo2",
- false,
- []string{"idx_demo2_created", "idx_unique_demo2_title", "idx_demo2_active"},
- },
- }
-
- for _, s := range scenarios {
- result, err := app.Dao().TableIndexes(s.table)
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("[%s] Expected hasErr %v, got %v", s.table, s.expectError, hasErr)
- }
-
- if len(s.expectIndexes) != len(result) {
- t.Errorf("[%s] Expected %d indexes, got %d:\n%v", s.table, len(s.expectIndexes), len(result), result)
- continue
- }
-
- for _, name := range s.expectIndexes {
- if result[name] == "" {
- t.Errorf("[%s] Missing index %q in \n%v", s.table, name, result)
- }
- }
- }
-}
diff --git a/examples/base/main.go b/examples/base/main.go
index 13371422..84cae004 100644
--- a/examples/base/main.go
+++ b/examples/base/main.go
@@ -2,10 +2,10 @@ package main
import (
"log"
+ "net/http"
"os"
"path/filepath"
"strings"
- "time"
"github.com/pocketbase/pocketbase"
"github.com/pocketbase/pocketbase/apis"
@@ -13,6 +13,7 @@ import (
"github.com/pocketbase/pocketbase/plugins/ghupdate"
"github.com/pocketbase/pocketbase/plugins/jsvm"
"github.com/pocketbase/pocketbase/plugins/migratecmd"
+ "github.com/pocketbase/pocketbase/tools/hook"
)
func main() {
@@ -42,7 +43,7 @@ func main() {
app.RootCmd.PersistentFlags().IntVar(
&hooksPool,
"hooksPool",
- 25,
+ 20,
"the total prewarm goja.Runtime instances for the JS app hooks execution",
)
@@ -78,21 +79,13 @@ func main() {
"fallback the request to index.html on missing static path (eg. when pretty urls are used with SPA)",
)
- var queryTimeout int
- app.RootCmd.PersistentFlags().IntVar(
- &queryTimeout,
- "queryTimeout",
- 30,
- "the default SELECT queries timeout in seconds",
- )
-
app.RootCmd.ParseFlags(os.Args[1:])
// ---------------------------------------------------------------
// Plugins and hooks:
// ---------------------------------------------------------------
- // load jsvm (hooks and migrations)
+ // load jsvm (pb_hooks and pb_migrations)
jsvm.MustRegister(app, jsvm.Config{
MigrationsDir: migrationsDir,
HooksDir: hooksDir,
@@ -110,17 +103,18 @@ func main() {
// GitHub selfupdate
ghupdate.MustRegister(app, app.RootCmd, ghupdate.Config{})
- app.OnAfterBootstrap().PreAdd(func(e *core.BootstrapEvent) error {
- app.Dao().ModelQueryTimeout = time.Duration(queryTimeout) * time.Second
- return nil
- })
+ // static route to serves files from the provided public dir
+ // (if publicDir exists and the route path is not already defined)
+ app.OnServe().Bind(&hook.Handler[*core.ServeEvent]{
+ Func: func(e *core.ServeEvent) error {
+ if !e.Router.HasRoute(http.MethodGet, "/{path...}") {
+ e.Router.GET("/{path...}", apis.Static(os.DirFS(publicDir), indexFallback))
+ }
- app.OnBeforeServe().Add(func(e *core.ServeEvent) error {
- // serves static files from the provided public dir (if exists)
- e.Router.GET("/*", apis.StaticDirectoryHandler(os.DirFS(publicDir), indexFallback))
- return nil
+ return e.Next()
+ },
+ Priority: 999, // execute as latest as possible to allow users to provide their own route
})
-
if err := app.Start(); err != nil {
log.Fatal(err)
}
diff --git a/forms/admin_login.go b/forms/admin_login.go
deleted file mode 100644
index da4631a7..00000000
--- a/forms/admin_login.go
+++ /dev/null
@@ -1,80 +0,0 @@
-package forms
-
-import (
- "database/sql"
- "errors"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/go-ozzo/ozzo-validation/v4/is"
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
-)
-
-// AdminLogin is an admin email/pass login form.
-type AdminLogin struct {
- app core.App
- dao *daos.Dao
-
- Identity string `form:"identity" json:"identity"`
- Password string `form:"password" json:"password"`
-}
-
-// NewAdminLogin creates a new [AdminLogin] form initialized with
-// the provided [core.App] instance.
-//
-// If you want to submit the form as part of a transaction,
-// you can change the default Dao via [SetDao()].
-func NewAdminLogin(app core.App) *AdminLogin {
- return &AdminLogin{
- app: app,
- dao: app.Dao(),
- }
-}
-
-// SetDao replaces the default form Dao instance with the provided one.
-func (form *AdminLogin) SetDao(dao *daos.Dao) {
- form.dao = dao
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-func (form *AdminLogin) Validate() error {
- return validation.ValidateStruct(form,
- validation.Field(&form.Identity, validation.Required, validation.Length(1, 255), is.EmailFormat),
- validation.Field(&form.Password, validation.Required, validation.Length(1, 255)),
- )
-}
-
-// Submit validates and submits the admin form.
-// On success returns the authorized admin model.
-//
-// You can optionally provide a list of InterceptorFunc to
-// further modify the form behavior before persisting it.
-func (form *AdminLogin) Submit(interceptors ...InterceptorFunc[*models.Admin]) (*models.Admin, error) {
- if err := form.Validate(); err != nil {
- return nil, err
- }
-
- admin, fetchErr := form.dao.FindAdminByEmail(form.Identity)
-
- // ignore not found errors to allow custom fetch implementations
- if fetchErr != nil && !errors.Is(fetchErr, sql.ErrNoRows) {
- return nil, fetchErr
- }
-
- interceptorsErr := runInterceptors(admin, func(m *models.Admin) error {
- admin = m
-
- if admin == nil || !admin.ValidatePassword(form.Password) {
- return errors.New("Invalid login credentials.")
- }
-
- return nil
- }, interceptors...)
-
- if interceptorsErr != nil {
- return nil, interceptorsErr
- }
-
- return admin, nil
-}
diff --git a/forms/admin_login_test.go b/forms/admin_login_test.go
deleted file mode 100644
index 3fa4580f..00000000
--- a/forms/admin_login_test.go
+++ /dev/null
@@ -1,100 +0,0 @@
-package forms_test
-
-import (
- "errors"
- "testing"
-
- "github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
-)
-
-func TestAdminLoginValidateAndSubmit(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- form := forms.NewAdminLogin(app)
-
- scenarios := []struct {
- email string
- password string
- expectError bool
- }{
- {"", "", true},
- {"", "1234567890", true},
- {"test@example.com", "", true},
- {"test", "test", true},
- {"missing@example.com", "1234567890", true},
- {"test@example.com", "123456789", true},
- {"test@example.com", "1234567890", false},
- }
-
- for i, s := range scenarios {
- form.Identity = s.email
- form.Password = s.password
-
- admin, err := form.Submit()
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, s.expectError, hasErr, err)
- }
-
- if !s.expectError && admin == nil {
- t.Errorf("(%d) Expected admin model to be returned, got nil", i)
- }
-
- if admin != nil && admin.Email != s.email {
- t.Errorf("(%d) Expected admin with email %s to be returned, got %v", i, s.email, admin)
- }
- }
-}
-
-func TestAdminLoginInterceptors(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- form := forms.NewAdminLogin(testApp)
- form.Identity = "test@example.com"
- form.Password = "123456"
- var interceptorAdmin *models.Admin
- testErr := errors.New("test_error")
-
- interceptor1Called := false
- interceptor1 := func(next forms.InterceptorNextFunc[*models.Admin]) forms.InterceptorNextFunc[*models.Admin] {
- return func(admin *models.Admin) error {
- interceptor1Called = true
- return next(admin)
- }
- }
-
- interceptor2Called := false
- interceptor2 := func(next forms.InterceptorNextFunc[*models.Admin]) forms.InterceptorNextFunc[*models.Admin] {
- return func(admin *models.Admin) error {
- interceptorAdmin = admin
- interceptor2Called = true
- return testErr
- }
- }
-
- _, submitErr := form.Submit(interceptor1, interceptor2)
- if submitErr != testErr {
- t.Fatalf("Expected submitError %v, got %v", testErr, submitErr)
- }
-
- if !interceptor1Called {
- t.Fatalf("Expected interceptor1 to be called")
- }
-
- if !interceptor2Called {
- t.Fatalf("Expected interceptor2 to be called")
- }
-
- if interceptorAdmin == nil || interceptorAdmin.Email != form.Identity {
- t.Fatalf("Expected Admin model with email %s, got %v", form.Identity, interceptorAdmin)
- }
-}
diff --git a/forms/admin_password_reset_confirm.go b/forms/admin_password_reset_confirm.go
deleted file mode 100644
index 0c5ee11f..00000000
--- a/forms/admin_password_reset_confirm.go
+++ /dev/null
@@ -1,96 +0,0 @@
-package forms
-
-import (
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/forms/validators"
- "github.com/pocketbase/pocketbase/models"
-)
-
-// AdminPasswordResetConfirm is an admin password reset confirmation form.
-type AdminPasswordResetConfirm struct {
- app core.App
- dao *daos.Dao
-
- Token string `form:"token" json:"token"`
- Password string `form:"password" json:"password"`
- PasswordConfirm string `form:"passwordConfirm" json:"passwordConfirm"`
-}
-
-// NewAdminPasswordResetConfirm creates a new [AdminPasswordResetConfirm]
-// form initialized with from the provided [core.App] instance.
-//
-// If you want to submit the form as part of a transaction,
-// you can change the default Dao via [SetDao()].
-func NewAdminPasswordResetConfirm(app core.App) *AdminPasswordResetConfirm {
- return &AdminPasswordResetConfirm{
- app: app,
- dao: app.Dao(),
- }
-}
-
-// SetDao replaces the form Dao instance with the provided one.
-//
-// This is useful if you want to use a specific transaction Dao instance
-// instead of the default app.Dao().
-func (form *AdminPasswordResetConfirm) SetDao(dao *daos.Dao) {
- form.dao = dao
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-func (form *AdminPasswordResetConfirm) Validate() error {
- return validation.ValidateStruct(form,
- validation.Field(&form.Token, validation.Required, validation.By(form.checkToken)),
- validation.Field(&form.Password, validation.Required, validation.Length(10, 72)),
- validation.Field(&form.PasswordConfirm, validation.Required, validation.By(validators.Compare(form.Password))),
- )
-}
-
-func (form *AdminPasswordResetConfirm) checkToken(value any) error {
- v, _ := value.(string)
- if v == "" {
- return nil // nothing to check
- }
-
- admin, err := form.dao.FindAdminByToken(v, form.app.Settings().AdminPasswordResetToken.Secret)
- if err != nil || admin == nil {
- return validation.NewError("validation_invalid_token", "Invalid or expired token.")
- }
-
- return nil
-}
-
-// Submit validates and submits the admin password reset confirmation form.
-// On success returns the updated admin model associated to `form.Token`.
-//
-// You can optionally provide a list of InterceptorFunc to further
-// modify the form behavior before persisting it.
-func (form *AdminPasswordResetConfirm) Submit(interceptors ...InterceptorFunc[*models.Admin]) (*models.Admin, error) {
- if err := form.Validate(); err != nil {
- return nil, err
- }
-
- admin, err := form.dao.FindAdminByToken(
- form.Token,
- form.app.Settings().AdminPasswordResetToken.Secret,
- )
- if err != nil {
- return nil, err
- }
-
- if err := admin.SetPassword(form.Password); err != nil {
- return nil, err
- }
-
- interceptorsErr := runInterceptors(admin, func(m *models.Admin) error {
- admin = m
- return form.dao.SaveAdmin(m)
- }, interceptors...)
-
- if interceptorsErr != nil {
- return nil, interceptorsErr
- }
-
- return admin, nil
-}
diff --git a/forms/admin_password_reset_confirm_test.go b/forms/admin_password_reset_confirm_test.go
deleted file mode 100644
index 583fce65..00000000
--- a/forms/admin_password_reset_confirm_test.go
+++ /dev/null
@@ -1,154 +0,0 @@
-package forms_test
-
-import (
- "errors"
- "testing"
-
- "github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/security"
-)
-
-func TestAdminPasswordResetConfirmValidateAndSubmit(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- form := forms.NewAdminPasswordResetConfirm(app)
-
- scenarios := []struct {
- token string
- password string
- passwordConfirm string
- expectError bool
- }{
- {"", "", "", true},
- {"", "123", "", true},
- {"", "", "123", true},
- {"test", "", "", true},
- {"test", "123", "", true},
- {"test", "123", "123", true},
- {
- // expired
- "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6InN5d2JoZWNuaDQ2cmhtMCIsInR5cGUiOiJhZG1pbiIsImVtYWlsIjoidGVzdEBleGFtcGxlLmNvbSIsImV4cCI6MTY0MDk5MTY2MX0.GLwCOsgWTTEKXTK-AyGW838de1OeZGIjfHH0FoRLqZg",
- "1234567890",
- "1234567890",
- true,
- },
- {
- // valid with mismatched passwords
- "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6InN5d2JoZWNuaDQ2cmhtMCIsInR5cGUiOiJhZG1pbiIsImVtYWlsIjoidGVzdEBleGFtcGxlLmNvbSIsImV4cCI6MjIwODk4MTYwMH0.kwFEler6KSMKJNstuaSDvE1QnNdCta5qSnjaIQ0hhhc",
- "1234567890",
- "1234567891",
- true,
- },
- {
- // valid with matching passwords
- "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6InN5d2JoZWNuaDQ2cmhtMCIsInR5cGUiOiJhZG1pbiIsImVtYWlsIjoidGVzdEBleGFtcGxlLmNvbSIsImV4cCI6MjIwODk4MTYwMH0.kwFEler6KSMKJNstuaSDvE1QnNdCta5qSnjaIQ0hhhc",
- "1234567891",
- "1234567891",
- false,
- },
- }
-
- for i, s := range scenarios {
- form.Token = s.token
- form.Password = s.password
- form.PasswordConfirm = s.passwordConfirm
-
- interceptorCalls := 0
- interceptor := func(next forms.InterceptorNextFunc[*models.Admin]) forms.InterceptorNextFunc[*models.Admin] {
- return func(m *models.Admin) error {
- interceptorCalls++
- return next(m)
- }
- }
-
- admin, err := form.Submit(interceptor)
-
- // check interceptor calls
- expectInterceptorCalls := 1
- if s.expectError {
- expectInterceptorCalls = 0
- }
- if interceptorCalls != expectInterceptorCalls {
- t.Errorf("[%d] Expected interceptor to be called %d, got %d", i, expectInterceptorCalls, interceptorCalls)
- }
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, s.expectError, hasErr, err)
- continue
- }
-
- if s.expectError {
- continue
- }
-
- claims, _ := security.ParseUnverifiedJWT(s.token)
- tokenAdminId := claims["id"]
-
- if admin.Id != tokenAdminId {
- t.Errorf("(%d) Expected admin with id %s to be returned, got %v", i, tokenAdminId, admin)
- }
-
- if !admin.ValidatePassword(form.Password) {
- t.Errorf("(%d) Expected the admin password to have been updated to %q", i, form.Password)
- }
- }
-}
-
-func TestAdminPasswordResetConfirmInterceptors(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- admin, err := testApp.Dao().FindAdminByEmail("test@example.com")
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewAdminPasswordResetConfirm(testApp)
- form.Token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6InN5d2JoZWNuaDQ2cmhtMCIsInR5cGUiOiJhZG1pbiIsImVtYWlsIjoidGVzdEBleGFtcGxlLmNvbSIsImV4cCI6MjIwODk4MTYwMH0.kwFEler6KSMKJNstuaSDvE1QnNdCta5qSnjaIQ0hhhc"
- form.Password = "1234567891"
- form.PasswordConfirm = "1234567891"
- interceptorTokenKey := admin.TokenKey
- testErr := errors.New("test_error")
-
- interceptor1Called := false
- interceptor1 := func(next forms.InterceptorNextFunc[*models.Admin]) forms.InterceptorNextFunc[*models.Admin] {
- return func(admin *models.Admin) error {
- interceptor1Called = true
- return next(admin)
- }
- }
-
- interceptor2Called := false
- interceptor2 := func(next forms.InterceptorNextFunc[*models.Admin]) forms.InterceptorNextFunc[*models.Admin] {
- return func(admin *models.Admin) error {
- interceptorTokenKey = admin.TokenKey
- interceptor2Called = true
- return testErr
- }
- }
-
- _, submitErr := form.Submit(interceptor1, interceptor2)
- if submitErr != testErr {
- t.Fatalf("Expected submitError %v, got %v", testErr, submitErr)
- }
-
- if !interceptor1Called {
- t.Fatalf("Expected interceptor1 to be called")
- }
-
- if !interceptor2Called {
- t.Fatalf("Expected interceptor2 to be called")
- }
-
- if interceptorTokenKey == admin.TokenKey {
- t.Fatalf("Expected the form model to be filled before calling the interceptors")
- }
-}
diff --git a/forms/admin_password_reset_request.go b/forms/admin_password_reset_request.go
deleted file mode 100644
index b0568ae7..00000000
--- a/forms/admin_password_reset_request.go
+++ /dev/null
@@ -1,89 +0,0 @@
-package forms
-
-import (
- "errors"
- "fmt"
- "time"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/go-ozzo/ozzo-validation/v4/is"
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/mails"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-// AdminPasswordResetRequest is an admin password reset request form.
-type AdminPasswordResetRequest struct {
- app core.App
- dao *daos.Dao
- resendThreshold float64 // in seconds
-
- Email string `form:"email" json:"email"`
-}
-
-// NewAdminPasswordResetRequest creates a new [AdminPasswordResetRequest]
-// form initialized with from the provided [core.App] instance.
-//
-// If you want to submit the form as part of a transaction,
-// you can change the default Dao via [SetDao()].
-func NewAdminPasswordResetRequest(app core.App) *AdminPasswordResetRequest {
- return &AdminPasswordResetRequest{
- app: app,
- dao: app.Dao(),
- resendThreshold: 120, // 2min
- }
-}
-
-// SetDao replaces the default form Dao instance with the provided one.
-func (form *AdminPasswordResetRequest) SetDao(dao *daos.Dao) {
- form.dao = dao
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-//
-// This method doesn't verify that admin with `form.Email` exists (this is done on Submit).
-func (form *AdminPasswordResetRequest) Validate() error {
- return validation.ValidateStruct(form,
- validation.Field(
- &form.Email,
- validation.Required,
- validation.Length(1, 255),
- is.EmailFormat,
- ),
- )
-}
-
-// Submit validates and submits the form.
-// On success sends a password reset email to the `form.Email` admin.
-//
-// You can optionally provide a list of InterceptorFunc to further
-// modify the form behavior before persisting it.
-func (form *AdminPasswordResetRequest) Submit(interceptors ...InterceptorFunc[*models.Admin]) error {
- if err := form.Validate(); err != nil {
- return err
- }
-
- admin, err := form.dao.FindAdminByEmail(form.Email)
- if err != nil {
- return fmt.Errorf("Failed to fetch admin with email %s: %w", form.Email, err)
- }
-
- now := time.Now().UTC()
- lastResetSentAt := admin.LastResetSentAt.Time()
- if now.Sub(lastResetSentAt).Seconds() < form.resendThreshold {
- return errors.New("You have already requested a password reset.")
- }
-
- return runInterceptors(admin, func(m *models.Admin) error {
- if err := mails.SendAdminPasswordReset(form.app, m); err != nil {
- return err
- }
-
- // update last sent timestamp
- m.LastResetSentAt = types.NowDateTime()
-
- return form.dao.SaveAdmin(m)
- }, interceptors...)
-}
diff --git a/forms/admin_password_reset_request_test.go b/forms/admin_password_reset_request_test.go
deleted file mode 100644
index 6d69ccc3..00000000
--- a/forms/admin_password_reset_request_test.go
+++ /dev/null
@@ -1,127 +0,0 @@
-package forms_test
-
-import (
- "errors"
- "testing"
-
- "github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
-)
-
-func TestAdminPasswordResetRequestValidateAndSubmit(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- form := forms.NewAdminPasswordResetRequest(testApp)
-
- scenarios := []struct {
- email string
- expectError bool
- }{
- {"", true},
- {"", true},
- {"invalid", true},
- {"missing@example.com", true},
- {"test@example.com", false},
- {"test@example.com", true}, // already requested
- }
-
- for i, s := range scenarios {
- testApp.TestMailer.TotalSend = 0 // reset
- form.Email = s.email
-
- adminBefore, _ := testApp.Dao().FindAdminByEmail(s.email)
-
- interceptorCalls := 0
- interceptor := func(next forms.InterceptorNextFunc[*models.Admin]) forms.InterceptorNextFunc[*models.Admin] {
- return func(m *models.Admin) error {
- interceptorCalls++
- return next(m)
- }
- }
-
- err := form.Submit(interceptor)
-
- // check interceptor calls
- expectInterceptorCalls := 1
- if s.expectError {
- expectInterceptorCalls = 0
- }
- if interceptorCalls != expectInterceptorCalls {
- t.Errorf("[%d] Expected interceptor to be called %d, got %d", i, expectInterceptorCalls, interceptorCalls)
- }
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, s.expectError, hasErr, err)
- }
-
- adminAfter, _ := testApp.Dao().FindAdminByEmail(s.email)
-
- if !s.expectError && (adminBefore.LastResetSentAt == adminAfter.LastResetSentAt || adminAfter.LastResetSentAt.IsZero()) {
- t.Errorf("(%d) Expected admin.LastResetSentAt to change, got %q", i, adminAfter.LastResetSentAt)
- }
-
- expectedMails := 1
- if s.expectError {
- expectedMails = 0
- }
- if testApp.TestMailer.TotalSend != expectedMails {
- t.Errorf("(%d) Expected %d mail(s) to be sent, got %d", i, expectedMails, testApp.TestMailer.TotalSend)
- }
- }
-}
-
-func TestAdminPasswordResetRequestInterceptors(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- admin, err := testApp.Dao().FindAdminByEmail("test@example.com")
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewAdminPasswordResetRequest(testApp)
- form.Email = admin.Email
- interceptorLastResetSentAt := admin.LastResetSentAt
- testErr := errors.New("test_error")
-
- interceptor1Called := false
- interceptor1 := func(next forms.InterceptorNextFunc[*models.Admin]) forms.InterceptorNextFunc[*models.Admin] {
- return func(admin *models.Admin) error {
- interceptor1Called = true
- return next(admin)
- }
- }
-
- interceptor2Called := false
- interceptor2 := func(next forms.InterceptorNextFunc[*models.Admin]) forms.InterceptorNextFunc[*models.Admin] {
- return func(admin *models.Admin) error {
- interceptorLastResetSentAt = admin.LastResetSentAt
- interceptor2Called = true
- return testErr
- }
- }
-
- submitErr := form.Submit(interceptor1, interceptor2)
- if submitErr != testErr {
- t.Fatalf("Expected submitError %v, got %v", testErr, submitErr)
- }
-
- if !interceptor1Called {
- t.Fatalf("Expected interceptor1 to be called")
- }
-
- if !interceptor2Called {
- t.Fatalf("Expected interceptor2 to be called")
- }
-
- if interceptorLastResetSentAt.String() != admin.LastResetSentAt.String() {
- t.Fatalf("Expected the form model to NOT be filled before calling the interceptors")
- }
-}
diff --git a/forms/admin_upsert.go b/forms/admin_upsert.go
deleted file mode 100644
index 4afcb67e..00000000
--- a/forms/admin_upsert.go
+++ /dev/null
@@ -1,123 +0,0 @@
-package forms
-
-import (
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/go-ozzo/ozzo-validation/v4/is"
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/forms/validators"
- "github.com/pocketbase/pocketbase/models"
-)
-
-// AdminUpsert is a [models.Admin] upsert (create/update) form.
-type AdminUpsert struct {
- app core.App
- dao *daos.Dao
- admin *models.Admin
-
- Id string `form:"id" json:"id"`
- Avatar int `form:"avatar" json:"avatar"`
- Email string `form:"email" json:"email"`
- Password string `form:"password" json:"password"`
- PasswordConfirm string `form:"passwordConfirm" json:"passwordConfirm"`
-}
-
-// NewAdminUpsert creates a new [AdminUpsert] form with initializer
-// config created from the provided [core.App] and [models.Admin] instances
-// (for create you could pass a pointer to an empty Admin - `&models.Admin{}`).
-//
-// If you want to submit the form as part of a transaction,
-// you can change the default Dao via [SetDao()].
-func NewAdminUpsert(app core.App, admin *models.Admin) *AdminUpsert {
- form := &AdminUpsert{
- app: app,
- dao: app.Dao(),
- admin: admin,
- }
-
- // load defaults
- form.Id = admin.Id
- form.Avatar = admin.Avatar
- form.Email = admin.Email
-
- return form
-}
-
-// SetDao replaces the default form Dao instance with the provided one.
-func (form *AdminUpsert) SetDao(dao *daos.Dao) {
- form.dao = dao
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-func (form *AdminUpsert) Validate() error {
- return validation.ValidateStruct(form,
- validation.Field(
- &form.Id,
- validation.When(
- form.admin.IsNew(),
- validation.Length(models.DefaultIdLength, models.DefaultIdLength),
- validation.Match(idRegex),
- validation.By(validators.UniqueId(form.dao, form.admin.TableName())),
- ).Else(validation.In(form.admin.Id)),
- ),
- validation.Field(
- &form.Avatar,
- validation.Min(0),
- validation.Max(9),
- ),
- validation.Field(
- &form.Email,
- validation.Required,
- validation.Length(1, 255),
- is.EmailFormat,
- validation.By(form.checkUniqueEmail),
- ),
- validation.Field(
- &form.Password,
- validation.When(form.admin.IsNew(), validation.Required),
- validation.Length(10, 72),
- ),
- validation.Field(
- &form.PasswordConfirm,
- validation.When(form.Password != "", validation.Required),
- validation.By(validators.Compare(form.Password)),
- ),
- )
-}
-
-func (form *AdminUpsert) checkUniqueEmail(value any) error {
- v, _ := value.(string)
-
- if form.dao.IsAdminEmailUnique(v, form.admin.Id) {
- return nil
- }
-
- return validation.NewError("validation_admin_email_exists", "Admin email already exists.")
-}
-
-// Submit validates the form and upserts the form admin model.
-//
-// You can optionally provide a list of InterceptorFunc to further
-// modify the form behavior before persisting it.
-func (form *AdminUpsert) Submit(interceptors ...InterceptorFunc[*models.Admin]) error {
- if err := form.Validate(); err != nil {
- return err
- }
-
- // custom insertion id can be set only on create
- if form.admin.IsNew() && form.Id != "" {
- form.admin.MarkAsNew()
- form.admin.SetId(form.Id)
- }
-
- form.admin.Avatar = form.Avatar
- form.admin.Email = form.Email
-
- if form.Password != "" {
- form.admin.SetPassword(form.Password)
- }
-
- return runInterceptors(form.admin, func(admin *models.Admin) error {
- return form.dao.SaveAdmin(admin)
- }, interceptors...)
-}
diff --git a/forms/admin_upsert_test.go b/forms/admin_upsert_test.go
deleted file mode 100644
index bb502842..00000000
--- a/forms/admin_upsert_test.go
+++ /dev/null
@@ -1,341 +0,0 @@
-package forms_test
-
-import (
- "encoding/json"
- "errors"
- "fmt"
- "testing"
-
- "github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
-)
-
-func TestNewAdminUpsert(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- admin := &models.Admin{}
- admin.Avatar = 3
- admin.Email = "new@example.com"
-
- form := forms.NewAdminUpsert(app, admin)
-
- // test defaults
- if form.Avatar != admin.Avatar {
- t.Errorf("Expected Avatar %d, got %d", admin.Avatar, form.Avatar)
- }
- if form.Email != admin.Email {
- t.Errorf("Expected Email %q, got %q", admin.Email, form.Email)
- }
-}
-
-func TestAdminUpsertValidateAndSubmit(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- id string
- jsonData string
- expectError bool
- }{
- {
- // create empty
- "",
- `{}`,
- true,
- },
- {
- // update empty
- "sywbhecnh46rhm0",
- `{}`,
- false,
- },
- {
- // create failure - existing email
- "",
- `{
- "email": "test@example.com",
- "password": "1234567890",
- "passwordConfirm": "1234567890"
- }`,
- true,
- },
- {
- // create failure - passwords mismatch
- "",
- `{
- "email": "test_new@example.com",
- "password": "1234567890",
- "passwordConfirm": "1234567891"
- }`,
- true,
- },
- {
- // create success
- "",
- `{
- "email": "test_new@example.com",
- "password": "1234567890",
- "passwordConfirm": "1234567890"
- }`,
- false,
- },
- {
- // update failure - existing email
- "sywbhecnh46rhm0",
- `{
- "email": "test2@example.com"
- }`,
- true,
- },
- {
- // update failure - mismatching passwords
- "sywbhecnh46rhm0",
- `{
- "password": "1234567890",
- "passwordConfirm": "1234567891"
- }`,
- true,
- },
- {
- // update success - new email
- "sywbhecnh46rhm0",
- `{
- "email": "test_update@example.com"
- }`,
- false,
- },
- {
- // update success - new password
- "sywbhecnh46rhm0",
- `{
- "password": "1234567890",
- "passwordConfirm": "1234567890"
- }`,
- false,
- },
- }
-
- for i, s := range scenarios {
- isCreate := true
- admin := &models.Admin{}
- if s.id != "" {
- isCreate = false
- admin, _ = app.Dao().FindAdminById(s.id)
- }
- initialTokenKey := admin.TokenKey
-
- form := forms.NewAdminUpsert(app, admin)
-
- // load data
- loadErr := json.Unmarshal([]byte(s.jsonData), form)
- if loadErr != nil {
- t.Errorf("(%d) Failed to load form data: %v", i, loadErr)
- continue
- }
-
- interceptorCalls := 0
-
- err := form.Submit(func(next forms.InterceptorNextFunc[*models.Admin]) forms.InterceptorNextFunc[*models.Admin] {
- return func(m *models.Admin) error {
- interceptorCalls++
- return next(m)
- }
- })
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("(%d) Expected hasErr %v, got %v (%v)", i, s.expectError, hasErr, err)
- }
-
- foundAdmin, _ := app.Dao().FindAdminByEmail(form.Email)
-
- if !s.expectError && isCreate && foundAdmin == nil {
- t.Errorf("(%d) Expected admin to be created, got nil", i)
- continue
- }
-
- expectInterceptorCall := 1
- if s.expectError {
- expectInterceptorCall = 0
- }
- if interceptorCalls != expectInterceptorCall {
- t.Errorf("(%d) Expected interceptor to be called %d, got %d", i, expectInterceptorCall, interceptorCalls)
- }
-
- if s.expectError {
- continue // skip persistence check
- }
-
- if foundAdmin.Email != form.Email {
- t.Errorf("(%d) Expected email %s, got %s", i, form.Email, foundAdmin.Email)
- }
-
- if foundAdmin.Avatar != form.Avatar {
- t.Errorf("(%d) Expected avatar %d, got %d", i, form.Avatar, foundAdmin.Avatar)
- }
-
- if form.Password != "" && initialTokenKey == foundAdmin.TokenKey {
- t.Errorf("(%d) Expected token key to be renewed when setting a new password", i)
- }
- }
-}
-
-func TestAdminUpsertSubmitInterceptors(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- admin := &models.Admin{}
- form := forms.NewAdminUpsert(app, admin)
- form.Email = "test_new@example.com"
- form.Password = "1234567890"
- form.PasswordConfirm = form.Password
-
- testErr := errors.New("test_error")
- interceptorAdminEmail := ""
-
- interceptor1Called := false
- interceptor1 := func(next forms.InterceptorNextFunc[*models.Admin]) forms.InterceptorNextFunc[*models.Admin] {
- return func(m *models.Admin) error {
- interceptor1Called = true
- return next(m)
- }
- }
-
- interceptor2Called := false
- interceptor2 := func(next forms.InterceptorNextFunc[*models.Admin]) forms.InterceptorNextFunc[*models.Admin] {
- return func(m *models.Admin) error {
- interceptorAdminEmail = admin.Email // to check if the record was filled
- interceptor2Called = true
- return testErr
- }
- }
-
- err := form.Submit(interceptor1, interceptor2)
- if err != testErr {
- t.Fatalf("Expected error %v, got %v", testErr, err)
- }
-
- if !interceptor1Called {
- t.Fatalf("Expected interceptor1 to be called")
- }
-
- if !interceptor2Called {
- t.Fatalf("Expected interceptor2 to be called")
- }
-
- if interceptorAdminEmail != form.Email {
- t.Fatalf("Expected the form model to be filled before calling the interceptors")
- }
-}
-
-func TestAdminUpsertWithCustomId(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- existingAdmin, err := app.Dao().FindAdminByEmail("test@example.com")
- if err != nil {
- t.Fatal(err)
- }
-
- scenarios := []struct {
- name string
- jsonData string
- collection *models.Admin
- expectError bool
- }{
- {
- "empty data",
- "{}",
- &models.Admin{},
- false,
- },
- {
- "empty id",
- `{"id":""}`,
- &models.Admin{},
- false,
- },
- {
- "id < 15 chars",
- `{"id":"a23"}`,
- &models.Admin{},
- true,
- },
- {
- "id > 15 chars",
- `{"id":"a234567890123456"}`,
- &models.Admin{},
- true,
- },
- {
- "id = 15 chars (invalid chars)",
- `{"id":"a@3456789012345"}`,
- &models.Admin{},
- true,
- },
- {
- "id = 15 chars (valid chars)",
- `{"id":"a23456789012345"}`,
- &models.Admin{},
- false,
- },
- {
- "changing the id of an existing item",
- `{"id":"b23456789012345"}`,
- existingAdmin,
- true,
- },
- {
- "using the same existing item id",
- `{"id":"` + existingAdmin.Id + `"}`,
- existingAdmin,
- false,
- },
- {
- "skipping the id for existing item",
- `{}`,
- existingAdmin,
- false,
- },
- }
-
- for i, scenario := range scenarios {
- form := forms.NewAdminUpsert(app, scenario.collection)
- if form.Email == "" {
- form.Email = fmt.Sprintf("test_id_%d@example.com", i)
- }
- form.Password = "1234567890"
- form.PasswordConfirm = form.Password
-
- // load data
- loadErr := json.Unmarshal([]byte(scenario.jsonData), form)
- if loadErr != nil {
- t.Errorf("[%s] Failed to load form data: %v", scenario.name, loadErr)
- continue
- }
-
- submitErr := form.Submit()
- hasErr := submitErr != nil
-
- if hasErr != scenario.expectError {
- t.Errorf("[%s] Expected hasErr to be %v, got %v (%v)", scenario.name, scenario.expectError, hasErr, submitErr)
- }
-
- if !hasErr && form.Id != "" {
- _, err := app.Dao().FindAdminById(form.Id)
- if err != nil {
- t.Errorf("[%s] Expected to find record with id %s, got %v", scenario.name, form.Id, err)
- }
- }
- }
-}
diff --git a/forms/backup_create.go b/forms/backup_create.go
deleted file mode 100644
index d7737027..00000000
--- a/forms/backup_create.go
+++ /dev/null
@@ -1,79 +0,0 @@
-package forms
-
-import (
- "context"
- "regexp"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/core"
-)
-
-var backupNameRegex = regexp.MustCompile(`^[a-z0-9_-]+\.zip$`)
-
-// BackupCreate is a request form for creating a new app backup.
-type BackupCreate struct {
- app core.App
- ctx context.Context
-
- Name string `form:"name" json:"name"`
-}
-
-// NewBackupCreate creates new BackupCreate request form.
-func NewBackupCreate(app core.App) *BackupCreate {
- return &BackupCreate{
- app: app,
- ctx: context.Background(),
- }
-}
-
-// SetContext replaces the default form context with the provided one.
-func (form *BackupCreate) SetContext(ctx context.Context) {
- form.ctx = ctx
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-func (form *BackupCreate) Validate() error {
- return validation.ValidateStruct(form,
- validation.Field(
- &form.Name,
- validation.Length(1, 100),
- validation.Match(backupNameRegex),
- validation.By(form.checkUniqueName),
- ),
- )
-}
-
-func (form *BackupCreate) checkUniqueName(value any) error {
- v, _ := value.(string)
- if v == "" {
- return nil // nothing to check
- }
-
- fsys, err := form.app.NewBackupsFilesystem()
- if err != nil {
- return err
- }
- defer fsys.Close()
-
- fsys.SetContext(form.ctx)
-
- if exists, err := fsys.Exists(v); err != nil || exists {
- return validation.NewError("validation_backup_name_exists", "The backup file name is invalid or already exists.")
- }
-
- return nil
-}
-
-// Submit validates the form and creates the app backup.
-//
-// You can optionally provide a list of InterceptorFunc to further
-// modify the form behavior before creating the backup.
-func (form *BackupCreate) Submit(interceptors ...InterceptorFunc[string]) error {
- if err := form.Validate(); err != nil {
- return err
- }
-
- return runInterceptors(form.Name, func(name string) error {
- return form.app.CreateBackup(form.ctx, name)
- }, interceptors...)
-}
diff --git a/forms/backup_create_test.go b/forms/backup_create_test.go
deleted file mode 100644
index 82112ca4..00000000
--- a/forms/backup_create_test.go
+++ /dev/null
@@ -1,104 +0,0 @@
-package forms_test
-
-import (
- "strings"
- "testing"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/tests"
-)
-
-func TestBackupCreateValidateAndSubmit(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- name string
- backupName string
- expectedErrors []string
- }{
- {
- "invalid length",
- strings.Repeat("a", 97) + ".zip",
- []string{"name"},
- },
- {
- "valid length + invalid format",
- strings.Repeat("a", 96),
- []string{"name"},
- },
- {
- "valid length + valid format",
- strings.Repeat("a", 96) + ".zip",
- []string{},
- },
- {
- "auto generated name",
- "",
- []string{},
- },
- }
-
- for _, s := range scenarios {
- t.Run(s.name, func(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- fsys, err := app.NewBackupsFilesystem()
- if err != nil {
- t.Fatal(err)
- }
- defer fsys.Close()
-
- form := forms.NewBackupCreate(app)
- form.Name = s.backupName
-
- result := form.Submit()
-
- // parse errors
- errs, ok := result.(validation.Errors)
- if !ok && result != nil {
- t.Fatalf("Failed to parse errors %v", result)
- return
- }
-
- // check errors
- if len(errs) > len(s.expectedErrors) {
- t.Fatalf("Expected error keys %v, got %v", s.expectedErrors, errs)
- }
- for _, k := range s.expectedErrors {
- if _, ok := errs[k]; !ok {
- t.Fatalf("Missing expected error key %q in %v", k, errs)
- }
- }
-
- // retrieve all created backup files
- files, err := fsys.List("")
- if err != nil {
- t.Fatal("Failed to retrieve backup files")
- return
- }
-
- if result != nil {
- if total := len(files); total != 0 {
- t.Fatalf("Didn't expected backup files, found %d", total)
- }
- return
- }
-
- if total := len(files); total != 1 {
- t.Fatalf("Expected 1 backup file, got %d", total)
- return
- }
-
- if s.backupName == "" {
- prefix := "pb_backup_"
- if !strings.HasPrefix(files[0].Key, prefix) {
- t.Fatalf("Expected the backup file, to have prefix %q: %q", prefix, files[0].Key)
- }
- } else if s.backupName != files[0].Key {
- t.Fatalf("Expected backup file %q, got %q", s.backupName, files[0].Key)
- }
- })
- }
-}
diff --git a/forms/backup_upload.go b/forms/backup_upload.go
deleted file mode 100644
index 8056e691..00000000
--- a/forms/backup_upload.go
+++ /dev/null
@@ -1,85 +0,0 @@
-package forms
-
-import (
- "context"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/forms/validators"
- "github.com/pocketbase/pocketbase/tools/filesystem"
-)
-
-// BackupUpload is a request form for uploading a new app backup.
-type BackupUpload struct {
- app core.App
- ctx context.Context
-
- File *filesystem.File `json:"file"`
-}
-
-// NewBackupUpload creates new BackupUpload request form.
-func NewBackupUpload(app core.App) *BackupUpload {
- return &BackupUpload{
- app: app,
- ctx: context.Background(),
- }
-}
-
-// SetContext replaces the default form upload context with the provided one.
-func (form *BackupUpload) SetContext(ctx context.Context) {
- form.ctx = ctx
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-func (form *BackupUpload) Validate() error {
- return validation.ValidateStruct(form,
- validation.Field(
- &form.File,
- validation.Required,
- validation.By(validators.UploadedFileMimeType([]string{"application/zip"})),
- validation.By(form.checkUniqueName),
- ),
- )
-}
-
-func (form *BackupUpload) checkUniqueName(value any) error {
- v, _ := value.(*filesystem.File)
- if v == nil {
- return nil // nothing to check
- }
-
- fsys, err := form.app.NewBackupsFilesystem()
- if err != nil {
- return err
- }
- defer fsys.Close()
-
- fsys.SetContext(form.ctx)
-
- if exists, err := fsys.Exists(v.OriginalName); err != nil || exists {
- return validation.NewError("validation_backup_name_exists", "Backup file with the specified name already exists.")
- }
-
- return nil
-}
-
-// Submit validates the form and upload the backup file.
-//
-// You can optionally provide a list of InterceptorFunc to further
-// modify the form behavior before uploading the backup.
-func (form *BackupUpload) Submit(interceptors ...InterceptorFunc[*filesystem.File]) error {
- if err := form.Validate(); err != nil {
- return err
- }
-
- return runInterceptors(form.File, func(file *filesystem.File) error {
- fsys, err := form.app.NewBackupsFilesystem()
- if err != nil {
- return err
- }
-
- fsys.SetContext(form.ctx)
-
- return fsys.UploadFile(file, file.OriginalName)
- }, interceptors...)
-}
diff --git a/forms/backup_upload_test.go b/forms/backup_upload_test.go
deleted file mode 100644
index f92ceada..00000000
--- a/forms/backup_upload_test.go
+++ /dev/null
@@ -1,120 +0,0 @@
-package forms_test
-
-import (
- "archive/zip"
- "bytes"
- "testing"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/filesystem"
-)
-
-func TestBackupUploadValidateAndSubmit(t *testing.T) {
- t.Parallel()
-
- var zb bytes.Buffer
- zw := zip.NewWriter(&zb)
- if err := zw.Close(); err != nil {
- t.Fatal(err)
- }
-
- f0, _ := filesystem.NewFileFromBytes([]byte("test"), "existing")
- f1, _ := filesystem.NewFileFromBytes([]byte("456"), "nozip")
- f2, _ := filesystem.NewFileFromBytes(zb.Bytes(), "existing")
- f3, _ := filesystem.NewFileFromBytes(zb.Bytes(), "zip")
-
- scenarios := []struct {
- name string
- file *filesystem.File
- expectedErrors []string
- }{
- {
- "missing file",
- nil,
- []string{"file"},
- },
- {
- "non-zip file",
- f1,
- []string{"file"},
- },
- {
- "zip file with non-unique name",
- f2,
- []string{"file"},
- },
- {
- "zip file with unique name",
- f3,
- []string{},
- },
- }
-
- for _, s := range scenarios {
- t.Run(s.name, func(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- fsys, err := app.NewBackupsFilesystem()
- if err != nil {
- t.Fatal(err)
- }
- defer fsys.Close()
- // create a dummy backup file to simulate existing backups
- if err := fsys.UploadFile(f0, f0.OriginalName); err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewBackupUpload(app)
- form.File = s.file
-
- result := form.Submit()
-
- // parse errors
- errs, ok := result.(validation.Errors)
- if !ok && result != nil {
- t.Fatalf("Failed to parse errors %v", result)
- }
-
- // check errors
- if len(errs) > len(s.expectedErrors) {
- t.Fatalf("Expected error keys %v, got %v", s.expectedErrors, errs)
- }
- for _, k := range s.expectedErrors {
- if _, ok := errs[k]; !ok {
- t.Fatalf("Missing expected error key %q in %v", k, errs)
- }
- }
-
- expectedFiles := []*filesystem.File{f0}
- if result == nil {
- expectedFiles = append(expectedFiles, s.file)
- }
-
- // retrieve all uploaded backup files
- files, err := fsys.List("")
- if err != nil {
- t.Fatal("Failed to retrieve backup files")
- }
-
- if len(files) != len(expectedFiles) {
- t.Fatalf("Expected %d files, got %d", len(expectedFiles), len(files))
- }
-
- for _, ef := range expectedFiles {
- exists := false
- for _, f := range files {
- if f.Key == ef.OriginalName {
- exists = true
- break
- }
- }
- if !exists {
- t.Fatalf("Missing expected backup file %v", ef.OriginalName)
- }
- }
- })
- }
-}
diff --git a/forms/base.go b/forms/base.go
deleted file mode 100644
index 64c18883..00000000
--- a/forms/base.go
+++ /dev/null
@@ -1,31 +0,0 @@
-// Package models implements various services used for request data
-// validation and applying changes to existing DB models through the app Dao.
-package forms
-
-import (
- "regexp"
-)
-
-// base ID value regex pattern
-var idRegex = regexp.MustCompile(`^[^\@\#\$\&\|\.\,\'\"\\\/\s]+$`)
-
-// InterceptorNextFunc is a interceptor handler function.
-// Usually used in combination with InterceptorFunc.
-type InterceptorNextFunc[T any] func(t T) error
-
-// InterceptorFunc defines a single interceptor function that
-// will execute the provided next func handler.
-type InterceptorFunc[T any] func(next InterceptorNextFunc[T]) InterceptorNextFunc[T]
-
-// runInterceptors executes the provided list of interceptors.
-func runInterceptors[T any](
- data T,
- next InterceptorNextFunc[T],
- interceptors ...InterceptorFunc[T],
-) error {
- for i := len(interceptors) - 1; i >= 0; i-- {
- next = interceptors[i](next)
- }
-
- return next(data)
-}
diff --git a/forms/collection_upsert.go b/forms/collection_upsert.go
deleted file mode 100644
index eb39a676..00000000
--- a/forms/collection_upsert.go
+++ /dev/null
@@ -1,540 +0,0 @@
-package forms
-
-import (
- "encoding/json"
- "fmt"
- "regexp"
- "strconv"
- "strings"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/forms/validators"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/resolvers"
- "github.com/pocketbase/pocketbase/tools/dbutils"
- "github.com/pocketbase/pocketbase/tools/list"
- "github.com/pocketbase/pocketbase/tools/search"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-var collectionNameRegex = regexp.MustCompile(`^[a-zA-Z0-9][a-zA-Z0-9_]*$`)
-
-// CollectionUpsert is a [models.Collection] upsert (create/update) form.
-type CollectionUpsert struct {
- app core.App
- dao *daos.Dao
- collection *models.Collection
-
- Id string `form:"id" json:"id"`
- Type string `form:"type" json:"type"`
- Name string `form:"name" json:"name"`
- System bool `form:"system" json:"system"`
- Schema schema.Schema `form:"schema" json:"schema"`
- Indexes types.JsonArray[string] `form:"indexes" json:"indexes"`
- ListRule *string `form:"listRule" json:"listRule"`
- ViewRule *string `form:"viewRule" json:"viewRule"`
- CreateRule *string `form:"createRule" json:"createRule"`
- UpdateRule *string `form:"updateRule" json:"updateRule"`
- DeleteRule *string `form:"deleteRule" json:"deleteRule"`
- Options types.JsonMap `form:"options" json:"options"`
-}
-
-// NewCollectionUpsert creates a new [CollectionUpsert] form with initializer
-// config created from the provided [core.App] and [models.Collection] instances
-// (for create you could pass a pointer to an empty Collection - `&models.Collection{}`).
-//
-// If you want to submit the form as part of a transaction,
-// you can change the default Dao via [SetDao()].
-func NewCollectionUpsert(app core.App, collection *models.Collection) *CollectionUpsert {
- form := &CollectionUpsert{
- app: app,
- dao: app.Dao(),
- collection: collection,
- }
-
- // load defaults
- form.Id = form.collection.Id
- form.Type = form.collection.Type
- form.Name = form.collection.Name
- form.System = form.collection.System
- form.Indexes = form.collection.Indexes
- form.ListRule = form.collection.ListRule
- form.ViewRule = form.collection.ViewRule
- form.CreateRule = form.collection.CreateRule
- form.UpdateRule = form.collection.UpdateRule
- form.DeleteRule = form.collection.DeleteRule
- form.Options = form.collection.Options
-
- if form.Type == "" {
- form.Type = models.CollectionTypeBase
- }
-
- clone, _ := form.collection.Schema.Clone()
- if clone != nil && form.Type != models.CollectionTypeView {
- form.Schema = *clone
- } else {
- form.Schema = schema.Schema{}
- }
-
- return form
-}
-
-// SetDao replaces the default form Dao instance with the provided one.
-func (form *CollectionUpsert) SetDao(dao *daos.Dao) {
- form.dao = dao
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-func (form *CollectionUpsert) Validate() error {
- isAuth := form.Type == models.CollectionTypeAuth
- isView := form.Type == models.CollectionTypeView
-
- // generate schema from the query (overwriting any explicit user defined schema)
- if isView {
- options := models.CollectionViewOptions{}
- if err := decodeOptions(form.Options, &options); err != nil {
- return err
- }
- form.Schema, _ = form.dao.CreateViewSchema(options.Query)
- }
-
- return validation.ValidateStruct(form,
- validation.Field(
- &form.Id,
- validation.When(
- form.collection.IsNew(),
- validation.Length(models.DefaultIdLength, models.DefaultIdLength),
- validation.Match(idRegex),
- validation.By(validators.UniqueId(form.dao, form.collection.TableName())),
- ).Else(validation.In(form.collection.Id)),
- ),
- validation.Field(
- &form.System,
- validation.By(form.ensureNoSystemFlagChange),
- ),
- validation.Field(
- &form.Type,
- validation.Required,
- validation.In(
- models.CollectionTypeBase,
- models.CollectionTypeAuth,
- models.CollectionTypeView,
- ),
- validation.By(form.ensureNoTypeChange),
- ),
- validation.Field(
- &form.Name,
- validation.Required,
- validation.Length(1, 255),
- validation.Match(collectionNameRegex),
- validation.By(form.ensureNoSystemNameChange),
- validation.By(form.checkUniqueName),
- validation.By(form.checkForVia),
- ),
- // validates using the type's own validation rules + some collection's specifics
- validation.Field(
- &form.Schema,
- validation.By(form.checkMinSchemaFields),
- validation.By(form.ensureNoSystemFieldsChange),
- validation.By(form.ensureNoFieldsTypeChange),
- validation.By(form.checkRelationFields),
- validation.When(isAuth, validation.By(form.ensureNoAuthFieldName)),
- ),
- validation.Field(&form.ListRule, validation.By(form.checkRule)),
- validation.Field(&form.ViewRule, validation.By(form.checkRule)),
- validation.Field(
- &form.CreateRule,
- validation.When(isView, validation.Nil),
- validation.By(form.checkRule),
- ),
- validation.Field(
- &form.UpdateRule,
- validation.When(isView, validation.Nil),
- validation.By(form.checkRule),
- ),
- validation.Field(
- &form.DeleteRule,
- validation.When(isView, validation.Nil),
- validation.By(form.checkRule),
- ),
- validation.Field(&form.Indexes, validation.By(form.checkIndexes)),
- validation.Field(&form.Options, validation.By(form.checkOptions)),
- )
-}
-
-func (form *CollectionUpsert) checkForVia(value any) error {
- v, _ := value.(string)
- if v == "" {
- return nil
- }
-
- if strings.Contains(strings.ToLower(v), "_via_") {
- return validation.NewError("validation_invalid_name", "The name of the collection cannot contain '_via_'.")
- }
-
- return nil
-}
-
-func (form *CollectionUpsert) checkUniqueName(value any) error {
- v, _ := value.(string)
-
- // ensure unique collection name
- if !form.dao.IsCollectionNameUnique(v, form.collection.Id) {
- return validation.NewError("validation_collection_name_exists", "Collection name must be unique (case insensitive).")
- }
-
- // ensure that the collection name doesn't collide with the id of any collection
- if form.dao.FindById(&models.Collection{}, v) == nil {
- return validation.NewError("validation_collection_name_id_duplicate", "The name must not match an existing collection id.")
- }
-
- return nil
-}
-
-func (form *CollectionUpsert) ensureNoSystemNameChange(value any) error {
- v, _ := value.(string)
-
- if !form.collection.IsNew() && form.collection.System && v != form.collection.Name {
- return validation.NewError("validation_collection_system_name_change", "System collections cannot be renamed.")
- }
-
- return nil
-}
-
-func (form *CollectionUpsert) ensureNoSystemFlagChange(value any) error {
- v, _ := value.(bool)
-
- if !form.collection.IsNew() && v != form.collection.System {
- return validation.NewError("validation_collection_system_flag_change", "System collection state cannot be changed.")
- }
-
- return nil
-}
-
-func (form *CollectionUpsert) ensureNoTypeChange(value any) error {
- v, _ := value.(string)
-
- if !form.collection.IsNew() && v != form.collection.Type {
- return validation.NewError("validation_collection_type_change", "Collection type cannot be changed.")
- }
-
- return nil
-}
-
-func (form *CollectionUpsert) ensureNoFieldsTypeChange(value any) error {
- v, _ := value.(schema.Schema)
-
- for i, field := range v.Fields() {
- oldField := form.collection.Schema.GetFieldById(field.Id)
-
- if oldField != nil && oldField.Type != field.Type {
- return validation.Errors{fmt.Sprint(i): validation.NewError(
- "validation_field_type_change",
- "Field type cannot be changed.",
- )}
- }
- }
-
- return nil
-}
-
-func (form *CollectionUpsert) checkRelationFields(value any) error {
- v, _ := value.(schema.Schema)
-
- for i, field := range v.Fields() {
- if field.Type != schema.FieldTypeRelation {
- continue
- }
-
- options, _ := field.Options.(*schema.RelationOptions)
- if options == nil {
- return validation.Errors{fmt.Sprint(i): validation.Errors{
- "options": validation.NewError(
- "validation_schema_invalid_relation_field_options",
- "The relation field has invalid field options.",
- )},
- }
- }
-
- // prevent collectionId change
- oldField := form.collection.Schema.GetFieldById(field.Id)
- if oldField != nil {
- oldOptions, _ := oldField.Options.(*schema.RelationOptions)
- if oldOptions != nil && oldOptions.CollectionId != options.CollectionId {
- return validation.Errors{fmt.Sprint(i): validation.Errors{
- "options": validation.Errors{
- "collectionId": validation.NewError(
- "validation_field_relation_change",
- "The relation collection cannot be changed.",
- ),
- }},
- }
- }
- }
-
- relCollection, _ := form.dao.FindCollectionByNameOrId(options.CollectionId)
-
- // validate collectionId
- if relCollection == nil || relCollection.Id != options.CollectionId {
- return validation.Errors{fmt.Sprint(i): validation.Errors{
- "options": validation.Errors{
- "collectionId": validation.NewError(
- "validation_field_invalid_relation",
- "The relation collection doesn't exist.",
- ),
- }},
- }
- }
-
- // allow only views to have relations to other views
- // (see https://github.com/pocketbase/pocketbase/issues/3000)
- if form.Type != models.CollectionTypeView && relCollection.IsView() {
- return validation.Errors{fmt.Sprint(i): validation.Errors{
- "options": validation.Errors{
- "collectionId": validation.NewError(
- "validation_field_non_view_base_relation_collection",
- "Non view collections are not allowed to have a view relation.",
- ),
- }},
- }
- }
- }
-
- return nil
-}
-
-func (form *CollectionUpsert) ensureNoAuthFieldName(value any) error {
- v, _ := value.(schema.Schema)
-
- if form.Type != models.CollectionTypeAuth {
- return nil // not an auth collection
- }
-
- authFieldNames := schema.AuthFieldNames()
- // exclude the meta RecordUpsert form fields
- authFieldNames = append(authFieldNames, "password", "passwordConfirm", "oldPassword")
-
- errs := validation.Errors{}
- for i, field := range v.Fields() {
- if list.ExistInSlice(field.Name, authFieldNames) {
- errs[fmt.Sprint(i)] = validation.Errors{
- "name": validation.NewError(
- "validation_reserved_auth_field_name",
- "The field name is reserved and cannot be used.",
- ),
- }
- }
- }
-
- if len(errs) > 0 {
- return errs
- }
-
- return nil
-}
-
-func (form *CollectionUpsert) checkMinSchemaFields(value any) error {
- v, _ := value.(schema.Schema)
-
- switch form.Type {
- case models.CollectionTypeAuth, models.CollectionTypeView:
- return nil // no schema fields constraint
- default:
- if len(v.Fields()) == 0 {
- return validation.ErrRequired
- }
- }
-
- return nil
-}
-
-func (form *CollectionUpsert) ensureNoSystemFieldsChange(value any) error {
- v, _ := value.(schema.Schema)
-
- for _, oldField := range form.collection.Schema.Fields() {
- if !oldField.System {
- continue
- }
-
- newField := v.GetFieldById(oldField.Id)
-
- if newField == nil || oldField.String() != newField.String() {
- return validation.NewError("validation_system_field_change", "System fields cannot be deleted or changed.")
- }
- }
-
- return nil
-}
-
-func (form *CollectionUpsert) checkRule(value any) error {
- v, _ := value.(*string)
- if v == nil || *v == "" {
- return nil // nothing to check
- }
-
- dummy := *form.collection
- dummy.Type = form.Type
- dummy.Schema = form.Schema
- dummy.System = form.System
- dummy.Options = form.Options
-
- r := resolvers.NewRecordFieldResolver(form.dao, &dummy, nil, true)
-
- _, err := search.FilterData(*v).BuildExpr(r)
- if err != nil {
- return validation.NewError("validation_invalid_rule", "Invalid filter rule. Raw error: "+err.Error())
- }
-
- return nil
-}
-
-func (form *CollectionUpsert) checkIndexes(value any) error {
- v, _ := value.(types.JsonArray[string])
-
- if form.Type == models.CollectionTypeView && len(v) > 0 {
- return validation.NewError(
- "validation_indexes_not_supported",
- "The collection doesn't support indexes.",
- )
- }
-
- for i, rawIndex := range v {
- parsed := dbutils.ParseIndex(rawIndex)
-
- if !parsed.IsValid() {
- return validation.Errors{
- strconv.Itoa(i): validation.NewError(
- "validation_invalid_index_expression",
- "Invalid CREATE INDEX expression.",
- ),
- }
- }
-
- // note: we don't check the index table because it is always
- // overwritten by the daos.SyncRecordTableSchema to allow
- // easier partial modifications (eg. changing only the collection name).
- // if !strings.EqualFold(parsed.TableName, form.Name) {
- // return validation.Errors{
- // strconv.Itoa(i): validation.NewError(
- // "validation_invalid_index_table",
- // fmt.Sprintf("The index table must be the same as the collection name."),
- // ),
- // }
- // }
- }
-
- return nil
-}
-
-func (form *CollectionUpsert) checkOptions(value any) error {
- v, _ := value.(types.JsonMap)
-
- switch form.Type {
- case models.CollectionTypeAuth:
- options := models.CollectionAuthOptions{}
- if err := decodeOptions(v, &options); err != nil {
- return err
- }
-
- // check the generic validations
- if err := options.Validate(); err != nil {
- return err
- }
-
- // additional form specific validations
- if err := form.checkRule(options.ManageRule); err != nil {
- return validation.Errors{"manageRule": err}
- }
- case models.CollectionTypeView:
- options := models.CollectionViewOptions{}
- if err := decodeOptions(v, &options); err != nil {
- return err
- }
-
- // check the generic validations
- if err := options.Validate(); err != nil {
- return err
- }
-
- // check the query option
- if _, err := form.dao.CreateViewSchema(options.Query); err != nil {
- return validation.Errors{
- "query": validation.NewError(
- "validation_invalid_view_query",
- fmt.Sprintf("Invalid query - %s", err.Error()),
- ),
- }
- }
- }
-
- return nil
-}
-
-func decodeOptions(options types.JsonMap, result any) error {
- raw, err := options.MarshalJSON()
- if err != nil {
- return validation.NewError("validation_invalid_options", "Invalid options.")
- }
-
- if err := json.Unmarshal(raw, result); err != nil {
- return validation.NewError("validation_invalid_options", "Invalid options.")
- }
-
- return nil
-}
-
-// Submit validates the form and upserts the form's Collection model.
-//
-// On success the related record table schema will be auto updated.
-//
-// You can optionally provide a list of InterceptorFunc to further
-// modify the form behavior before persisting it.
-func (form *CollectionUpsert) Submit(interceptors ...InterceptorFunc[*models.Collection]) error {
- if err := form.Validate(); err != nil {
- return err
- }
-
- if form.collection.IsNew() {
- // type can be set only on create
- form.collection.Type = form.Type
-
- // system flag can be set only on create
- form.collection.System = form.System
-
- // custom insertion id can be set only on create
- if form.Id != "" {
- form.collection.MarkAsNew()
- form.collection.SetId(form.Id)
- }
- }
-
- // system collections cannot be renamed
- if form.collection.IsNew() || !form.collection.System {
- form.collection.Name = form.Name
- }
-
- // view schema is autogenerated on save and cannot have indexes
- if !form.collection.IsView() {
- form.collection.Schema = form.Schema
-
- // normalize indexes format
- form.collection.Indexes = make(types.JsonArray[string], len(form.Indexes))
- for i, rawIdx := range form.Indexes {
- form.collection.Indexes[i] = dbutils.ParseIndex(rawIdx).Build()
- }
- }
-
- form.collection.ListRule = form.ListRule
- form.collection.ViewRule = form.ViewRule
- form.collection.CreateRule = form.CreateRule
- form.collection.UpdateRule = form.UpdateRule
- form.collection.DeleteRule = form.DeleteRule
- form.collection.SetOptions(form.Options)
-
- return runInterceptors(form.collection, func(collection *models.Collection) error {
- return form.dao.SaveCollection(collection)
- }, interceptors...)
-}
diff --git a/forms/collection_upsert_test.go b/forms/collection_upsert_test.go
deleted file mode 100644
index 5de00332..00000000
--- a/forms/collection_upsert_test.go
+++ /dev/null
@@ -1,827 +0,0 @@
-package forms_test
-
-import (
- "encoding/json"
- "errors"
- "testing"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/dbutils"
- "github.com/pocketbase/pocketbase/tools/security"
- "github.com/spf13/cast"
-)
-
-func TestNewCollectionUpsert(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection := &models.Collection{}
- collection.Name = "test_name"
- collection.Type = "test_type"
- collection.System = true
- listRule := "test_list"
- collection.ListRule = &listRule
- viewRule := "test_view"
- collection.ViewRule = &viewRule
- createRule := "test_create"
- collection.CreateRule = &createRule
- updateRule := "test_update"
- collection.UpdateRule = &updateRule
- deleteRule := "test_delete"
- collection.DeleteRule = &deleteRule
- collection.Schema = schema.NewSchema(&schema.SchemaField{
- Name: "test",
- Type: schema.FieldTypeText,
- })
-
- form := forms.NewCollectionUpsert(app, collection)
-
- if form.Name != collection.Name {
- t.Errorf("Expected Name %q, got %q", collection.Name, form.Name)
- }
-
- if form.Type != collection.Type {
- t.Errorf("Expected Type %q, got %q", collection.Type, form.Type)
- }
-
- if form.System != collection.System {
- t.Errorf("Expected System %v, got %v", collection.System, form.System)
- }
-
- if form.ListRule != collection.ListRule {
- t.Errorf("Expected ListRule %v, got %v", collection.ListRule, form.ListRule)
- }
-
- if form.ViewRule != collection.ViewRule {
- t.Errorf("Expected ViewRule %v, got %v", collection.ViewRule, form.ViewRule)
- }
-
- if form.CreateRule != collection.CreateRule {
- t.Errorf("Expected CreateRule %v, got %v", collection.CreateRule, form.CreateRule)
- }
-
- if form.UpdateRule != collection.UpdateRule {
- t.Errorf("Expected UpdateRule %v, got %v", collection.UpdateRule, form.UpdateRule)
- }
-
- if form.DeleteRule != collection.DeleteRule {
- t.Errorf("Expected DeleteRule %v, got %v", collection.DeleteRule, form.DeleteRule)
- }
-
- // store previous state and modify the collection schema to verify
- // that the form.Schema is a deep clone
- loadedSchema, _ := collection.Schema.MarshalJSON()
- collection.Schema.AddField(&schema.SchemaField{
- Name: "new_field",
- Type: schema.FieldTypeBool,
- })
-
- formSchema, _ := form.Schema.MarshalJSON()
-
- if string(formSchema) != string(loadedSchema) {
- t.Errorf("Expected Schema %v, got %v", string(loadedSchema), string(formSchema))
- }
-}
-
-func TestCollectionUpsertValidateAndSubmit(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- testName string
- existingName string
- jsonData string
- expectedErrors []string
- }{
- {"empty create (base)", "", "{}", []string{"name", "schema"}},
- {"empty create (auth)", "", `{"type":"auth"}`, []string{"name"}},
- {"empty create (view)", "", `{"type":"view"}`, []string{"name", "options"}},
- {"empty update", "demo2", "{}", []string{}},
- {
- "collection and field with _via_ names",
- "",
- `{
- "name": "a_via_b",
- "schema": [
- {"name":"c_via_d","type":"text"}
- ]
- }`,
- []string{"name", "schema"},
- },
- {
- "create failure",
- "",
- `{
- "name": "test ?!@#$",
- "type": "invalid",
- "system": true,
- "schema": [
- {"name":"","type":"text"}
- ],
- "listRule": "missing = '123'",
- "viewRule": "missing = '123'",
- "createRule": "missing = '123'",
- "updateRule": "missing = '123'",
- "deleteRule": "missing = '123'",
- "indexes": ["create index '' on '' ()"]
- }`,
- []string{"name", "type", "schema", "listRule", "viewRule", "createRule", "updateRule", "deleteRule", "indexes"},
- },
- {
- "create failure - existing name",
- "",
- `{
- "name": "demo1",
- "system": true,
- "schema": [
- {"name":"test","type":"text"}
- ],
- "listRule": "test='123'",
- "viewRule": "test='123'",
- "createRule": "test='123'",
- "updateRule": "test='123'",
- "deleteRule": "test='123'"
- }`,
- []string{"name"},
- },
- {
- "create failure - existing internal table",
- "",
- `{
- "name": "_admins",
- "schema": [
- {"name":"test","type":"text"}
- ]
- }`,
- []string{"name"},
- },
- {
- "create failure - name starting with underscore",
- "",
- `{
- "name": "_test_new",
- "schema": [
- {"name":"test","type":"text"}
- ]
- }`,
- []string{"name"},
- },
- {
- "create failure - duplicated field names (case insensitive)",
- "",
- `{
- "name": "test_new",
- "schema": [
- {"name":"test","type":"text"},
- {"name":"tESt","type":"text"}
- ]
- }`,
- []string{"schema"},
- },
- {
- "create failure - check auth options validators",
- "",
- `{
- "name": "test_new",
- "type": "auth",
- "schema": [
- {"name":"test","type":"text"}
- ],
- "options": { "minPasswordLength": 3 }
- }`,
- []string{"options"},
- },
- {
- "create failure - check view options validators",
- "",
- `{
- "name": "test_new",
- "type": "view",
- "options": { "query": "invalid query" }
- }`,
- []string{"options"},
- },
- {
- "create success",
- "",
- `{
- "name": "test_new",
- "type": "auth",
- "system": true,
- "schema": [
- {"id":"a123456","name":"test1","type":"text"},
- {"id":"b123456","name":"test2","type":"email"},
- {
- "name":"test3",
- "type":"relation",
- "options":{
- "collectionId":"v851q4r790rhknl",
- "displayFields":["name","id","created","updated","username","email","emailVisibility","verified"]
- }
- }
- ],
- "listRule": "test1='123' && verified = true",
- "viewRule": "test1='123' && emailVisibility = true",
- "createRule": "test1='123' && email != ''",
- "updateRule": "test1='123' && username != ''",
- "deleteRule": "test1='123' && id != ''",
- "indexes": ["create index idx_test_new on anything (test1)"]
- }`,
- []string{},
- },
- {
- "update failure - changing field type",
- "test_new",
- `{
- "schema": [
- {"id":"a123456","name":"test1","type":"url"},
- {"id":"b123456","name":"test2","type":"bool"}
- ],
- "indexes": ["create index idx_test_new on test_new (test1)", "invalid"]
- }`,
- []string{"schema", "indexes"},
- },
- {
- "update success - rename fields to existing field names (aka. reusing field names)",
- "test_new",
- `{
- "schema": [
- {"id":"a123456","name":"test2","type":"text"},
- {"id":"b123456","name":"test1","type":"email"}
- ]
- }`,
- []string{},
- },
- {
- "update failure - existing name",
- "demo2",
- `{"name": "demo3"}`,
- []string{"name"},
- },
- {
- "update failure - changing system collection",
- "nologin",
- `{
- "name": "update",
- "system": false,
- "schema": [
- {"id":"koih1lqx","name":"abc","type":"text"}
- ],
- "listRule": "abc = '123'",
- "viewRule": "abc = '123'",
- "createRule": "abc = '123'",
- "updateRule": "abc = '123'",
- "deleteRule": "abc = '123'"
- }`,
- []string{"name", "system"},
- },
- {
- "update failure - changing collection type",
- "demo3",
- `{
- "type": "auth"
- }`,
- []string{"type"},
- },
- {
- "update failure - changing relation collection",
- "users",
- `{
- "schema": [
- {
- "id": "lkeigvv3",
- "name": "rel",
- "type": "relation",
- "options": {
- "collectionId": "wzlqyes4orhoygb",
- "cascadeDelete": false,
- "maxSelect": 1,
- "displayFields": null
- }
- }
- ]
- }`,
- []string{"schema"},
- },
- {
- "update failure - all fields",
- "demo2",
- `{
- "name": "test ?!@#$",
- "type": "invalid",
- "system": true,
- "schema": [
- {"name":"","type":"text"}
- ],
- "listRule": "missing = '123'",
- "viewRule": "missing = '123'",
- "createRule": "missing = '123'",
- "updateRule": "missing = '123'",
- "deleteRule": "missing = '123'",
- "options": {"test": 123},
- "indexes": ["create index '' from demo2 on (id)"]
- }`,
- []string{"name", "type", "system", "schema", "listRule", "viewRule", "createRule", "updateRule", "deleteRule", "indexes"},
- },
- {
- "update success - update all fields",
- "clients",
- `{
- "name": "demo_update",
- "type": "auth",
- "schema": [
- {"id":"_2hlxbmp","name":"test","type":"text"}
- ],
- "listRule": "test='123' && verified = true",
- "viewRule": "test='123' && emailVisibility = true",
- "createRule": "test='123' && email != ''",
- "updateRule": "test='123' && username != ''",
- "deleteRule": "test='123' && id != ''",
- "options": {"minPasswordLength": 10},
- "indexes": [
- "create index idx_clients_test1 on anything (id, email, test)",
- "create unique index idx_clients_test2 on clients (id, username, email)"
- ]
- }`,
- []string{},
- },
- // (fail due to filters old field references)
- {
- "update failure - rename the schema field of the last updated collection",
- "demo_update",
- `{
- "schema": [
- {"id":"_2hlxbmp","name":"test_renamed","type":"text"}
- ]
- }`,
- []string{"listRule", "viewRule", "createRule", "updateRule", "deleteRule"},
- },
- // (cleared filter references)
- {
- "update success - rename the schema field of the last updated collection",
- "demo_update",
- `{
- "schema": [
- {"id":"_2hlxbmp","name":"test_renamed","type":"text"}
- ],
- "listRule": null,
- "viewRule": null,
- "createRule": null,
- "updateRule": null,
- "deleteRule": null,
- "indexes": []
- }`,
- []string{},
- },
- {
- "update success - system collection",
- "nologin",
- `{
- "listRule": "name='123'",
- "viewRule": "name='123'",
- "createRule": "name='123'",
- "updateRule": "name='123'",
- "deleteRule": "name='123'"
- }`,
- []string{},
- },
-
- // view tests
- // -----------------------------------------------------------
- {
- "base->view relation",
- "",
- `{
- "name": "test_view_relation",
- "type": "base",
- "schema": [
- {
- "name": "test",
- "type": "relation",
- "options":{
- "collectionId": "v9gwnfh02gjq1q0"
- }
- }
- ]
- }`,
- []string{"schema"}, // not allowed
- },
- {
- "auth->view relation",
- "",
- `{
- "name": "test_view_relation",
- "type": "auth",
- "schema": [
- {
- "name": "test",
- "type": "relation",
- "options": {
- "collectionId": "v9gwnfh02gjq1q0"
- }
- }
- ]
- }`,
- []string{"schema"}, // not allowed
- },
- {
- "view->view relation",
- "",
- `{
- "name": "test_view_relation",
- "type": "view",
- "options": {
- "query": "select view1.id, view1.id as rel from view1"
- }
- }`,
- []string{}, // allowed
- },
- {
- "view create failure",
- "",
- `{
- "name": "upsert_view",
- "type": "view",
- "listRule": "id='123' && verified = true",
- "viewRule": "id='123' && emailVisibility = true",
- "schema": [
- {"id":"abc123","name":"some invalid field name that will be overwritten !@#$","type":"bool"}
- ],
- "options": {
- "query": "select id, email from users; drop table _admins;"
- },
- "indexes": ["create index idx_test_view on upsert_view (id)"]
- }`,
- []string{
- "listRule",
- "viewRule",
- "options",
- "indexes", // views don't have indexes
- },
- },
- {
- "view create success",
- "",
- `{
- "name": "upsert_view",
- "type": "view",
- "listRule": "id='123' && verified = true",
- "viewRule": "id='123' && emailVisibility = true",
- "schema": [
- {"id":"abc123","name":"some invalid field name that will be overwritten !@#$","type":"bool"}
- ],
- "options": {
- "query": "select id, emailVisibility, verified from users"
- }
- }`,
- []string{
- // "schema", should be overwritten by an autogenerated from the query
- },
- },
- {
- "view update failure (schema autogeneration and rule fields check)",
- "upsert_view",
- `{
- "name": "upsert_view_2",
- "listRule": "id='456' && verified = true",
- "viewRule": "id='456'",
- "createRule": "id='123'",
- "updateRule": "id='123'",
- "deleteRule": "id='123'",
- "schema": [
- {"id":"abc123","name":"verified","type":"bool"}
- ],
- "options": {
- "query": "select 1 as id"
- }
- }`,
- []string{
- "listRule", // missing field (ignoring the old or explicit schema)
- "createRule", // not allowed
- "updateRule", // not allowed
- "deleteRule", // not allowed
- },
- },
- {
- "view update failure (check query identifiers format)",
- "upsert_view",
- `{
- "listRule": null,
- "viewRule": null,
- "options": {
- "query": "select 1 as id, 2 as [invalid!@#]"
- }
- }`,
- []string{
- "schema", // should fail due to invalid field name
- },
- },
- {
- "view update success",
- "upsert_view",
- `{
- "listRule": null,
- "viewRule": null,
- "options": {
- "query": "select 1 as id, 2 as valid"
- }
- }`,
- []string{},
- },
- }
-
- for _, s := range scenarios {
- t.Run(s.testName, func(t *testing.T) {
- collection := &models.Collection{}
- if s.existingName != "" {
- var err error
- collection, err = app.Dao().FindCollectionByNameOrId(s.existingName)
- if err != nil {
- t.Fatal(err)
- }
- }
-
- form := forms.NewCollectionUpsert(app, collection)
-
- // load data
- loadErr := json.Unmarshal([]byte(s.jsonData), form)
- if loadErr != nil {
- t.Fatalf("Failed to load form data: %v", loadErr)
- }
-
- interceptorCalls := 0
- interceptor := func(next forms.InterceptorNextFunc[*models.Collection]) forms.InterceptorNextFunc[*models.Collection] {
- return func(c *models.Collection) error {
- interceptorCalls++
- return next(c)
- }
- }
-
- // parse errors
- result := form.Submit(interceptor)
- errs, ok := result.(validation.Errors)
- if !ok && result != nil {
- t.Fatalf("Failed to parse errors %v", result)
- }
-
- // check interceptor calls
- expectInterceptorCalls := 1
- if len(s.expectedErrors) > 0 {
- expectInterceptorCalls = 0
- }
- if interceptorCalls != expectInterceptorCalls {
- t.Fatalf("Expected interceptor to be called %d, got %d", expectInterceptorCalls, interceptorCalls)
- }
-
- // check errors
- if len(errs) > len(s.expectedErrors) {
- t.Fatalf("Expected error keys %v, got %v", s.expectedErrors, errs)
- }
- for _, k := range s.expectedErrors {
- if _, ok := errs[k]; !ok {
- t.Fatalf("Missing expected error key %q in %v", k, errs)
- }
- }
-
- if len(s.expectedErrors) > 0 {
- return
- }
-
- collection, _ = app.Dao().FindCollectionByNameOrId(form.Name)
- if collection == nil {
- t.Fatalf("Expected to find collection %q, got nil", form.Name)
- }
-
- if form.Name != collection.Name {
- t.Fatalf("Expected Name %q, got %q", collection.Name, form.Name)
- }
-
- if form.Type != collection.Type {
- t.Fatalf("Expected Type %q, got %q", collection.Type, form.Type)
- }
-
- if form.System != collection.System {
- t.Fatalf("Expected System %v, got %v", collection.System, form.System)
- }
-
- if cast.ToString(form.ListRule) != cast.ToString(collection.ListRule) {
- t.Fatalf("Expected ListRule %v, got %v", collection.ListRule, form.ListRule)
- }
-
- if cast.ToString(form.ViewRule) != cast.ToString(collection.ViewRule) {
- t.Fatalf("Expected ViewRule %v, got %v", collection.ViewRule, form.ViewRule)
- }
-
- if cast.ToString(form.CreateRule) != cast.ToString(collection.CreateRule) {
- t.Fatalf("Expected CreateRule %v, got %v", collection.CreateRule, form.CreateRule)
- }
-
- if cast.ToString(form.UpdateRule) != cast.ToString(collection.UpdateRule) {
- t.Fatalf("Expected UpdateRule %v, got %v", collection.UpdateRule, form.UpdateRule)
- }
-
- if cast.ToString(form.DeleteRule) != cast.ToString(collection.DeleteRule) {
- t.Fatalf("Expected DeleteRule %v, got %v", collection.DeleteRule, form.DeleteRule)
- }
-
- rawFormSchema, _ := form.Schema.MarshalJSON()
- rawCollectionSchema, _ := collection.Schema.MarshalJSON()
-
- if len(form.Schema.Fields()) != len(collection.Schema.Fields()) {
- t.Fatalf("Expected Schema \n%v, \ngot \n%v", string(rawCollectionSchema), string(rawFormSchema))
- }
-
- for _, f := range form.Schema.Fields() {
- if collection.Schema.GetFieldByName(f.Name) == nil {
- t.Fatalf("Missing field %s \nin \n%v", f.Name, string(rawFormSchema))
- }
- }
-
- // check indexes (if any)
- allIndexes, _ := app.Dao().TableIndexes(form.Name)
- for _, formIdx := range form.Indexes {
- parsed := dbutils.ParseIndex(formIdx)
- parsed.TableName = form.Name
- normalizedIdx := parsed.Build()
-
- var exists bool
- for _, idx := range allIndexes {
- if dbutils.ParseIndex(idx).Build() == normalizedIdx {
- exists = true
- continue
- }
- }
-
- if !exists {
- t.Fatalf("Missing index %s \nin \n%v", normalizedIdx, allIndexes)
- }
- }
- })
- }
-}
-
-func TestCollectionUpsertSubmitInterceptors(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection, err := app.Dao().FindCollectionByNameOrId("demo2")
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewCollectionUpsert(app, collection)
- form.Name = "test_new"
-
- testErr := errors.New("test_error")
- interceptorCollectionName := ""
-
- interceptor1Called := false
- interceptor1 := func(next forms.InterceptorNextFunc[*models.Collection]) forms.InterceptorNextFunc[*models.Collection] {
- return func(c *models.Collection) error {
- interceptor1Called = true
- return next(c)
- }
- }
-
- interceptor2Called := false
- interceptor2 := func(next forms.InterceptorNextFunc[*models.Collection]) forms.InterceptorNextFunc[*models.Collection] {
- return func(c *models.Collection) error {
- interceptorCollectionName = collection.Name // to check if the record was filled
- interceptor2Called = true
- return testErr
- }
- }
-
- submitErr := form.Submit(interceptor1, interceptor2)
- if submitErr != testErr {
- t.Fatalf("Expected submitError %v, got %v", testErr, submitErr)
- }
-
- if !interceptor1Called {
- t.Fatalf("Expected interceptor1 to be called")
- }
-
- if !interceptor2Called {
- t.Fatalf("Expected interceptor2 to be called")
- }
-
- if interceptorCollectionName != form.Name {
- t.Fatalf("Expected the form model to be filled before calling the interceptors")
- }
-}
-
-func TestCollectionUpsertWithCustomId(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- existingCollection, err := app.Dao().FindCollectionByNameOrId("demo2")
- if err != nil {
- t.Fatal(err)
- }
-
- newCollection := func() *models.Collection {
- return &models.Collection{
- Name: "c_" + security.PseudorandomString(4),
- Schema: existingCollection.Schema,
- }
- }
-
- scenarios := []struct {
- name string
- jsonData string
- collection *models.Collection
- expectError bool
- }{
- {
- "empty data",
- "{}",
- newCollection(),
- false,
- },
- {
- "empty id",
- `{"id":""}`,
- newCollection(),
- false,
- },
- {
- "id < 15 chars",
- `{"id":"a23"}`,
- newCollection(),
- true,
- },
- {
- "id > 15 chars",
- `{"id":"a234567890123456"}`,
- newCollection(),
- true,
- },
- {
- "id = 15 chars (invalid chars)",
- `{"id":"a@3456789012345"}`,
- newCollection(),
- true,
- },
- {
- "id = 15 chars (valid chars)",
- `{"id":"a23456789012345"}`,
- newCollection(),
- false,
- },
- {
- "changing the id of an existing item",
- `{"id":"b23456789012345"}`,
- existingCollection,
- true,
- },
- {
- "using the same existing item id",
- `{"id":"` + existingCollection.Id + `"}`,
- existingCollection,
- false,
- },
- {
- "skipping the id for existing item",
- `{}`,
- existingCollection,
- false,
- },
- }
-
- for _, s := range scenarios {
- form := forms.NewCollectionUpsert(app, s.collection)
-
- // load data
- loadErr := json.Unmarshal([]byte(s.jsonData), form)
- if loadErr != nil {
- t.Errorf("[%s] Failed to load form data: %v", s.name, loadErr)
- continue
- }
-
- submitErr := form.Submit()
- hasErr := submitErr != nil
-
- if hasErr != s.expectError {
- t.Errorf("[%s] Expected hasErr to be %v, got %v (%v)", s.name, s.expectError, hasErr, submitErr)
- }
-
- if !hasErr && form.Id != "" {
- _, err := app.Dao().FindCollectionByNameOrId(form.Id)
- if err != nil {
- t.Errorf("[%s] Expected to find record with id %s, got %v", s.name, form.Id, err)
- }
- }
- }
-}
diff --git a/forms/collections_import.go b/forms/collections_import.go
deleted file mode 100644
index bd17833a..00000000
--- a/forms/collections_import.go
+++ /dev/null
@@ -1,132 +0,0 @@
-package forms
-
-import (
- "encoding/json"
- "fmt"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
-)
-
-// CollectionsImport is a form model to bulk import
-// (create, replace and delete) collections from a user provided list.
-type CollectionsImport struct {
- app core.App
- dao *daos.Dao
-
- Collections []*models.Collection `form:"collections" json:"collections"`
- DeleteMissing bool `form:"deleteMissing" json:"deleteMissing"`
-}
-
-// NewCollectionsImport creates a new [CollectionsImport] form with
-// initialized with from the provided [core.App] instance.
-//
-// If you want to submit the form as part of a transaction,
-// you can change the default Dao via [SetDao()].
-func NewCollectionsImport(app core.App) *CollectionsImport {
- return &CollectionsImport{
- app: app,
- dao: app.Dao(),
- }
-}
-
-// SetDao replaces the default form Dao instance with the provided one.
-func (form *CollectionsImport) SetDao(dao *daos.Dao) {
- form.dao = dao
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-func (form *CollectionsImport) Validate() error {
- return validation.ValidateStruct(form,
- validation.Field(&form.Collections, validation.Required),
- )
-}
-
-// Submit applies the import, aka.:
-// - imports the form collections (create or replace)
-// - sync the collection changes with their related records table
-// - ensures the integrity of the imported structure (aka. run validations for each collection)
-// - if [form.DeleteMissing] is set, deletes all local collections that are not found in the imports list
-//
-// All operations are wrapped in a single transaction that are
-// rollbacked on the first encountered error.
-//
-// You can optionally provide a list of InterceptorFunc to further
-// modify the form behavior before persisting it.
-func (form *CollectionsImport) Submit(interceptors ...InterceptorFunc[[]*models.Collection]) error {
- if err := form.Validate(); err != nil {
- return err
- }
-
- return runInterceptors(form.Collections, func(collections []*models.Collection) error {
- return form.dao.RunInTransaction(func(txDao *daos.Dao) error {
- importErr := txDao.ImportCollections(
- collections,
- form.DeleteMissing,
- form.afterSync,
- )
- if importErr == nil {
- return nil
- }
-
- // validation failure
- if err, ok := importErr.(validation.Errors); ok {
- return err
- }
-
- // generic/db failure
- return validation.Errors{"collections": validation.NewError(
- "collections_import_failure",
- "Failed to import the collections configuration. Raw error:\n"+importErr.Error(),
- )}
- })
- }, interceptors...)
-}
-
-func (form *CollectionsImport) afterSync(txDao *daos.Dao, mappedNew, mappedOld map[string]*models.Collection) error {
- // refresh the actual persisted collections list
- refreshedCollections := []*models.Collection{}
- if err := txDao.CollectionQuery().OrderBy("updated ASC").All(&refreshedCollections); err != nil {
- return err
- }
-
- // trigger the validator for each existing collection to
- // ensure that the app is not left in a broken state
- for _, collection := range refreshedCollections {
- upsertModel := mappedOld[collection.GetId()]
- if upsertModel == nil {
- upsertModel = collection
- }
- upsertModel.MarkAsNotNew()
-
- upsertForm := NewCollectionUpsert(form.app, upsertModel)
- upsertForm.SetDao(txDao)
-
- // load form fields with the refreshed collection state
- upsertForm.Id = collection.Id
- upsertForm.Type = collection.Type
- upsertForm.Name = collection.Name
- upsertForm.System = collection.System
- upsertForm.ListRule = collection.ListRule
- upsertForm.ViewRule = collection.ViewRule
- upsertForm.CreateRule = collection.CreateRule
- upsertForm.UpdateRule = collection.UpdateRule
- upsertForm.DeleteRule = collection.DeleteRule
- upsertForm.Schema = collection.Schema
- upsertForm.Options = collection.Options
-
- if err := upsertForm.Validate(); err != nil {
- // serialize the validation error(s)
- serializedErr, _ := json.MarshalIndent(err, "", " ")
-
- return validation.Errors{"collections": validation.NewError(
- "collections_import_validate_failure",
- fmt.Sprintf("Data validations failed for collection %q (%s):\n%s", collection.Name, collection.Id, serializedErr),
- )}
- }
- }
-
- return nil
-}
diff --git a/forms/collections_import_test.go b/forms/collections_import_test.go
deleted file mode 100644
index 33d15ab8..00000000
--- a/forms/collections_import_test.go
+++ /dev/null
@@ -1,511 +0,0 @@
-package forms_test
-
-import (
- "encoding/json"
- "errors"
- "testing"
-
- "github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
-)
-
-func TestCollectionsImportValidate(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- form := forms.NewCollectionsImport(app)
-
- scenarios := []struct {
- collections []*models.Collection
- expectError bool
- }{
- {nil, true},
- {[]*models.Collection{}, true},
- {[]*models.Collection{{}}, false},
- }
-
- for i, s := range scenarios {
- form.Collections = s.collections
-
- err := form.Validate()
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, s.expectError, hasErr, err)
- }
- }
-}
-
-func TestCollectionsImportSubmit(t *testing.T) {
- t.Parallel()
-
- totalCollections := 11
-
- scenarios := []struct {
- name string
- jsonData string
- expectError bool
- expectCollectionsCount int
- expectEvents map[string]int
- }{
- {
- name: "empty collections",
- jsonData: `{
- "deleteMissing": true,
- "collections": []
- }`,
- expectError: true,
- expectCollectionsCount: totalCollections,
- expectEvents: nil,
- },
- {
- name: "one of the collections has invalid data",
- jsonData: `{
- "collections": [
- {
- "name": "import1",
- "schema": [
- {
- "id":"fz6iql2m",
- "name":"active",
- "type":"bool"
- }
- ]
- },
- {
- "name": "import 2",
- "schema": [
- {
- "id":"fz6iql2m",
- "name":"active",
- "type":"bool"
- }
- ]
- }
- ]
- }`,
- expectError: true,
- expectCollectionsCount: totalCollections,
- expectEvents: map[string]int{
- "OnModelBeforeCreate": 2,
- },
- },
- {
- name: "test empty base collection schema",
- jsonData: `{
- "collections": [
- {
- "name": "import1"
- },
- {
- "name": "import2",
- "type": "auth"
- }
- ]
- }`,
- expectError: true,
- expectCollectionsCount: totalCollections,
- expectEvents: map[string]int{
- "OnModelBeforeCreate": 2,
- },
- },
- {
- name: "all imported collections has valid data",
- jsonData: `{
- "collections": [
- {
- "name": "import1",
- "schema": [
- {
- "id":"fz6iql2m",
- "name":"active",
- "type":"bool"
- }
- ]
- },
- {
- "name": "import2",
- "schema": [
- {
- "id":"fz6iql2m",
- "name":"active",
- "type":"bool"
- }
- ]
- },
- {
- "name": "import3",
- "type": "auth"
- }
- ]
- }`,
- expectError: false,
- expectCollectionsCount: totalCollections + 3,
- expectEvents: map[string]int{
- "OnModelBeforeCreate": 3,
- "OnModelAfterCreate": 3,
- },
- },
- {
- name: "new collection with existing name",
- jsonData: `{
- "collections": [
- {
- "name": "demo2",
- "schema": [
- {
- "id":"fz6iql2m",
- "name":"active",
- "type":"bool"
- }
- ]
- }
- ]
- }`,
- expectError: true,
- expectCollectionsCount: totalCollections,
- expectEvents: map[string]int{
- "OnModelBeforeCreate": 1,
- },
- },
- {
- name: "delete system + modified + new collection",
- jsonData: `{
- "deleteMissing": true,
- "collections": [
- {
- "id":"sz5l5z67tg7gku0",
- "name":"demo2",
- "schema":[
- {
- "id":"_2hlxbmp",
- "name":"title",
- "type":"text",
- "system":false,
- "required":true,
- "unique":false,
- "options":{
- "min":3,
- "max":null,
- "pattern":""
- }
- }
- ]
- },
- {
- "name": "import1",
- "schema": [
- {
- "id":"fz6iql2m",
- "name":"active",
- "type":"bool"
- }
- ]
- }
- ]
- }`,
- expectError: true,
- expectCollectionsCount: totalCollections,
- expectEvents: map[string]int{
- "OnModelBeforeDelete": 1,
- },
- },
- {
- name: "modified + new collection",
- jsonData: `{
- "collections": [
- {
- "id":"sz5l5z67tg7gku0",
- "name":"demo2_rename",
- "schema":[
- {
- "id":"_2hlxbmp",
- "name":"title_new",
- "type":"text",
- "system":false,
- "required":true,
- "unique":false,
- "options":{
- "min":3,
- "max":null,
- "pattern":""
- }
- }
- ]
- },
- {
- "name": "import1",
- "schema": [
- {
- "id":"fz6iql2m",
- "name":"active",
- "type":"bool"
- }
- ]
- },
- {
- "name": "import2",
- "schema": [
- {
- "id":"fz6iql2m",
- "name":"active",
- "type":"bool"
- }
- ]
- }
- ]
- }`,
- expectError: false,
- expectCollectionsCount: totalCollections + 2,
- expectEvents: map[string]int{
- "OnModelBeforeUpdate": 1,
- "OnModelAfterUpdate": 1,
- "OnModelBeforeCreate": 2,
- "OnModelAfterCreate": 2,
- },
- },
- {
- name: "delete non-system + modified + new collection",
- jsonData: `{
- "deleteMissing": true,
- "collections": [
- {
- "id": "kpv709sk2lqbqk8",
- "system": true,
- "name": "nologin",
- "type": "auth",
- "options": {
- "allowEmailAuth": false,
- "allowOAuth2Auth": false,
- "allowUsernameAuth": false,
- "exceptEmailDomains": [],
- "manageRule": "@request.auth.collectionName = 'users'",
- "minPasswordLength": 8,
- "onlyEmailDomains": [],
- "requireEmail": true
- },
- "listRule": "",
- "viewRule": "",
- "createRule": "",
- "updateRule": "",
- "deleteRule": "",
- "schema": [
- {
- "id": "x8zzktwe",
- "name": "name",
- "type": "text",
- "system": false,
- "required": false,
- "unique": false,
- "options": {
- "min": null,
- "max": null,
- "pattern": ""
- }
- }
- ]
- },
- {
- "id":"sz5l5z67tg7gku0",
- "name":"demo2",
- "schema":[
- {
- "id":"_2hlxbmp",
- "name":"title",
- "type":"text",
- "system":false,
- "required":true,
- "unique":false,
- "options":{
- "min":3,
- "max":null,
- "pattern":""
- }
- }
- ]
- },
- {
- "id": "test_deleted_collection_name_reuse",
- "name": "demo1",
- "schema": [
- {
- "id":"fz6iql2m",
- "name":"active",
- "type":"bool"
- }
- ]
- }
- ]
- }`,
- expectError: false,
- expectCollectionsCount: 3,
- expectEvents: map[string]int{
- "OnModelBeforeUpdate": 2,
- "OnModelAfterUpdate": 2,
- "OnModelBeforeCreate": 1,
- "OnModelAfterCreate": 1,
- "OnModelBeforeDelete": totalCollections - 2,
- "OnModelAfterDelete": totalCollections - 2,
- },
- },
- {
- name: "lazy system table name error",
- jsonData: `{
- "collections": [
- {
- "name": "_admins",
- "schema": [
- {
- "id":"fz6iql2m",
- "name":"active",
- "type":"bool"
- }
- ]
- }
- ]
- }`,
- expectError: true,
- expectCollectionsCount: totalCollections,
- expectEvents: map[string]int{
- "OnModelBeforeCreate": 1,
- },
- },
- {
- name: "lazy view evaluation",
- jsonData: `{
- "collections": [
- {
- "name": "view_before",
- "type": "view",
- "options": {
- "query": "select id, active from base_test"
- }
- },
- {
- "name": "base_test",
- "schema": [
- {
- "id":"fz6iql2m",
- "name":"active",
- "type":"bool"
- }
- ]
- },
- {
- "name": "view_after_new",
- "type": "view",
- "options": {
- "query": "select id, active from base_test"
- }
- },
- {
- "name": "view_after_old",
- "type": "view",
- "options": {
- "query": "select id from demo1"
- }
- }
- ]
- }`,
- expectError: false,
- expectCollectionsCount: totalCollections + 4,
- expectEvents: map[string]int{
- "OnModelBeforeUpdate": 3,
- "OnModelAfterUpdate": 3,
- "OnModelBeforeCreate": 4,
- "OnModelAfterCreate": 4,
- },
- },
- }
-
- for _, s := range scenarios {
- t.Run(s.name, func(t *testing.T) {
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- form := forms.NewCollectionsImport(testApp)
-
- // load data
- loadErr := json.Unmarshal([]byte(s.jsonData), form)
- if loadErr != nil {
- t.Fatalf("Failed to load form data: %v", loadErr)
- }
-
- err := form.Submit()
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Fatalf("Expected hasErr to be %v, got %v (%v)", s.expectError, hasErr, err)
- }
-
- // check collections count
- collections := []*models.Collection{}
- if err := testApp.Dao().CollectionQuery().All(&collections); err != nil {
- t.Fatal(err)
- }
- if len(collections) != s.expectCollectionsCount {
- t.Fatalf("Expected %d collections, got %d", s.expectCollectionsCount, len(collections))
- }
-
- // check events
- if len(testApp.EventCalls) > len(s.expectEvents) {
- t.Fatalf("Expected events %v, got %v", s.expectEvents, testApp.EventCalls)
- }
- for event, expectedCalls := range s.expectEvents {
- actualCalls := testApp.EventCalls[event]
- if actualCalls != expectedCalls {
- t.Fatalf("Expected event %s to be called %d, got %d", event, expectedCalls, actualCalls)
- }
- }
- })
- }
-}
-
-func TestCollectionsImportSubmitInterceptors(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collections := []*models.Collection{}
- if err := app.Dao().CollectionQuery().All(&collections); err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewCollectionsImport(app)
- form.Collections = collections
-
- testErr := errors.New("test_error")
-
- interceptor1Called := false
- interceptor1 := func(next forms.InterceptorNextFunc[[]*models.Collection]) forms.InterceptorNextFunc[[]*models.Collection] {
- return func(imports []*models.Collection) error {
- interceptor1Called = true
- return next(imports)
- }
- }
-
- interceptor2Called := false
- interceptor2 := func(next forms.InterceptorNextFunc[[]*models.Collection]) forms.InterceptorNextFunc[[]*models.Collection] {
- return func(imports []*models.Collection) error {
- interceptor2Called = true
- return testErr
- }
- }
-
- submitErr := form.Submit(interceptor1, interceptor2)
- if submitErr != testErr {
- t.Fatalf("Expected submitError %v, got %v", testErr, submitErr)
- }
-
- if !interceptor1Called {
- t.Fatalf("Expected interceptor1 to be called")
- }
-
- if !interceptor2Called {
- t.Fatalf("Expected interceptor2 to be called")
- }
-}
diff --git a/forms/realtime_subscribe.go b/forms/realtime_subscribe.go
deleted file mode 100644
index fc852fc8..00000000
--- a/forms/realtime_subscribe.go
+++ /dev/null
@@ -1,23 +0,0 @@
-package forms
-
-import (
- validation "github.com/go-ozzo/ozzo-validation/v4"
-)
-
-// RealtimeSubscribe is a realtime subscriptions request form.
-type RealtimeSubscribe struct {
- ClientId string `form:"clientId" json:"clientId"`
- Subscriptions []string `form:"subscriptions" json:"subscriptions"`
-}
-
-// NewRealtimeSubscribe creates new RealtimeSubscribe request form.
-func NewRealtimeSubscribe() *RealtimeSubscribe {
- return &RealtimeSubscribe{}
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-func (form *RealtimeSubscribe) Validate() error {
- return validation.ValidateStruct(form,
- validation.Field(&form.ClientId, validation.Required, validation.Length(1, 255)),
- )
-}
diff --git a/forms/realtime_subscribe_test.go b/forms/realtime_subscribe_test.go
deleted file mode 100644
index d4f8b1e7..00000000
--- a/forms/realtime_subscribe_test.go
+++ /dev/null
@@ -1,33 +0,0 @@
-package forms_test
-
-import (
- "strings"
- "testing"
-
- "github.com/pocketbase/pocketbase/forms"
-)
-
-func TestRealtimeSubscribeValidate(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- clientId string
- expectError bool
- }{
- {"", true},
- {strings.Repeat("a", 256), true},
- {"test", false},
- }
-
- for i, s := range scenarios {
- form := forms.NewRealtimeSubscribe()
- form.ClientId = s.clientId
-
- err := form.Validate()
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, s.expectError, hasErr, err)
- }
- }
-}
diff --git a/forms/record_email_change_confirm.go b/forms/record_email_change_confirm.go
deleted file mode 100644
index 79da8b92..00000000
--- a/forms/record_email_change_confirm.go
+++ /dev/null
@@ -1,145 +0,0 @@
-package forms
-
-import (
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/tools/security"
-)
-
-// RecordEmailChangeConfirm is an auth record email change confirmation form.
-type RecordEmailChangeConfirm struct {
- app core.App
- dao *daos.Dao
- collection *models.Collection
-
- Token string `form:"token" json:"token"`
- Password string `form:"password" json:"password"`
-}
-
-// NewRecordEmailChangeConfirm creates a new [RecordEmailChangeConfirm] form
-// initialized with from the provided [core.App] and [models.Collection] instances.
-//
-// If you want to submit the form as part of a transaction,
-// you can change the default Dao via [SetDao()].
-func NewRecordEmailChangeConfirm(app core.App, collection *models.Collection) *RecordEmailChangeConfirm {
- return &RecordEmailChangeConfirm{
- app: app,
- dao: app.Dao(),
- collection: collection,
- }
-}
-
-// SetDao replaces the default form Dao instance with the provided one.
-func (form *RecordEmailChangeConfirm) SetDao(dao *daos.Dao) {
- form.dao = dao
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-func (form *RecordEmailChangeConfirm) Validate() error {
- return validation.ValidateStruct(form,
- validation.Field(
- &form.Token,
- validation.Required,
- validation.By(form.checkToken),
- ),
- validation.Field(
- &form.Password,
- validation.Required,
- validation.Length(1, 100),
- validation.By(form.checkPassword),
- ),
- )
-}
-
-func (form *RecordEmailChangeConfirm) checkToken(value any) error {
- v, _ := value.(string)
- if v == "" {
- return nil // nothing to check
- }
-
- authRecord, _, err := form.parseToken(v)
- if err != nil {
- return err
- }
-
- if authRecord.Collection().Id != form.collection.Id {
- return validation.NewError("validation_token_collection_mismatch", "The provided token is for different auth collection.")
- }
-
- return nil
-}
-
-func (form *RecordEmailChangeConfirm) checkPassword(value any) error {
- v, _ := value.(string)
- if v == "" {
- return nil // nothing to check
- }
-
- authRecord, _, _ := form.parseToken(form.Token)
- if authRecord == nil || !authRecord.ValidatePassword(v) {
- return validation.NewError("validation_invalid_password", "Missing or invalid auth record password.")
- }
-
- return nil
-}
-
-func (form *RecordEmailChangeConfirm) parseToken(token string) (*models.Record, string, error) {
- // check token payload
- claims, _ := security.ParseUnverifiedJWT(token)
- newEmail, _ := claims["newEmail"].(string)
- if newEmail == "" {
- return nil, "", validation.NewError("validation_invalid_token_payload", "Invalid token payload - newEmail must be set.")
- }
-
- // ensure that there aren't other users with the new email
- if !form.dao.IsRecordValueUnique(form.collection.Id, schema.FieldNameEmail, newEmail) {
- return nil, "", validation.NewError("validation_existing_token_email", "The new email address is already registered: "+newEmail)
- }
-
- // verify that the token is not expired and its signature is valid
- authRecord, err := form.dao.FindAuthRecordByToken(
- token,
- form.app.Settings().RecordEmailChangeToken.Secret,
- )
- if err != nil || authRecord == nil {
- return nil, "", validation.NewError("validation_invalid_token", "Invalid or expired token.")
- }
-
- return authRecord, newEmail, nil
-}
-
-// Submit validates and submits the auth record email change confirmation form.
-// On success returns the updated auth record associated to `form.Token`.
-//
-// You can optionally provide a list of InterceptorFunc to
-// further modify the form behavior before persisting it.
-func (form *RecordEmailChangeConfirm) Submit(interceptors ...InterceptorFunc[*models.Record]) (*models.Record, error) {
- if err := form.Validate(); err != nil {
- return nil, err
- }
-
- authRecord, newEmail, err := form.parseToken(form.Token)
- if err != nil {
- return nil, err
- }
-
- authRecord.SetEmail(newEmail)
- authRecord.SetVerified(true)
-
- // @todo consider removing if not necessary anymore
- authRecord.RefreshTokenKey() // invalidate old tokens
-
- interceptorsErr := runInterceptors(authRecord, func(m *models.Record) error {
- authRecord = m
- return form.dao.SaveRecord(m)
- }, interceptors...)
-
- if interceptorsErr != nil {
- return nil, interceptorsErr
- }
-
- return authRecord, nil
-}
diff --git a/forms/record_email_change_confirm_test.go b/forms/record_email_change_confirm_test.go
deleted file mode 100644
index 90ca6f86..00000000
--- a/forms/record_email_change_confirm_test.go
+++ /dev/null
@@ -1,204 +0,0 @@
-package forms_test
-
-import (
- "encoding/json"
- "errors"
- "testing"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/security"
-)
-
-func TestRecordEmailChangeConfirmValidateAndSubmit(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- authCollection, err := testApp.Dao().FindCollectionByNameOrId("users")
- if err != nil {
- t.Fatal(err)
- }
-
- scenarios := []struct {
- jsonData string
- expectedErrors []string
- }{
- // empty payload
- {"{}", []string{"token", "password"}},
- // empty data
- {
- `{"token": "", "password": ""}`,
- []string{"token", "password"},
- },
- // invalid token payload
- {
- `{
- "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiY29sbGVjdGlvbklkIjoiX3BiX3VzZXJzX2F1dGhfIiwiZXhwIjoyMjA4OTg1MjYxfQ.quDgaCi2rGTRx3qO06CrFvHdeCua_5J7CCVWSaFhkus",
- "password": "123456"
- }`,
- []string{"token", "password"},
- },
- // expired token
- {
- `{
- "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiY29sbGVjdGlvbklkIjoiX3BiX3VzZXJzX2F1dGhfIiwiZW1haWwiOiJ0ZXN0QGV4YW1wbGUuY29tIiwibmV3RW1haWwiOiJ0ZXN0X25ld0BleGFtcGxlLmNvbSIsImV4cCI6MTYwOTQ1NTY2MX0.n1OJXJEACMNPT9aMTO48cVJexIiZEtHsz4UNBIfMcf4",
- "password": "123456"
- }`,
- []string{"token", "password"},
- },
- // existing new email
- {
- `{
- "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiY29sbGVjdGlvbklkIjoiX3BiX3VzZXJzX2F1dGhfIiwiZW1haWwiOiJ0ZXN0QGV4YW1wbGUuY29tIiwibmV3RW1haWwiOiJ0ZXN0MkBleGFtcGxlLmNvbSIsImV4cCI6MjIwODk4NTI2MX0.Q_o6zpc2URggTU0mWv2CS0rIPbQhFdmrjZ-ASwHh1Ww",
- "password": "1234567890"
- }`,
- []string{"token", "password"},
- },
- // wrong confirmation password
- {
- `{
- "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiY29sbGVjdGlvbklkIjoiX3BiX3VzZXJzX2F1dGhfIiwiZW1haWwiOiJ0ZXN0QGV4YW1wbGUuY29tIiwibmV3RW1haWwiOiJ0ZXN0X25ld0BleGFtcGxlLmNvbSIsImV4cCI6MjIwODk4NTI2MX0.hmR7Ye23C68tS1LgHgYgT7NBJczTad34kzcT4sqW3FY",
- "password": "123456"
- }`,
- []string{"password"},
- },
- // valid data
- {
- `{
- "token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiY29sbGVjdGlvbklkIjoiX3BiX3VzZXJzX2F1dGhfIiwiZW1haWwiOiJ0ZXN0QGV4YW1wbGUuY29tIiwibmV3RW1haWwiOiJ0ZXN0X25ld0BleGFtcGxlLmNvbSIsImV4cCI6MjIwODk4NTI2MX0.hmR7Ye23C68tS1LgHgYgT7NBJczTad34kzcT4sqW3FY",
- "password": "1234567890"
- }`,
- []string{},
- },
- }
-
- for i, s := range scenarios {
- form := forms.NewRecordEmailChangeConfirm(testApp, authCollection)
-
- // load data
- loadErr := json.Unmarshal([]byte(s.jsonData), form)
- if loadErr != nil {
- t.Errorf("(%d) Failed to load form data: %v", i, loadErr)
- continue
- }
-
- interceptorCalls := 0
- interceptor := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(r *models.Record) error {
- interceptorCalls++
- return next(r)
- }
- }
-
- record, err := form.Submit(interceptor)
-
- // check interceptor calls
- expectInterceptorCalls := 1
- if len(s.expectedErrors) > 0 {
- expectInterceptorCalls = 0
- }
- if interceptorCalls != expectInterceptorCalls {
- t.Errorf("[%d] Expected interceptor to be called %d, got %d", i, expectInterceptorCalls, interceptorCalls)
- }
-
- // parse errors
- errs, ok := err.(validation.Errors)
- if !ok && err != nil {
- t.Errorf("(%d) Failed to parse errors %v", i, err)
- continue
- }
-
- // check errors
- if len(errs) > len(s.expectedErrors) {
- t.Errorf("(%d) Expected error keys %v, got %v", i, s.expectedErrors, errs)
- }
- for _, k := range s.expectedErrors {
- if _, ok := errs[k]; !ok {
- t.Errorf("(%d) Missing expected error key %q in %v", i, k, errs)
- }
- }
-
- if len(errs) > 0 {
- continue
- }
-
- claims, _ := security.ParseUnverifiedJWT(form.Token)
- newEmail, _ := claims["newEmail"].(string)
-
- // check whether the user was updated
- // ---
- if record.Email() != newEmail {
- t.Errorf("(%d) Expected record email %q, got %q", i, newEmail, record.Email())
- }
-
- if !record.Verified() {
- t.Errorf("(%d) Expected record to be verified, got false", i)
- }
-
- // shouldn't validate second time due to refreshed record token
- if err := form.Validate(); err == nil {
- t.Errorf("(%d) Expected error, got nil", i)
- }
- }
-}
-
-func TestRecordEmailChangeConfirmInterceptors(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- authCollection, err := testApp.Dao().FindCollectionByNameOrId("users")
- if err != nil {
- t.Fatal(err)
- }
-
- authRecord, err := testApp.Dao().FindAuthRecordByEmail("users", "test@example.com")
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewRecordEmailChangeConfirm(testApp, authCollection)
- form.Token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiY29sbGVjdGlvbklkIjoiX3BiX3VzZXJzX2F1dGhfIiwiZW1haWwiOiJ0ZXN0QGV4YW1wbGUuY29tIiwibmV3RW1haWwiOiJ0ZXN0X25ld0BleGFtcGxlLmNvbSIsImV4cCI6MjIwODk4NTI2MX0.hmR7Ye23C68tS1LgHgYgT7NBJczTad34kzcT4sqW3FY"
- form.Password = "1234567890"
- interceptorEmail := authRecord.Email()
- testErr := errors.New("test_error")
-
- interceptor1Called := false
- interceptor1 := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(record *models.Record) error {
- interceptor1Called = true
- return next(record)
- }
- }
-
- interceptor2Called := false
- interceptor2 := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(record *models.Record) error {
- interceptorEmail = record.Email()
- interceptor2Called = true
- return testErr
- }
- }
-
- _, submitErr := form.Submit(interceptor1, interceptor2)
- if submitErr != testErr {
- t.Fatalf("Expected submitError %v, got %v", testErr, submitErr)
- }
-
- if !interceptor1Called {
- t.Fatalf("Expected interceptor1 to be called")
- }
-
- if !interceptor2Called {
- t.Fatalf("Expected interceptor2 to be called")
- }
-
- if interceptorEmail == authRecord.Email() {
- t.Fatalf("Expected the form model to be filled before calling the interceptors")
- }
-}
diff --git a/forms/record_email_change_request.go b/forms/record_email_change_request.go
deleted file mode 100644
index f849290a..00000000
--- a/forms/record_email_change_request.go
+++ /dev/null
@@ -1,75 +0,0 @@
-package forms
-
-import (
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/go-ozzo/ozzo-validation/v4/is"
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/mails"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
-)
-
-// RecordEmailChangeRequest is an auth record email change request form.
-type RecordEmailChangeRequest struct {
- app core.App
- dao *daos.Dao
- record *models.Record
-
- NewEmail string `form:"newEmail" json:"newEmail"`
-}
-
-// NewRecordEmailChangeRequest creates a new [RecordEmailChangeRequest] form
-// initialized with from the provided [core.App] and [models.Record] instances.
-//
-// If you want to submit the form as part of a transaction,
-// you can change the default Dao via [SetDao()].
-func NewRecordEmailChangeRequest(app core.App, record *models.Record) *RecordEmailChangeRequest {
- return &RecordEmailChangeRequest{
- app: app,
- dao: app.Dao(),
- record: record,
- }
-}
-
-// SetDao replaces the default form Dao instance with the provided one.
-func (form *RecordEmailChangeRequest) SetDao(dao *daos.Dao) {
- form.dao = dao
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-func (form *RecordEmailChangeRequest) Validate() error {
- return validation.ValidateStruct(form,
- validation.Field(
- &form.NewEmail,
- validation.Required,
- validation.Length(1, 255),
- is.EmailFormat,
- validation.By(form.checkUniqueEmail),
- ),
- )
-}
-
-func (form *RecordEmailChangeRequest) checkUniqueEmail(value any) error {
- v, _ := value.(string)
-
- if !form.dao.IsRecordValueUnique(form.record.Collection().Id, schema.FieldNameEmail, v) {
- return validation.NewError("validation_record_email_invalid", "User email already exists or it is invalid.")
- }
-
- return nil
-}
-
-// Submit validates and sends the change email request.
-//
-// You can optionally provide a list of InterceptorFunc to
-// further modify the form behavior before persisting it.
-func (form *RecordEmailChangeRequest) Submit(interceptors ...InterceptorFunc[*models.Record]) error {
- if err := form.Validate(); err != nil {
- return err
- }
-
- return runInterceptors(form.record, func(m *models.Record) error {
- return mails.SendRecordChangeEmail(form.app, m, form.NewEmail)
- }, interceptors...)
-}
diff --git a/forms/record_email_change_request_test.go b/forms/record_email_change_request_test.go
deleted file mode 100644
index c6e8e9d3..00000000
--- a/forms/record_email_change_request_test.go
+++ /dev/null
@@ -1,153 +0,0 @@
-package forms_test
-
-import (
- "encoding/json"
- "errors"
- "testing"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
-)
-
-func TestRecordEmailChangeRequestValidateAndSubmit(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- user, err := testApp.Dao().FindAuthRecordByEmail("users", "test@example.com")
- if err != nil {
- t.Fatal(err)
- }
-
- scenarios := []struct {
- jsonData string
- expectedErrors []string
- }{
- // empty payload
- {"{}", []string{"newEmail"}},
- // empty data
- {
- `{"newEmail": ""}`,
- []string{"newEmail"},
- },
- // invalid email
- {
- `{"newEmail": "invalid"}`,
- []string{"newEmail"},
- },
- // existing email token
- {
- `{"newEmail": "test2@example.com"}`,
- []string{"newEmail"},
- },
- // valid new email
- {
- `{"newEmail": "test_new@example.com"}`,
- []string{},
- },
- }
-
- for i, s := range scenarios {
- testApp.TestMailer.TotalSend = 0 // reset
- form := forms.NewRecordEmailChangeRequest(testApp, user)
-
- // load data
- loadErr := json.Unmarshal([]byte(s.jsonData), form)
- if loadErr != nil {
- t.Errorf("(%d) Failed to load form data: %v", i, loadErr)
- continue
- }
-
- interceptorCalls := 0
- interceptor := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(r *models.Record) error {
- interceptorCalls++
- return next(r)
- }
- }
-
- err := form.Submit(interceptor)
-
- // check interceptor calls
- expectInterceptorCalls := 1
- if len(s.expectedErrors) > 0 {
- expectInterceptorCalls = 0
- }
- if interceptorCalls != expectInterceptorCalls {
- t.Errorf("[%d] Expected interceptor to be called %d, got %d", i, expectInterceptorCalls, interceptorCalls)
- }
-
- // parse errors
- errs, ok := err.(validation.Errors)
- if !ok && err != nil {
- t.Errorf("(%d) Failed to parse errors %v", i, err)
- continue
- }
-
- // check errors
- if len(errs) > len(s.expectedErrors) {
- t.Errorf("(%d) Expected error keys %v, got %v", i, s.expectedErrors, errs)
- }
- for _, k := range s.expectedErrors {
- if _, ok := errs[k]; !ok {
- t.Errorf("(%d) Missing expected error key %q in %v", i, k, errs)
- }
- }
-
- expectedMails := 1
- if len(s.expectedErrors) > 0 {
- expectedMails = 0
- }
- if testApp.TestMailer.TotalSend != expectedMails {
- t.Errorf("(%d) Expected %d mail(s) to be sent, got %d", i, expectedMails, testApp.TestMailer.TotalSend)
- }
- }
-}
-
-func TestRecordEmailChangeRequestInterceptors(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- authRecord, err := testApp.Dao().FindAuthRecordByEmail("users", "test@example.com")
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewRecordEmailChangeRequest(testApp, authRecord)
- form.NewEmail = "test_new@example.com"
- testErr := errors.New("test_error")
-
- interceptor1Called := false
- interceptor1 := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(record *models.Record) error {
- interceptor1Called = true
- return next(record)
- }
- }
-
- interceptor2Called := false
- interceptor2 := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(record *models.Record) error {
- interceptor2Called = true
- return testErr
- }
- }
-
- submitErr := form.Submit(interceptor1, interceptor2)
- if submitErr != testErr {
- t.Fatalf("Expected submitError %v, got %v", testErr, submitErr)
- }
-
- if !interceptor1Called {
- t.Fatalf("Expected interceptor1 to be called")
- }
-
- if !interceptor2Called {
- t.Fatalf("Expected interceptor2 to be called")
- }
-}
diff --git a/forms/record_oauth2_login.go b/forms/record_oauth2_login.go
deleted file mode 100644
index 6747e1ba..00000000
--- a/forms/record_oauth2_login.go
+++ /dev/null
@@ -1,294 +0,0 @@
-package forms
-
-import (
- "context"
- "errors"
- "fmt"
- "time"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tools/auth"
- "github.com/pocketbase/pocketbase/tools/security"
- "golang.org/x/oauth2"
-)
-
-// RecordOAuth2LoginData defines the OA
-type RecordOAuth2LoginData struct {
- ExternalAuth *models.ExternalAuth
- Record *models.Record
- OAuth2User *auth.AuthUser
- ProviderClient auth.Provider
-}
-
-// BeforeOAuth2RecordCreateFunc defines a callback function that will
-// be called before OAuth2 new Record creation.
-type BeforeOAuth2RecordCreateFunc func(createForm *RecordUpsert, authRecord *models.Record, authUser *auth.AuthUser) error
-
-// RecordOAuth2Login is an auth record OAuth2 login form.
-type RecordOAuth2Login struct {
- app core.App
- dao *daos.Dao
- collection *models.Collection
-
- beforeOAuth2RecordCreateFunc BeforeOAuth2RecordCreateFunc
-
- // Optional auth record that will be used if no external
- // auth relation is found (if it is from the same collection)
- loggedAuthRecord *models.Record
-
- // The name of the OAuth2 client provider (eg. "google")
- Provider string `form:"provider" json:"provider"`
-
- // The authorization code returned from the initial request.
- Code string `form:"code" json:"code"`
-
- // The optional PKCE code verifier as part of the code_challenge sent with the initial request.
- CodeVerifier string `form:"codeVerifier" json:"codeVerifier"`
-
- // The redirect url sent with the initial request.
- RedirectUrl string `form:"redirectUrl" json:"redirectUrl"`
-
- // Additional data that will be used for creating a new auth record
- // if an existing OAuth2 account doesn't exist.
- CreateData map[string]any `form:"createData" json:"createData"`
-}
-
-// NewRecordOAuth2Login creates a new [RecordOAuth2Login] form with
-// initialized with from the provided [core.App] instance.
-//
-// If you want to submit the form as part of a transaction,
-// you can change the default Dao via [SetDao()].
-func NewRecordOAuth2Login(app core.App, collection *models.Collection, optAuthRecord *models.Record) *RecordOAuth2Login {
- form := &RecordOAuth2Login{
- app: app,
- dao: app.Dao(),
- collection: collection,
- loggedAuthRecord: optAuthRecord,
- }
-
- return form
-}
-
-// SetDao replaces the default form Dao instance with the provided one.
-func (form *RecordOAuth2Login) SetDao(dao *daos.Dao) {
- form.dao = dao
-}
-
-// SetBeforeNewRecordCreateFunc sets a before OAuth2 record create callback handler.
-func (form *RecordOAuth2Login) SetBeforeNewRecordCreateFunc(f BeforeOAuth2RecordCreateFunc) {
- form.beforeOAuth2RecordCreateFunc = f
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-func (form *RecordOAuth2Login) Validate() error {
- return validation.ValidateStruct(form,
- validation.Field(&form.Provider, validation.Required, validation.By(form.checkProviderName)),
- validation.Field(&form.Code, validation.Required),
- validation.Field(&form.RedirectUrl, validation.Required),
- )
-}
-
-func (form *RecordOAuth2Login) checkProviderName(value any) error {
- name, _ := value.(string)
-
- config, ok := form.app.Settings().NamedAuthProviderConfigs()[name]
- if !ok || !config.Enabled {
- return validation.NewError("validation_invalid_provider", fmt.Sprintf("%q is missing or is not enabled.", name))
- }
-
- return nil
-}
-
-// Submit validates and submits the form.
-//
-// If an auth record doesn't exist, it will make an attempt to create it
-// based on the fetched OAuth2 profile data via a local [RecordUpsert] form.
-// You can intercept/modify the Record create form with [form.SetBeforeNewRecordCreateFunc()].
-//
-// You can also optionally provide a list of InterceptorFunc to
-// further modify the form behavior before persisting it.
-//
-// On success returns the authorized record model and the fetched provider's data.
-func (form *RecordOAuth2Login) Submit(
- interceptors ...InterceptorFunc[*RecordOAuth2LoginData],
-) (*models.Record, *auth.AuthUser, error) {
- if err := form.Validate(); err != nil {
- return nil, nil, err
- }
-
- if !form.collection.AuthOptions().AllowOAuth2Auth {
- return nil, nil, errors.New("OAuth2 authentication is not allowed for the auth collection.")
- }
-
- provider, err := auth.NewProviderByName(form.Provider)
- if err != nil {
- return nil, nil, err
- }
-
- ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
- defer cancel()
-
- provider.SetContext(ctx)
-
- // load provider configuration
- providerConfig := form.app.Settings().NamedAuthProviderConfigs()[form.Provider]
- if err := providerConfig.SetupProvider(provider); err != nil {
- return nil, nil, err
- }
-
- provider.SetRedirectUrl(form.RedirectUrl)
-
- var opts []oauth2.AuthCodeOption
-
- if provider.PKCE() {
- opts = append(opts, oauth2.SetAuthURLParam("code_verifier", form.CodeVerifier))
- }
-
- // fetch token
- token, err := provider.FetchToken(form.Code, opts...)
- if err != nil {
- return nil, nil, err
- }
-
- // fetch external auth user
- authUser, err := provider.FetchAuthUser(token)
- if err != nil {
- return nil, nil, err
- }
-
- var authRecord *models.Record
-
- // check for existing relation with the auth record
- rel, _ := form.dao.FindFirstExternalAuthByExpr(dbx.HashExp{
- "collectionId": form.collection.Id,
- "provider": form.Provider,
- "providerId": authUser.Id,
- })
- switch {
- case rel != nil:
- authRecord, err = form.dao.FindRecordById(form.collection.Id, rel.RecordId)
- if err != nil {
- return nil, authUser, err
- }
- case form.loggedAuthRecord != nil && form.loggedAuthRecord.Collection().Id == form.collection.Id:
- // fallback to the logged auth record (if any)
- authRecord = form.loggedAuthRecord
- case authUser.Email != "":
- // look for an existing auth record by the external auth record's email
- authRecord, _ = form.dao.FindAuthRecordByEmail(form.collection.Id, authUser.Email)
- }
-
- interceptorData := &RecordOAuth2LoginData{
- ExternalAuth: rel,
- Record: authRecord,
- OAuth2User: authUser,
- ProviderClient: provider,
- }
-
- interceptorsErr := runInterceptors(interceptorData, func(newData *RecordOAuth2LoginData) error {
- return form.submit(newData)
- }, interceptors...)
-
- if interceptorsErr != nil {
- return nil, interceptorData.OAuth2User, interceptorsErr
- }
-
- return interceptorData.Record, interceptorData.OAuth2User, nil
-}
-
-func (form *RecordOAuth2Login) submit(data *RecordOAuth2LoginData) error {
- return form.dao.RunInTransaction(func(txDao *daos.Dao) error {
- if data.Record == nil {
- data.Record = models.NewRecord(form.collection)
- data.Record.RefreshId()
- data.Record.MarkAsNew()
- createForm := NewRecordUpsert(form.app, data.Record)
- createForm.SetFullManageAccess(true)
- createForm.SetDao(txDao)
- if data.OAuth2User.Username != "" &&
- len(data.OAuth2User.Username) >= 3 &&
- len(data.OAuth2User.Username) <= 150 &&
- usernameRegex.MatchString(data.OAuth2User.Username) {
- createForm.Username = form.dao.SuggestUniqueAuthRecordUsername(
- form.collection.Id,
- data.OAuth2User.Username,
- )
- }
-
- // load custom data
- createForm.LoadData(form.CreateData)
-
- // load the OAuth2 user data
- createForm.Email = data.OAuth2User.Email
- createForm.Verified = true // mark as verified as long as it matches the OAuth2 data (even if the email is empty)
-
- // generate a random password if not explicitly set
- if createForm.Password == "" {
- createForm.Password = security.RandomString(30)
- createForm.PasswordConfirm = createForm.Password
- }
-
- if form.beforeOAuth2RecordCreateFunc != nil {
- if err := form.beforeOAuth2RecordCreateFunc(createForm, data.Record, data.OAuth2User); err != nil {
- return err
- }
- }
-
- // create the new auth record
- if err := createForm.Submit(); err != nil {
- return err
- }
- } else {
- isLoggedAuthRecord := form.loggedAuthRecord != nil &&
- form.loggedAuthRecord.Id == data.Record.Id &&
- form.loggedAuthRecord.Collection().Id == data.Record.Collection().Id
-
- // set random password for users with unverified email
- // (this is in case a malicious actor has registered via password using the user email)
- if !isLoggedAuthRecord && data.Record.Email() != "" && !data.Record.Verified() {
- data.Record.SetPassword(security.RandomString(30))
- if err := txDao.SaveRecord(data.Record); err != nil {
- return err
- }
- }
-
- // update the existing auth record empty email if the data.OAuth2User has one
- // (this is in case previously the auth record was created
- // with an OAuth2 provider that didn't return an email address)
- if data.Record.Email() == "" && data.OAuth2User.Email != "" {
- data.Record.SetEmail(data.OAuth2User.Email)
- if err := txDao.SaveRecord(data.Record); err != nil {
- return err
- }
- }
-
- // update the existing auth record verified state
- // (only if the auth record doesn't have an email or the auth record email match with the one in data.OAuth2User)
- if !data.Record.Verified() && (data.Record.Email() == "" || data.Record.Email() == data.OAuth2User.Email) {
- data.Record.SetVerified(true)
- if err := txDao.SaveRecord(data.Record); err != nil {
- return err
- }
- }
- }
-
- // create ExternalAuth relation if missing
- if data.ExternalAuth == nil {
- data.ExternalAuth = &models.ExternalAuth{
- CollectionId: data.Record.Collection().Id,
- RecordId: data.Record.Id,
- Provider: form.Provider,
- ProviderId: data.OAuth2User.Id,
- }
- if err := txDao.SaveExternalAuth(data.ExternalAuth); err != nil {
- return err
- }
- }
-
- return nil
- })
-}
diff --git a/forms/record_oauth2_login_test.go b/forms/record_oauth2_login_test.go
deleted file mode 100644
index 5a4c0744..00000000
--- a/forms/record_oauth2_login_test.go
+++ /dev/null
@@ -1,98 +0,0 @@
-package forms_test
-
-import (
- "encoding/json"
- "testing"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/tests"
-)
-
-func TestUserOauth2LoginValidate(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- testName string
- collectionName string
- jsonData string
- expectedErrors []string
- }{
- {
- "empty payload",
- "users",
- "{}",
- []string{"provider", "code", "redirectUrl"},
- },
- {
- "empty data",
- "users",
- `{"provider":"","code":"","codeVerifier":"","redirectUrl":""}`,
- []string{"provider", "code", "redirectUrl"},
- },
- {
- "missing provider",
- "users",
- `{"provider":"missing","code":"123","codeVerifier":"123","redirectUrl":"https://example.com"}`,
- []string{"provider"},
- },
- {
- "disabled provider",
- "users",
- `{"provider":"github","code":"123","codeVerifier":"123","redirectUrl":"https://example.com"}`,
- []string{"provider"},
- },
- {
- "enabled provider",
- "users",
- `{"provider":"gitlab","code":"123","codeVerifier":"123","redirectUrl":"https://example.com"}`,
- []string{},
- },
- {
- "[#3689] any redirectUrl value",
- "users",
- `{"provider":"gitlab","code":"123","codeVerifier":"123","redirectUrl":"something"}`,
- []string{},
- },
- }
-
- for _, s := range scenarios {
- authCollection, _ := app.Dao().FindCollectionByNameOrId(s.collectionName)
- if authCollection == nil {
- t.Errorf("[%s] Failed to fetch auth collection", s.testName)
- }
-
- form := forms.NewRecordOAuth2Login(app, authCollection, nil)
-
- // load data
- loadErr := json.Unmarshal([]byte(s.jsonData), form)
- if loadErr != nil {
- t.Errorf("[%s] Failed to load form data: %v", s.testName, loadErr)
- continue
- }
-
- err := form.Validate()
-
- // parse errors
- errs, ok := err.(validation.Errors)
- if !ok && err != nil {
- t.Errorf("[%s] Failed to parse errors %v", s.testName, err)
- continue
- }
-
- // check errors
- if len(errs) > len(s.expectedErrors) {
- t.Errorf("[%s] Expected error keys %v, got %v", s.testName, s.expectedErrors, errs)
- }
- for _, k := range s.expectedErrors {
- if _, ok := errs[k]; !ok {
- t.Errorf("[%s] Missing expected error key %q in %v", s.testName, k, errs)
- }
- }
- }
-}
-
-// @todo consider mocking a Oauth2 provider to test Submit
diff --git a/forms/record_password_login.go b/forms/record_password_login.go
deleted file mode 100644
index 85f8caae..00000000
--- a/forms/record_password_login.go
+++ /dev/null
@@ -1,95 +0,0 @@
-package forms
-
-import (
- "database/sql"
- "errors"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/go-ozzo/ozzo-validation/v4/is"
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
-)
-
-// RecordPasswordLogin is record username/email + password login form.
-type RecordPasswordLogin struct {
- app core.App
- dao *daos.Dao
- collection *models.Collection
-
- Identity string `form:"identity" json:"identity"`
- Password string `form:"password" json:"password"`
-}
-
-// NewRecordPasswordLogin creates a new [RecordPasswordLogin] form initialized
-// with from the provided [core.App] and [models.Collection] instance.
-//
-// If you want to submit the form as part of a transaction,
-// you can change the default Dao via [SetDao()].
-func NewRecordPasswordLogin(app core.App, collection *models.Collection) *RecordPasswordLogin {
- return &RecordPasswordLogin{
- app: app,
- dao: app.Dao(),
- collection: collection,
- }
-}
-
-// SetDao replaces the default form Dao instance with the provided one.
-func (form *RecordPasswordLogin) SetDao(dao *daos.Dao) {
- form.dao = dao
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-func (form *RecordPasswordLogin) Validate() error {
- return validation.ValidateStruct(form,
- validation.Field(&form.Identity, validation.Required, validation.Length(1, 255)),
- validation.Field(&form.Password, validation.Required, validation.Length(1, 255)),
- )
-}
-
-// Submit validates and submits the form.
-// On success returns the authorized record model.
-//
-// You can optionally provide a list of InterceptorFunc to
-// further modify the form behavior before persisting it.
-func (form *RecordPasswordLogin) Submit(interceptors ...InterceptorFunc[*models.Record]) (*models.Record, error) {
- if err := form.Validate(); err != nil {
- return nil, err
- }
-
- authOptions := form.collection.AuthOptions()
-
- var authRecord *models.Record
- var fetchErr error
-
- isEmail := is.EmailFormat.Validate(form.Identity) == nil
-
- if isEmail {
- if authOptions.AllowEmailAuth {
- authRecord, fetchErr = form.dao.FindAuthRecordByEmail(form.collection.Id, form.Identity)
- }
- } else if authOptions.AllowUsernameAuth {
- authRecord, fetchErr = form.dao.FindAuthRecordByUsername(form.collection.Id, form.Identity)
- }
-
- // ignore not found errors to allow custom fetch implementations
- if fetchErr != nil && !errors.Is(fetchErr, sql.ErrNoRows) {
- return nil, fetchErr
- }
-
- interceptorsErr := runInterceptors(authRecord, func(m *models.Record) error {
- authRecord = m
-
- if authRecord == nil || !authRecord.ValidatePassword(form.Password) {
- return errors.New("Invalid login credentials.")
- }
-
- return nil
- }, interceptors...)
-
- if interceptorsErr != nil {
- return nil, interceptorsErr
- }
-
- return authRecord, nil
-}
diff --git a/forms/record_password_login_test.go b/forms/record_password_login_test.go
deleted file mode 100644
index 3892dc48..00000000
--- a/forms/record_password_login_test.go
+++ /dev/null
@@ -1,186 +0,0 @@
-package forms_test
-
-import (
- "errors"
- "testing"
-
- "github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
-)
-
-func TestRecordPasswordLoginValidateAndSubmit(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- scenarios := []struct {
- testName string
- collectionName string
- identity string
- password string
- expectError bool
- }{
- {
- "empty data",
- "users",
- "",
- "",
- true,
- },
-
- // username
- {
- "existing username + wrong password",
- "users",
- "users75657",
- "invalid",
- true,
- },
- {
- "missing username + valid password",
- "users",
- "clients57772", // not in the "users" collection
- "1234567890",
- true,
- },
- {
- "existing username + valid password but in restricted username auth collection",
- "clients",
- "clients57772",
- "1234567890",
- true,
- },
- {
- "existing username + valid password but in restricted username and email auth collection",
- "nologin",
- "test_username",
- "1234567890",
- true,
- },
- {
- "existing username + valid password",
- "users",
- "users75657",
- "1234567890",
- false,
- },
-
- // email
- {
- "existing email + wrong password",
- "users",
- "test@example.com",
- "invalid",
- true,
- },
- {
- "missing email + valid password",
- "users",
- "test_missing@example.com",
- "1234567890",
- true,
- },
- {
- "existing username + valid password but in restricted username auth collection",
- "clients",
- "test@example.com",
- "1234567890",
- false,
- },
- {
- "existing username + valid password but in restricted username and email auth collection",
- "nologin",
- "test@example.com",
- "1234567890",
- true,
- },
- {
- "existing email + valid password",
- "users",
- "test@example.com",
- "1234567890",
- false,
- },
- }
-
- for _, s := range scenarios {
- authCollection, err := testApp.Dao().FindCollectionByNameOrId(s.collectionName)
- if err != nil {
- t.Errorf("[%s] Failed to fetch auth collection: %v", s.testName, err)
- }
-
- form := forms.NewRecordPasswordLogin(testApp, authCollection)
- form.Identity = s.identity
- form.Password = s.password
-
- record, err := form.Submit()
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("[%s] Expected hasErr to be %v, got %v (%v)", s.testName, s.expectError, hasErr, err)
- continue
- }
-
- if hasErr {
- continue
- }
-
- if record.Email() != s.identity && record.Username() != s.identity {
- t.Errorf("[%s] Expected record with identity %q, got \n%v", s.testName, s.identity, record)
- }
- }
-}
-
-func TestRecordPasswordLoginInterceptors(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- authCollection, err := testApp.Dao().FindCollectionByNameOrId("users")
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewRecordPasswordLogin(testApp, authCollection)
- form.Identity = "test@example.com"
- form.Password = "123456"
- var interceptorRecord *models.Record
- testErr := errors.New("test_error")
-
- interceptor1Called := false
- interceptor1 := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(record *models.Record) error {
- interceptor1Called = true
- return next(record)
- }
- }
-
- interceptor2Called := false
- interceptor2 := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(record *models.Record) error {
- interceptorRecord = record
- interceptor2Called = true
- return testErr
- }
- }
-
- _, submitErr := form.Submit(interceptor1, interceptor2)
- if submitErr != testErr {
- t.Fatalf("Expected submitError %v, got %v", testErr, submitErr)
- }
-
- if !interceptor1Called {
- t.Fatalf("Expected interceptor1 to be called")
- }
-
- if !interceptor2Called {
- t.Fatalf("Expected interceptor2 to be called")
- }
-
- if interceptorRecord == nil || interceptorRecord.Email() != form.Identity {
- t.Fatalf("Expected auth Record model with email %s, got %v", form.Identity, interceptorRecord)
- }
-}
diff --git a/forms/record_password_reset_confirm.go b/forms/record_password_reset_confirm.go
deleted file mode 100644
index 370322cd..00000000
--- a/forms/record_password_reset_confirm.go
+++ /dev/null
@@ -1,118 +0,0 @@
-package forms
-
-import (
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/forms/validators"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tools/security"
- "github.com/spf13/cast"
-)
-
-// RecordPasswordResetConfirm is an auth record password reset confirmation form.
-type RecordPasswordResetConfirm struct {
- app core.App
- collection *models.Collection
- dao *daos.Dao
-
- Token string `form:"token" json:"token"`
- Password string `form:"password" json:"password"`
- PasswordConfirm string `form:"passwordConfirm" json:"passwordConfirm"`
-}
-
-// NewRecordPasswordResetConfirm creates a new [RecordPasswordResetConfirm]
-// form initialized with from the provided [core.App] instance.
-//
-// If you want to submit the form as part of a transaction,
-// you can change the default Dao via [SetDao()].
-func NewRecordPasswordResetConfirm(app core.App, collection *models.Collection) *RecordPasswordResetConfirm {
- return &RecordPasswordResetConfirm{
- app: app,
- dao: app.Dao(),
- collection: collection,
- }
-}
-
-// SetDao replaces the default form Dao instance with the provided one.
-func (form *RecordPasswordResetConfirm) SetDao(dao *daos.Dao) {
- form.dao = dao
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-func (form *RecordPasswordResetConfirm) Validate() error {
- minPasswordLength := form.collection.AuthOptions().MinPasswordLength
-
- return validation.ValidateStruct(form,
- validation.Field(&form.Token, validation.Required, validation.By(form.checkToken)),
- validation.Field(&form.Password, validation.Required, validation.Length(minPasswordLength, 100)),
- validation.Field(&form.PasswordConfirm, validation.Required, validation.By(validators.Compare(form.Password))),
- )
-}
-
-func (form *RecordPasswordResetConfirm) checkToken(value any) error {
- v, _ := value.(string)
- if v == "" {
- return nil // nothing to check
- }
-
- record, err := form.dao.FindAuthRecordByToken(
- v,
- form.app.Settings().RecordPasswordResetToken.Secret,
- )
- if err != nil || record == nil {
- return validation.NewError("validation_invalid_token", "Invalid or expired token.")
- }
-
- if record.Collection().Id != form.collection.Id {
- return validation.NewError("validation_token_collection_mismatch", "The provided token is for different auth collection.")
- }
-
- return nil
-}
-
-// Submit validates and submits the form.
-// On success returns the updated auth record associated to `form.Token`.
-//
-// You can optionally provide a list of InterceptorFunc to further
-// modify the form behavior before persisting it.
-func (form *RecordPasswordResetConfirm) Submit(interceptors ...InterceptorFunc[*models.Record]) (*models.Record, error) {
- if err := form.Validate(); err != nil {
- return nil, err
- }
-
- authRecord, err := form.dao.FindAuthRecordByToken(
- form.Token,
- form.app.Settings().RecordPasswordResetToken.Secret,
- )
- if err != nil {
- return nil, err
- }
-
- if err := authRecord.SetPassword(form.Password); err != nil {
- return nil, err
- }
-
- if !authRecord.Verified() {
- payload, err := security.ParseUnverifiedJWT(form.Token)
- if err != nil {
- return nil, err
- }
-
- // mark as verified if the email hasn't changed
- if authRecord.Email() == cast.ToString(payload["email"]) {
- authRecord.SetVerified(true)
- }
- }
-
- interceptorsErr := runInterceptors(authRecord, func(m *models.Record) error {
- authRecord = m
- return form.dao.SaveRecord(authRecord)
- }, interceptors...)
-
- if interceptorsErr != nil {
- return nil, interceptorsErr
- }
-
- return authRecord, nil
-}
diff --git a/forms/record_password_reset_confirm_test.go b/forms/record_password_reset_confirm_test.go
deleted file mode 100644
index 18fb7a6d..00000000
--- a/forms/record_password_reset_confirm_test.go
+++ /dev/null
@@ -1,196 +0,0 @@
-package forms_test
-
-import (
- "encoding/json"
- "errors"
- "testing"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/security"
-)
-
-func TestRecordPasswordResetConfirmValidateAndSubmit(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- authCollection, err := testApp.Dao().FindCollectionByNameOrId("users")
- if err != nil {
- t.Fatal(err)
- }
-
- scenarios := []struct {
- jsonData string
- expectedErrors []string
- }{
- // empty data (Validate call check)
- {
- `{}`,
- []string{"token", "password", "passwordConfirm"},
- },
- // expired token
- {
- `{
- "token":"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsImVtYWlsIjoidGVzdEBleGFtcGxlLmNvbSIsImNvbGxlY3Rpb25JZCI6Il9wYl91c2Vyc19hdXRoXyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiZXhwIjoxNjQwOTkxNjYxfQ.TayHoXkOTM0w8InkBEb86npMJEaf6YVUrxrRmMgFjeY",
- "password":"12345678",
- "passwordConfirm":"12345678"
- }`,
- []string{"token"},
- },
- // valid token but invalid passwords lengths
- {
- `{
- "token":"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsImVtYWlsIjoidGVzdEBleGFtcGxlLmNvbSIsImNvbGxlY3Rpb25JZCI6Il9wYl91c2Vyc19hdXRoXyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiZXhwIjoyMjA4OTg1MjYxfQ.R_4FOSUHIuJQ5Crl3PpIPCXMsoHzuTaNlccpXg_3FOg",
- "password":"1234567",
- "passwordConfirm":"1234567"
- }`,
- []string{"password"},
- },
- // valid token but mismatched passwordConfirm
- {
- `{
- "token":"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsImVtYWlsIjoidGVzdEBleGFtcGxlLmNvbSIsImNvbGxlY3Rpb25JZCI6Il9wYl91c2Vyc19hdXRoXyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiZXhwIjoyMjA4OTg1MjYxfQ.R_4FOSUHIuJQ5Crl3PpIPCXMsoHzuTaNlccpXg_3FOg",
- "password":"12345678",
- "passwordConfirm":"12345679"
- }`,
- []string{"passwordConfirm"},
- },
- // valid token and password
- {
- `{
- "token":"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsImVtYWlsIjoidGVzdEBleGFtcGxlLmNvbSIsImNvbGxlY3Rpb25JZCI6Il9wYl91c2Vyc19hdXRoXyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiZXhwIjoyMjA4OTg1MjYxfQ.R_4FOSUHIuJQ5Crl3PpIPCXMsoHzuTaNlccpXg_3FOg",
- "password":"12345678",
- "passwordConfirm":"12345678"
- }`,
- []string{},
- },
- }
-
- for i, s := range scenarios {
- form := forms.NewRecordPasswordResetConfirm(testApp, authCollection)
-
- // load data
- loadErr := json.Unmarshal([]byte(s.jsonData), form)
- if loadErr != nil {
- t.Errorf("(%d) Failed to load form data: %v", i, loadErr)
- continue
- }
-
- interceptorCalls := 0
- interceptor := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(r *models.Record) error {
- interceptorCalls++
- return next(r)
- }
- }
-
- record, submitErr := form.Submit(interceptor)
-
- // parse errors
- errs, ok := submitErr.(validation.Errors)
- if !ok && submitErr != nil {
- t.Errorf("(%d) Failed to parse errors %v", i, submitErr)
- continue
- }
-
- // check interceptor calls
- expectInterceptorCalls := 1
- if len(s.expectedErrors) > 0 {
- expectInterceptorCalls = 0
- }
- if interceptorCalls != expectInterceptorCalls {
- t.Errorf("[%d] Expected interceptor to be called %d, got %d", i, expectInterceptorCalls, interceptorCalls)
- }
-
- // check errors
- if len(errs) > len(s.expectedErrors) {
- t.Errorf("(%d) Expected error keys %v, got %v", i, s.expectedErrors, errs)
- }
- for _, k := range s.expectedErrors {
- if _, ok := errs[k]; !ok {
- t.Errorf("(%d) Missing expected error key %q in %v", i, k, errs)
- }
- }
-
- if len(errs) > 0 || len(s.expectedErrors) > 0 {
- continue
- }
-
- claims, _ := security.ParseUnverifiedJWT(form.Token)
- tokenRecordId := claims["id"]
-
- if record.Id != tokenRecordId {
- t.Errorf("(%d) Expected record with id %s, got %v", i, tokenRecordId, record)
- }
-
- if !record.LastResetSentAt().IsZero() {
- t.Errorf("(%d) Expected record.LastResetSentAt to be empty, got %v", i, record.LastResetSentAt())
- }
-
- if !record.ValidatePassword(form.Password) {
- t.Errorf("(%d) Expected the record password to have been updated to %q", i, form.Password)
- }
- }
-}
-
-func TestRecordPasswordResetConfirmInterceptors(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- authCollection, err := testApp.Dao().FindCollectionByNameOrId("users")
- if err != nil {
- t.Fatal(err)
- }
-
- authRecord, err := testApp.Dao().FindAuthRecordByEmail("users", "test@example.com")
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewRecordPasswordResetConfirm(testApp, authCollection)
- form.Token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsImVtYWlsIjoidGVzdEBleGFtcGxlLmNvbSIsImNvbGxlY3Rpb25JZCI6Il9wYl91c2Vyc19hdXRoXyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiZXhwIjoyMjA4OTg1MjYxfQ.R_4FOSUHIuJQ5Crl3PpIPCXMsoHzuTaNlccpXg_3FOg"
- form.Password = "1234567890"
- form.PasswordConfirm = "1234567890"
- interceptorTokenKey := authRecord.TokenKey()
- testErr := errors.New("test_error")
-
- interceptor1Called := false
- interceptor1 := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(record *models.Record) error {
- interceptor1Called = true
- return next(record)
- }
- }
-
- interceptor2Called := false
- interceptor2 := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(record *models.Record) error {
- interceptorTokenKey = record.TokenKey()
- interceptor2Called = true
- return testErr
- }
- }
-
- _, submitErr := form.Submit(interceptor1, interceptor2)
- if submitErr != testErr {
- t.Fatalf("Expected submitError %v, got %v", testErr, submitErr)
- }
-
- if !interceptor1Called {
- t.Fatalf("Expected interceptor1 to be called")
- }
-
- if !interceptor2Called {
- t.Fatalf("Expected interceptor2 to be called")
- }
-
- if interceptorTokenKey == authRecord.TokenKey() {
- t.Fatalf("Expected the form model to be filled before calling the interceptors")
- }
-}
diff --git a/forms/record_password_reset_request.go b/forms/record_password_reset_request.go
deleted file mode 100644
index 0abda397..00000000
--- a/forms/record_password_reset_request.go
+++ /dev/null
@@ -1,92 +0,0 @@
-package forms
-
-import (
- "errors"
- "fmt"
- "time"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/go-ozzo/ozzo-validation/v4/is"
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/mails"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-// RecordPasswordResetRequest is an auth record reset password request form.
-type RecordPasswordResetRequest struct {
- app core.App
- dao *daos.Dao
- collection *models.Collection
- resendThreshold float64 // in seconds
-
- Email string `form:"email" json:"email"`
-}
-
-// NewRecordPasswordResetRequest creates a new [RecordPasswordResetRequest]
-// form initialized with from the provided [core.App] instance.
-//
-// If you want to submit the form as part of a transaction,
-// you can change the default Dao via [SetDao()].
-func NewRecordPasswordResetRequest(app core.App, collection *models.Collection) *RecordPasswordResetRequest {
- return &RecordPasswordResetRequest{
- app: app,
- dao: app.Dao(),
- collection: collection,
- resendThreshold: 120, // 2 min
- }
-}
-
-// SetDao replaces the default form Dao instance with the provided one.
-func (form *RecordPasswordResetRequest) SetDao(dao *daos.Dao) {
- form.dao = dao
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-//
-// This method doesn't check whether auth record with `form.Email` exists (this is done on Submit).
-func (form *RecordPasswordResetRequest) Validate() error {
- return validation.ValidateStruct(form,
- validation.Field(
- &form.Email,
- validation.Required,
- validation.Length(1, 255),
- is.EmailFormat,
- ),
- )
-}
-
-// Submit validates and submits the form.
-// On success, sends a password reset email to the `form.Email` auth record.
-//
-// You can optionally provide a list of InterceptorFunc to further
-// modify the form behavior before persisting it.
-func (form *RecordPasswordResetRequest) Submit(interceptors ...InterceptorFunc[*models.Record]) error {
- if err := form.Validate(); err != nil {
- return err
- }
-
- authRecord, err := form.dao.FindAuthRecordByEmail(form.collection.Id, form.Email)
- if err != nil {
- return fmt.Errorf("Failed to fetch %s record with email %s: %w", form.collection.Id, form.Email, err)
- }
-
- now := time.Now().UTC()
- lastResetSentAt := authRecord.LastResetSentAt().Time()
- if now.Sub(lastResetSentAt).Seconds() < form.resendThreshold {
- return errors.New("You've already requested a password reset.")
- }
-
- return runInterceptors(authRecord, func(m *models.Record) error {
- if err := mails.SendRecordPasswordReset(form.app, m); err != nil {
- return err
- }
-
- // update last sent timestamp
- m.Set(schema.FieldNameLastResetSentAt, types.NowDateTime())
-
- return form.dao.SaveRecord(m)
- }, interceptors...)
-}
diff --git a/forms/record_password_reset_request_test.go b/forms/record_password_reset_request_test.go
deleted file mode 100644
index 2dc52052..00000000
--- a/forms/record_password_reset_request_test.go
+++ /dev/null
@@ -1,174 +0,0 @@
-package forms_test
-
-import (
- "encoding/json"
- "errors"
- "testing"
- "time"
-
- "github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-func TestRecordPasswordResetRequestSubmit(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- authCollection, err := testApp.Dao().FindCollectionByNameOrId("users")
- if err != nil {
- t.Fatal(err)
- }
-
- scenarios := []struct {
- jsonData string
- expectError bool
- }{
- // empty field (Validate call check)
- {
- `{"email":""}`,
- true,
- },
- // invalid email field (Validate call check)
- {
- `{"email":"invalid"}`,
- true,
- },
- // nonexisting user
- {
- `{"email":"missing@example.com"}`,
- true,
- },
- // existing user
- {
- `{"email":"test@example.com"}`,
- false,
- },
- // existing user - reached send threshod
- {
- `{"email":"test@example.com"}`,
- true,
- },
- }
-
- now := types.NowDateTime()
- time.Sleep(1 * time.Millisecond)
-
- for i, s := range scenarios {
- testApp.TestMailer.TotalSend = 0 // reset
- form := forms.NewRecordPasswordResetRequest(testApp, authCollection)
-
- // load data
- loadErr := json.Unmarshal([]byte(s.jsonData), form)
- if loadErr != nil {
- t.Errorf("(%d) Failed to load form data: %v", i, loadErr)
- continue
- }
-
- interceptorCalls := 0
- interceptor := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(r *models.Record) error {
- interceptorCalls++
- return next(r)
- }
- }
-
- err := form.Submit(interceptor)
-
- // check interceptor calls
- expectInterceptorCalls := 1
- if s.expectError {
- expectInterceptorCalls = 0
- }
- if interceptorCalls != expectInterceptorCalls {
- t.Errorf("[%d] Expected interceptor to be called %d, got %d", i, expectInterceptorCalls, interceptorCalls)
- }
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, s.expectError, hasErr, err)
- }
-
- expectedMails := 1
- if s.expectError {
- expectedMails = 0
- }
- if testApp.TestMailer.TotalSend != expectedMails {
- t.Errorf("(%d) Expected %d mail(s) to be sent, got %d", i, expectedMails, testApp.TestMailer.TotalSend)
- }
-
- if s.expectError {
- continue
- }
-
- // check whether LastResetSentAt was updated
- user, err := testApp.Dao().FindAuthRecordByEmail(authCollection.Id, form.Email)
- if err != nil {
- t.Errorf("(%d) Expected user with email %q to exist, got nil", i, form.Email)
- continue
- }
-
- if user.LastResetSentAt().Time().Sub(now.Time()) < 0 {
- t.Errorf("(%d) Expected LastResetSentAt to be after %v, got %v", i, now, user.LastResetSentAt())
- }
- }
-}
-
-func TestRecordPasswordResetRequestInterceptors(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- authCollection, err := testApp.Dao().FindCollectionByNameOrId("users")
- if err != nil {
- t.Fatal(err)
- }
-
- authRecord, err := testApp.Dao().FindAuthRecordByEmail("users", "test@example.com")
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewRecordPasswordResetRequest(testApp, authCollection)
- form.Email = authRecord.Email()
- interceptorLastResetSentAt := authRecord.LastResetSentAt()
- testErr := errors.New("test_error")
-
- interceptor1Called := false
- interceptor1 := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(record *models.Record) error {
- interceptor1Called = true
- return next(record)
- }
- }
-
- interceptor2Called := false
- interceptor2 := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(record *models.Record) error {
- interceptorLastResetSentAt = record.LastResetSentAt()
- interceptor2Called = true
- return testErr
- }
- }
-
- submitErr := form.Submit(interceptor1, interceptor2)
- if submitErr != testErr {
- t.Fatalf("Expected submitError %v, got %v", testErr, submitErr)
- }
-
- if !interceptor1Called {
- t.Fatalf("Expected interceptor1 to be called")
- }
-
- if !interceptor2Called {
- t.Fatalf("Expected interceptor2 to be called")
- }
-
- if interceptorLastResetSentAt.String() != authRecord.LastResetSentAt().String() {
- t.Fatalf("Expected the form model to NOT be filled before calling the interceptors")
- }
-}
diff --git a/forms/record_upsert.go b/forms/record_upsert.go
index cdfe972b..72373b9e 100644
--- a/forms/record_upsert.go
+++ b/forms/record_upsert.go
@@ -1,935 +1,289 @@
package forms
import (
- "encoding/json"
+ "context"
"errors"
"fmt"
- "log/slog"
- "net/http"
- "regexp"
- "strings"
+ "slices"
validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/go-ozzo/ozzo-validation/v4/is"
"github.com/pocketbase/dbx"
"github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/forms/validators"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/tools/filesystem"
- "github.com/pocketbase/pocketbase/tools/list"
- "github.com/pocketbase/pocketbase/tools/rest"
+ "github.com/pocketbase/pocketbase/core/validators"
"github.com/pocketbase/pocketbase/tools/security"
"github.com/spf13/cast"
)
-// username value regex pattern
-var usernameRegex = regexp.MustCompile(`^[\w][\w\.\-]*$`)
+const (
+ accessLevelDefault = iota
+ accessLevelManager
+ accessLevelSuperuser
+)
-// RecordUpsert is a [models.Record] upsert (create/update) form.
type RecordUpsert struct {
- app core.App
- dao *daos.Dao
- manageAccess bool
- record *models.Record
+ ctx context.Context
+ app core.App
+ record *core.Record
+ accessLevel int
- filesToUpload map[string][]*filesystem.File
- filesToDelete []string // names list
-
- // base model fields
- Id string `json:"id"`
-
- // auth collection fields
- // ---
- Username string `json:"username"`
- Email string `json:"email"`
- EmailVisibility bool `json:"emailVisibility"`
- Verified bool `json:"verified"`
- Password string `json:"password"`
- PasswordConfirm string `json:"passwordConfirm"`
- OldPassword string `json:"oldPassword"`
- // ---
-
- data map[string]any
+ // extra password fields
+ Password string `form:"password" json:"password"`
+ PasswordConfirm string `form:"passwordConfirm" json:"passwordConfirm"`
+ OldPassword string `form:"oldPassword" json:"oldPassword"`
}
-// NewRecordUpsert creates a new [RecordUpsert] form with initializer
-// config created from the provided [core.App] and [models.Record] instances
-// (for create you could pass a pointer to an empty Record - models.NewRecord(collection)).
-//
-// If you want to submit the form as part of a transaction,
-// you can change the default Dao via [SetDao()].
-func NewRecordUpsert(app core.App, record *models.Record) *RecordUpsert {
+// NewRecordUpsert creates a new [RecordUpsert] form from the provided [core.App] and [core.Record] instances
+// (for create you could pass a pointer to an empty Record - core.NewRecord(collection)).
+func NewRecordUpsert(app core.App, record *core.Record) *RecordUpsert {
form := &RecordUpsert{
- app: app,
- dao: app.Dao(),
- record: record,
- filesToDelete: []string{},
- filesToUpload: map[string][]*filesystem.File{},
+ ctx: context.Background(),
+ app: app,
+ record: record,
}
- form.loadFormDefaults()
-
return form
}
-// Data returns the loaded form's data.
-func (form *RecordUpsert) Data() map[string]any {
- return form.data
+// SetContext assigns ctx as context of the current form.
+func (form *RecordUpsert) SetContext(ctx context.Context) {
+ form.ctx = ctx
}
-// SetFullManageAccess sets the manageAccess bool flag of the current
-// form to enable/disable directly changing some system record fields
-// (often used with auth collection records).
-func (form *RecordUpsert) SetFullManageAccess(fullManageAccess bool) {
- form.manageAccess = fullManageAccess
-}
-
-// SetDao replaces the default form Dao instance with the provided one.
-func (form *RecordUpsert) SetDao(dao *daos.Dao) {
- form.dao = dao
-}
-
-func (form *RecordUpsert) loadFormDefaults() {
- form.Id = form.record.Id
-
- if form.record.Collection().IsAuth() {
- form.Username = form.record.Username()
- form.Email = form.record.Email()
- form.EmailVisibility = form.record.EmailVisibility()
- form.Verified = form.record.Verified()
- }
-
- form.data = map[string]any{}
- for _, field := range form.record.Collection().Schema.Fields() {
- form.data[field.Name] = form.record.Get(field.Name)
- }
-}
-
-func (form *RecordUpsert) getContentType(r *http.Request) string {
- t := r.Header.Get("Content-Type")
- for i, c := range t {
- if c == ' ' || c == ';' {
- return t[:i]
- }
- }
- return t
-}
-
-func (form *RecordUpsert) extractRequestData(
- r *http.Request,
- keyPrefix string,
-) (map[string]any, map[string][]*filesystem.File, error) {
- switch form.getContentType(r) {
- case "application/json":
- return form.extractJsonData(r, keyPrefix)
- case "multipart/form-data":
- return form.extractMultipartFormData(r, keyPrefix)
- default:
- return nil, nil, errors.New("unsupported request content-type")
- }
-}
-
-func (form *RecordUpsert) extractJsonData(
- r *http.Request,
- keyPrefix string,
-) (map[string]any, map[string][]*filesystem.File, error) {
- data := map[string]any{}
-
- err := rest.CopyJsonBody(r, &data)
-
- if keyPrefix != "" {
- parts := strings.Split(keyPrefix, ".")
- for _, part := range parts {
- if data[part] == nil {
- break
- }
- if v, ok := data[part].(map[string]any); ok {
- data = v
- }
- }
- }
-
- return data, nil, err
-}
-
-func (form *RecordUpsert) extractMultipartFormData(
- r *http.Request,
- keyPrefix string,
-) (map[string]any, map[string][]*filesystem.File, error) {
- // parse form data (if not already)
- if err := r.ParseMultipartForm(rest.DefaultMaxMemory); err != nil {
- return nil, nil, err
- }
-
- data := map[string]any{}
- filesToUpload := map[string][]*filesystem.File{}
- arraybleFieldTypes := schema.ArraybleFieldTypes()
-
- for fullKey, values := range r.PostForm {
- key := fullKey
- if keyPrefix != "" {
- key = strings.TrimPrefix(key, keyPrefix+".")
- }
-
- if len(values) == 0 {
- data[key] = nil
- continue
- }
-
- // special case for multipart json encoded fields
- if key == rest.MultipartJsonKey {
- for _, v := range values {
- if err := json.Unmarshal([]byte(v), &data); err != nil {
- form.app.Logger().Debug("Failed to decode @json value into the data map", "error", err, "value", v)
- }
- }
- continue
- }
-
- field := form.record.Collection().Schema.GetFieldByName(key)
- if field != nil && list.ExistInSlice(field.Type, arraybleFieldTypes) {
- data[key] = values
- } else {
- data[key] = values[0]
- }
- }
-
- // load uploaded files (if any)
- for _, field := range form.record.Collection().Schema.Fields() {
- if field.Type != schema.FieldTypeFile {
- continue // not a file field
- }
-
- key := field.Name
- fullKey := key
- if keyPrefix != "" {
- fullKey = keyPrefix + "." + key
- }
-
- files, err := rest.FindUploadedFiles(r, fullKey)
- if err != nil || len(files) == 0 {
- if err != nil && err != http.ErrMissingFile {
- form.app.Logger().Debug(
- "Uploaded file error",
- slog.String("key", fullKey),
- slog.String("error", err.Error()),
- )
- }
-
- // skip invalid or missing file(s)
- continue
- }
-
- filesToUpload[key] = append(filesToUpload[key], files...)
- }
-
- return data, filesToUpload, nil
-}
-
-// LoadRequest extracts the json or multipart/form-data request data
-// and lods it into the form.
+// SetApp replaces the current form app instance.
//
-// File upload is supported only via multipart/form-data.
-func (form *RecordUpsert) LoadRequest(r *http.Request, keyPrefix string) error {
- requestData, uploadedFiles, err := form.extractRequestData(r, keyPrefix)
- if err != nil {
- return err
- }
-
- if err := form.LoadData(requestData); err != nil {
- return err
- }
-
- for key, files := range uploadedFiles {
- form.AddFiles(key, files...)
- }
-
- return nil
+// This could be used for example if you want to change at later stage
+// before submission to change from regular -> transactional app instance.
+func (form *RecordUpsert) SetApp(app core.App) {
+ form.app = app
}
-// FilesToUpload returns the parsed request files ready for upload.
-func (form *RecordUpsert) FilesToUpload() map[string][]*filesystem.File {
- return form.filesToUpload
+// SetRecord replaces the current form record instance.
+func (form *RecordUpsert) SetRecord(record *core.Record) {
+ form.record = record
}
-// FilesToUpload returns the parsed request filenames ready to be deleted.
-func (form *RecordUpsert) FilesToDelete() []string {
- return form.filesToDelete
+// ResetAccess resets the form access level to the accessLevelDefault.
+func (form *RecordUpsert) ResetAccess() {
+ form.accessLevel = accessLevelDefault
}
-// AddFiles adds the provided file(s) to the specified file field.
-//
-// If the file field is a SINGLE-value file field (aka. "Max Select = 1"),
-// then the newly added file will REPLACE the existing one.
-// In this case if you pass more than 1 files only the first one will be assigned.
-//
-// If the file field is a MULTI-value file field (aka. "Max Select > 1"),
-// then the newly added file(s) will be APPENDED to the existing one(s).
-//
-// Example
-//
-// f1, _ := filesystem.NewFileFromPath("/path/to/file1.txt")
-// f2, _ := filesystem.NewFileFromPath("/path/to/file2.txt")
-// form.AddFiles("documents", f1, f2)
-func (form *RecordUpsert) AddFiles(key string, files ...*filesystem.File) error {
- field := form.record.Collection().Schema.GetFieldByName(key)
- if field == nil || field.Type != schema.FieldTypeFile {
- return errors.New("invalid field key")
- }
-
- options, ok := field.Options.(*schema.FileOptions)
- if !ok {
- return errors.New("failed to initilize field options")
- }
-
- if len(files) == 0 {
- return nil // nothing to upload
- }
-
- if form.filesToUpload == nil {
- form.filesToUpload = map[string][]*filesystem.File{}
- }
-
- oldNames := list.ToUniqueStringSlice(form.data[key])
-
- if options.MaxSelect == 1 {
- // mark previous file(s) for deletion before replacing
- if len(oldNames) > 0 {
- form.filesToDelete = list.ToUniqueStringSlice(append(form.filesToDelete, oldNames...))
- }
-
- // replace
- form.filesToUpload[key] = []*filesystem.File{files[0]}
- form.data[key] = field.PrepareValue(files[0].Name)
- } else {
- // append
- form.filesToUpload[key] = append(form.filesToUpload[key], files...)
- for _, f := range files {
- oldNames = append(oldNames, f.Name)
- }
- form.data[key] = field.PrepareValue(oldNames)
- }
-
- return nil
+// GrantManagerAccess updates the form access level to "manager" allowing
+// directly changing some system record fields (often used with auth collection records).
+func (form *RecordUpsert) GrantManagerAccess() {
+ form.accessLevel = accessLevelManager
}
-// RemoveFiles removes a single or multiple file from the specified file field.
-//
-// NB! If filesToDelete is not set it will remove all existing files
-// assigned to the file field (including those assigned with AddFiles)!
-//
-// Example
-//
-// // mark only only 2 files for removal
-// form.RemoveFiles("documents", "file1_aw4bdrvws6.txt", "file2_xwbs36bafv.txt")
-//
-// // mark all "documents" files for removal
-// form.RemoveFiles("documents")
-func (form *RecordUpsert) RemoveFiles(key string, toDelete ...string) error {
- field := form.record.Collection().Schema.GetFieldByName(key)
- if field == nil || field.Type != schema.FieldTypeFile {
- return errors.New("invalid field key")
- }
-
- existing := list.ToUniqueStringSlice(form.data[key])
-
- // mark all files for deletion
- if len(toDelete) == 0 {
- toDelete = make([]string, len(existing))
- copy(toDelete, existing)
- }
-
- // check for existing files
- for i := len(existing) - 1; i >= 0; i-- {
- if list.ExistInSlice(existing[i], toDelete) {
- form.filesToDelete = append(form.filesToDelete, existing[i])
- existing = append(existing[:i], existing[i+1:]...)
- }
- }
-
- // check for newly uploaded files
- for i := len(form.filesToUpload[key]) - 1; i >= 0; i-- {
- f := form.filesToUpload[key][i]
- if list.ExistInSlice(f.Name, toDelete) {
- form.filesToUpload[key] = append(form.filesToUpload[key][:i], form.filesToUpload[key][i+1:]...)
- }
- }
-
- form.data[key] = field.PrepareValue(existing)
-
- return nil
+// GrantSuperuserAccess updates the form access level to "superuser" allowing
+// directly changing all system record fields, including those marked as "Hidden".
+func (form *RecordUpsert) GrantSuperuserAccess() {
+ form.accessLevel = accessLevelSuperuser
}
-// LoadData loads and normalizes the provided regular record data fields into the form.
-func (form *RecordUpsert) LoadData(requestData map[string]any) error {
- // load base system fields
- if v, ok := requestData[schema.FieldNameId]; ok {
- form.Id = cast.ToString(v)
- }
+// HasManageAccess reports whether the form has "manager" or "superuser" level access.
+func (form *RecordUpsert) HasManageAccess() bool {
+ return form.accessLevel == accessLevelManager || form.accessLevel == accessLevelSuperuser
+}
- // load auth system fields
- if form.record.Collection().IsAuth() {
- if v, ok := requestData[schema.FieldNameUsername]; ok {
- form.Username = cast.ToString(v)
- }
- if v, ok := requestData[schema.FieldNameEmail]; ok {
- form.Email = cast.ToString(v)
- }
- if v, ok := requestData[schema.FieldNameEmailVisibility]; ok {
- form.EmailVisibility = cast.ToBool(v)
- }
- if v, ok := requestData[schema.FieldNameVerified]; ok {
- form.Verified = cast.ToBool(v)
- }
- if v, ok := requestData["password"]; ok {
+// Load loads the provided data into the form and the related record.
+func (form *RecordUpsert) Load(data map[string]any) {
+ excludeFields := []string{core.FieldNameExpand}
+
+ isAuth := form.record.Collection().IsAuth()
+
+ // load the special auth form fields
+ if isAuth {
+ if v, ok := data["password"]; ok {
form.Password = cast.ToString(v)
}
- if v, ok := requestData["passwordConfirm"]; ok {
+ if v, ok := data["passwordConfirm"]; ok {
form.PasswordConfirm = cast.ToString(v)
}
- if v, ok := requestData["oldPassword"]; ok {
+ if v, ok := data["oldPassword"]; ok {
form.OldPassword = cast.ToString(v)
}
+
+ excludeFields = append(excludeFields, "passwordConfirm", "oldPassword") // skip non-schema password fields
}
- // replace modifiers (if any)
- requestData = form.record.ReplaceModifers(requestData)
-
- // create a shallow copy of form.data
- var extendedData = make(map[string]any, len(form.data))
- for k, v := range form.data {
- extendedData[k] = v
- }
-
- // extend form.data with the request data
- rawData, err := json.Marshal(requestData)
- if err != nil {
- return err
- }
- if err := json.Unmarshal(rawData, &extendedData); err != nil {
- return err
- }
-
- for _, field := range form.record.Collection().Schema.Fields() {
- key := field.Name
- value := field.PrepareValue(extendedData[key])
-
- if field.Type != schema.FieldTypeFile {
- form.data[key] = value
+ for k, v := range data {
+ if slices.Contains(excludeFields, k) {
continue
}
- // -----------------------------------------------------------
- // Delete previously uploaded file(s)
- // -----------------------------------------------------------
+ // set only known collection fields
+ field := form.record.SetIfFieldExists(k, v)
- oldNames := form.record.GetStringSlice(key)
- submittedNames := list.ToUniqueStringSlice(value)
-
- // ensure that all submitted names are existing to prevent accidental files deletions
- if len(submittedNames) > len(oldNames) || len(list.SubtractSlice(submittedNames, oldNames)) != 0 {
- return validation.Errors{
- key: validation.NewError(
- "validation_unknown_filenames",
- "The field contains unknown filenames.",
- ),
- }
- }
-
- // if empty value was set, mark all previously uploaded files for deletion
- // otherwise check for "deleted" (aka. unsubmitted) file names
- if len(submittedNames) == 0 && len(oldNames) > 0 {
- form.RemoveFiles(key)
- } else if len(oldNames) > 0 {
- toDelete := []string{}
-
- for _, name := range oldNames {
- // submitted as a modifier or a new array
- if !list.ExistInSlice(name, submittedNames) {
- toDelete = append(toDelete, name)
- continue
- }
- }
-
- if len(toDelete) > 0 {
- form.RemoveFiles(key, toDelete...)
- }
- }
-
- // allow file key reasignments for file names sorting
- // (only if all submitted values already exists)
- if len(submittedNames) > 0 && len(list.SubtractSlice(submittedNames, oldNames)) == 0 {
- form.data[key] = submittedNames
+ // restore original value if hidden field (with exception of the auth "password")
+ //
+ // note: this is an extra measure against loading hidden fields
+ // but usually is not used by the default route handlers since
+ // they filter additionally the data before calling Load
+ if form.accessLevel != accessLevelSuperuser && field != nil && field.GetHidden() && (!isAuth || field.GetName() != core.FieldNamePassword) {
+ form.record.SetRaw(field.GetName(), form.record.Original().GetRaw(field.GetName()))
}
}
-
- return nil
}
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-func (form *RecordUpsert) Validate() error {
- // base form fields validator
- baseFieldsRules := []*validation.FieldRules{
- validation.Field(
- &form.Id,
- validation.When(
- form.record.IsNew(),
- validation.Length(models.DefaultIdLength, models.DefaultIdLength),
- validation.Match(idRegex),
- validation.By(validators.UniqueId(form.dao, form.record.TableName())),
- ).Else(validation.In(form.record.Id)),
- ),
+func (form *RecordUpsert) validateFormFields() error {
+ isAuth := form.record.Collection().IsAuth()
+ if !isAuth {
+ return nil
}
- // auth fields validators
- if form.record.Collection().IsAuth() {
- baseFieldsRules = append(baseFieldsRules,
- validation.Field(
- &form.Username,
- // require only on update, because on create we fallback to auto generated username
- validation.When(!form.record.IsNew(), validation.Required),
- validation.Length(3, 150),
- validation.Match(usernameRegex),
- validation.By(form.checkUniqueUsername),
- ),
- validation.Field(
- &form.Email,
- validation.When(
- form.record.Collection().AuthOptions().RequireEmail,
- validation.Required,
- ),
- // don't allow direct email change (or unset) if the form doesn't have manage access permissions
+ isNew := form.record.IsNew()
+
+ original := form.record.Original()
+
+ validateData := map[string]any{
+ "email": form.record.Email(),
+ "verified": form.record.Verified(),
+ "password": form.Password,
+ "passwordConfirm": form.PasswordConfirm,
+ "oldPassword": form.OldPassword,
+ }
+
+ return validation.Validate(validateData,
+ validation.Map(
+ validation.Key(
+ "email",
+ // don't allow direct email updates if the form doesn't have manage access permissions
// (aka. allow only admin or authorized auth models to directly update the field)
validation.When(
- !form.record.IsNew() && !form.manageAccess,
- validation.In(form.record.Email()),
+ !isNew && !form.HasManageAccess(),
+ validation.By(validators.Equal(original.Email())),
),
- validation.Length(1, 255),
- is.EmailFormat,
- validation.By(form.checkEmailDomain),
- validation.By(form.checkUniqueEmail),
),
- validation.Field(
- &form.Verified,
+ validation.Key(
+ "verified",
// don't allow changing verified if the form doesn't have manage access permissions
// (aka. allow only admin or authorized auth models to directly change the field)
validation.When(
- !form.manageAccess,
- validation.In(form.record.Verified()),
+ !form.HasManageAccess(),
+ validation.By(validators.Equal(original.Verified())),
),
),
- validation.Field(
- &form.Password,
+ validation.Key(
+ "password",
validation.When(
- (form.record.IsNew() || form.PasswordConfirm != "" || form.OldPassword != ""),
+ (isNew || form.PasswordConfirm != "" || form.OldPassword != ""),
validation.Required,
),
- validation.Length(form.record.Collection().AuthOptions().MinPasswordLength, 72),
),
- validation.Field(
- &form.PasswordConfirm,
+ validation.Key(
+ "passwordConfirm",
validation.When(
- (form.record.IsNew() || form.Password != "" || form.OldPassword != ""),
+ (isNew || form.Password != "" || form.OldPassword != ""),
validation.Required,
),
- validation.By(validators.Compare(form.Password)),
+ validation.By(validators.Equal(form.Password)),
),
- validation.Field(
- &form.OldPassword,
+ validation.Key(
+ "oldPassword",
// require old password only on update when:
- // - form.manageAccess is not set
+ // - form.HasManageAccess() is not satisfied
// - changing the existing password
validation.When(
- !form.record.IsNew() && !form.manageAccess && (form.Password != "" || form.PasswordConfirm != ""),
+ !isNew && !form.HasManageAccess() && (form.Password != "" || form.PasswordConfirm != ""),
validation.Required,
validation.By(form.checkOldPassword),
),
),
- )
- }
-
- if err := validation.ValidateStruct(form, baseFieldsRules...); err != nil {
- return err
- }
-
- // record data validator
- return validators.NewRecordDataValidator(
- form.dao,
- form.record,
- form.filesToUpload,
- ).Validate(form.data)
-}
-
-func (form *RecordUpsert) checkUniqueUsername(value any) error {
- v, _ := value.(string)
- if v == "" {
- return nil
- }
-
- isUnique := form.dao.IsRecordValueUnique(
- form.record.Collection().Id,
- schema.FieldNameUsername,
- v,
- form.record.Id,
+ ),
)
- if !isUnique {
- return validation.NewError("validation_invalid_username", "The username is invalid or already in use.")
- }
-
- return nil
-}
-
-func (form *RecordUpsert) checkUniqueEmail(value any) error {
- v, _ := value.(string)
- if v == "" {
- return nil
- }
-
- isUnique := form.dao.IsRecordValueUnique(
- form.record.Collection().Id,
- schema.FieldNameEmail,
- v,
- form.record.Id,
- )
- if !isUnique {
- return validation.NewError("validation_invalid_email", "The email is invalid or already in use.")
- }
-
- return nil
-}
-
-func (form *RecordUpsert) checkEmailDomain(value any) error {
- val, _ := value.(string)
- if val == "" {
- return nil // nothing to check
- }
-
- domain := val[strings.LastIndex(val, "@")+1:]
- only := form.record.Collection().AuthOptions().OnlyEmailDomains
- except := form.record.Collection().AuthOptions().ExceptEmailDomains
-
- // only domains check
- if len(only) > 0 && !list.ExistInSlice(domain, only) {
- return validation.NewError("validation_email_domain_not_allowed", "Email domain is not allowed.")
- }
-
- // except domains check
- if len(except) > 0 && list.ExistInSlice(domain, except) {
- return validation.NewError("validation_email_domain_not_allowed", "Email domain is not allowed.")
- }
-
- return nil
}
func (form *RecordUpsert) checkOldPassword(value any) error {
v, _ := value.(string)
- if v == "" {
+ if v == "" || form.record.IsNew() {
return nil // nothing to check
}
- if !form.record.ValidatePassword(v) {
+ if !form.record.Original().ValidatePassword(v) {
return validation.NewError("validation_invalid_old_password", "Missing or invalid old password.")
}
return nil
}
-func (form *RecordUpsert) ValidateAndFill() error {
- if err := form.Validate(); err != nil {
- return err
- }
-
- isNew := form.record.IsNew()
-
- // custom insertion id can be set only on create
- if isNew && form.Id != "" {
- form.record.SetId(form.Id)
- form.record.MarkAsNew()
- }
-
- // set auth fields
- if form.record.Collection().IsAuth() {
- // generate a default username during create (if missing)
- if form.record.IsNew() && form.Username == "" {
- baseUsername := form.record.Collection().Name + security.RandomStringWithAlphabet(5, "123456789")
- form.Username = form.dao.SuggestUniqueAuthRecordUsername(form.record.Collection().Id, baseUsername)
- }
-
- if form.Username != "" {
- if err := form.record.SetUsername(form.Username); err != nil {
- return err
- }
- }
-
- if isNew || form.manageAccess {
- if err := form.record.SetEmail(form.Email); err != nil {
- return err
- }
- }
-
- if err := form.record.SetEmailVisibility(form.EmailVisibility); err != nil {
- return err
- }
-
- if form.manageAccess {
- if err := form.record.SetVerified(form.Verified); err != nil {
- return err
- }
- }
-
- if form.Password != "" && form.Password == form.PasswordConfirm {
- if err := form.record.SetPassword(form.Password); err != nil {
- return err
- }
- }
- }
-
- // bulk load the remaining form data
- form.record.Load(form.data)
-
- return nil
-}
-
-// DrySubmit performs a form submit within a transaction and reverts it.
-// For actual record persistence, check the `form.Submit()` method.
+// @todo consider removing and executing the Create API rule without dummy insert.
//
-// This method doesn't handle file uploads/deletes or trigger any app events!
-func (form *RecordUpsert) DrySubmit(callback func(txDao *daos.Dao) error) error {
+// DrySubmit performs a temp form submit within a transaction and reverts it at the end.
+// For actual record persistence, check the [RecordUpsert.Submit()] method.
+//
+// This method doesn't perform validations, handle file uploads/deletes or trigger app save events!
+func (form *RecordUpsert) DrySubmit(callback func(txApp core.App, drySavedRecord *core.Record) error) error {
isNew := form.record.IsNew()
- if err := form.ValidateAndFill(); err != nil {
- return err
+ clone := form.record.Clone()
+
+ // set an id if it doesn't have already
+ // (the value doesn't matter; it is used only during the manual delete/update rollback)
+ if clone.IsNew() && clone.Id == "" {
+ clone.Id = "_temp_" + security.PseudorandomString(15)
}
- var dryDao *daos.Dao
- if form.dao.ConcurrentDB() == form.dao.NonconcurrentDB() {
- // it is already in a transaction and therefore use the app concurrent db pool
- // to prevent "transaction has already been committed or rolled back" error
- dryDao = daos.New(form.app.Dao().ConcurrentDB())
- } else {
- // otherwise use the form noncurrent dao db pool
- dryDao = daos.New(form.dao.NonconcurrentDB())
+ app := form.app.UnsafeWithoutHooks()
+
+ _, isTransactional := app.DB().(*dbx.Tx)
+ if !isTransactional {
+ return app.RunInTransaction(func(txApp core.App) error {
+ tx, ok := txApp.DB().(*dbx.Tx)
+ if !ok {
+ return errors.New("failed to get transaction db")
+ }
+ defer tx.Rollback()
+
+ if err := txApp.SaveNoValidateWithContext(form.ctx, clone); err != nil {
+ return validators.NormalizeUniqueIndexError(err, clone.Collection().Name, clone.Collection().Fields.FieldNames())
+ }
+
+ if callback != nil {
+ return callback(txApp, clone)
+ }
+
+ return nil
+ })
}
- return dryDao.RunInTransaction(func(txDao *daos.Dao) error {
- tx, ok := txDao.DB().(*dbx.Tx)
- if !ok {
- return errors.New("failed to get transaction db")
- }
- defer tx.Rollback()
+ // already in a transaction
+ // (manual rollback to avoid starting another transaction)
+ // ---------------------------------------------------------------
+ err := app.SaveNoValidateWithContext(form.ctx, clone)
+ if err != nil {
+ return validators.NormalizeUniqueIndexError(err, clone.Collection().Name, clone.Collection().Fields.FieldNames())
+ }
- if err := txDao.SaveRecord(form.record); err != nil {
- return form.prepareError(err)
- }
-
- // restore record isNew state
+ manualRollback := func() error {
if isNew {
- form.record.MarkAsNew()
- }
-
- if callback != nil {
- return callback(txDao)
- }
-
- return nil
- })
-}
-
-// Submit validates the form and upserts the form Record model.
-//
-// You can optionally provide a list of InterceptorFunc to further
-// modify the form behavior before persisting it.
-func (form *RecordUpsert) Submit(interceptors ...InterceptorFunc[*models.Record]) error {
- if err := form.ValidateAndFill(); err != nil {
- return err
- }
-
- return runInterceptors(form.record, func(record *models.Record) error {
- form.record = record
-
- if !form.record.HasId() {
- form.record.RefreshId()
- form.record.MarkAsNew()
- }
-
- dao := form.dao.Clone()
-
- // upload new files (if any)
- //
- // note: executed after the default BeforeCreateFunc and BeforeUpdateFunc hook actions
- // to allow uploading AFTER the before app model hooks (eg. in case of an id change)
- // but BEFORE the actual record db persistence
- // ---
- dao.BeforeCreateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- newAction := func() error {
- if m.TableName() == form.record.TableName() && m.GetId() == form.record.GetId() {
- if err := form.processFilesToUpload(); err != nil {
- return err
- }
- }
-
- return action()
+ err = app.DeleteWithContext(form.ctx, clone)
+ if err != nil {
+ return fmt.Errorf("failed to rollback dry submit created record: %w", err)
}
-
- if form.dao.BeforeCreateFunc != nil {
- return form.dao.BeforeCreateFunc(eventDao, m, newAction)
- }
-
- return newAction()
- }
-
- dao.BeforeUpdateFunc = func(eventDao *daos.Dao, m models.Model, action func() error) error {
- newAction := func() error {
- if m.TableName() == form.record.TableName() && m.GetId() == form.record.GetId() {
- if err := form.processFilesToUpload(); err != nil {
- return err
- }
- }
-
- return action()
- }
-
- if form.dao.BeforeUpdateFunc != nil {
- return form.dao.BeforeUpdateFunc(eventDao, m, newAction)
- }
-
- return newAction()
- }
- // ---
-
- // persist the record model
- if err := dao.SaveRecord(form.record); err != nil {
- return form.prepareError(err)
- }
-
- // delete old files (if any)
- //
- // for now fail silently to avoid reupload when `form.Submit()`
- // is called manually (aka. not from an api request)...
- if err := form.processFilesToDelete(); err != nil {
- form.app.Logger().Debug(
- "Failed to delete old files",
- slog.String("error", err.Error()),
- )
- }
-
- return nil
- }, interceptors...)
-}
-
-func (form *RecordUpsert) processFilesToUpload() error {
- if len(form.filesToUpload) == 0 {
- return nil // no parsed file fields
- }
-
- if !form.record.HasId() {
- return errors.New("the record doesn't have an id")
- }
-
- fs, err := form.app.NewFilesystem()
- if err != nil {
- return err
- }
- defer fs.Close()
-
- var uploadErrors []error // list of upload errors
- var uploaded []string // list of uploaded file paths
-
- for fieldKey := range form.filesToUpload {
- for i, file := range form.filesToUpload[fieldKey] {
- path := form.record.BaseFilesPath() + "/" + file.Name
- if err := fs.UploadFile(file, path); err == nil {
- // keep track of the already uploaded file
- uploaded = append(uploaded, path)
- } else {
- // store the upload error
- uploadErrors = append(uploadErrors, fmt.Errorf("file %d: %v", i, err))
- }
- }
- }
-
- if len(uploadErrors) > 0 {
- // cleanup - try to delete the successfully uploaded files (if any)
- form.deleteFilesByNamesList(uploaded)
-
- return fmt.Errorf("failed to upload all files: %v", uploadErrors)
- }
-
- return nil
-}
-
-func (form *RecordUpsert) processFilesToDelete() (err error) {
- form.filesToDelete, err = form.deleteFilesByNamesList(form.filesToDelete)
- return
-}
-
-// deleteFiles deletes a list of record files by their names.
-// Returns the failed/remaining files.
-func (form *RecordUpsert) deleteFilesByNamesList(filenames []string) ([]string, error) {
- if len(filenames) == 0 {
- return filenames, nil // nothing to delete
- }
-
- if !form.record.HasId() {
- return filenames, errors.New("the record doesn't have an id")
- }
-
- fs, err := form.app.NewFilesystem()
- if err != nil {
- return filenames, err
- }
- defer fs.Close()
-
- var deleteErrors []error
-
- for i := len(filenames) - 1; i >= 0; i-- {
- filename := filenames[i]
- path := form.record.BaseFilesPath() + "/" + filename
-
- if err := fs.Delete(path); err == nil {
- // remove the deleted file from the list
- filenames = append(filenames[:i], filenames[i+1:]...)
-
- // try to delete the related file thumbs (if any)
- fs.DeletePrefix(form.record.BaseFilesPath() + "/thumbs_" + filename + "/")
} else {
- // store the delete error
- deleteErrors = append(deleteErrors, fmt.Errorf("file %d: %v", i, err))
- }
- }
-
- if len(deleteErrors) > 0 {
- return filenames, fmt.Errorf("failed to delete all files: %v", deleteErrors)
- }
-
- return filenames, nil
-}
-
-// prepareError parses the provided error and tries to return
-// user-friendly validation error(s).
-func (form *RecordUpsert) prepareError(err error) error {
- msg := strings.ToLower(err.Error())
-
- validationErrs := validation.Errors{}
-
- // check for unique constraint failure
- if strings.Contains(msg, "unique constraint failed") {
- msg = strings.ReplaceAll(strings.TrimSpace(msg), ",", " ")
-
- c := form.record.Collection()
- for _, f := range c.Schema.Fields() {
- // blank space to unify multi-columns lookup
- if strings.Contains(msg+" ", strings.ToLower(c.Name+"."+f.Name)) {
- validationErrs[f.Name] = validation.NewError("validation_not_unique", "Value must be unique")
+ clone.Load(clone.Original().FieldsData())
+ err = app.SaveNoValidateWithContext(form.ctx, clone)
+ if err != nil {
+ return fmt.Errorf("failed to rollback dry submit updated record: %w", err)
}
}
+
+ return nil
}
- if len(validationErrs) > 0 {
- return validationErrs
+ if callback != nil {
+ return errors.Join(callback(app, clone), manualRollback())
}
- return err
+ return manualRollback()
+}
+
+// Submit validates the form specific validations and attempts to save the form record.
+func (form *RecordUpsert) Submit() error {
+ err := form.validateFormFields()
+ if err != nil {
+ return err
+ }
+
+ // run record validations and persist in db
+ return form.app.SaveWithContext(form.ctx, form.record)
}
diff --git a/forms/record_upsert_test.go b/forms/record_upsert_test.go
index 6a730f81..bd88629e 100644
--- a/forms/record_upsert_test.go
+++ b/forms/record_upsert_test.go
@@ -4,1224 +4,894 @@ import (
"bytes"
"encoding/json"
"errors"
- "fmt"
- "net/http"
- "net/http/httptest"
+ "maps"
"os"
"path/filepath"
"strings"
"testing"
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/labstack/echo/v5"
"github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
"github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
"github.com/pocketbase/pocketbase/tests"
"github.com/pocketbase/pocketbase/tools/filesystem"
- "github.com/pocketbase/pocketbase/tools/list"
- "github.com/pocketbase/pocketbase/tools/rest"
- "github.com/pocketbase/pocketbase/tools/types"
)
-func hasRecordFile(app core.App, record *models.Record, filename string) bool {
- fs, _ := app.NewFilesystem()
- defer fs.Close()
+func TestRecordUpsertLoad(t *testing.T) {
+ t.Parallel()
- fileKey := filepath.Join(
- record.Collection().Id,
- record.Id,
- filename,
- )
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
- exists, _ := fs.Exists(fileKey)
-
- return exists
-}
-
-func TestNewRecordUpsert(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection, _ := app.Dao().FindCollectionByNameOrId("demo2")
- record := models.NewRecord(collection)
- record.Set("title", "test_value")
-
- form := forms.NewRecordUpsert(app, record)
-
- val := form.Data()["title"]
- if val != "test_value" {
- t.Errorf("Expected record data to be loaded, got %v", form.Data())
- }
-}
-
-func TestRecordUpsertLoadRequestUnsupported(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- record, err := app.Dao().FindRecordById("demo2", "0yxhwia2amd8gec")
+ demo1Col, err := testApp.FindCollectionByNameOrId("demo1")
if err != nil {
t.Fatal(err)
}
- testData := "title=test123"
-
- form := forms.NewRecordUpsert(app, record)
- req := httptest.NewRequest(http.MethodGet, "/", strings.NewReader(testData))
- req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationForm)
-
- if err := form.LoadRequest(req, ""); err == nil {
- t.Fatal("Expected LoadRequest to fail, got nil")
- }
-}
-
-func TestRecordUpsertLoadRequestJson(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- record, err := app.Dao().FindRecordById("demo1", "84nmscqy84lsi1t")
+ usersCol, err := testApp.FindCollectionByNameOrId("users")
if err != nil {
t.Fatal(err)
}
- testData := map[string]any{
- "a": map[string]any{
- "b": map[string]any{
- "id": "test_id",
- "text": "test123",
- "unknown": "test456",
- // file fields unset/delete
- "file_one": nil,
- "file_many.0": "", // delete by index
- "file_many-": []string{"test_MaWC6mWyrP.txt", "test_tC1Yc87DfC.txt"}, // multiple delete with modifier
- "file_many.300_WlbFWSGmW9.png": nil, // delete by filename
- "file_many.2": "test.png", // should be ignored
- },
- },
- }
-
- form := forms.NewRecordUpsert(app, record)
- jsonBody, _ := json.Marshal(testData)
- req := httptest.NewRequest(http.MethodGet, "/", bytes.NewReader(jsonBody))
- req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON)
- loadErr := form.LoadRequest(req, "a.b")
- if loadErr != nil {
- t.Fatal(loadErr)
- }
-
- if form.Id != "test_id" {
- t.Fatalf("Expect id field to be %q, got %q", "test_id", form.Id)
- }
-
- if v, ok := form.Data()["text"]; !ok || v != "test123" {
- t.Fatalf("Expect title field to be %q, got %q", "test123", v)
- }
-
- if v, ok := form.Data()["unknown"]; ok {
- t.Fatalf("Didn't expect unknown field to be set, got %v", v)
- }
-
- fileOne, ok := form.Data()["file_one"]
- if !ok {
- t.Fatal("Expect file_one field to be set")
- }
- if fileOne != "" {
- t.Fatalf("Expect file_one field to be empty string, got %v", fileOne)
- }
-
- fileMany, ok := form.Data()["file_many"]
- if !ok || fileMany == nil {
- t.Fatal("Expect file_many field to be set")
- }
- manyfilesRemains := len(list.ToUniqueStringSlice(fileMany))
- if manyfilesRemains != 1 {
- t.Fatalf("Expect only 1 file_many to remain, got \n%v", fileMany)
- }
-}
-
-func TestRecordUpsertLoadRequestMultipart(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- record, err := app.Dao().FindRecordById("demo1", "84nmscqy84lsi1t")
- if err != nil {
- t.Fatal(err)
- }
-
- formData, mp, err := tests.MockMultipartData(map[string]string{
- "a.b.id": "test_id",
- "a.b.text": "test123",
- "a.b.unknown": "test456",
- "a.b." + rest.MultipartJsonKey: `{"json":["a","b"],"email":"test3@example.com"}`,
- // file fields unset/delete
- "a.b.file_one-": "test_d61b33QdDU.txt", // delete with modifier
- "a.b.file_many.0": "", // delete by index
- "a.b.file_many-": "test_tC1Yc87DfC.txt", // delete with modifier
- "a.b.file_many.300_WlbFWSGmW9.png": "", // delete by filename
- "a.b.file_many.2": "test.png", // should be ignored
- }, "a.b.file_many")
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewRecordUpsert(app, record)
- req := httptest.NewRequest(http.MethodGet, "/", formData)
- req.Header.Set(echo.HeaderContentType, mp.FormDataContentType())
- loadErr := form.LoadRequest(req, "a.b")
- if loadErr != nil {
- t.Fatal(loadErr)
- }
-
- if form.Id != "test_id" {
- t.Fatalf("Expect id field to be %q, got %q", "test_id", form.Id)
- }
-
- if v, ok := form.Data()["text"]; !ok || v != "test123" {
- t.Fatalf("Expect text field to be %q, got %q", "test123", v)
- }
-
- if v, ok := form.Data()["unknown"]; ok {
- t.Fatalf("Didn't expect unknown field to be set, got %v", v)
- }
-
- if v, ok := form.Data()["email"]; !ok || v != "test3@example.com" {
- t.Fatalf("Expect email field to be %q, got %q", "test3@example.com", v)
- }
-
- rawJsonValue, ok := form.Data()["json"].(types.JsonRaw)
- if !ok {
- t.Fatal("Expect json field to be set")
- }
- expectedJsonValue := `["a","b"]`
- if rawJsonValue.String() != expectedJsonValue {
- t.Fatalf("Expect json field %v, got %v", expectedJsonValue, rawJsonValue)
- }
-
- fileOne, ok := form.Data()["file_one"]
- if !ok {
- t.Fatal("Expect file_one field to be set")
- }
- if fileOne != "" {
- t.Fatalf("Expect file_one field to be empty string, got %v", fileOne)
- }
-
- fileMany, ok := form.Data()["file_many"]
- if !ok || fileMany == nil {
- t.Fatal("Expect file_many field to be set")
- }
- manyfilesRemains := len(list.ToUniqueStringSlice(fileMany))
- expectedRemains := 3 // 5 old; 3 deleted and 1 new uploaded
- if manyfilesRemains != expectedRemains {
- t.Fatalf("Expect file_many to be %d, got %d (%v)", expectedRemains, manyfilesRemains, fileMany)
- }
-}
-
-func TestRecordUpsertLoadData(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- record, err := app.Dao().FindRecordById("demo2", "llvuca81nly1qls")
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewRecordUpsert(app, record)
-
- loadErr := form.LoadData(map[string]any{
- "title": "test_new",
- "active": true,
- })
- if loadErr != nil {
- t.Fatal(loadErr)
- }
-
- if v, ok := form.Data()["title"]; !ok || v != "test_new" {
- t.Fatalf("Expect title field to be %v, got %v", "test_new", v)
- }
-
- if v, ok := form.Data()["active"]; !ok || v != true {
- t.Fatalf("Expect active field to be %v, got %v", true, v)
- }
-}
-
-func TestRecordUpsertDrySubmitFailure(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection, _ := app.Dao().FindCollectionByNameOrId("demo1")
- recordBefore, err := app.Dao().FindRecordById(collection.Id, "al1h9ijdeojtsjy")
- if err != nil {
- t.Fatal(err)
- }
-
- formData, mp, err := tests.MockMultipartData(map[string]string{
- "title": "abc",
- "rel_one": "missing",
- })
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewRecordUpsert(app, recordBefore)
- req := httptest.NewRequest(http.MethodGet, "/", formData)
- req.Header.Set(echo.HeaderContentType, mp.FormDataContentType())
- form.LoadRequest(req, "")
-
- callbackCalls := 0
-
- // ensure that validate is triggered
- // ---
- result := form.DrySubmit(func(txDao *daos.Dao) error {
- callbackCalls++
- return nil
- })
- if result == nil {
- t.Fatal("Expected error, got nil")
- }
- if callbackCalls != 0 {
- t.Fatalf("Expected callbackCalls to be 0, got %d", callbackCalls)
- }
-
- // ensure that the record changes weren't persisted
- // ---
- recordAfter, err := app.Dao().FindRecordById(collection.Id, recordBefore.Id)
- if err != nil {
- t.Fatal(err)
- }
-
- if recordAfter.GetString("title") == "abc" {
- t.Fatalf("Expected record.title to be %v, got %v", recordAfter.GetString("title"), "abc")
- }
-
- if recordAfter.GetString("rel_one") == "missing" {
- t.Fatalf("Expected record.rel_one to be %s, got %s", recordBefore.GetString("rel_one"), "missing")
- }
-}
-
-func TestRecordUpsertDrySubmitSuccess(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection, _ := app.Dao().FindCollectionByNameOrId("demo1")
- recordBefore, err := app.Dao().FindRecordById(collection.Id, "84nmscqy84lsi1t")
- if err != nil {
- t.Fatal(err)
- }
-
- formData, mp, err := tests.MockMultipartData(map[string]string{
- "title": "dry_test",
- "file_one": "",
- }, "file_many")
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewRecordUpsert(app, recordBefore)
- req := httptest.NewRequest(http.MethodGet, "/", formData)
- req.Header.Set(echo.HeaderContentType, mp.FormDataContentType())
- form.LoadRequest(req, "")
-
- callbackCalls := 0
-
- result := form.DrySubmit(func(txDao *daos.Dao) error {
- callbackCalls++
- return nil
- })
- if result != nil {
- t.Fatalf("Expected nil, got error %v", result)
- }
-
- // ensure callback was called
- if callbackCalls != 1 {
- t.Fatalf("Expected callbackCalls to be 1, got %d", callbackCalls)
- }
-
- // ensure that the record changes weren't persisted
- recordAfter, err := app.Dao().FindRecordById(collection.Id, recordBefore.Id)
- if err != nil {
- t.Fatal(err)
- }
- if recordAfter.GetString("title") == "dry_test" {
- t.Fatalf("Expected record.title to be %v, got %v", recordAfter.GetString("title"), "dry_test")
- }
- if recordAfter.GetString("file_one") == "" {
- t.Fatal("Expected record.file_one to not be changed, got empty string")
- }
-
- // file wasn't removed
- if !hasRecordFile(app, recordAfter, recordAfter.GetString("file_one")) {
- t.Fatal("file_one file should not have been deleted")
- }
-}
-
-func TestRecordUpsertDrySubmitWithNestedTx(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection, _ := app.Dao().FindCollectionByNameOrId("demo1")
- recordBefore, err := app.Dao().FindRecordById(collection.Id, "84nmscqy84lsi1t")
- if err != nil {
- t.Fatal(err)
- }
-
- formData, mp, err := tests.MockMultipartData(map[string]string{
- "title": "dry_test",
- })
- if err != nil {
- t.Fatal(err)
- }
-
- txErr := app.Dao().RunInTransaction(func(txDao *daos.Dao) error {
- form := forms.NewRecordUpsert(app, recordBefore)
- form.SetDao(txDao)
- req := httptest.NewRequest(http.MethodGet, "/", formData)
- req.Header.Set(echo.HeaderContentType, mp.FormDataContentType())
- form.LoadRequest(req, "")
-
- callbackCalls := 0
-
- result := form.DrySubmit(func(innerTxDao *daos.Dao) error {
- callbackCalls++
- return nil
- })
- if result != nil {
- t.Fatalf("Expected nil, got error %v", result)
- }
-
- // ensure callback was called
- if callbackCalls != 1 {
- t.Fatalf("Expected callbackCalls to be 1, got %d", callbackCalls)
- }
-
- // ensure that the original txDao can still be used after the DrySubmit rollback
- if _, err := txDao.FindRecordById(collection.Id, recordBefore.Id); err != nil {
- t.Fatalf("Expected the dry submit rollback to not affect the outer tx context, got %v", err)
- }
-
- // ensure that the record changes weren't persisted
- recordAfter, err := app.Dao().FindRecordById(collection.Id, recordBefore.Id)
- if err != nil {
- t.Fatal(err)
- }
- if recordAfter.GetString("title") == "dry_test" {
- t.Fatalf("Expected record.title to be %v, got %v", recordBefore.GetString("title"), "dry_test")
- }
-
- return nil
- })
- if txErr != nil {
- t.Fatalf("Nested transactions failure: %v", txErr)
- }
-}
-
-func TestRecordUpsertSubmitFailure(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection, err := app.Dao().FindCollectionByNameOrId("demo1")
- if err != nil {
- t.Fatal(err)
- }
-
- recordBefore, err := app.Dao().FindRecordById(collection.Id, "84nmscqy84lsi1t")
- if err != nil {
- t.Fatal(err)
- }
-
- formData, mp, err := tests.MockMultipartData(map[string]string{
- "text": "abc",
- "bool": "false",
- "select_one": "invalid",
- "file_many": "invalid",
- "email": "invalid",
- }, "file_one")
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewRecordUpsert(app, recordBefore)
- req := httptest.NewRequest(http.MethodGet, "/", formData)
- req.Header.Set(echo.HeaderContentType, mp.FormDataContentType())
- form.LoadRequest(req, "")
-
- interceptorCalls := 0
- interceptor := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(r *models.Record) error {
- interceptorCalls++
- return next(r)
- }
- }
-
- // ensure that validate is triggered
- // ---
- result := form.Submit(interceptor)
- if result == nil {
- t.Fatal("Expected error, got nil")
- }
-
- // check interceptor calls
- // ---
- if interceptorCalls != 0 {
- t.Fatalf("Expected interceptor to be called 0 times, got %d", interceptorCalls)
- }
-
- // ensure that the record changes weren't persisted
- // ---
- recordAfter, err := app.Dao().FindRecordById(collection.Id, recordBefore.Id)
- if err != nil {
- t.Fatal(err)
- }
-
- if v := recordAfter.Get("text"); v == "abc" {
- t.Fatalf("Expected record.text not to change, got %v", v)
- }
- if v := recordAfter.Get("bool"); v == false {
- t.Fatalf("Expected record.bool not to change, got %v", v)
- }
- if v := recordAfter.Get("select_one"); v == "invalid" {
- t.Fatalf("Expected record.select_one not to change, got %v", v)
- }
- if v := recordAfter.Get("email"); v == "invalid" {
- t.Fatalf("Expected record.email not to change, got %v", v)
- }
- if v := recordAfter.GetStringSlice("file_many"); len(v) != 5 {
- t.Fatalf("Expected record.file_many not to change, got %v", v)
- }
-
- // ensure the files weren't removed
- for _, f := range recordAfter.GetStringSlice("file_many") {
- if !hasRecordFile(app, recordAfter, f) {
- t.Fatal("file_many file should not have been deleted")
- }
- }
-}
-
-func TestRecordUpsertSubmitSuccess(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection, _ := app.Dao().FindCollectionByNameOrId("demo1")
- recordBefore, err := app.Dao().FindRecordById(collection.Id, "84nmscqy84lsi1t")
- if err != nil {
- t.Fatal(err)
- }
-
- formData, mp, err := tests.MockMultipartData(map[string]string{
- "text": "test_save",
- "bool": "true",
- "select_one": "optionA",
- "file_one": "",
- }, "file_many.1", "file_many") // replace + new file
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewRecordUpsert(app, recordBefore)
- req := httptest.NewRequest(http.MethodGet, "/", formData)
- req.Header.Set(echo.HeaderContentType, mp.FormDataContentType())
- form.LoadRequest(req, "")
-
- interceptorCalls := 0
- interceptor := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(r *models.Record) error {
- interceptorCalls++
- return next(r)
- }
- }
-
- result := form.Submit(interceptor)
- if result != nil {
- t.Fatalf("Expected nil, got error %v", result)
- }
-
- // check interceptor calls
- // ---
- if interceptorCalls != 1 {
- t.Fatalf("Expected interceptor to be called 1 time, got %d", interceptorCalls)
- }
-
- // ensure that the record changes were persisted
- // ---
- recordAfter, err := app.Dao().FindRecordById(collection.Id, recordBefore.Id)
- if err != nil {
- t.Fatal(err)
- }
-
- if v := recordAfter.GetString("text"); v != "test_save" {
- t.Fatalf("Expected record.text to be %v, got %v", v, "test_save")
- }
-
- if hasRecordFile(app, recordAfter, recordAfter.GetString("file_one")) {
- t.Fatal("Expected record.file_one to be deleted")
- }
-
- fileMany := (recordAfter.GetStringSlice("file_many"))
- if len(fileMany) != 6 { // 1 replace + 1 new
- t.Fatalf("Expected 6 record.file_many, got %d (%v)", len(fileMany), fileMany)
- }
- for _, f := range fileMany {
- if !hasRecordFile(app, recordAfter, f) {
- t.Fatalf("Expected file %q to exist", f)
- }
- }
-}
-
-func TestRecordUpsertSubmitInterceptors(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection, _ := app.Dao().FindCollectionByNameOrId("demo3")
- record, err := app.Dao().FindRecordById(collection.Id, "mk5fmymtx4wsprk")
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewRecordUpsert(app, record)
- form.Data()["title"] = "test_new"
-
- testErr := errors.New("test_error")
- interceptorRecordTitle := ""
-
- interceptor1Called := false
- interceptor1 := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(r *models.Record) error {
- interceptor1Called = true
- return next(r)
- }
- }
-
- interceptor2Called := false
- interceptor2 := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(r *models.Record) error {
- interceptorRecordTitle = record.GetString("title") // to check if the record was filled
- interceptor2Called = true
- return testErr
- }
- }
-
- submitErr := form.Submit(interceptor1, interceptor2)
- if submitErr != testErr {
- t.Fatalf("Expected submitError %v, got %v", testErr, submitErr)
- }
-
- if !interceptor1Called {
- t.Fatalf("Expected interceptor1 to be called")
- }
-
- if !interceptor2Called {
- t.Fatalf("Expected interceptor2 to be called")
- }
-
- if interceptorRecordTitle != form.Data()["title"].(string) {
- t.Fatalf("Expected the form model to be filled before calling the interceptors")
- }
-}
-
-func TestRecordUpsertWithCustomId(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection, err := app.Dao().FindCollectionByNameOrId("demo3")
- if err != nil {
- t.Fatal(err)
- }
-
- existingRecord, err := app.Dao().FindRecordById(collection.Id, "mk5fmymtx4wsprk")
+ file, err := filesystem.NewFileFromBytes([]byte("test"), "test.txt")
if err != nil {
t.Fatal(err)
}
scenarios := []struct {
- name string
- data map[string]string
- record *models.Record
- expectError bool
+ name string
+ data map[string]any
+ record *core.Record
+ managerAccessLevel bool
+ superuserAccessLevel bool
+ expected []string
+ notExpected []string
}{
{
- "empty data",
- map[string]string{},
- models.NewRecord(collection),
- false,
- },
- {
- "empty id",
- map[string]string{"id": ""},
- models.NewRecord(collection),
- false,
- },
- {
- "id < 15 chars",
- map[string]string{"id": "a23"},
- models.NewRecord(collection),
- true,
- },
- {
- "id > 15 chars",
- map[string]string{"id": "a234567890123456"},
- models.NewRecord(collection),
- true,
- },
- {
- "id = 15 chars (invalid chars)",
- map[string]string{"id": "a@3456789012345"},
- models.NewRecord(collection),
- true,
- },
- {
- "id = 15 chars (valid chars)",
- map[string]string{"id": "a23456789012345"},
- models.NewRecord(collection),
- false,
- },
- {
- "changing the id of an existing record",
- map[string]string{"id": "b23456789012345"},
- existingRecord,
- true,
- },
- {
- "using the same existing record id",
- map[string]string{"id": existingRecord.Id},
- existingRecord,
- false,
- },
- {
- "skipping the id for existing record",
- map[string]string{},
- existingRecord,
- false,
- },
- }
-
- for _, scenario := range scenarios {
- formData, mp, err := tests.MockMultipartData(scenario.data)
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewRecordUpsert(app, scenario.record)
- req := httptest.NewRequest(http.MethodGet, "/", formData)
- req.Header.Set(echo.HeaderContentType, mp.FormDataContentType())
- form.LoadRequest(req, "")
-
- dryErr := form.DrySubmit(nil)
- hasDryErr := dryErr != nil
-
- submitErr := form.Submit()
- hasSubmitErr := submitErr != nil
-
- if hasDryErr != hasSubmitErr {
- t.Errorf("[%s] Expected hasDryErr and hasSubmitErr to have the same value, got %v vs %v", scenario.name, hasDryErr, hasSubmitErr)
- }
-
- if hasSubmitErr != scenario.expectError {
- t.Errorf("[%s] Expected hasSubmitErr to be %v, got %v (%v)", scenario.name, scenario.expectError, hasSubmitErr, submitErr)
- }
-
- if id, ok := scenario.data["id"]; ok && id != "" && !hasSubmitErr {
- _, err := app.Dao().FindRecordById(collection.Id, id)
- if err != nil {
- t.Errorf("[%s] Expected to find record with id %s, got %v", scenario.name, id, err)
- }
- }
- }
-}
-
-func TestRecordUpsertAuthRecord(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- name string
- existingId string
- data map[string]any
- manageAccess bool
- expectError bool
- }{
- {
- "empty create data",
- "",
- map[string]any{},
- false,
- true,
- },
- {
- "empty update data",
- "4q1xlclmfloku33",
- map[string]any{},
- false,
- false,
- },
- {
- "minimum valid create data",
- "",
- map[string]any{
- "password": "12345678",
- "passwordConfirm": "12345678",
+ name: "base collection record",
+ data: map[string]any{
+ "text": "test_text",
+ "custom": "123", // should be ignored
+ "number": "456", // should be normalized by the setter
+ "select_many+": []string{"optionB", "optionC"}, // test modifier fields
+ "created": "2022-01:01 10:00:00.000Z", // should be ignored
+ // ignore special auth fields
+ "oldPassword": "123",
+ "password": "456",
+ "passwordConfirm": "789",
+ },
+ record: core.NewRecord(demo1Col),
+ expected: []string{
+ `"text":"test_text"`,
+ `"number":456`,
+ `"select_many":["optionB","optionC"]`,
+ `"password":""`,
+ `"oldPassword":""`,
+ `"passwordConfirm":""`,
+ `"created":""`,
+ `"updated":""`,
+ `"json":null`,
+ },
+ notExpected: []string{
+ `"custom"`,
+ `"select_many-"`,
+ `"select_many+"`,
},
- false,
- false,
},
{
- "create with all allowed auth fields",
- "",
- map[string]any{
- "username": "test_new-a.b",
- "email": "test_new@example.com",
- "emailVisibility": true,
- "password": "12345678",
- "passwordConfirm": "12345678",
+ name: "auth collection record",
+ data: map[string]any{
+ "email": "test@example.com",
+ // special auth fields
+ "oldPassword": "123",
+ "password": "456",
+ "passwordConfirm": "789",
},
- false,
- false,
- },
-
- // username
- {
- "invalid username characters",
- "",
- map[string]any{
- "username": "test abc!@#",
- "password": "12345678",
- "passwordConfirm": "12345678",
+ record: core.NewRecord(usersCol),
+ expected: []string{
+ `"email":"test@example.com"`,
+ `"oldPassword":"123"`,
+ `"password":"456"`,
+ `"passwordConfirm":"789"`,
},
- false,
- true,
},
{
- "invalid username length (less than 3)",
- "",
- map[string]any{
- "username": "ab",
- "password": "12345678",
- "passwordConfirm": "12345678",
+ name: "hidden fields (manager)",
+ data: map[string]any{
+ "email": "test@example.com",
+ "tokenKey": "abc", // should be ignored
+ // special auth fields
+ "password": "456",
+ "oldPassword": "123",
+ "passwordConfirm": "789",
+ },
+ managerAccessLevel: true,
+ record: core.NewRecord(usersCol),
+ expected: []string{
+ `"email":"test@example.com"`,
+ `"tokenKey":""`,
+ `"password":"456"`,
+ `"oldPassword":"123"`,
+ `"passwordConfirm":"789"`,
},
- false,
- true,
},
{
- "invalid username length (more than 150)",
- "",
- map[string]any{
- "username": strings.Repeat("a", 151),
- "password": "12345678",
- "passwordConfirm": "12345678",
+ name: "hidden fields (superuser)",
+ data: map[string]any{
+ "email": "test@example.com",
+ "tokenKey": "abc",
+ // special auth fields
+ "password": "456",
+ "oldPassword": "123",
+ "passwordConfirm": "789",
},
- false,
- true,
- },
-
- // verified
- {
- "try to set verified without managed access",
- "",
- map[string]any{
- "verified": true,
- "password": "12345678",
- "passwordConfirm": "12345678",
+ superuserAccessLevel: true,
+ record: core.NewRecord(usersCol),
+ expected: []string{
+ `"email":"test@example.com"`,
+ `"tokenKey":"abc"`,
+ `"password":"456"`,
+ `"oldPassword":"123"`,
+ `"passwordConfirm":"789"`,
},
- false,
- true,
},
{
- "try to update verified without managed access",
- "4q1xlclmfloku33",
- map[string]any{
- "verified": true,
+ name: "with file field",
+ data: map[string]any{
+ "file_one": file,
+ "url": file, // should be ignored for non-file fields
},
- false,
- true,
- },
- {
- "set verified with managed access",
- "",
- map[string]any{
- "verified": true,
- "password": "12345678",
- "passwordConfirm": "12345678",
+ record: core.NewRecord(demo1Col),
+ expected: []string{
+ `"file_one":{`,
+ `"originalName":"test.txt"`,
+ `"url":""`,
},
- true,
- false,
- },
- {
- "update verified with managed access",
- "4q1xlclmfloku33",
- map[string]any{
- "verified": true,
- },
- true,
- false,
- },
-
- // email
- {
- "try to update email without managed access",
- "4q1xlclmfloku33",
- map[string]any{
- "email": "test_update@example.com",
- },
- false,
- true,
- },
- {
- "update email with managed access",
- "4q1xlclmfloku33",
- map[string]any{
- "email": "test_update@example.com",
- },
- true,
- false,
- },
-
- // password
- {
- "trigger the password validations if only oldPassword is set",
- "4q1xlclmfloku33",
- map[string]any{
- "oldPassword": "1234567890",
- },
- false,
- true,
- },
- {
- "trigger the password validations if only passwordConfirm is set",
- "4q1xlclmfloku33",
- map[string]any{
- "passwordConfirm": "1234567890",
- },
- false,
- true,
- },
- {
- "try to update password without managed access",
- "4q1xlclmfloku33",
- map[string]any{
- "password": "1234567890",
- "passwordConfirm": "1234567890",
- },
- false,
- true,
- },
- {
- "update password without managed access but with oldPassword",
- "4q1xlclmfloku33",
- map[string]any{
- "oldPassword": "1234567890",
- "password": "1234567890",
- "passwordConfirm": "1234567890",
- },
- false,
- false,
- },
- {
- "update email with managed access (without oldPassword)",
- "4q1xlclmfloku33",
- map[string]any{
- "password": "1234567890",
- "passwordConfirm": "1234567890",
- },
- true,
- false,
},
}
for _, s := range scenarios {
- collection, err := app.Dao().FindCollectionByNameOrId("users")
+ t.Run(s.name, func(t *testing.T) {
+ form := forms.NewRecordUpsert(testApp, s.record)
+
+ if s.managerAccessLevel {
+ form.GrantManagerAccess()
+ }
+
+ if s.superuserAccessLevel {
+ form.GrantSuperuserAccess()
+ }
+
+ // ensure that the form access level was updated
+ if !form.HasManageAccess() && (s.superuserAccessLevel || s.managerAccessLevel) {
+ t.Fatalf("Expected the form to have manage access level (manager or superuser)")
+ }
+
+ form.Load(s.data)
+
+ loaded := map[string]any{
+ "oldPassword": form.OldPassword,
+ "password": form.Password,
+ "passwordConfirm": form.PasswordConfirm,
+ }
+ maps.Copy(loaded, s.record.FieldsData())
+ maps.Copy(loaded, s.record.CustomData())
+
+ raw, err := json.Marshal(loaded)
+ if err != nil {
+ t.Fatalf("Failed to serialize data: %v", err)
+ }
+
+ rawStr := string(raw)
+
+ for _, str := range s.expected {
+ if !strings.Contains(rawStr, str) {
+ t.Fatalf("Couldn't find %q in \n%v", str, rawStr)
+ }
+ }
+
+ for _, str := range s.notExpected {
+ if strings.Contains(rawStr, str) {
+ t.Fatalf("Didn't expect %q in \n%v", str, rawStr)
+ }
+ }
+ })
+ }
+}
+
+func TestRecordUpsertDrySubmitFailure(t *testing.T) {
+ runTest := func(t *testing.T, testApp core.App) {
+ col, err := testApp.FindCollectionByNameOrId("demo1")
if err != nil {
t.Fatal(err)
}
- record := models.NewRecord(collection)
- if s.existingId != "" {
- var err error
- record, err = app.Dao().FindRecordById(collection.Id, s.existingId)
- if err != nil {
- t.Errorf("[%s] Failed to fetch auth record with id %s", s.name, s.existingId)
- continue
- }
+ originalId := "imy661ixudk5izi"
+
+ record, err := testApp.FindRecordById(col, originalId)
+ if err != nil {
+ t.Fatal(err)
}
- form := forms.NewRecordUpsert(app, record)
- form.SetFullManageAccess(s.manageAccess)
- if err := form.LoadData(s.data); err != nil {
- t.Errorf("[%s] Failed to load form data", s.name)
- continue
+ oldRaw, err := json.Marshal(record)
+ if err != nil {
+ t.Fatal(err)
}
- submitErr := form.Submit()
-
- hasErr := submitErr != nil
- if hasErr != s.expectError {
- t.Errorf("[%s] Expected hasErr %v, got %v (%v)", s.name, s.expectError, hasErr, submitErr)
+ file, err := filesystem.NewFileFromBytes([]byte("test"), "test.txt")
+ if err != nil {
+ t.Fatal(err)
}
- if !hasErr && record.Username() == "" {
- t.Errorf("[%s] Expected username to be set, got empty string: \n%v", s.name, record)
+ form := forms.NewRecordUpsert(testApp, record)
+ form.Load(map[string]any{
+ "text": "test_update",
+ "file_one": file,
+ "select_one": "!invalid", // should be allowed even if invalid since validations are not executed
+ })
+
+ calls := ""
+ testApp.OnRecordValidate(col.Name).BindFunc(func(e *core.RecordEvent) error {
+ calls += "a" // shouldn't be called
+ return e.Next()
+ })
+
+ result := form.DrySubmit(func(txApp core.App, drySavedRecord *core.Record) error {
+ calls += "b"
+ return errors.New("error...")
+ })
+
+ if result == nil {
+ t.Fatal("Expected DrySubmit error, got nil")
}
+
+ if calls != "b" {
+ t.Fatalf("Expected calls %q, got %q", "ab", calls)
+ }
+
+ // refresh the record to ensure that the changes weren't persisted
+ record, err = testApp.FindRecordById(col, originalId)
+ if err != nil {
+ t.Fatalf("Expected record with the original id %q to exist, got\n%v", originalId, record.PublicExport())
+ }
+
+ newRaw, err := json.Marshal(record)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if !bytes.Equal(oldRaw, newRaw) {
+ t.Fatalf("Expected record\n%s\ngot\n%s", oldRaw, newRaw)
+ }
+
+ testFilesCount(t, testApp, record, 0)
}
+
+ t.Run("without parent transaction", func(t *testing.T) {
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ runTest(t, testApp)
+ })
+
+ t.Run("with parent transaction", func(t *testing.T) {
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ testApp.RunInTransaction(func(txApp core.App) error {
+ runTest(t, txApp)
+ return nil
+ })
+ })
}
-func TestRecordUpsertUniqueValidator(t *testing.T) {
+func TestRecordUpsertDrySubmitCreateSuccess(t *testing.T) {
+ runTest := func(t *testing.T, testApp core.App) {
+ col, err := testApp.FindCollectionByNameOrId("demo1")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ record := core.NewRecord(col)
+
+ file, err := filesystem.NewFileFromBytes([]byte("test"), "test.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ form := forms.NewRecordUpsert(testApp, record)
+ form.Load(map[string]any{
+ "id": "test",
+ "text": "test_update",
+ "file_one": file,
+ "select_one": "!invalid", // should be allowed even if invalid since validations are not executed
+ })
+
+ calls := ""
+ testApp.OnRecordValidate(col.Name).BindFunc(func(e *core.RecordEvent) error {
+ calls += "a" // shouldn't be called
+ return e.Next()
+ })
+
+ result := form.DrySubmit(func(txApp core.App, drySavedRecord *core.Record) error {
+ calls += "b"
+ return nil
+ })
+
+ if result != nil {
+ t.Fatalf("Expected DrySubmit success, got error: %v", result)
+ }
+
+ if calls != "b" {
+ t.Fatalf("Expected calls %q, got %q", "ab", calls)
+ }
+
+ // refresh the record to ensure that the changes weren't persisted
+ _, err = testApp.FindRecordById(col, record.Id)
+ if err == nil {
+ t.Fatal("Expected the created record to be deleted")
+ }
+
+ testFilesCount(t, testApp, record, 0)
+ }
+
+ t.Run("without parent transaction", func(t *testing.T) {
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ runTest(t, testApp)
+ })
+
+ t.Run("with parent transaction", func(t *testing.T) {
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ testApp.RunInTransaction(func(txApp core.App) error {
+ runTest(t, txApp)
+ return nil
+ })
+ })
+}
+
+func TestRecordUpsertDrySubmitUpdateSuccess(t *testing.T) {
+ runTest := func(t *testing.T, testApp core.App) {
+ col, err := testApp.FindCollectionByNameOrId("demo1")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ record, err := testApp.FindRecordById(col, "imy661ixudk5izi")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ oldRaw, err := json.Marshal(record)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ file, err := filesystem.NewFileFromBytes([]byte("test"), "test.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ form := forms.NewRecordUpsert(testApp, record)
+ form.Load(map[string]any{
+ "text": "test_update",
+ "file_one": file,
+ })
+
+ calls := ""
+ testApp.OnRecordValidate(col.Name).BindFunc(func(e *core.RecordEvent) error {
+ calls += "a" // shouldn't be called
+ return e.Next()
+ })
+
+ result := form.DrySubmit(func(txApp core.App, drySavedRecord *core.Record) error {
+ calls += "b"
+ return nil
+ })
+
+ if result != nil {
+ t.Fatalf("Expected DrySubmit success, got error: %v", result)
+ }
+
+ if calls != "b" {
+ t.Fatalf("Expected calls %q, got %q", "ab", calls)
+ }
+
+ // refresh the record to ensure that the changes weren't persisted
+ record, err = testApp.FindRecordById(col, record.Id)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ newRaw, err := json.Marshal(record)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if !bytes.Equal(oldRaw, newRaw) {
+ t.Fatalf("Expected record\n%s\ngot\n%s", oldRaw, newRaw)
+ }
+
+ testFilesCount(t, testApp, record, 0)
+ }
+
+ t.Run("without parent transaction", func(t *testing.T) {
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ runTest(t, testApp)
+ })
+
+ t.Run("with parent transaction", func(t *testing.T) {
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ testApp.RunInTransaction(func(txApp core.App) error {
+ runTest(t, txApp)
+ return nil
+ })
+ })
+}
+
+func TestRecordUpsertSubmitValidations(t *testing.T) {
+ t.Parallel()
+
app, _ := tests.NewTestApp()
defer app.Cleanup()
- // create a dummy collection
- collection := &models.Collection{
- Name: "test",
- Schema: schema.NewSchema(
- &schema.SchemaField{
- Type: "text",
- Name: "fieldA",
- },
- &schema.SchemaField{
- Type: "text",
- Name: "fieldB",
- },
- &schema.SchemaField{
- Type: "text",
- Name: "fieldC",
- },
- ),
- Indexes: types.JsonArray[string]{
- // the field case shouldn't matter
- "create unique index unique_single_idx on test (fielda)",
- "create unique index unique_combined_idx on test (fieldb, FIELDC)",
- },
- }
- if err := app.Dao().SaveCollection(collection); err != nil {
+ demo2Col, err := app.FindCollectionByNameOrId("demo2")
+ if err != nil {
t.Fatal(err)
}
- dummyRecord := models.NewRecord(collection)
- dummyRecord.Set("fieldA", "a")
- dummyRecord.Set("fieldB", "b")
- dummyRecord.Set("fieldC", "c")
- if err := app.Dao().SaveRecord(dummyRecord); err != nil {
+ demo2Rec, err := app.FindRecordById(demo2Col, "llvuca81nly1qls")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ usersCol, err := app.FindCollectionByNameOrId("users")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ userRec, err := app.FindRecordById(usersCol, "4q1xlclmfloku33")
+ if err != nil {
t.Fatal(err)
}
scenarios := []struct {
name string
+ record *core.Record
data map[string]any
+ managerAccess bool
expectedErrors []string
}{
+ // base
{
- "duplicated unique value",
- map[string]any{
- "fieldA": "a",
- },
- []string{"fieldA"},
+ name: "new base collection record with empty data",
+ record: core.NewRecord(demo2Col),
+ data: map[string]any{},
+ expectedErrors: []string{"title"},
},
{
- "duplicated combined unique value",
- map[string]any{
- "fieldB": "b",
- "fieldC": "c",
+ name: "new base collection record with invalid data",
+ record: core.NewRecord(demo2Col),
+ data: map[string]any{
+ "title": "",
+ // should be ignored
+ "custom": "abc",
+ "oldPassword": "123",
+ "password": "456",
+ "passwordConfirm": "789",
},
- []string{"fieldB", "fieldC"},
+ expectedErrors: []string{"title"},
},
{
- "non-duplicated unique value",
- map[string]any{
- "fieldA": "a2",
+ name: "new base collection record with valid data",
+ record: core.NewRecord(demo2Col),
+ data: map[string]any{
+ "title": "abc",
+ // should be ignored
+ "custom": "abc",
+ "oldPassword": "123",
+ "password": "456",
+ "passwordConfirm": "789",
},
- nil,
+ expectedErrors: []string{},
},
{
- "non-duplicated combined unique value",
- map[string]any{
- "fieldB": "b",
- "fieldC": "d",
+ name: "existing base collection record with empty data",
+ record: demo2Rec,
+ data: map[string]any{},
+ expectedErrors: []string{},
+ },
+ {
+ name: "existing base collection record with invalid data",
+ record: demo2Rec,
+ data: map[string]any{
+ "title": "",
},
- nil,
+ expectedErrors: []string{"title"},
+ },
+ {
+ name: "existing base collection record with valid data",
+ record: demo2Rec,
+ data: map[string]any{
+ "title": "abc",
+ },
+ expectedErrors: []string{},
+ },
+
+ // auth
+ {
+ name: "new auth collection record with empty data",
+ record: core.NewRecord(usersCol),
+ data: map[string]any{},
+ expectedErrors: []string{"password", "passwordConfirm"},
+ },
+ {
+ name: "new auth collection record with invalid record and invalid form data (without manager acess)",
+ record: core.NewRecord(usersCol),
+ data: map[string]any{
+ "verified": true,
+ "emailVisibility": true,
+ "email": "test@example.com",
+ "password": "456",
+ "passwordConfirm": "789",
+ "username": "!invalid",
+ // should be ignored (custom or hidden fields)
+ "tokenKey": strings.Repeat("a", 2),
+ "custom": "abc",
+ "oldPassword": "123",
+ },
+ // fail the form validator
+ expectedErrors: []string{"verified", "passwordConfirm"},
+ },
+ {
+ name: "new auth collection record with invalid record and valid form data (without manager acess)",
+ record: core.NewRecord(usersCol),
+ data: map[string]any{
+ "verified": false,
+ "emailVisibility": true,
+ "email": "test@example.com",
+ "password": "456",
+ "passwordConfirm": "456",
+ "username": "!invalid",
+ // should be ignored (custom or hidden fields)
+ "tokenKey": strings.Repeat("a", 2),
+ "custom": "abc",
+ "oldPassword": "123",
+ },
+ // fail the record fields validator
+ expectedErrors: []string{"password", "username"},
+ },
+ {
+ name: "new auth collection record with invalid record and invalid form data (with manager acess)",
+ record: core.NewRecord(usersCol),
+ managerAccess: true,
+ data: map[string]any{
+ "verified": true,
+ "emailVisibility": true,
+ "email": "test@example.com",
+ "password": "456",
+ "passwordConfirm": "789",
+ "username": "!invalid",
+ // should be ignored (custom or hidden fields)
+ "tokenKey": strings.Repeat("a", 2),
+ "custom": "abc",
+ "oldPassword": "123",
+ },
+ // fail the form validator
+ expectedErrors: []string{"passwordConfirm"},
+ },
+ {
+ name: "new auth collection record with invalid record and valid form data (with manager acess)",
+ record: core.NewRecord(usersCol),
+ managerAccess: true,
+ data: map[string]any{
+ "verified": true,
+ "emailVisibility": true,
+ "email": "test@example.com",
+ "password": "456",
+ "passwordConfirm": "456",
+ "username": "!invalid",
+ // should be ignored (custom or hidden fields)
+ "tokenKey": strings.Repeat("a", 2),
+ "custom": "abc",
+ "oldPassword": "123",
+ },
+ // fail the record fields validator
+ expectedErrors: []string{"password", "username"},
+ },
+ {
+ name: "new auth collection record with valid data",
+ record: core.NewRecord(usersCol),
+ data: map[string]any{
+ "emailVisibility": true,
+ "email": "test_new@example.com",
+ "password": "1234567890",
+ "passwordConfirm": "1234567890",
+ // should be ignored (custom or hidden fields)
+ "tokenKey": strings.Repeat("a", 2),
+ "custom": "abc",
+ "oldPassword": "123",
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "new auth collection record with valid data and duplicated email",
+ record: core.NewRecord(usersCol),
+ data: map[string]any{
+ "email": "test@example.com",
+ "password": "1234567890",
+ "passwordConfirm": "1234567890",
+ // should be ignored (custom or hidden fields)
+ "tokenKey": strings.Repeat("a", 2),
+ "custom": "abc",
+ "oldPassword": "123",
+ },
+ // fail the unique db validator
+ expectedErrors: []string{"email"},
+ },
+ {
+ name: "existing auth collection record with empty data",
+ record: userRec,
+ data: map[string]any{},
+ expectedErrors: []string{},
+ },
+ {
+ name: "existing auth collection record with invalid record data and invalid form data (without manager access)",
+ record: userRec,
+ data: map[string]any{
+ "verified": true,
+ "email": "test_new@example.com", // not allowed to change
+ "oldPassword": "123",
+ "password": "456",
+ "passwordConfirm": "789",
+ "username": "!invalid",
+ // should be ignored (custom or hidden fields)
+ "tokenKey": strings.Repeat("a", 2),
+ "custom": "abc",
+ },
+ // fail form validator
+ expectedErrors: []string{"verified", "email", "oldPassword", "passwordConfirm"},
+ },
+ {
+ name: "existing auth collection record with invalid record data and valid form data (without manager access)",
+ record: userRec,
+ data: map[string]any{
+ "oldPassword": "1234567890",
+ "password": "12345678901",
+ "passwordConfirm": "12345678901",
+ "username": "!invalid",
+ // should be ignored (custom or hidden fields)
+ "tokenKey": strings.Repeat("a", 2),
+ "custom": "abc",
+ },
+ // fail record fields validator
+ expectedErrors: []string{"username"},
+ },
+ {
+ name: "existing auth collection record with invalid record data and invalid form data (with manager access)",
+ record: userRec,
+ managerAccess: true,
+ data: map[string]any{
+ "verified": true,
+ "email": "test_new@example.com",
+ "oldPassword": "123", // should be ignored
+ "password": "456",
+ "passwordConfirm": "789",
+ "username": "!invalid",
+ // should be ignored (custom or hidden fields)
+ "tokenKey": strings.Repeat("a", 2),
+ "custom": "abc",
+ },
+ // fail form validator
+ expectedErrors: []string{"passwordConfirm"},
+ },
+ {
+ name: "existing auth collection record with invalid record data and valid form data (with manager access)",
+ record: userRec,
+ managerAccess: true,
+ data: map[string]any{
+ "verified": true,
+ "email": "test_new@example.com",
+ "oldPassword": "1234567890",
+ "password": "12345678901",
+ "passwordConfirm": "12345678901",
+ "username": "!invalid",
+ // should be ignored (custom or hidden fields)
+ "tokenKey": strings.Repeat("a", 2),
+ "custom": "abc",
+ },
+ // fail record fields validator
+ expectedErrors: []string{"username"},
+ },
+ {
+ name: "existing auth collection record with base valid data",
+ record: userRec,
+ data: map[string]any{
+ "name": "test",
+ },
+ expectedErrors: []string{},
+ },
+ {
+ name: "existing auth collection record with valid password and invalid oldPassword data",
+ record: userRec,
+ data: map[string]any{
+ "name": "test",
+ "oldPassword": "invalid",
+ "password": "1234567890",
+ "passwordConfirm": "1234567890",
+ },
+ expectedErrors: []string{"oldPassword"},
+ },
+ {
+ name: "existing auth collection record with valid password data",
+ record: userRec,
+ data: map[string]any{
+ "name": "test",
+ "oldPassword": "1234567890",
+ "password": "0987654321",
+ "passwordConfirm": "0987654321",
+ },
+ expectedErrors: []string{},
},
}
for _, s := range scenarios {
- record := models.NewRecord(collection)
+ t.Run(s.name, func(t *testing.T) {
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
- form := forms.NewRecordUpsert(app, record)
- if err := form.LoadData(s.data); err != nil {
- t.Errorf("[%s] Failed to load form data", s.name)
- continue
- }
-
- result := form.Submit()
-
- // parse errors
- errs, ok := result.(validation.Errors)
- if !ok && result != nil {
- t.Errorf("[%s] Failed to parse errors %v", s.name, result)
- continue
- }
-
- // check errors
- if len(errs) > len(s.expectedErrors) {
- t.Errorf("[%s] Expected error keys %v, got %v", s.name, s.expectedErrors, errs)
- continue
- }
- for _, k := range s.expectedErrors {
- if _, ok := errs[k]; !ok {
- t.Errorf("[%s] Missing expected error key %q in %v", s.name, k, errs)
- continue
+ form := forms.NewRecordUpsert(testApp, s.record.Original())
+ if s.managerAccess {
+ form.GrantManagerAccess()
}
- }
+ form.Load(s.data)
+
+ result := form.Submit()
+
+ tests.TestValidationErrors(t, result, s.expectedErrors)
+ })
}
}
-func TestRecordUpsertAddAndRemoveFiles(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
+func TestRecordUpsertSubmitFailure(t *testing.T) {
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
- recordBefore, err := app.Dao().FindRecordById("demo1", "84nmscqy84lsi1t")
+ col, err := testApp.FindCollectionByNameOrId("demo1")
if err != nil {
t.Fatal(err)
}
- // create test temp files
- tempDir := filepath.Join(app.DataDir(), "temp")
- if err := os.MkdirAll(app.DataDir(), os.ModePerm); err != nil {
- t.Fatal(err)
- }
- defer os.RemoveAll(tempDir)
- tmpFile, _ := os.CreateTemp(os.TempDir(), "tmpfile1-*.txt")
- tmpFile.Close()
-
- form := forms.NewRecordUpsert(app, recordBefore)
-
- f1, err := filesystem.NewFileFromPath(tmpFile.Name())
+ record, err := testApp.FindRecordById(col, "imy661ixudk5izi")
if err != nil {
t.Fatal(err)
}
- f2, err := filesystem.NewFileFromPath(tmpFile.Name())
+ file, err := filesystem.NewFileFromBytes([]byte("test"), "test.txt")
if err != nil {
t.Fatal(err)
}
- f3, err := filesystem.NewFileFromPath(tmpFile.Name())
+ form := forms.NewRecordUpsert(testApp, record)
+ form.Load(map[string]any{
+ "text": "test_update",
+ "file_one": file,
+ "select_one": "invalid",
+ })
+
+ validateCalls := 0
+ testApp.OnRecordValidate(col.Name).BindFunc(func(e *core.RecordEvent) error {
+ validateCalls++
+ return e.Next()
+ })
+
+ result := form.Submit()
+
+ if result == nil {
+ t.Fatal("Expected Submit error, got nil")
+ }
+
+ if validateCalls != 1 {
+ t.Fatalf("Expected validateCalls %d, got %d", 1, validateCalls)
+ }
+
+ // refresh the record to ensure that the changes weren't persisted
+ record, err = testApp.FindRecordById(col, record.Id)
if err != nil {
t.Fatal(err)
}
- removed0 := "test_d61b33QdDU.txt" // replaced
- removed1 := "300_WlbFWSGmW9.png"
- removed2 := "logo_vcfJJG5TAh.svg"
-
- form.AddFiles("file_one", f1) // should replace the existin file
-
- form.AddFiles("file_many", f2, f3) // should append
-
- form.RemoveFiles("file_many", removed1, removed2) // should remove
-
- filesToUpload := form.FilesToUpload()
- if v, ok := filesToUpload["file_one"]; !ok || len(v) != 1 {
- t.Fatalf("Expected filesToUpload[file_one] to have exactly 1 file, got %v", v)
- }
- if v, ok := filesToUpload["file_many"]; !ok || len(v) != 2 {
- t.Fatalf("Expected filesToUpload[file_many] to have exactly 2 file, got %v", v)
+ if v := record.GetString("text"); v == "test_update" {
+ t.Fatalf("Expected record.text to remain the same, got %q", v)
}
- filesToDelete := form.FilesToDelete()
- if len(filesToDelete) != 3 {
- t.Fatalf("Expected exactly 2 file to delete, got %v", filesToDelete)
- }
- for _, f := range []string{removed0, removed1, removed2} {
- if !list.ExistInSlice(f, filesToDelete) {
- t.Fatalf("Missing file %q from filesToDelete %v", f, filesToDelete)
- }
+ if v := record.GetString("select_one"); v != "" {
+ t.Fatalf("Expected record.select_one to remain the same, got %q", v)
}
- if err := form.Submit(); err != nil {
- t.Fatalf("Failed to submit the RecordUpsert form, got %v", err)
+ if v := record.GetString("file_one"); v != "" {
+ t.Fatalf("Expected record.file_one to remain the same, got %q", v)
}
- recordAfter, err := app.Dao().FindRecordById("demo1", "84nmscqy84lsi1t")
- if err != nil {
- t.Fatal(err)
- }
-
- // ensure files deletion
- if hasRecordFile(app, recordAfter, removed0) {
- t.Fatalf("Expected the old file_one file to be deleted")
- }
- if hasRecordFile(app, recordAfter, removed1) {
- t.Fatalf("Expected %s to be deleted", removed1)
- }
- if hasRecordFile(app, recordAfter, removed2) {
- t.Fatalf("Expected %s to be deleted", removed2)
- }
-
- fileOne := recordAfter.GetStringSlice("file_one")
- if len(fileOne) == 0 {
- t.Fatalf("Expected new file_one file to be uploaded")
- }
-
- fileMany := recordAfter.GetStringSlice("file_many")
- if len(fileMany) != 5 {
- t.Fatalf("Expected file_many to be 5, got %v", fileMany)
- }
+ testFilesCount(t, testApp, record, 0)
}
-func TestRecordUpsertUploadFailure(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
+func TestRecordUpsertSubmitSuccess(t *testing.T) {
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
- collection, err := app.Dao().FindCollectionByNameOrId("demo3")
+ col, err := testApp.FindCollectionByNameOrId("demo1")
if err != nil {
t.Fatal(err)
}
- testDaos := []*daos.Dao{
- app.Dao(), // with hooks
- daos.New(app.Dao().DB()), // without hooks
+ record, err := testApp.FindRecordById(col, "imy661ixudk5izi")
+ if err != nil {
+ t.Fatal(err)
}
- for i, dao := range testDaos {
- // create with invalid file
- {
- prefix := fmt.Sprintf("%d-create", i)
+ file, err := filesystem.NewFileFromBytes([]byte("test"), "test.txt")
+ if err != nil {
+ t.Fatal(err)
+ }
- new := models.NewRecord(collection)
- new.Id = "123456789012341"
+ form := forms.NewRecordUpsert(testApp, record)
+ form.Load(map[string]any{
+ "text": "test_update",
+ "file_one": file,
+ "select_one": "optionC",
+ })
- form := forms.NewRecordUpsert(app, new)
- form.SetDao(dao)
- form.LoadData(map[string]any{"title": "new_test"})
- form.AddFiles("files", &filesystem.File{Reader: &filesystem.PathReader{Path: "/tmp/__missing__"}})
+ validateCalls := 0
+ testApp.OnRecordValidate(col.Name).BindFunc(func(e *core.RecordEvent) error {
+ validateCalls++
+ return e.Next()
+ })
- if err := form.Submit(); err == nil {
- t.Fatalf("[%s] Expected error, got nil", prefix)
- }
+ result := form.Submit()
- if r, err := app.Dao().FindRecordById(collection.Id, new.Id); err == nil {
- t.Fatalf("[%s] Expected the inserted record to be deleted, found \n%v", prefix, r.PublicExport())
- }
- }
+ if result != nil {
+ t.Fatalf("Expected Submit success, got error: %v", result)
+ }
- // update with invalid file
- {
- prefix := fmt.Sprintf("%d-update", i)
+ if validateCalls != 1 {
+ t.Fatalf("Expected validateCalls %d, got %d", 1, validateCalls)
+ }
- record, err := app.Dao().FindRecordById(collection.Id, "1tmknxy2868d869")
- if err != nil {
- t.Fatal(err)
- }
+ // refresh the record to ensure that the changes were persisted
+ record, err = testApp.FindRecordById(col, record.Id)
+ if err != nil {
+ t.Fatal(err)
+ }
- form := forms.NewRecordUpsert(app, record)
- form.SetDao(dao)
- form.LoadData(map[string]any{"title": "update_test"})
- form.AddFiles("files", &filesystem.File{Reader: &filesystem.PathReader{Path: "/tmp/__missing__"}})
+ if v := record.GetString("text"); v != "test_update" {
+ t.Fatalf("Expected record.text %q, got %q", "test_update", v)
+ }
- if err := form.Submit(); err == nil {
- t.Fatalf("[%s] Expected error, got nil", prefix)
- }
+ if v := record.GetString("select_one"); v != "optionC" {
+ t.Fatalf("Expected record.select_one %q, got %q", "optionC", v)
+ }
- if r, _ := app.Dao().FindRecordById(collection.Id, record.Id); r == nil || r.GetString("title") == "update_test" {
- t.Fatalf("[%s] Expected the record changes to be reverted, got \n%v", prefix, r.PublicExport())
- }
- }
+ if v := record.GetString("file_one"); v != file.Name {
+ t.Fatalf("Expected record.file_one %q, got %q", file.Name, v)
+ }
+
+ testFilesCount(t, testApp, record, 2) // the file + attrs
+}
+
+// -------------------------------------------------------------------
+
+func testFilesCount(t *testing.T, app core.App, record *core.Record, count int) {
+ storageDir := filepath.Join(app.DataDir(), "storage", record.Collection().Id, record.Id)
+
+ entries, _ := os.ReadDir(storageDir)
+ if len(entries) != count {
+ t.Errorf("Expected %d entries, got %d\n%v", count, len(entries), entries)
}
}
diff --git a/forms/record_verification_confirm.go b/forms/record_verification_confirm.go
deleted file mode 100644
index 2d0f7ad5..00000000
--- a/forms/record_verification_confirm.go
+++ /dev/null
@@ -1,116 +0,0 @@
-package forms
-
-import (
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tools/security"
- "github.com/spf13/cast"
-)
-
-// RecordVerificationConfirm is an auth record email verification confirmation form.
-type RecordVerificationConfirm struct {
- app core.App
- collection *models.Collection
- dao *daos.Dao
-
- Token string `form:"token" json:"token"`
-}
-
-// NewRecordVerificationConfirm creates a new [RecordVerificationConfirm]
-// form initialized with from the provided [core.App] instance.
-//
-// If you want to submit the form as part of a transaction,
-// you can change the default Dao via [SetDao()].
-func NewRecordVerificationConfirm(app core.App, collection *models.Collection) *RecordVerificationConfirm {
- return &RecordVerificationConfirm{
- app: app,
- dao: app.Dao(),
- collection: collection,
- }
-}
-
-// SetDao replaces the default form Dao instance with the provided one.
-func (form *RecordVerificationConfirm) SetDao(dao *daos.Dao) {
- form.dao = dao
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-func (form *RecordVerificationConfirm) Validate() error {
- return validation.ValidateStruct(form,
- validation.Field(&form.Token, validation.Required, validation.By(form.checkToken)),
- )
-}
-
-func (form *RecordVerificationConfirm) checkToken(value any) error {
- v, _ := value.(string)
- if v == "" {
- return nil // nothing to check
- }
-
- claims, _ := security.ParseUnverifiedJWT(v)
- email := cast.ToString(claims["email"])
- if email == "" {
- return validation.NewError("validation_invalid_token_claims", "Missing email token claim.")
- }
-
- record, err := form.dao.FindAuthRecordByToken(
- v,
- form.app.Settings().RecordVerificationToken.Secret,
- )
- if err != nil || record == nil {
- return validation.NewError("validation_invalid_token", "Invalid or expired token.")
- }
-
- if record.Collection().Id != form.collection.Id {
- return validation.NewError("validation_token_collection_mismatch", "The provided token is for different auth collection.")
- }
-
- if record.Email() != email {
- return validation.NewError("validation_token_email_mismatch", "The record email doesn't match with the requested token claims.")
- }
-
- return nil
-}
-
-// Submit validates and submits the form.
-// On success returns the verified auth record associated to `form.Token`.
-//
-// You can optionally provide a list of InterceptorFunc to further
-// modify the form behavior before persisting it.
-func (form *RecordVerificationConfirm) Submit(interceptors ...InterceptorFunc[*models.Record]) (*models.Record, error) {
- if err := form.Validate(); err != nil {
- return nil, err
- }
-
- record, err := form.dao.FindAuthRecordByToken(
- form.Token,
- form.app.Settings().RecordVerificationToken.Secret,
- )
- if err != nil {
- return nil, err
- }
-
- wasVerified := record.Verified()
-
- if !wasVerified {
- record.SetVerified(true)
- }
-
- interceptorsErr := runInterceptors(record, func(m *models.Record) error {
- record = m
-
- if wasVerified {
- return nil // already verified
- }
-
- return form.dao.SaveRecord(m)
- }, interceptors...)
-
- if interceptorsErr != nil {
- return nil, interceptorsErr
- }
-
- return record, nil
-}
diff --git a/forms/record_verification_confirm_test.go b/forms/record_verification_confirm_test.go
deleted file mode 100644
index ba4dcf38..00000000
--- a/forms/record_verification_confirm_test.go
+++ /dev/null
@@ -1,156 +0,0 @@
-package forms_test
-
-import (
- "encoding/json"
- "errors"
- "testing"
-
- "github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/security"
-)
-
-func TestRecordVerificationConfirmValidateAndSubmit(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- authCollection, err := testApp.Dao().FindCollectionByNameOrId("users")
- if err != nil {
- t.Fatal(err)
- }
-
- scenarios := []struct {
- jsonData string
- expectError bool
- }{
- // empty data (Validate call check)
- {
- `{}`,
- true,
- },
- // expired token (Validate call check)
- {
- `{"token":"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsImVtYWlsIjoidGVzdEBleGFtcGxlLmNvbSIsImNvbGxlY3Rpb25JZCI6Il9wYl91c2Vyc19hdXRoXyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiZXhwIjoxNjQwOTkxNjYxfQ.Avbt9IP8sBisVz_2AGrlxLDvangVq4PhL2zqQVYLKlE"}`,
- true,
- },
- // valid token (already verified record)
- {
- `{"token":"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6Im9hcDY0MGNvdDR5cnUycyIsImVtYWlsIjoidGVzdDJAZXhhbXBsZS5jb20iLCJjb2xsZWN0aW9uSWQiOiJfcGJfdXNlcnNfYXV0aF8iLCJ0eXBlIjoiYXV0aFJlY29yZCIsImV4cCI6MjIwODk4NTI2MX0.PsOABmYUzGbd088g8iIBL4-pf7DUZm0W5Ju6lL5JVRg"}`,
- false,
- },
- // valid token (unverified record)
- {
- `{"token":"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsImVtYWlsIjoidGVzdEBleGFtcGxlLmNvbSIsImNvbGxlY3Rpb25JZCI6Il9wYl91c2Vyc19hdXRoXyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiZXhwIjoyMjA4OTg1MjYxfQ.hL16TVmStHFdHLc4a860bRqJ3sFfzjv0_NRNzwsvsrc"}`,
- false,
- },
- }
-
- for i, s := range scenarios {
- form := forms.NewRecordVerificationConfirm(testApp, authCollection)
-
- // load data
- loadErr := json.Unmarshal([]byte(s.jsonData), form)
- if loadErr != nil {
- t.Errorf("(%d) Failed to load form data: %v", i, loadErr)
- continue
- }
-
- interceptorCalls := 0
- interceptor := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(r *models.Record) error {
- interceptorCalls++
- return next(r)
- }
- }
-
- record, err := form.Submit(interceptor)
-
- // check interceptor calls
- expectInterceptorCalls := 1
- if s.expectError {
- expectInterceptorCalls = 0
- }
- if interceptorCalls != expectInterceptorCalls {
- t.Errorf("[%d] Expected interceptor to be called %d, got %d", i, expectInterceptorCalls, interceptorCalls)
- }
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, s.expectError, hasErr, err)
- }
-
- if hasErr {
- continue
- }
-
- claims, _ := security.ParseUnverifiedJWT(form.Token)
- tokenRecordId := claims["id"]
-
- if record.Id != tokenRecordId {
- t.Errorf("(%d) Expected record.Id %q, got %q", i, tokenRecordId, record.Id)
- }
-
- if !record.Verified() {
- t.Errorf("(%d) Expected record.Verified() to be true, got false", i)
- }
- }
-}
-
-func TestRecordVerificationConfirmInterceptors(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- authCollection, err := testApp.Dao().FindCollectionByNameOrId("users")
- if err != nil {
- t.Fatal(err)
- }
-
- authRecord, err := testApp.Dao().FindAuthRecordByEmail("users", "test@example.com")
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewRecordVerificationConfirm(testApp, authCollection)
- form.Token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjRxMXhsY2xtZmxva3UzMyIsImVtYWlsIjoidGVzdEBleGFtcGxlLmNvbSIsImNvbGxlY3Rpb25JZCI6Il9wYl91c2Vyc19hdXRoXyIsInR5cGUiOiJhdXRoUmVjb3JkIiwiZXhwIjoyMjA4OTg1MjYxfQ.hL16TVmStHFdHLc4a860bRqJ3sFfzjv0_NRNzwsvsrc"
- interceptorVerified := authRecord.Verified()
- testErr := errors.New("test_error")
-
- interceptor1Called := false
- interceptor1 := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(record *models.Record) error {
- interceptor1Called = true
- return next(record)
- }
- }
-
- interceptor2Called := false
- interceptor2 := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(record *models.Record) error {
- interceptorVerified = record.Verified()
- interceptor2Called = true
- return testErr
- }
- }
-
- _, submitErr := form.Submit(interceptor1, interceptor2)
- if submitErr != testErr {
- t.Fatalf("Expected submitError %v, got %v", testErr, submitErr)
- }
-
- if !interceptor1Called {
- t.Fatalf("Expected interceptor1 to be called")
- }
-
- if !interceptor2Called {
- t.Fatalf("Expected interceptor2 to be called")
- }
-
- if interceptorVerified == authRecord.Verified() {
- t.Fatalf("Expected the form model to be filled before calling the interceptors")
- }
-}
diff --git a/forms/record_verification_request.go b/forms/record_verification_request.go
deleted file mode 100644
index 09b46799..00000000
--- a/forms/record_verification_request.go
+++ /dev/null
@@ -1,101 +0,0 @@
-package forms
-
-import (
- "errors"
- "time"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/go-ozzo/ozzo-validation/v4/is"
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/mails"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-// RecordVerificationRequest is an auth record email verification request form.
-type RecordVerificationRequest struct {
- app core.App
- collection *models.Collection
- dao *daos.Dao
- resendThreshold float64 // in seconds
-
- Email string `form:"email" json:"email"`
-}
-
-// NewRecordVerificationRequest creates a new [RecordVerificationRequest]
-// form initialized with from the provided [core.App] instance.
-//
-// If you want to submit the form as part of a transaction,
-// you can change the default Dao via [SetDao()].
-func NewRecordVerificationRequest(app core.App, collection *models.Collection) *RecordVerificationRequest {
- return &RecordVerificationRequest{
- app: app,
- dao: app.Dao(),
- collection: collection,
- resendThreshold: 120, // 2 min
- }
-}
-
-// SetDao replaces the default form Dao instance with the provided one.
-func (form *RecordVerificationRequest) SetDao(dao *daos.Dao) {
- form.dao = dao
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-//
-// // This method doesn't verify that auth record with `form.Email` exists (this is done on Submit).
-func (form *RecordVerificationRequest) Validate() error {
- return validation.ValidateStruct(form,
- validation.Field(
- &form.Email,
- validation.Required,
- validation.Length(1, 255),
- is.EmailFormat,
- ),
- )
-}
-
-// Submit validates and sends a verification request email
-// to the `form.Email` auth record.
-//
-// You can optionally provide a list of InterceptorFunc to further
-// modify the form behavior before persisting it.
-func (form *RecordVerificationRequest) Submit(interceptors ...InterceptorFunc[*models.Record]) error {
- if err := form.Validate(); err != nil {
- return err
- }
-
- record, err := form.dao.FindFirstRecordByData(
- form.collection.Id,
- schema.FieldNameEmail,
- form.Email,
- )
- if err != nil {
- return err
- }
-
- if !record.Verified() {
- now := time.Now().UTC()
- lastVerificationSentAt := record.LastVerificationSentAt().Time()
- if (now.Sub(lastVerificationSentAt)).Seconds() < form.resendThreshold {
- return errors.New("A verification email was already sent.")
- }
- }
-
- return runInterceptors(record, func(m *models.Record) error {
- if m.Verified() {
- return nil // already verified
- }
-
- if err := mails.SendRecordVerification(form.app, m); err != nil {
- return err
- }
-
- // update last sent timestamp
- m.SetLastVerificationSentAt(types.NowDateTime())
-
- return form.dao.SaveRecord(m)
- }, interceptors...)
-}
diff --git a/forms/record_verification_request_test.go b/forms/record_verification_request_test.go
deleted file mode 100644
index 03b48372..00000000
--- a/forms/record_verification_request_test.go
+++ /dev/null
@@ -1,192 +0,0 @@
-package forms_test
-
-import (
- "encoding/json"
- "errors"
- "testing"
- "time"
-
- "github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-func TestRecordVerificationRequestSubmit(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- authCollection, err := testApp.Dao().FindCollectionByNameOrId("clients")
- if err != nil {
- t.Fatal(err)
- }
-
- scenarios := []struct {
- jsonData string
- expectError bool
- expectMail bool
- }{
- // empty field (Validate call check)
- {
- `{"email":""}`,
- true,
- false,
- },
- // invalid email field (Validate call check)
- {
- `{"email":"invalid"}`,
- true,
- false,
- },
- // nonexisting user
- {
- `{"email":"missing@example.com"}`,
- true,
- false,
- },
- // existing user (already verified)
- {
- `{"email":"test@example.com"}`,
- false,
- false,
- },
- // existing user (already verified) - repeating request to test threshod skip
- {
- `{"email":"test@example.com"}`,
- false,
- false,
- },
- // existing user (unverified)
- {
- `{"email":"test2@example.com"}`,
- false,
- true,
- },
- // existing user (inverified) - reached send threshod
- {
- `{"email":"test2@example.com"}`,
- true,
- false,
- },
- }
-
- now := types.NowDateTime()
- time.Sleep(1 * time.Millisecond)
-
- for i, s := range scenarios {
- testApp.TestMailer.TotalSend = 0 // reset
- form := forms.NewRecordVerificationRequest(testApp, authCollection)
-
- // load data
- loadErr := json.Unmarshal([]byte(s.jsonData), form)
- if loadErr != nil {
- t.Errorf("[%d] Failed to load form data: %v", i, loadErr)
- continue
- }
-
- interceptorCalls := 0
- interceptor := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(r *models.Record) error {
- interceptorCalls++
- return next(r)
- }
- }
-
- err := form.Submit(interceptor)
-
- // check interceptor calls
- expectInterceptorCalls := 1
- if s.expectError {
- expectInterceptorCalls = 0
- }
- if interceptorCalls != expectInterceptorCalls {
- t.Errorf("[%d] Expected interceptor to be called %d, got %d", i, expectInterceptorCalls, interceptorCalls)
- }
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("[%d] Expected hasErr to be %v, got %v (%v)", i, s.expectError, hasErr, err)
- }
-
- expectedMails := 0
- if s.expectMail {
- expectedMails = 1
- }
- if testApp.TestMailer.TotalSend != expectedMails {
- t.Errorf("[%d] Expected %d mail(s) to be sent, got %d", i, expectedMails, testApp.TestMailer.TotalSend)
- }
-
- if s.expectError {
- continue
- }
-
- user, err := testApp.Dao().FindAuthRecordByEmail(authCollection.Id, form.Email)
- if err != nil {
- t.Errorf("[%d] Expected user with email %q to exist, got nil", i, form.Email)
- continue
- }
-
- // check whether LastVerificationSentAt was updated
- if !user.Verified() && user.LastVerificationSentAt().Time().Sub(now.Time()) < 0 {
- t.Errorf("[%d] Expected LastVerificationSentAt to be after %v, got %v", i, now, user.LastVerificationSentAt())
- }
- }
-}
-
-func TestRecordVerificationRequestInterceptors(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- authCollection, err := testApp.Dao().FindCollectionByNameOrId("users")
- if err != nil {
- t.Fatal(err)
- }
-
- authRecord, err := testApp.Dao().FindAuthRecordByEmail("users", "test@example.com")
- if err != nil {
- t.Fatal(err)
- }
-
- form := forms.NewRecordVerificationRequest(testApp, authCollection)
- form.Email = authRecord.Email()
- interceptorLastVerificationSentAt := authRecord.LastVerificationSentAt()
- testErr := errors.New("test_error")
-
- interceptor1Called := false
- interceptor1 := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(record *models.Record) error {
- interceptor1Called = true
- return next(record)
- }
- }
-
- interceptor2Called := false
- interceptor2 := func(next forms.InterceptorNextFunc[*models.Record]) forms.InterceptorNextFunc[*models.Record] {
- return func(record *models.Record) error {
- interceptorLastVerificationSentAt = record.LastVerificationSentAt()
- interceptor2Called = true
- return testErr
- }
- }
-
- submitErr := form.Submit(interceptor1, interceptor2)
- if submitErr != testErr {
- t.Fatalf("Expected submitError %v, got %v", testErr, submitErr)
- }
-
- if !interceptor1Called {
- t.Fatalf("Expected interceptor1 to be called")
- }
-
- if !interceptor2Called {
- t.Fatalf("Expected interceptor2 to be called")
- }
-
- if interceptorLastVerificationSentAt.String() != authRecord.LastVerificationSentAt().String() {
- t.Fatalf("Expected the form model to NOT be filled before calling the interceptors")
- }
-}
diff --git a/forms/settings_upsert.go b/forms/settings_upsert.go
deleted file mode 100644
index 9d14705d..00000000
--- a/forms/settings_upsert.go
+++ /dev/null
@@ -1,90 +0,0 @@
-package forms
-
-import (
- "os"
- "time"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/settings"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-// SettingsUpsert is a [settings.Settings] upsert (create/update) form.
-type SettingsUpsert struct {
- *settings.Settings
-
- app core.App
- dao *daos.Dao
-}
-
-// NewSettingsUpsert creates a new [SettingsUpsert] form with initializer
-// config created from the provided [core.App] instance.
-//
-// If you want to submit the form as part of a transaction,
-// you can change the default Dao via [SetDao()].
-func NewSettingsUpsert(app core.App) *SettingsUpsert {
- form := &SettingsUpsert{
- app: app,
- dao: app.Dao(),
- }
-
- // load the application settings into the form
- form.Settings, _ = app.Settings().Clone()
-
- return form
-}
-
-// SetDao replaces the default form Dao instance with the provided one.
-func (form *SettingsUpsert) SetDao(dao *daos.Dao) {
- form.dao = dao
-}
-
-// Validate makes the form validatable by implementing [validation.Validatable] interface.
-func (form *SettingsUpsert) Validate() error {
- return form.Settings.Validate()
-}
-
-// Submit validates the form and upserts the loaded settings.
-//
-// On success the app settings will be refreshed with the form ones.
-//
-// You can optionally provide a list of InterceptorFunc to further
-// modify the form behavior before persisting it.
-func (form *SettingsUpsert) Submit(interceptors ...InterceptorFunc[*settings.Settings]) error {
- if err := form.Validate(); err != nil {
- return err
- }
-
- return runInterceptors(form.Settings, func(s *settings.Settings) error {
- form.Settings = s
-
- // persists settings change
- encryptionKey := os.Getenv(form.app.EncryptionEnv())
- if err := form.dao.SaveSettings(form.Settings, encryptionKey); err != nil {
- return err
- }
-
- // reload app settings
- if err := form.app.RefreshSettings(); err != nil {
- return err
- }
-
- // try to clear old logs not matching the new settings
- createdBefore := time.Now().AddDate(0, 0, -1*form.Settings.Logs.MaxDays).UTC().Format(types.DefaultDateLayout)
- expr := dbx.NewExp("[[created]] <= {:date} OR [[level]] < {:level}", dbx.Params{
- "date": createdBefore,
- "level": form.Settings.Logs.MinLevel,
- })
- form.app.LogsDao().NonconcurrentDB().Delete((&models.Log{}).TableName(), expr).Execute()
-
- // no logs are allowed -> try to reclaim preserved disk space after the previous delete operation
- if form.Settings.Logs.MaxDays == 0 {
- form.app.LogsDao().Vacuum()
- }
-
- return nil
- }, interceptors...)
-}
diff --git a/forms/settings_upsert_test.go b/forms/settings_upsert_test.go
deleted file mode 100644
index fee6fbcf..00000000
--- a/forms/settings_upsert_test.go
+++ /dev/null
@@ -1,172 +0,0 @@
-package forms_test
-
-import (
- "encoding/json"
- "errors"
- "os"
- "testing"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/forms"
- "github.com/pocketbase/pocketbase/models/settings"
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/security"
-)
-
-func TestNewSettingsUpsert(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- app.Settings().Meta.AppName = "name_update"
-
- form := forms.NewSettingsUpsert(app)
-
- formSettings, _ := json.Marshal(form.Settings)
- appSettings, _ := json.Marshal(app.Settings())
-
- if string(formSettings) != string(appSettings) {
- t.Errorf("Expected settings \n%s, got \n%s", string(appSettings), string(formSettings))
- }
-}
-
-func TestSettingsUpsertValidateAndSubmit(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- jsonData string
- encryption bool
- expectedErrors []string
- }{
- // empty (plain)
- {"{}", false, nil},
- // empty (encrypt)
- {"{}", true, nil},
- // failure - invalid data
- {
- `{"meta": {"appName": ""}, "logs": {"maxDays": -1}}`,
- false,
- []string{"meta", "logs"},
- },
- // success - valid data (plain)
- {
- `{"meta": {"appName": "test"}, "logs": {"maxDays": 0}}`,
- false,
- nil,
- },
- // success - valid data (encrypt)
- {
- `{"meta": {"appName": "test"}, "logs": {"maxDays": 7}}`,
- true,
- nil,
- },
- }
-
- for i, s := range scenarios {
- if s.encryption {
- os.Setenv(app.EncryptionEnv(), security.RandomString(32))
- } else {
- os.Unsetenv(app.EncryptionEnv())
- }
-
- form := forms.NewSettingsUpsert(app)
-
- // load data
- loadErr := json.Unmarshal([]byte(s.jsonData), form)
- if loadErr != nil {
- t.Errorf("(%d) Failed to load form data: %v", i, loadErr)
- continue
- }
-
- interceptorCalls := 0
- interceptor := func(next forms.InterceptorNextFunc[*settings.Settings]) forms.InterceptorNextFunc[*settings.Settings] {
- return func(s *settings.Settings) error {
- interceptorCalls++
- return next(s)
- }
- }
-
- // parse errors
- result := form.Submit(interceptor)
- errs, ok := result.(validation.Errors)
- if !ok && result != nil {
- t.Errorf("(%d) Failed to parse errors %v", i, result)
- continue
- }
-
- // check interceptor calls
- expectInterceptorCall := 1
- if len(s.expectedErrors) > 0 {
- expectInterceptorCall = 0
- }
- if interceptorCalls != expectInterceptorCall {
- t.Errorf("(%d) Expected interceptor to be called %d, got %d", i, expectInterceptorCall, interceptorCalls)
- }
-
- // check errors
- if len(errs) > len(s.expectedErrors) {
- t.Errorf("(%d) Expected error keys %v, got %v", i, s.expectedErrors, errs)
- }
- for _, k := range s.expectedErrors {
- if _, ok := errs[k]; !ok {
- t.Errorf("(%d) Missing expected error key %q in %v", i, k, errs)
- }
- }
-
- if len(s.expectedErrors) > 0 {
- continue
- }
-
- formSettings, _ := json.Marshal(form.Settings)
- appSettings, _ := json.Marshal(app.Settings())
-
- if string(formSettings) != string(appSettings) {
- t.Errorf("Expected app settings \n%s, got \n%s", string(appSettings), string(formSettings))
- }
- }
-}
-
-func TestSettingsUpsertSubmitInterceptors(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- form := forms.NewSettingsUpsert(app)
- form.Meta.AppName = "test_new"
-
- testErr := errors.New("test_error")
-
- interceptor1Called := false
- interceptor1 := func(next forms.InterceptorNextFunc[*settings.Settings]) forms.InterceptorNextFunc[*settings.Settings] {
- return func(s *settings.Settings) error {
- interceptor1Called = true
- return next(s)
- }
- }
-
- interceptor2Called := false
- interceptor2 := func(next forms.InterceptorNextFunc[*settings.Settings]) forms.InterceptorNextFunc[*settings.Settings] {
- return func(s *settings.Settings) error {
- interceptor2Called = true
- return testErr
- }
- }
-
- submitErr := form.Submit(interceptor1, interceptor2)
- if submitErr != testErr {
- t.Fatalf("Expected submitError %v, got %v", testErr, submitErr)
- }
-
- if !interceptor1Called {
- t.Fatalf("Expected interceptor1 to be called")
- }
-
- if !interceptor2Called {
- t.Fatalf("Expected interceptor2 to be called")
- }
-}
diff --git a/forms/test_email_send.go b/forms/test_email_send.go
index 5dd902e4..5c1d51e7 100644
--- a/forms/test_email_send.go
+++ b/forms/test_email_send.go
@@ -1,26 +1,29 @@
package forms
import (
+ "errors"
+
validation "github.com/go-ozzo/ozzo-validation/v4"
"github.com/go-ozzo/ozzo-validation/v4/is"
"github.com/pocketbase/pocketbase/core"
"github.com/pocketbase/pocketbase/mails"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
)
const (
- templateVerification = "verification"
- templatePasswordReset = "password-reset"
- templateEmailChange = "email-change"
+ TestTemplateVerification = "verification"
+ TestTemplatePasswordReset = "password-reset"
+ TestTemplateEmailChange = "email-change"
+ TestTemplateOTP = "otp"
+ TestTemplateAuthAlert = "login-alert"
)
// TestEmailSend is a email template test request form.
type TestEmailSend struct {
app core.App
- Template string `form:"template" json:"template"`
- Email string `form:"email" json:"email"`
+ Email string `form:"email" json:"email"`
+ Template string `form:"template" json:"template"`
+ Collection string `form:"collection" json:"collection"` // optional, fallbacks to _superusers
}
// NewTestEmailSend creates and initializes new TestEmailSend form.
@@ -31,6 +34,11 @@ func NewTestEmailSend(app core.App) *TestEmailSend {
// Validate makes the form validatable by implementing [validation.Validatable] interface.
func (form *TestEmailSend) Validate() error {
return validation.ValidateStruct(form,
+ validation.Field(
+ &form.Collection,
+ validation.Length(1, 255),
+ validation.By(form.checkAuthCollection),
+ ),
validation.Field(
&form.Email,
validation.Required,
@@ -40,38 +48,69 @@ func (form *TestEmailSend) Validate() error {
validation.Field(
&form.Template,
validation.Required,
- validation.In(templateVerification, templatePasswordReset, templateEmailChange),
+ validation.In(
+ TestTemplateVerification,
+ TestTemplatePasswordReset,
+ TestTemplateEmailChange,
+ TestTemplateOTP,
+ TestTemplateAuthAlert,
+ ),
),
)
}
+func (form *TestEmailSend) checkAuthCollection(value any) error {
+ v, _ := value.(string)
+ if v == "" {
+ return nil // nothing to check
+ }
+
+ c, _ := form.app.FindCollectionByNameOrId(v)
+ if c == nil || !c.IsAuth() {
+ return validation.NewError("validation_invalid_auth_collection", "Must be a valid auth collection id or name.")
+ }
+
+ return nil
+}
+
// Submit validates and sends a test email to the form.Email address.
func (form *TestEmailSend) Submit() error {
if err := form.Validate(); err != nil {
return err
}
- // create a test auth record
- collection := &models.Collection{
- BaseModel: models.BaseModel{Id: "__pb_test_collection_id__"},
- Name: "__pb_test_collection_name__",
- Type: models.CollectionTypeAuth,
+ collectionIdOrName := form.Collection
+ if collectionIdOrName == "" {
+ collectionIdOrName = core.CollectionNameSuperusers
}
- record := models.NewRecord(collection)
- record.Id = "__pb_test_id__"
- record.Set(schema.FieldNameUsername, "pb_test")
- record.Set(schema.FieldNameEmail, form.Email)
+ collection, err := form.app.FindCollectionByNameOrId(collectionIdOrName)
+ if err != nil {
+ return err
+ }
+
+ record := core.NewRecord(collection)
+ for _, field := range collection.Fields {
+ if field.GetHidden() {
+ continue
+ }
+ record.Set(field.GetName(), "__pb_test_"+field.GetName()+"__")
+ }
record.RefreshTokenKey()
+ record.SetEmail(form.Email)
switch form.Template {
- case templateVerification:
+ case TestTemplateVerification:
return mails.SendRecordVerification(form.app, record)
- case templatePasswordReset:
+ case TestTemplatePasswordReset:
return mails.SendRecordPasswordReset(form.app, record)
- case templateEmailChange:
+ case TestTemplateEmailChange:
return mails.SendRecordChangeEmail(form.app, record, form.Email)
+ case TestTemplateOTP:
+ return mails.SendRecordOTP(form.app, record, "OTP_ID", "123456")
+ case TestTemplateAuthAlert:
+ return mails.SendRecordAuthAlert(form.app, record)
+ default:
+ return errors.New("unknown template " + form.Template)
}
-
- return nil
}
diff --git a/forms/test_email_send_test.go b/forms/test_email_send_test.go
index 4bae5ee3..0d58595b 100644
--- a/forms/test_email_send_test.go
+++ b/forms/test_email_send_test.go
@@ -1,6 +1,7 @@
package forms_test
import (
+ "fmt"
"strings"
"testing"
@@ -15,43 +16,46 @@ func TestEmailSendValidateAndSubmit(t *testing.T) {
scenarios := []struct {
template string
email string
+ collection string
expectedErrors []string
}{
- {"", "", []string{"template", "email"}},
- {"invalid", "test@example.com", []string{"template"}},
- {"verification", "invalid", []string{"email"}},
- {"verification", "test@example.com", nil},
- {"password-reset", "test@example.com", nil},
- {"email-change", "test@example.com", nil},
+ {"", "", "", []string{"template", "email"}},
+ {"invalid", "test@example.com", "", []string{"template"}},
+ {forms.TestTemplateVerification, "invalid", "", []string{"email"}},
+ {forms.TestTemplateVerification, "test@example.com", "invalid", []string{"collection"}},
+ {forms.TestTemplateVerification, "test@example.com", "demo1", []string{"collection"}},
+ {forms.TestTemplateVerification, "test@example.com", "users", nil},
+ {forms.TestTemplatePasswordReset, "test@example.com", "", nil},
+ {forms.TestTemplateEmailChange, "test@example.com", "", nil},
+ {forms.TestTemplateOTP, "test@example.com", "", nil},
+ {forms.TestTemplateAuthAlert, "test@example.com", "", nil},
}
for i, s := range scenarios {
- func() {
+ t.Run(fmt.Sprintf("%d_%s", i, s.template), func(t *testing.T) {
app, _ := tests.NewTestApp()
defer app.Cleanup()
form := forms.NewTestEmailSend(app)
form.Email = s.email
form.Template = s.template
+ form.Collection = s.collection
result := form.Submit()
// parse errors
errs, ok := result.(validation.Errors)
if !ok && result != nil {
- t.Errorf("(%d) Failed to parse errors %v", i, result)
- return
+ t.Fatalf("Failed to parse errors %v", result)
}
// check errors
if len(errs) > len(s.expectedErrors) {
- t.Errorf("(%d) Expected error keys %v, got %v", i, s.expectedErrors, errs)
- return
+ t.Fatalf("Expected error keys %v, got %v", s.expectedErrors, errs)
}
for _, k := range s.expectedErrors {
if _, ok := errs[k]; !ok {
- t.Errorf("(%d) Missing expected error key %q in %v", i, k, errs)
- return
+ t.Fatalf("Missing expected error key %q in %v", k, errs)
}
}
@@ -60,24 +64,33 @@ func TestEmailSendValidateAndSubmit(t *testing.T) {
expectedEmails = 0
}
- if app.TestMailer.TotalSend != expectedEmails {
- t.Errorf("(%d) Expected %d email(s) to be sent, got %d", i, expectedEmails, app.TestMailer.TotalSend)
+ if app.TestMailer.TotalSend() != expectedEmails {
+ t.Fatalf("Expected %d email(s) to be sent, got %d", expectedEmails, app.TestMailer.TotalSend())
}
if len(s.expectedErrors) > 0 {
return
}
- expectedContent := "Verify"
- if s.template == "password-reset" {
+ var expectedContent string
+ switch s.template {
+ case forms.TestTemplatePasswordReset:
expectedContent = "Reset password"
- } else if s.template == "email-change" {
+ case forms.TestTemplateEmailChange:
expectedContent = "Confirm new email"
+ case forms.TestTemplateVerification:
+ expectedContent = "Verify"
+ case forms.TestTemplateOTP:
+ expectedContent = "one-time password"
+ case forms.TestTemplateAuthAlert:
+ expectedContent = "from a new location"
+ default:
+ expectedContent = "__UNKNOWN_TEMPLATE__"
}
- if !strings.Contains(app.TestMailer.LastMessage.HTML, expectedContent) {
- t.Errorf("(%d) Expected the email to contains %s, got \n%v", i, expectedContent, app.TestMailer.LastMessage.HTML)
+ if !strings.Contains(app.TestMailer.LastMessage().HTML, expectedContent) {
+ t.Errorf("Expected the email to contains %q, got\n%v", expectedContent, app.TestMailer.LastMessage().HTML)
}
- }()
+ })
}
}
diff --git a/forms/test_s3_filesystem.go b/forms/test_s3_filesystem.go
index c2e26e59..c39c59ed 100644
--- a/forms/test_s3_filesystem.go
+++ b/forms/test_s3_filesystem.go
@@ -6,7 +6,6 @@ import (
validation "github.com/go-ozzo/ozzo-validation/v4"
"github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/models/settings"
"github.com/pocketbase/pocketbase/tools/filesystem"
"github.com/pocketbase/pocketbase/tools/security"
)
@@ -46,7 +45,7 @@ func (form *TestS3Filesystem) Submit() error {
return err
}
- var s3Config settings.S3Config
+ var s3Config core.S3Config
if form.Filesystem == s3FilesystemBackups {
s3Config = form.app.Settings().Backups.S3
diff --git a/forms/test_s3_filesystem_test.go b/forms/test_s3_filesystem_test.go
index 71453705..391cef7d 100644
--- a/forms/test_s3_filesystem_test.go
+++ b/forms/test_s3_filesystem_test.go
@@ -11,9 +11,6 @@ import (
func TestS3FilesystemValidate(t *testing.T) {
t.Parallel()
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
scenarios := []struct {
name string
filesystem string
@@ -42,28 +39,31 @@ func TestS3FilesystemValidate(t *testing.T) {
}
for _, s := range scenarios {
- form := forms.NewTestS3Filesystem(app)
- form.Filesystem = s.filesystem
+ t.Run(s.name, func(t *testing.T) {
+ app, _ := tests.NewTestApp()
+ defer app.Cleanup()
- result := form.Validate()
+ form := forms.NewTestS3Filesystem(app)
+ form.Filesystem = s.filesystem
- // parse errors
- errs, ok := result.(validation.Errors)
- if !ok && result != nil {
- t.Errorf("[%s] Failed to parse errors %v", s.name, result)
- continue
- }
+ result := form.Validate()
- // check errors
- if len(errs) > len(s.expectedErrors) {
- t.Errorf("[%s] Expected error keys %v, got %v", s.name, s.expectedErrors, errs)
- continue
- }
- for _, k := range s.expectedErrors {
- if _, ok := errs[k]; !ok {
- t.Errorf("[%s] Missing expected error key %q in %v", s.name, k, errs)
+ // parse errors
+ errs, ok := result.(validation.Errors)
+ if !ok && result != nil {
+ t.Fatalf("Failed to parse errors %v", result)
}
- }
+
+ // check errors
+ if len(errs) > len(s.expectedErrors) {
+ t.Fatalf("Expected error keys %v, got %v", s.expectedErrors, errs)
+ }
+ for _, k := range s.expectedErrors {
+ if _, ok := errs[k]; !ok {
+ t.Fatalf("Missing expected error key %q in %v", k, errs)
+ }
+ }
+ })
}
}
diff --git a/forms/validators/file_test.go b/forms/validators/file_test.go
deleted file mode 100644
index 07b5ec98..00000000
--- a/forms/validators/file_test.go
+++ /dev/null
@@ -1,97 +0,0 @@
-package validators_test
-
-import (
- "net/http"
- "net/http/httptest"
- "testing"
-
- "github.com/pocketbase/pocketbase/forms/validators"
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/filesystem"
- "github.com/pocketbase/pocketbase/tools/rest"
-)
-
-func TestUploadedFileSize(t *testing.T) {
- t.Parallel()
-
- data, mp, err := tests.MockMultipartData(nil, "test")
- if err != nil {
- t.Fatal(err)
- }
-
- req := httptest.NewRequest(http.MethodPost, "/", data)
- req.Header.Add("Content-Type", mp.FormDataContentType())
-
- files, err := rest.FindUploadedFiles(req, "test")
- if err != nil {
- t.Fatal(err)
- }
-
- if len(files) != 1 {
- t.Fatalf("Expected one test file, got %d", len(files))
- }
-
- scenarios := []struct {
- maxBytes int
- file *filesystem.File
- expectError bool
- }{
- {0, nil, false},
- {4, nil, false},
- {3, files[0], true}, // all test files have "test" as content
- {4, files[0], false},
- {5, files[0], false},
- }
-
- for i, s := range scenarios {
- err := validators.UploadedFileSize(s.maxBytes)(s.file)
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, s.expectError, hasErr, err)
- }
- }
-}
-
-func TestUploadedFileMimeType(t *testing.T) {
- t.Parallel()
-
- data, mp, err := tests.MockMultipartData(nil, "test")
- if err != nil {
- t.Fatal(err)
- }
-
- req := httptest.NewRequest(http.MethodPost, "/", data)
- req.Header.Add("Content-Type", mp.FormDataContentType())
-
- files, err := rest.FindUploadedFiles(req, "test")
- if err != nil {
- t.Fatal(err)
- }
-
- if len(files) != 1 {
- t.Fatalf("Expected one test file, got %d", len(files))
- }
-
- scenarios := []struct {
- types []string
- file *filesystem.File
- expectError bool
- }{
- {nil, nil, false},
- {[]string{"image/jpeg"}, nil, false},
- {[]string{}, files[0], true},
- {[]string{"image/jpeg"}, files[0], true},
- // test files are detected as "text/plain; charset=utf-8" content type
- {[]string{"image/jpeg", "text/plain; charset=utf-8"}, files[0], false},
- }
-
- for i, s := range scenarios {
- err := validators.UploadedFileMimeType(s.types)(s.file)
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, s.expectError, hasErr, err)
- }
- }
-}
diff --git a/forms/validators/model.go b/forms/validators/model.go
deleted file mode 100644
index 035c1239..00000000
--- a/forms/validators/model.go
+++ /dev/null
@@ -1,39 +0,0 @@
-package validators
-
-import (
- "database/sql"
- "errors"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/daos"
-)
-
-// UniqueId checks whether the provided model id already exists.
-//
-// Example:
-//
-// validation.Field(&form.Id, validation.By(validators.UniqueId(form.dao, tableName)))
-func UniqueId(dao *daos.Dao, tableName string) validation.RuleFunc {
- return func(value any) error {
- v, _ := value.(string)
- if v == "" {
- return nil // nothing to check
- }
-
- var foundId string
-
- err := dao.DB().
- Select("id").
- From(tableName).
- Where(dbx.HashExp{"id": v}).
- Limit(1).
- Row(&foundId)
-
- if (err != nil && !errors.Is(err, sql.ErrNoRows)) || foundId != "" {
- return validation.NewError("validation_invalid_id", "The model id is invalid or already exists.")
- }
-
- return nil
- }
-}
diff --git a/forms/validators/model_test.go b/forms/validators/model_test.go
deleted file mode 100644
index f2759ebe..00000000
--- a/forms/validators/model_test.go
+++ /dev/null
@@ -1,36 +0,0 @@
-package validators_test
-
-import (
- "testing"
-
- "github.com/pocketbase/pocketbase/forms/validators"
- "github.com/pocketbase/pocketbase/tests"
-)
-
-func TestUniqueId(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- scenarios := []struct {
- id string
- tableName string
- expectError bool
- }{
- {"", "", false},
- {"test", "", true},
- {"wsmn24bux7wo113", "_collections", true},
- {"test_unique_id", "unknown_table", true},
- {"test_unique_id", "_collections", false},
- }
-
- for i, s := range scenarios {
- err := validators.UniqueId(app.Dao(), s.tableName)(s.id)
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, s.expectError, hasErr, err)
- }
- }
-}
diff --git a/forms/validators/record_data.go b/forms/validators/record_data.go
deleted file mode 100644
index 9c4232f8..00000000
--- a/forms/validators/record_data.go
+++ /dev/null
@@ -1,393 +0,0 @@
-package validators
-
-import (
- "fmt"
- "net/url"
- "regexp"
- "strings"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/go-ozzo/ozzo-validation/v4/is"
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/tools/filesystem"
- "github.com/pocketbase/pocketbase/tools/list"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-var requiredErr = validation.NewError("validation_required", "Missing required value")
-
-// NewRecordDataValidator creates new [models.Record] data validator
-// using the provided record constraints and schema.
-//
-// Example:
-//
-// validator := NewRecordDataValidator(app.Dao(), record, nil)
-// err := validator.Validate(map[string]any{"test":123})
-func NewRecordDataValidator(
- dao *daos.Dao,
- record *models.Record,
- uploadedFiles map[string][]*filesystem.File,
-) *RecordDataValidator {
- return &RecordDataValidator{
- dao: dao,
- record: record,
- uploadedFiles: uploadedFiles,
- }
-}
-
-// RecordDataValidator defines a model.Record data validator
-// using the provided record constraints and schema.
-type RecordDataValidator struct {
- dao *daos.Dao
- record *models.Record
- uploadedFiles map[string][]*filesystem.File
-}
-
-// Validate validates the provided `data` by checking it against
-// the validator record constraints and schema.
-func (validator *RecordDataValidator) Validate(data map[string]any) error {
- keyedSchema := validator.record.Collection().Schema.AsMap()
- if len(keyedSchema) == 0 {
- return nil // no fields to check
- }
-
- if len(data) == 0 {
- return validation.NewError("validation_empty_data", "No data to validate")
- }
-
- errs := validation.Errors{}
-
- // check for unknown fields
- for key := range data {
- if _, ok := keyedSchema[key]; !ok {
- errs[key] = validation.NewError("validation_unknown_field", "Unknown field")
- }
- }
- if len(errs) > 0 {
- return errs
- }
-
- for key, field := range keyedSchema {
- // normalize value to emulate the same behavior
- // when fetching or persisting the record model
- value := field.PrepareValue(data[key])
-
- // check required constraint
- if field.Required && validation.Required.Validate(value) != nil {
- errs[key] = requiredErr
- continue
- }
-
- // validate field value by its field type
- if err := validator.checkFieldValue(field, value); err != nil {
- errs[key] = err
- continue
- }
- }
-
- if len(errs) == 0 {
- return nil
- }
-
- return errs
-}
-
-func (validator *RecordDataValidator) checkFieldValue(field *schema.SchemaField, value any) error {
- switch field.Type {
- case schema.FieldTypeText:
- return validator.checkTextValue(field, value)
- case schema.FieldTypeNumber:
- return validator.checkNumberValue(field, value)
- case schema.FieldTypeBool:
- return validator.checkBoolValue(field, value)
- case schema.FieldTypeEmail:
- return validator.checkEmailValue(field, value)
- case schema.FieldTypeUrl:
- return validator.checkUrlValue(field, value)
- case schema.FieldTypeEditor:
- return validator.checkEditorValue(field, value)
- case schema.FieldTypeDate:
- return validator.checkDateValue(field, value)
- case schema.FieldTypeSelect:
- return validator.checkSelectValue(field, value)
- case schema.FieldTypeJson:
- return validator.checkJsonValue(field, value)
- case schema.FieldTypeFile:
- return validator.checkFileValue(field, value)
- case schema.FieldTypeRelation:
- return validator.checkRelationValue(field, value)
- }
-
- return nil
-}
-
-func (validator *RecordDataValidator) checkTextValue(field *schema.SchemaField, value any) error {
- val, _ := value.(string)
- if val == "" {
- return nil // nothing to check (skip zero-defaults)
- }
-
- options, _ := field.Options.(*schema.TextOptions)
-
- // note: casted to []rune to count multi-byte chars as one
- length := len([]rune(val))
-
- if options.Min != nil && length < *options.Min {
- return validation.NewError("validation_min_text_constraint", fmt.Sprintf("Must be at least %d character(s)", *options.Min))
- }
-
- if options.Max != nil && length > *options.Max {
- return validation.NewError("validation_max_text_constraint", fmt.Sprintf("Must be less than %d character(s)", *options.Max))
- }
-
- if options.Pattern != "" {
- match, _ := regexp.MatchString(options.Pattern, val)
- if !match {
- return validation.NewError("validation_invalid_format", "Invalid value format")
- }
- }
-
- return nil
-}
-
-func (validator *RecordDataValidator) checkNumberValue(field *schema.SchemaField, value any) error {
- val, _ := value.(float64)
- if val == 0 {
- return nil // nothing to check (skip zero-defaults)
- }
-
- options, _ := field.Options.(*schema.NumberOptions)
-
- if options.NoDecimal && val != float64(int64(val)) {
- return validation.NewError("validation_no_decimal_constraint", "Decimal numbers are not allowed")
- }
-
- if options.Min != nil && val < *options.Min {
- return validation.NewError("validation_min_number_constraint", fmt.Sprintf("Must be larger than %f", *options.Min))
- }
-
- if options.Max != nil && val > *options.Max {
- return validation.NewError("validation_max_number_constraint", fmt.Sprintf("Must be less than %f", *options.Max))
- }
-
- return nil
-}
-
-func (validator *RecordDataValidator) checkBoolValue(field *schema.SchemaField, value any) error {
- return nil
-}
-
-func (validator *RecordDataValidator) checkEmailValue(field *schema.SchemaField, value any) error {
- val, _ := value.(string)
- if val == "" {
- return nil // nothing to check
- }
-
- if is.EmailFormat.Validate(val) != nil {
- return validation.NewError("validation_invalid_email", "Must be a valid email")
- }
-
- options, _ := field.Options.(*schema.EmailOptions)
- domain := val[strings.LastIndex(val, "@")+1:]
-
- // only domains check
- if len(options.OnlyDomains) > 0 && !list.ExistInSlice(domain, options.OnlyDomains) {
- return validation.NewError("validation_email_domain_not_allowed", "Email domain is not allowed")
- }
-
- // except domains check
- if len(options.ExceptDomains) > 0 && list.ExistInSlice(domain, options.ExceptDomains) {
- return validation.NewError("validation_email_domain_not_allowed", "Email domain is not allowed")
- }
-
- return nil
-}
-
-func (validator *RecordDataValidator) checkUrlValue(field *schema.SchemaField, value any) error {
- val, _ := value.(string)
- if val == "" {
- return nil // nothing to check
- }
-
- if is.URL.Validate(val) != nil {
- return validation.NewError("validation_invalid_url", "Must be a valid url")
- }
-
- options, _ := field.Options.(*schema.UrlOptions)
-
- // extract host/domain
- u, _ := url.Parse(val)
- host := u.Host
-
- // only domains check
- if len(options.OnlyDomains) > 0 && !list.ExistInSlice(host, options.OnlyDomains) {
- return validation.NewError("validation_url_domain_not_allowed", "Url domain is not allowed")
- }
-
- // except domains check
- if len(options.ExceptDomains) > 0 && list.ExistInSlice(host, options.ExceptDomains) {
- return validation.NewError("validation_url_domain_not_allowed", "Url domain is not allowed")
- }
-
- return nil
-}
-
-func (validator *RecordDataValidator) checkEditorValue(field *schema.SchemaField, value any) error {
- return nil
-}
-
-func (validator *RecordDataValidator) checkDateValue(field *schema.SchemaField, value any) error {
- val, _ := value.(types.DateTime)
- if val.IsZero() {
- if field.Required {
- return requiredErr
- }
- return nil // nothing to check
- }
-
- options, _ := field.Options.(*schema.DateOptions)
-
- if !options.Min.IsZero() {
- if err := validation.Min(options.Min.Time()).Validate(val.Time()); err != nil {
- return err
- }
- }
-
- if !options.Max.IsZero() {
- if err := validation.Max(options.Max.Time()).Validate(val.Time()); err != nil {
- return err
- }
- }
-
- return nil
-}
-
-func (validator *RecordDataValidator) checkSelectValue(field *schema.SchemaField, value any) error {
- normalizedVal := list.ToUniqueStringSlice(value)
- if len(normalizedVal) == 0 {
- if field.Required {
- return requiredErr
- }
- return nil // nothing to check
- }
-
- options, _ := field.Options.(*schema.SelectOptions)
-
- // check max selected items
- if len(normalizedVal) > options.MaxSelect {
- return validation.NewError("validation_too_many_values", fmt.Sprintf("Select no more than %d", options.MaxSelect))
- }
-
- // check against the allowed values
- for _, val := range normalizedVal {
- if !list.ExistInSlice(val, options.Values) {
- return validation.NewError("validation_invalid_value", "Invalid value "+val)
- }
- }
-
- return nil
-}
-
-var emptyJsonValues = []string{
- "null", `""`, "[]", "{}",
-}
-
-func (validator *RecordDataValidator) checkJsonValue(field *schema.SchemaField, value any) error {
- if is.JSON.Validate(value) != nil {
- return validation.NewError("validation_invalid_json", "Must be a valid json value")
- }
-
- raw, _ := types.ParseJsonRaw(value)
-
- options, _ := field.Options.(*schema.JsonOptions)
-
- if len(raw) > options.MaxSize {
- return validation.NewError("validation_json_size_limit", fmt.Sprintf("The maximum allowed JSON size is %v bytes", options.MaxSize))
- }
-
- rawStr := strings.TrimSpace(raw.String())
- if field.Required && list.ExistInSlice(rawStr, emptyJsonValues) {
- return requiredErr
- }
-
- return nil
-}
-
-func (validator *RecordDataValidator) checkFileValue(field *schema.SchemaField, value any) error {
- names := list.ToUniqueStringSlice(value)
- if len(names) == 0 && field.Required {
- return requiredErr
- }
-
- options, _ := field.Options.(*schema.FileOptions)
-
- if len(names) > options.MaxSelect {
- return validation.NewError("validation_too_many_values", fmt.Sprintf("Select no more than %d", options.MaxSelect))
- }
-
- // extract the uploaded files
- files := make([]*filesystem.File, 0, len(validator.uploadedFiles[field.Name]))
- for _, file := range validator.uploadedFiles[field.Name] {
- if list.ExistInSlice(file.Name, names) {
- files = append(files, file)
- }
- }
-
- for _, file := range files {
- // check size
- if err := UploadedFileSize(options.MaxSize)(file); err != nil {
- return err
- }
-
- // check type
- if len(options.MimeTypes) > 0 {
- if err := UploadedFileMimeType(options.MimeTypes)(file); err != nil {
- return err
- }
- }
- }
-
- return nil
-}
-
-func (validator *RecordDataValidator) checkRelationValue(field *schema.SchemaField, value any) error {
- ids := list.ToUniqueStringSlice(value)
- if len(ids) == 0 {
- if field.Required {
- return requiredErr
- }
- return nil // nothing to check
- }
-
- options, _ := field.Options.(*schema.RelationOptions)
-
- if options.MinSelect != nil && len(ids) < *options.MinSelect {
- return validation.NewError("validation_not_enough_values", fmt.Sprintf("Select at least %d", *options.MinSelect))
- }
-
- if options.MaxSelect != nil && len(ids) > *options.MaxSelect {
- return validation.NewError("validation_too_many_values", fmt.Sprintf("Select no more than %d", *options.MaxSelect))
- }
-
- // check if the related records exist
- // ---
- relCollection, err := validator.dao.FindCollectionByNameOrId(options.CollectionId)
- if err != nil {
- return validation.NewError("validation_missing_rel_collection", "Relation connection is missing or cannot be accessed")
- }
-
- var total int
- validator.dao.RecordQuery(relCollection).
- Select("count(*)").
- AndWhere(dbx.In("id", list.ToInterfaceSlice(ids)...)).
- Row(&total)
- if total != len(ids) {
- return validation.NewError("validation_missing_rel_records", "Failed to find all relation records with the provided ids")
- }
- // ---
-
- return nil
-}
diff --git a/forms/validators/record_data_test.go b/forms/validators/record_data_test.go
deleted file mode 100644
index 9dc58692..00000000
--- a/forms/validators/record_data_test.go
+++ /dev/null
@@ -1,1322 +0,0 @@
-package validators_test
-
-import (
- "fmt"
- "net/http"
- "net/http/httptest"
- "strings"
- "testing"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/forms/validators"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/tests"
- "github.com/pocketbase/pocketbase/tools/filesystem"
- "github.com/pocketbase/pocketbase/tools/rest"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-type testDataFieldScenario struct {
- name string
- data map[string]any
- files map[string][]*filesystem.File
- expectedErrors []string
-}
-
-func TestRecordDataValidatorEmptyAndUnknown(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- collection, _ := app.Dao().FindCollectionByNameOrId("demo2")
- record := models.NewRecord(collection)
- validator := validators.NewRecordDataValidator(app.Dao(), record, nil)
-
- emptyErr := validator.Validate(map[string]any{})
- if emptyErr == nil {
- t.Fatal("Expected error for empty data, got nil")
- }
-
- unknownErr := validator.Validate(map[string]any{"unknown": 123})
- if unknownErr == nil {
- t.Fatal("Expected error for unknown data, got nil")
- }
-}
-
-func TestRecordDataValidatorValidateText(t *testing.T) {
- t.Parallel()
-
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- // create new test collection
- collection := &models.Collection{}
- collection.Name = "validate_test"
- min := 3
- max := 10
- pattern := `^\w+$`
- collection.Schema = schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeText,
- },
- &schema.SchemaField{
- Name: "field2",
- Required: true,
- Type: schema.FieldTypeText,
- Options: &schema.TextOptions{
- Pattern: pattern,
- },
- },
- &schema.SchemaField{
- Name: "field3",
- Unique: true,
- Type: schema.FieldTypeText,
- Options: &schema.TextOptions{
- Min: &min,
- Max: &max,
- },
- },
- )
- if err := app.Dao().SaveCollection(collection); err != nil {
- t.Fatal(err)
- }
-
- // create dummy record (used for the unique check)
- dummy := models.NewRecord(collection)
- dummy.Set("field1", "test")
- dummy.Set("field2", "test")
- dummy.Set("field3", "test")
- if err := app.Dao().SaveRecord(dummy); err != nil {
- t.Fatal(err)
- }
-
- scenarios := []testDataFieldScenario{
- {
- "(text) check required constraint",
- map[string]any{
- "field1": nil,
- "field2": nil,
- "field3": nil,
- },
- nil,
- []string{"field2"},
- },
- {
- "(text) check min constraint",
- map[string]any{
- "field1": "test",
- "field2": "test",
- "field3": strings.Repeat("a", min-1),
- },
- nil,
- []string{"field3"},
- },
- {
- "(text) check min constraint with multi-bytes char",
- map[string]any{
- "field1": "test",
- "field2": "test",
- "field3": "𝌆", // 4 bytes should be counted as 1 char
- },
- nil,
- []string{"field3"},
- },
- {
- "(text) check max constraint",
- map[string]any{
- "field1": "test",
- "field2": "test",
- "field3": strings.Repeat("a", max+1),
- },
- nil,
- []string{"field3"},
- },
- {
- "(text) check max constraint with multi-bytes chars",
- map[string]any{
- "field1": "test",
- "field2": "test",
- "field3": strings.Repeat("𝌆", max), // shouldn't exceed the max limit even though max*4bytes chars are used
- },
- nil,
- []string{},
- },
- {
- "(text) check pattern constraint",
- map[string]any{
- "field1": nil,
- "field2": "test!",
- "field3": "test",
- },
- nil,
- []string{"field2"},
- },
- {
- "(text) valid data (only required)",
- map[string]any{
- "field2": "test",
- },
- nil,
- []string{},
- },
- {
- "(text) valid data (all)",
- map[string]any{
- "field1": "test",
- "field2": 12345, // test value cast
- "field3": "test2",
- },
- nil,
- []string{},
- },
- }
-
- checkValidatorErrors(t, app.Dao(), models.NewRecord(collection), scenarios)
-}
-
-func TestRecordDataValidatorValidateNumber(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- // create new test collection
- collection := &models.Collection{}
- collection.Name = "validate_test"
- min := 2.0
- max := 150.0
- collection.Schema = schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeNumber,
- },
- &schema.SchemaField{
- Name: "field2",
- Required: true,
- Type: schema.FieldTypeNumber,
- },
- &schema.SchemaField{
- Name: "field3",
- Unique: true,
- Type: schema.FieldTypeNumber,
- Options: &schema.NumberOptions{
- Min: &min,
- Max: &max,
- },
- },
- &schema.SchemaField{
- Name: "field4",
- Type: schema.FieldTypeNumber,
- Options: &schema.NumberOptions{
- NoDecimal: true,
- },
- },
- )
- if err := app.Dao().SaveCollection(collection); err != nil {
- t.Fatal(err)
- }
-
- // create dummy record (used for the unique check)
- dummy := models.NewRecord(collection)
- dummy.Set("field1", 123)
- dummy.Set("field2", 123)
- dummy.Set("field3", 123)
- if err := app.Dao().SaveRecord(dummy); err != nil {
- t.Fatal(err)
- }
-
- scenarios := []testDataFieldScenario{
- {
- "(number) check required constraint",
- map[string]any{
- "field1": nil,
- "field2": nil,
- "field3": nil,
- "field4": nil,
- },
- nil,
- []string{"field2"},
- },
- {
- "(number) check required constraint + casting",
- map[string]any{
- "field1": "invalid",
- "field2": "invalid",
- "field3": "invalid",
- "field4": "invalid",
- },
- nil,
- []string{"field2"},
- },
- {
- "(number) check min constraint",
- map[string]any{
- "field1": 0.5,
- "field2": 1,
- "field3": min - 0.5,
- },
- nil,
- []string{"field3"},
- },
- {
- "(number) check min with zero-default",
- map[string]any{
- "field2": 1,
- "field3": 0,
- },
- nil,
- []string{},
- },
- {
- "(number) check max constraint",
- map[string]any{
- "field1": nil,
- "field2": max,
- "field3": max + 0.5,
- },
- nil,
- []string{"field3"},
- },
- {
- "(number) check NoDecimal",
- map[string]any{
- "field2": 1,
- "field4": 456.789,
- },
- nil,
- []string{"field4"},
- },
- {
- "(number) valid data (only required)",
- map[string]any{
- "field2": 1,
- },
- nil,
- []string{},
- },
- {
- "(number) valid data (all)",
- map[string]any{
- "field1": nil,
- "field2": 123, // test value cast
- "field3": max,
- "field4": 456,
- },
- nil,
- []string{},
- },
- }
-
- checkValidatorErrors(t, app.Dao(), models.NewRecord(collection), scenarios)
-}
-
-func TestRecordDataValidatorValidateBool(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- // create new test collection
- collection := &models.Collection{}
- collection.Name = "validate_test"
- collection.Schema = schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeBool,
- },
- &schema.SchemaField{
- Name: "field2",
- Required: true,
- Type: schema.FieldTypeBool,
- },
- &schema.SchemaField{
- Name: "field3",
- Unique: true,
- Type: schema.FieldTypeBool,
- Options: &schema.BoolOptions{},
- },
- )
- if err := app.Dao().SaveCollection(collection); err != nil {
- t.Fatal(err)
- }
-
- // create dummy record (used for the unique check)
- dummy := models.NewRecord(collection)
- dummy.Set("field1", false)
- dummy.Set("field2", true)
- dummy.Set("field3", true)
- if err := app.Dao().SaveRecord(dummy); err != nil {
- t.Fatal(err)
- }
-
- scenarios := []testDataFieldScenario{
- {
- "(bool) check required constraint",
- map[string]any{
- "field1": nil,
- "field2": nil,
- "field3": nil,
- },
- nil,
- []string{"field2"},
- },
- {
- "(bool) check required constraint + casting",
- map[string]any{
- "field1": "invalid",
- "field2": "invalid",
- "field3": "invalid",
- },
- nil,
- []string{"field2"},
- },
- {
- "(bool) valid data (only required)",
- map[string]any{
- "field2": 1,
- },
- nil,
- []string{},
- },
- {
- "(bool) valid data (all)",
- map[string]any{
- "field1": false,
- "field2": true,
- "field3": false,
- },
- nil,
- []string{},
- },
- }
-
- checkValidatorErrors(t, app.Dao(), models.NewRecord(collection), scenarios)
-}
-
-func TestRecordDataValidatorValidateEmail(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- // create new test collection
- collection := &models.Collection{}
- collection.Name = "validate_test"
- collection.Schema = schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeEmail,
- },
- &schema.SchemaField{
- Name: "field2",
- Required: true,
- Type: schema.FieldTypeEmail,
- Options: &schema.EmailOptions{
- ExceptDomains: []string{"example.com"},
- },
- },
- &schema.SchemaField{
- Name: "field3",
- Unique: true,
- Type: schema.FieldTypeEmail,
- Options: &schema.EmailOptions{
- OnlyDomains: []string{"example.com"},
- },
- },
- )
- if err := app.Dao().SaveCollection(collection); err != nil {
- t.Fatal(err)
- }
-
- // create dummy record (used for the unique check)
- dummy := models.NewRecord(collection)
- dummy.Set("field1", "test@demo.com")
- dummy.Set("field2", "test@test.com")
- dummy.Set("field3", "test@example.com")
- if err := app.Dao().SaveRecord(dummy); err != nil {
- t.Fatal(err)
- }
-
- scenarios := []testDataFieldScenario{
- {
- "(email) check required constraint",
- map[string]any{
- "field1": nil,
- "field2": nil,
- "field3": nil,
- },
- nil,
- []string{"field2"},
- },
- {
- "(email) check email format validator",
- map[string]any{
- "field1": "test",
- "field2": "test.com",
- "field3": 123,
- },
- nil,
- []string{"field1", "field2", "field3"},
- },
- {
- "(email) check ExceptDomains constraint",
- map[string]any{
- "field1": "test@example.com",
- "field2": "test@example.com",
- "field3": "test2@example.com",
- },
- nil,
- []string{"field2"},
- },
- {
- "(email) check OnlyDomains constraint",
- map[string]any{
- "field1": "test@test.com",
- "field2": "test@test.com",
- "field3": "test@test.com",
- },
- nil,
- []string{"field3"},
- },
- {
- "(email) valid data (only required)",
- map[string]any{
- "field2": "test@test.com",
- },
- nil,
- []string{},
- },
- {
- "(email) valid data (all)",
- map[string]any{
- "field1": "123@example.com",
- "field2": "test@test.com",
- "field3": "test2@example.com",
- },
- nil,
- []string{},
- },
- }
-
- checkValidatorErrors(t, app.Dao(), models.NewRecord(collection), scenarios)
-}
-
-func TestRecordDataValidatorValidateUrl(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- // create new test collection
- collection := &models.Collection{}
- collection.Name = "validate_test"
- collection.Schema = schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeUrl,
- },
- &schema.SchemaField{
- Name: "field2",
- Required: true,
- Type: schema.FieldTypeUrl,
- Options: &schema.UrlOptions{
- ExceptDomains: []string{"example.com"},
- },
- },
- &schema.SchemaField{
- Name: "field3",
- Unique: true,
- Type: schema.FieldTypeUrl,
- Options: &schema.UrlOptions{
- OnlyDomains: []string{"example.com"},
- },
- },
- )
- if err := app.Dao().SaveCollection(collection); err != nil {
- t.Fatal(err)
- }
-
- // create dummy record (used for the unique check)
- dummy := models.NewRecord(collection)
- dummy.Set("field1", "http://demo.com")
- dummy.Set("field2", "http://test.com")
- dummy.Set("field3", "http://example.com")
- if err := app.Dao().SaveRecord(dummy); err != nil {
- t.Fatal(err)
- }
-
- scenarios := []testDataFieldScenario{
- {
- "(url) check required constraint",
- map[string]any{
- "field1": nil,
- "field2": nil,
- "field3": nil,
- },
- nil,
- []string{"field2"},
- },
- {
- "(url) check url format validator",
- map[string]any{
- "field1": "/abc",
- "field2": "test.com", // valid
- "field3": "test@example.com",
- },
- nil,
- []string{"field1", "field3"},
- },
- {
- "(url) check ExceptDomains constraint",
- map[string]any{
- "field1": "http://example.com",
- "field2": "http://example.com",
- "field3": "https://example.com",
- },
- nil,
- []string{"field2"},
- },
- {
- "(url) check OnlyDomains constraint",
- map[string]any{
- "field1": "http://test.com/abc",
- "field2": "http://test.com/abc",
- "field3": "http://test.com/abc",
- },
- nil,
- []string{"field3"},
- },
- {
- "(url) check subdomains constraint",
- map[string]any{
- "field1": "http://test.test.com",
- "field2": "http://test.example.com",
- "field3": "http://test.example.com",
- },
- nil,
- []string{"field3"},
- },
- {
- "(url) valid data (only required)",
- map[string]any{
- "field2": "http://sub.test.com/abc",
- },
- nil,
- []string{},
- },
- {
- "(url) valid data (all)",
- map[string]any{
- "field1": "http://example.com/123",
- "field2": "http://test.com/",
- "field3": "http://example.com/test2",
- },
- nil,
- []string{},
- },
- }
-
- checkValidatorErrors(t, app.Dao(), models.NewRecord(collection), scenarios)
-}
-
-func TestRecordDataValidatorValidateDate(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- // create new test collection
- collection := &models.Collection{}
- collection.Name = "validate_test"
- min, _ := types.ParseDateTime("2022-01-01 01:01:01.123")
- max, _ := types.ParseDateTime("2030-01-01 01:01:01")
- collection.Schema = schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeDate,
- },
- &schema.SchemaField{
- Name: "field2",
- Required: true,
- Type: schema.FieldTypeDate,
- Options: &schema.DateOptions{
- Min: min,
- },
- },
- &schema.SchemaField{
- Name: "field3",
- Unique: true,
- Type: schema.FieldTypeDate,
- Options: &schema.DateOptions{
- Max: max,
- },
- },
- )
- if err := app.Dao().SaveCollection(collection); err != nil {
- t.Fatal(err)
- }
-
- // create dummy record (used for the unique check)
- dummy := models.NewRecord(collection)
- dummy.Set("field1", "2022-01-01 01:01:01")
- dummy.Set("field2", "2029-01-01 01:01:01.123")
- dummy.Set("field3", "2029-01-01 01:01:01.123")
- if err := app.Dao().SaveRecord(dummy); err != nil {
- t.Fatal(err)
- }
-
- scenarios := []testDataFieldScenario{
- {
- "(date) check required constraint",
- map[string]any{
- "field1": nil,
- "field2": nil,
- "field3": nil,
- },
- nil,
- []string{"field2"},
- },
- {
- "(date) check required constraint + cast",
- map[string]any{
- "field1": "invalid",
- "field2": "invalid",
- "field3": "invalid",
- },
- nil,
- []string{"field2"},
- },
- {
- "(date) check required constraint + zero datetime",
- map[string]any{
- "field1": "January 1, year 1, 00:00:00 UTC",
- "field2": "0001-01-01 00:00:00",
- "field3": "0001-01-01 00:00:00 +0000 UTC",
- },
- nil,
- []string{"field2"},
- },
- {
- "(date) check min date constraint",
- map[string]any{
- "field1": "2021-01-01 01:01:01",
- "field2": "2021-01-01 01:01:01",
- "field3": "2021-01-01 01:01:01",
- },
- nil,
- []string{"field2"},
- },
- {
- "(date) check max date constraint",
- map[string]any{
- "field1": "2030-02-01 01:01:01",
- "field2": "2030-02-01 01:01:01",
- "field3": "2030-02-01 01:01:01",
- },
- nil,
- []string{"field3"},
- },
- {
- "(date) valid data (only required)",
- map[string]any{
- "field2": "2029-01-01 01:01:01",
- },
- nil,
- []string{},
- },
- {
- "(date) valid data (all)",
- map[string]any{
- "field1": "2029-01-01 01:01:01.000",
- "field2": "2029-01-01 01:01:01",
- "field3": "2029-01-01 01:01:01.456",
- },
- nil,
- []string{},
- },
- }
-
- checkValidatorErrors(t, app.Dao(), models.NewRecord(collection), scenarios)
-}
-
-func TestRecordDataValidatorValidateSelect(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- // create new test collection
- collection := &models.Collection{}
- collection.Name = "validate_test"
- collection.Schema = schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{
- Values: []string{"1", "a", "b", "c"},
- MaxSelect: 1,
- },
- },
- &schema.SchemaField{
- Name: "field2",
- Required: true,
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{
- Values: []string{"a", "b", "c"},
- MaxSelect: 2,
- },
- },
- &schema.SchemaField{
- Name: "field3",
- Unique: true,
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{
- Values: []string{"a", "b", "c"},
- MaxSelect: 99,
- },
- },
- )
- if err := app.Dao().SaveCollection(collection); err != nil {
- t.Fatal(err)
- }
-
- // create dummy record (used for the unique check)
- dummy := models.NewRecord(collection)
- dummy.Set("field1", "a")
- dummy.Set("field2", []string{"a", "b"})
- dummy.Set("field3", []string{"a", "b", "c"})
- if err := app.Dao().SaveRecord(dummy); err != nil {
- t.Fatal(err)
- }
-
- scenarios := []testDataFieldScenario{
- {
- "(select) check required constraint",
- map[string]any{
- "field1": nil,
- "field2": nil,
- "field3": nil,
- },
- nil,
- []string{"field2"},
- },
- {
- "(select) check required constraint - empty values",
- map[string]any{
- "field1": "",
- "field2": "",
- "field3": "",
- },
- nil,
- []string{"field2"},
- },
- {
- "(select) check required constraint - multiple select cast",
- map[string]any{
- "field1": "a",
- "field2": "a",
- "field3": "a",
- },
- nil,
- []string{},
- },
- {
- "(select) check Values constraint",
- map[string]any{
- "field1": 1,
- "field2": "d",
- "field3": 123,
- },
- nil,
- []string{"field2", "field3"},
- },
- {
- "(select) check MaxSelect constraint",
- map[string]any{
- "field1": []string{"a", "b"}, // this will be normalized to a single string value
- "field2": []string{"a", "b", "c"},
- "field3": []string{"a", "b", "b", "b"}, // repeating values will be merged
- },
- nil,
- []string{"field2"},
- },
- {
- "(select) valid data - only required fields",
- map[string]any{
- "field2": []string{"a", "b"},
- },
- nil,
- []string{},
- },
- {
- "(select) valid data - all fields with normalizations",
- map[string]any{
- "field1": "a",
- "field2": []string{"a", "b", "b"}, // will be collapsed
- "field3": "b", // will be normalzied to slice
- },
- nil,
- []string{},
- },
- }
-
- checkValidatorErrors(t, app.Dao(), models.NewRecord(collection), scenarios)
-}
-
-func TestRecordDataValidatorValidateJson(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- // create new test collection
- collection := &models.Collection{}
- collection.Name = "validate_test"
- collection.Schema = schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeJson,
- Options: &schema.JsonOptions{
- MaxSize: 10,
- },
- },
- &schema.SchemaField{
- Name: "field2",
- Required: true,
- Type: schema.FieldTypeJson,
- Options: &schema.JsonOptions{
- MaxSize: 9999,
- },
- },
- &schema.SchemaField{
- Name: "field3",
- Unique: true,
- Type: schema.FieldTypeJson,
- Options: &schema.JsonOptions{
- MaxSize: 9999,
- },
- },
- )
- if err := app.Dao().SaveCollection(collection); err != nil {
- t.Fatal(err)
- }
-
- // create dummy record (used for the unique check)
- dummy := models.NewRecord(collection)
- dummy.Set("field1", `{"test":123}`)
- dummy.Set("field2", `{"test":123}`)
- dummy.Set("field3", `{"test":123}`)
- if err := app.Dao().SaveRecord(dummy); err != nil {
- t.Fatal(err)
- }
-
- scenarios := []testDataFieldScenario{
- {
- "(json) check required constraint - nil",
- map[string]any{
- "field1": nil,
- "field2": nil,
- "field3": nil,
- },
- nil,
- []string{"field2"},
- },
- {
- "(json) check required constraint - zero string",
- map[string]any{
- "field1": "",
- "field2": "",
- "field3": "",
- },
- nil,
- []string{"field2"},
- },
- {
- "(json) check required constraint - zero number",
- map[string]any{
- "field1": 0,
- "field2": 0,
- "field3": 0,
- },
- nil,
- []string{},
- },
- {
- "(json) check required constraint - zero slice",
- map[string]any{
- "field1": []string{},
- "field2": []string{},
- "field3": []string{},
- },
- nil,
- []string{"field2"},
- },
- {
- "(json) check required constraint - zero map",
- map[string]any{
- "field1": map[string]string{},
- "field2": map[string]string{},
- "field3": map[string]string{},
- },
- nil,
- []string{"field2"},
- },
- {
- "(json) check MaxSize constraint",
- map[string]any{
- "field1": `"123456789"`, // max 10bytes
- "field2": 123,
- },
- nil,
- []string{"field1"},
- },
- {
- "(json) check json text invalid obj, array and number normalizations",
- map[string]any{
- "field1": `[1 2 3]`,
- "field2": `{a: 123}`,
- "field3": `123.456 abc`,
- },
- nil,
- []string{},
- },
- {
- "(json) check json text reserved literals normalizations",
- map[string]any{
- "field1": `true`,
- "field2": `false`,
- "field3": `null`,
- },
- nil,
- []string{},
- },
- {
- "(json) valid data - only required fields",
- map[string]any{
- "field2": `{"test":123}`,
- },
- nil,
- []string{},
- },
- {
- "(json) valid data - all fields with normalizations",
- map[string]any{
- "field1": `"12345678"`,
- "field2": 123,
- "field3": []string{"a", "b", "c"},
- },
- nil,
- []string{},
- },
- }
-
- checkValidatorErrors(t, app.Dao(), models.NewRecord(collection), scenarios)
-}
-
-func TestRecordDataValidatorValidateFile(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- // create new test collection
- collection := &models.Collection{}
- collection.Name = "validate_test"
- collection.Schema = schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{
- MaxSelect: 1,
- MaxSize: 3,
- },
- },
- &schema.SchemaField{
- Name: "field2",
- Required: true,
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{
- MaxSelect: 2,
- MaxSize: 10,
- MimeTypes: []string{"image/jpeg", "text/plain; charset=utf-8"},
- },
- },
- &schema.SchemaField{
- Name: "field3",
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{
- MaxSelect: 3,
- MaxSize: 10,
- MimeTypes: []string{"image/jpeg"},
- },
- },
- )
- if err := app.Dao().SaveCollection(collection); err != nil {
- t.Fatal(err)
- }
-
- // stub uploaded files
- data, mp, err := tests.MockMultipartData(nil, "test", "test", "test", "test", "test")
- if err != nil {
- t.Fatal(err)
- }
- req := httptest.NewRequest(http.MethodPost, "/", data)
- req.Header.Add("Content-Type", mp.FormDataContentType())
- testFiles, err := rest.FindUploadedFiles(req, "test")
- if err != nil {
- t.Fatal(err)
- }
-
- scenarios := []testDataFieldScenario{
- {
- "check required constraint - nil",
- map[string]any{
- "field1": nil,
- "field2": nil,
- "field3": nil,
- },
- nil,
- []string{"field2"},
- },
- {
- "check MaxSelect constraint",
- map[string]any{
- "field1": "test1",
- "field2": []string{"test1", testFiles[0].Name, testFiles[3].Name},
- "field3": []string{"test1", "test2", "test3", "test4"},
- },
- map[string][]*filesystem.File{
- "field2": {testFiles[0], testFiles[3]},
- },
- []string{"field2", "field3"},
- },
- {
- "check MaxSize constraint",
- map[string]any{
- "field1": testFiles[0].Name,
- "field2": []string{"test1", testFiles[0].Name},
- "field3": []string{"test1", "test2", "test3"},
- },
- map[string][]*filesystem.File{
- "field1": {testFiles[0]},
- "field2": {testFiles[0]},
- },
- []string{"field1"},
- },
- {
- "check MimeTypes constraint",
- map[string]any{
- "field1": "test1",
- "field2": []string{"test1", testFiles[0].Name},
- "field3": []string{testFiles[1].Name, testFiles[2].Name},
- },
- map[string][]*filesystem.File{
- "field2": {testFiles[0], testFiles[1], testFiles[2]},
- "field3": {testFiles[1], testFiles[2]},
- },
- []string{"field3"},
- },
- {
- "valid data - no new files (just file ids)",
- map[string]any{
- "field1": "test1",
- "field2": []string{"test1", "test2"},
- "field3": []string{"test1", "test2", "test3"},
- },
- nil,
- []string{},
- },
- {
- "valid data - just new files",
- map[string]any{
- "field1": nil,
- "field2": []string{testFiles[0].Name, testFiles[1].Name},
- "field3": nil,
- },
- map[string][]*filesystem.File{
- "field2": {testFiles[0], testFiles[1]},
- },
- []string{},
- },
- {
- "valid data - mixed existing and new files",
- map[string]any{
- "field1": "test1",
- "field2": []string{"test1", testFiles[0].Name},
- "field3": "test1", // will be casted
- },
- map[string][]*filesystem.File{
- "field2": {testFiles[0], testFiles[1], testFiles[2]},
- },
- []string{},
- },
- }
-
- checkValidatorErrors(t, app.Dao(), models.NewRecord(collection), scenarios)
-}
-
-func TestRecordDataValidatorValidateRelation(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- demo, _ := app.Dao().FindCollectionByNameOrId("demo3")
-
- // demo3 rel ids
- relId1 := "mk5fmymtx4wsprk"
- relId2 := "7nwo8tuiatetxdm"
- relId3 := "lcl9d87w22ml6jy"
- relId4 := "1tmknxy2868d869"
-
- // record rel ids from different collections
- diffRelId1 := "0yxhwia2amd8gec"
- diffRelId2 := "llvuca81nly1qls"
-
- // create new test collection
- collection := &models.Collection{}
- collection.Name = "validate_test"
- collection.Schema = schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{
- MaxSelect: types.Pointer(1),
- CollectionId: demo.Id,
- },
- },
- &schema.SchemaField{
- Name: "field2",
- Required: true,
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{
- MaxSelect: types.Pointer(2),
- CollectionId: demo.Id,
- },
- },
- &schema.SchemaField{
- Name: "field3",
- Unique: true,
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{
- MinSelect: types.Pointer(2),
- CollectionId: demo.Id,
- },
- },
- &schema.SchemaField{
- Name: "field4",
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{
- MaxSelect: types.Pointer(3),
- CollectionId: "", // missing or non-existing collection id
- },
- },
- )
- if err := app.Dao().SaveCollection(collection); err != nil {
- t.Fatal(err)
- }
-
- // create dummy record (used for the unique check)
- dummy := models.NewRecord(collection)
- dummy.Set("field1", relId1)
- dummy.Set("field2", []string{relId1, relId2})
- dummy.Set("field3", []string{relId1, relId2, relId3})
- if err := app.Dao().SaveRecord(dummy); err != nil {
- t.Fatal(err)
- }
-
- scenarios := []testDataFieldScenario{
- {
- "check required constraint - nil",
- map[string]any{
- "field1": nil,
- "field2": nil,
- "field3": nil,
- },
- nil,
- []string{"field2"},
- },
- {
- "check required constraint - zero id",
- map[string]any{
- "field1": "",
- "field2": "",
- "field3": "",
- },
- nil,
- []string{"field2"},
- },
- {
- "check min constraint",
- map[string]any{
- "field2": relId2,
- "field3": []string{relId1},
- },
- nil,
- []string{"field3"},
- },
- {
- "check nonexisting collection id",
- map[string]any{
- "field2": relId1,
- "field4": relId1,
- },
- nil,
- []string{"field4"},
- },
- {
- "check MaxSelect constraint",
- map[string]any{
- "field1": []string{relId1, relId2}, // will be normalized to relId1 only
- "field2": []string{relId1, relId2, relId3},
- "field3": []string{relId1, relId2, relId3, relId4},
- },
- nil,
- []string{"field2"},
- },
- {
- "check with ids from different collections",
- map[string]any{
- "field1": diffRelId1,
- "field2": []string{relId2, diffRelId1},
- "field3": []string{diffRelId1, diffRelId2},
- },
- nil,
- []string{"field1", "field2", "field3"},
- },
- {
- "valid data - only required fields",
- map[string]any{
- "field2": []string{relId1, relId2},
- },
- nil,
- []string{},
- },
- {
- "valid data - all fields with normalization",
- map[string]any{
- "field1": []string{relId1, relId2},
- "field2": relId2,
- "field3": []string{relId3, relId2, relId1}, // unique is not triggered because the order is different
- },
- nil,
- []string{},
- },
- }
-
- checkValidatorErrors(t, app.Dao(), models.NewRecord(collection), scenarios)
-}
-
-func checkValidatorErrors(t *testing.T, dao *daos.Dao, record *models.Record, scenarios []testDataFieldScenario) {
- for i, s := range scenarios {
- prefix := s.name
- if prefix == "" {
- prefix = fmt.Sprintf("%d", i)
- }
-
- t.Run(prefix, func(t *testing.T) {
- validator := validators.NewRecordDataValidator(dao, record, s.files)
- result := validator.Validate(s.data)
-
- // parse errors
- errs, ok := result.(validation.Errors)
- if !ok && result != nil {
- t.Fatalf("Failed to parse errors %v", result)
- }
-
- // check errors
- if len(errs) > len(s.expectedErrors) {
- t.Fatalf("Expected error keys %v, got %v", s.expectedErrors, errs)
- }
- for _, k := range s.expectedErrors {
- if _, ok := errs[k]; !ok {
- t.Fatalf("Missing expected error key %q in %v", k, errs)
- }
- }
- })
- }
-}
diff --git a/forms/validators/string.go b/forms/validators/string.go
deleted file mode 100644
index 00eb997d..00000000
--- a/forms/validators/string.go
+++ /dev/null
@@ -1,22 +0,0 @@
-package validators
-
-import (
- validation "github.com/go-ozzo/ozzo-validation/v4"
-)
-
-// Compare checks whether the validated value matches another string.
-//
-// Example:
-//
-// validation.Field(&form.PasswordConfirm, validation.By(validators.Compare(form.Password)))
-func Compare(valueToCompare string) validation.RuleFunc {
- return func(value any) error {
- v, _ := value.(string)
-
- if v != valueToCompare {
- return validation.NewError("validation_values_mismatch", "Values don't match.")
- }
-
- return nil
- }
-}
diff --git a/forms/validators/string_test.go b/forms/validators/string_test.go
deleted file mode 100644
index b7ea2ff3..00000000
--- a/forms/validators/string_test.go
+++ /dev/null
@@ -1,32 +0,0 @@
-package validators_test
-
-import (
- "testing"
-
- "github.com/pocketbase/pocketbase/forms/validators"
-)
-
-func TestCompare(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- valA string
- valB string
- expectError bool
- }{
- {"", "", false},
- {"", "456", true},
- {"123", "", true},
- {"123", "456", true},
- {"123", "123", false},
- }
-
- for i, s := range scenarios {
- err := validators.Compare(s.valA)(s.valB)
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("(%d) Expected hasErr to be %v, got %v (%v)", i, s.expectError, hasErr, err)
- }
- }
-}
diff --git a/forms/validators/validators.go b/forms/validators/validators.go
deleted file mode 100644
index ec8c2177..00000000
--- a/forms/validators/validators.go
+++ /dev/null
@@ -1,2 +0,0 @@
-// Package validators implements custom shared PocketBase validators.
-package validators
diff --git a/go.mod b/go.mod
index 6e8f1883..03c45208 100644
--- a/go.mod
+++ b/go.mod
@@ -1,60 +1,58 @@
module github.com/pocketbase/pocketbase
-go 1.22
+go 1.23
require (
github.com/AlecAivazis/survey/v2 v2.3.7
- github.com/aws/aws-sdk-go-v2 v1.30.4
- github.com/aws/aws-sdk-go-v2/config v1.27.31
- github.com/aws/aws-sdk-go-v2/credentials v1.17.30
- github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.15
- github.com/aws/aws-sdk-go-v2/service/s3 v1.60.1
+ github.com/aws/aws-sdk-go-v2 v1.30.5
+ github.com/aws/aws-sdk-go-v2/config v1.27.33
+ github.com/aws/aws-sdk-go-v2/credentials v1.17.32
+ github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.18
+ github.com/aws/aws-sdk-go-v2/service/s3 v1.61.2
github.com/aws/smithy-go v1.20.4
github.com/disintegration/imaging v1.6.2
github.com/domodwyer/mailyak/v3 v3.6.2
- github.com/dop251/goja v0.0.0-20240822155948-fa6d1ed5e4b6
- github.com/dop251/goja_nodejs v0.0.0-20240728170619-29b559befffc
+ github.com/dop251/goja v0.0.0-20231027120936-b396bb4c349d
+ github.com/dop251/goja_nodejs v0.0.0-20231122114759-e84d9a924c5c
github.com/fatih/color v1.17.0
github.com/fsnotify/fsnotify v1.7.0
github.com/gabriel-vasile/mimetype v1.4.5
github.com/ganigeorgiev/fexpr v0.4.1
github.com/go-ozzo/ozzo-validation/v4 v4.3.0
- github.com/goccy/go-json v0.10.3
github.com/golang-jwt/jwt/v4 v4.5.0
- github.com/labstack/echo/v5 v5.0.0-20230722203903-ec5b858dab61
- github.com/mattn/go-sqlite3 v1.14.22
+ github.com/mattn/go-sqlite3 v1.14.23
github.com/pocketbase/dbx v1.10.1
github.com/pocketbase/tygoja v0.0.0-20240113091827-17918475d342
github.com/spf13/cast v1.7.0
github.com/spf13/cobra v1.8.1
gocloud.dev v0.39.0
- golang.org/x/crypto v0.26.0
- golang.org/x/net v0.28.0
- golang.org/x/oauth2 v0.22.0
+ golang.org/x/crypto v0.27.0
+ golang.org/x/net v0.29.0
+ golang.org/x/oauth2 v0.23.0
golang.org/x/sync v0.8.0
- modernc.org/sqlite v1.32.0
+ modernc.org/sqlite v1.33.1
)
require (
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.4 // indirect
- github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.12 // indirect
- github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.16 // indirect
- github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.16 // indirect
+ github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.13 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.17 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.17 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 // indirect
- github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.16 // indirect
+ github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.17 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.4 // indirect
- github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.18 // indirect
- github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.18 // indirect
- github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.16 // indirect
- github.com/aws/aws-sdk-go-v2/service/sso v1.22.5 // indirect
- github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.5 // indirect
- github.com/aws/aws-sdk-go-v2/service/sts v1.30.5 // indirect
- github.com/dlclark/regexp2 v1.11.4 // indirect
+ github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.19 // indirect
+ github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.19 // indirect
+ github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.17 // indirect
+ github.com/aws/aws-sdk-go-v2/service/sso v1.22.7 // indirect
+ github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.7 // indirect
+ github.com/aws/aws-sdk-go-v2/service/sts v1.30.7 // indirect
+ github.com/dlclark/regexp2 v1.10.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
- github.com/go-sourcemap/sourcemap v2.1.4+incompatible // indirect
+ github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
- github.com/google/pprof v0.0.0-20240827171923-fa2c70bbbfe5 // indirect
+ github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/googleapis/gax-go/v2 v2.13.0 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
@@ -66,23 +64,21 @@ require (
github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/spf13/pflag v1.0.5 // indirect
- github.com/valyala/bytebufferpool v1.0.0 // indirect
- github.com/valyala/fasttemplate v1.2.2 // indirect
+ github.com/stretchr/testify v1.8.2 // indirect
go.opencensus.io v0.24.0 // indirect
- golang.org/x/image v0.19.0 // indirect
+ golang.org/x/image v0.20.0 // indirect
golang.org/x/mod v0.19.0 // indirect
- golang.org/x/sys v0.24.0 // indirect
- golang.org/x/term v0.23.0 // indirect
- golang.org/x/text v0.17.0 // indirect
- golang.org/x/time v0.6.0 // indirect
+ golang.org/x/sys v0.25.0 // indirect
+ golang.org/x/term v0.24.0 // indirect
+ golang.org/x/text v0.18.0 // indirect
golang.org/x/tools v0.23.0 // indirect
- golang.org/x/xerrors v0.0.0-20240716161551-93cc26a95ae9 // indirect
- google.golang.org/api v0.194.0 // indirect
- google.golang.org/genproto/googleapis/rpc v0.0.0-20240827150818-7e3bb234dfed // indirect
- google.golang.org/grpc v1.65.0 // indirect
+ golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
+ google.golang.org/api v0.197.0 // indirect
+ google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 // indirect
+ google.golang.org/grpc v1.66.2 // indirect
google.golang.org/protobuf v1.34.2 // indirect
modernc.org/gc/v3 v3.0.0-20240801135723-a856999a2e4a // indirect
- modernc.org/libc v1.55.3 // indirect
+ modernc.org/libc v1.60.1 // indirect
modernc.org/mathutil v1.6.0 // indirect
modernc.org/memory v1.8.0 // indirect
modernc.org/strutil v1.2.0 // indirect
diff --git a/go.sum b/go.sum
index 3e6c803c..0aebb678 100644
--- a/go.sum
+++ b/go.sum
@@ -1,8 +1,8 @@
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.115.1 h1:Jo0SM9cQnSkYfp44+v+NQXHpcHqlnRJk2qxh6yvxxxQ=
cloud.google.com/go v0.115.1/go.mod h1:DuujITeaufu3gL68/lOFIirVNJwQeyf5UXyi+Wbgknc=
-cloud.google.com/go/auth v0.9.1 h1:+pMtLEV2k0AXKvs/tGZojuj6QaioxfUjOpMsG5Gtx+w=
-cloud.google.com/go/auth v0.9.1/go.mod h1:Sw8ocT5mhhXxFklyhT12Eiy0ed6tTrPMCJjSI8KhYLk=
+cloud.google.com/go/auth v0.9.3 h1:VOEUIAADkkLtyfr3BLa3R8Ed/j6w1jTBmARx+wb5w5U=
+cloud.google.com/go/auth v0.9.3/go.mod h1:7z6VY+7h3KUdRov5F1i8NDP5ZzWKYmEPO842BgCsmTk=
cloud.google.com/go/auth/oauth2adapt v0.2.4 h1:0GWE/FUsXhf6C+jAkWgYm7X9tK8cuEIfy19DBn6B6bY=
cloud.google.com/go/auth/oauth2adapt v0.2.4/go.mod h1:jC/jOpwFP6JBxhB3P5Rr0a9HLMC/Pe3eaL4NmdvqPtc=
cloud.google.com/go/compute/metadata v0.5.0 h1:Zr0eK8JbFv6+Wi4ilXAR8FJ3wyNdpxHKJNPos6LTZOY=
@@ -16,8 +16,6 @@ filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4
github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkkhIiSjQ=
github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
-github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0=
-github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2 h1:+vx7roKuyA63nhn5WAunQHLTznkw5W8b1Xc0dNjp83s=
github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2/go.mod h1:HBCaDeC1lPdgDeDbhX8XFpy1jqjK0IBG8W5K+xYqA0w=
github.com/asaskevich/govalidator v0.0.0-20200108200545-475eaeb16496/go.mod h1:oGkLhpf+kjZl6xBf758TQhh5XrAeiJv/7FRz/2spLIg=
@@ -25,48 +23,52 @@ github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3d
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
github.com/aws/aws-sdk-go v1.55.5 h1:KKUZBfBoyqy5d3swXyiC7Q76ic40rYcbqH7qjh59kzU=
github.com/aws/aws-sdk-go v1.55.5/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU=
-github.com/aws/aws-sdk-go-v2 v1.30.4 h1:frhcagrVNrzmT95RJImMHgabt99vkXGslubDaDagTk8=
-github.com/aws/aws-sdk-go-v2 v1.30.4/go.mod h1:CT+ZPWXbYrci8chcARI3OmI/qgd+f6WtuLOoaIA8PR0=
+github.com/aws/aws-sdk-go-v2 v1.30.5 h1:mWSRTwQAb0aLE17dSzztCVJWI9+cRMgqebndjwDyK0g=
+github.com/aws/aws-sdk-go-v2 v1.30.5/go.mod h1:CT+ZPWXbYrci8chcARI3OmI/qgd+f6WtuLOoaIA8PR0=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.4 h1:70PVAiL15/aBMh5LThwgXdSQorVr91L127ttckI9QQU=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.4/go.mod h1:/MQxMqci8tlqDH+pjmoLu1i0tbWCUP1hhyMRuFxpQCw=
-github.com/aws/aws-sdk-go-v2/config v1.27.31 h1:kxBoRsjhT3pq0cKthgj6RU6bXTm/2SgdoUMyrVw0rAI=
-github.com/aws/aws-sdk-go-v2/config v1.27.31/go.mod h1:z04nZdSWFPaDwK3DdJOG2r+scLQzMYuJeW0CujEm9FM=
-github.com/aws/aws-sdk-go-v2/credentials v1.17.30 h1:aau/oYFtibVovr2rDt8FHlU17BTicFEMAi29V1U+L5Q=
-github.com/aws/aws-sdk-go-v2/credentials v1.17.30/go.mod h1:BPJ/yXV92ZVq6G8uYvbU0gSl8q94UB63nMT5ctNO38g=
-github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.12 h1:yjwoSyDZF8Jth+mUk5lSPJCkMC0lMy6FaCD51jm6ayE=
-github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.12/go.mod h1:fuR57fAgMk7ot3WcNQfb6rSEn+SUffl7ri+aa8uKysI=
-github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.15 h1:ijB7hr56MngOiELJe0C5aQRaBQ11LveNgWFyG02AUto=
-github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.15/go.mod h1:0QEmQSSWMVfiAk93l1/ayR9DQ9+jwni7gHS2NARZXB0=
-github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.16 h1:TNyt/+X43KJ9IJJMjKfa3bNTiZbUP7DeCxfbTROESwY=
-github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.16/go.mod h1:2DwJF39FlNAUiX5pAc0UNeiz16lK2t7IaFcm0LFHEgc=
-github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.16 h1:jYfy8UPmd+6kJW5YhY0L1/KftReOGxI/4NtVSTh9O/I=
-github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.16/go.mod h1:7ZfEPZxkW42Afq4uQB8H2E2e6ebh6mXTueEpYzjCzcs=
+github.com/aws/aws-sdk-go-v2/config v1.27.33 h1:Nof9o/MsmH4oa0s2q9a0k7tMz5x/Yj5k06lDODWz3BU=
+github.com/aws/aws-sdk-go-v2/config v1.27.33/go.mod h1:kEqdYzRb8dd8Sy2pOdEbExTTF5v7ozEXX0McgPE7xks=
+github.com/aws/aws-sdk-go-v2/credentials v1.17.32 h1:7Cxhp/BnT2RcGy4VisJ9miUPecY+lyE9I8JvcZofn9I=
+github.com/aws/aws-sdk-go-v2/credentials v1.17.32/go.mod h1:P5/QMF3/DCHbXGEGkdbilXHsyTBX5D3HSwcrSc9p20I=
+github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.13 h1:pfQ2sqNpMVK6xz2RbqLEL0GH87JOwSxPV2rzm8Zsb74=
+github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.13/go.mod h1:NG7RXPUlqfsCLLFfi0+IpKN4sCB9D9fw/qTaSB+xRoU=
+github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.18 h1:9DIp7vhmOPmueCDwpXa45bEbLHHTt1kcxChdTJWWxvI=
+github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.18/go.mod h1:aJv/Fwz8r56ozwYFRC4bzoeL1L17GYQYemfblOBux1M=
+github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.17 h1:pI7Bzt0BJtYA0N/JEC6B8fJ4RBrEMi1LBrkMdFYNSnQ=
+github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.17/go.mod h1:Dh5zzJYMtxfIjYW+/evjQ8uj2OyR/ve2KROHGHlSFqE=
+github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.17 h1:Mqr/V5gvrhA2gvgnF42Zh5iMiQNcOYthFYwCyrnuWlc=
+github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.17/go.mod h1:aLJpZlCmjE+V+KtN1q1uyZkfnUWpQGpbsn89XPKyzfU=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 h1:VaRN3TlFdd6KxX1x3ILT5ynH6HvKgqdiXoTxAF4HQcQ=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1/go.mod h1:FbtygfRFze9usAadmnGJNc8KsP346kEe+y2/oyhGAGc=
-github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.16 h1:mimdLQkIX1zr8GIPY1ZtALdBQGxcASiBd2MOp8m/dMc=
-github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.16/go.mod h1:YHk6owoSwrIsok+cAH9PENCOGoH5PU2EllX4vLtSrsY=
+github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.17 h1:Roo69qTpfu8OlJ2Tb7pAYVuF0CpuUMB0IYWwYP/4DZM=
+github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.17/go.mod h1:NcWPxQzGM1USQggaTVwz6VpqMZPX1CvDJLDh6jnOCa4=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.4 h1:KypMCbLPPHEmf9DgMGw51jMj77VfGPAN2Kv4cfhlfgI=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.4/go.mod h1:Vz1JQXliGcQktFTN/LN6uGppAIRoLBR2bMvIMP0gOjc=
-github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.18 h1:GckUnpm4EJOAio1c8o25a+b3lVfwVzC9gnSBqiiNmZM=
-github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.18/go.mod h1:Br6+bxfG33Dk3ynmkhsW2Z/t9D4+lRqdLDNCKi85w0U=
-github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.18 h1:tJ5RnkHCiSH0jyd6gROjlJtNwov0eGYNz8s8nFcR0jQ=
-github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.18/go.mod h1:++NHzT+nAF7ZPrHPsA+ENvsXkOO8wEu+C6RXltAG4/c=
-github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.16 h1:jg16PhLPUiHIj8zYIW6bqzeQSuHVEiWnGA0Brz5Xv2I=
-github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.16/go.mod h1:Uyk1zE1VVdsHSU7096h/rwnXDzOzYQVl+FNPhPw7ShY=
-github.com/aws/aws-sdk-go-v2/service/s3 v1.60.1 h1:mx2ucgtv+MWzJesJY9Ig/8AFHgoE5FwLXwUVgW/FGdI=
-github.com/aws/aws-sdk-go-v2/service/s3 v1.60.1/go.mod h1:BSPI0EfnYUuNHPS0uqIo5VrRwzie+Fp+YhQOUs16sKI=
-github.com/aws/aws-sdk-go-v2/service/sso v1.22.5 h1:zCsFCKvbj25i7p1u94imVoO447I/sFv8qq+lGJhRN0c=
-github.com/aws/aws-sdk-go-v2/service/sso v1.22.5/go.mod h1:ZeDX1SnKsVlejeuz41GiajjZpRSWR7/42q/EyA/QEiM=
-github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.5 h1:SKvPgvdvmiTWoi0GAJ7AsJfOz3ngVkD/ERbs5pUnHNI=
-github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.5/go.mod h1:20sz31hv/WsPa3HhU3hfrIet2kxM4Pe0r20eBZ20Tac=
-github.com/aws/aws-sdk-go-v2/service/sts v1.30.5 h1:OMsEmCyz2i89XwRwPouAJvhj81wINh+4UK+k/0Yo/q8=
-github.com/aws/aws-sdk-go-v2/service/sts v1.30.5/go.mod h1:vmSqFK+BVIwVpDAGZB3CoCXHzurt4qBE8lf+I/kRTh0=
+github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.19 h1:FLMkfEiRjhgeDTCjjLoc3URo/TBkgeQbocA78lfkzSI=
+github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.19/go.mod h1:Vx+GucNSsdhaxs3aZIKfSUjKVGsxN25nX2SRcdhuw08=
+github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.19 h1:rfprUlsdzgl7ZL2KlXiUAoJnI/VxfHCvDFr2QDFj6u4=
+github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.19/go.mod h1:SCWkEdRq8/7EK60NcvvQ6NXKuTcchAD4ROAsC37VEZE=
+github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.17 h1:u+EfGmksnJc/x5tq3A+OD7LrMbSSR/5TrKLvkdy/fhY=
+github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.17/go.mod h1:VaMx6302JHax2vHJWgRo+5n9zvbacs3bLU/23DNQrTY=
+github.com/aws/aws-sdk-go-v2/service/s3 v1.61.2 h1:Kp6PWAlXwP1UvIflkIP6MFZYBNDCa4mFCGtxrpICVOg=
+github.com/aws/aws-sdk-go-v2/service/s3 v1.61.2/go.mod h1:5FmD/Dqq57gP+XwaUnd5WFPipAuzrf0HmupX27Gvjvc=
+github.com/aws/aws-sdk-go-v2/service/sso v1.22.7 h1:pIaGg+08llrP7Q5aiz9ICWbY8cqhTkyy+0SHvfzQpTc=
+github.com/aws/aws-sdk-go-v2/service/sso v1.22.7/go.mod h1:eEygMHnTKH/3kNp9Jr1n3PdejuSNcgwLe1dWgQtO0VQ=
+github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.7 h1:/Cfdu0XV3mONYKaOt1Gr0k1KvQzkzPyiKUdlWJqy+J4=
+github.com/aws/aws-sdk-go-v2/service/ssooidc v1.26.7/go.mod h1:bCbAxKDqNvkHxRaIMnyVPXPo+OaPRwvmgzMxbz1VKSA=
+github.com/aws/aws-sdk-go-v2/service/sts v1.30.7 h1:NKTa1eqZYw8tiHSRGpP0VtTdub/8KNk8sDkNPFaOKDE=
+github.com/aws/aws-sdk-go-v2/service/sts v1.30.7/go.mod h1:NXi1dIAGteSaRLqYgarlhP/Ij0cFT+qmCwiJqWh/U5o=
github.com/aws/smithy-go v1.20.4 h1:2HK1zBdPgRbjFOHlfeQZfpC4r72MOb9bZkiFwggKO+4=
github.com/aws/smithy-go v1.20.4/go.mod h1:irrKGvNn1InZwb2d7fkIRNucdfwR8R+Ts3wxYa/cJHg=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
+github.com/chzyer/logex v1.2.0/go.mod h1:9+9sk7u7pGNWYMkh0hdiL++6OeibzJccyQU4p4MedaY=
+github.com/chzyer/readline v1.5.0/go.mod h1:x22KAscuvRqlLoK9CsoYsmxoXZMMFVyOl86cAH8qUic=
+github.com/chzyer/test v0.0.0-20210722231415-061457976a23/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
+github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/creack/pty v1.1.17 h1:QeVUsEDNrLBW4tMgZHvxy18sKtr6VI492kBhUfhDJNI=
github.com/creack/pty v1.1.17/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@@ -74,14 +76,19 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
-github.com/dlclark/regexp2 v1.11.4 h1:rPYF9/LECdNymJufQKmri9gV604RvvABwgOA8un7yAo=
-github.com/dlclark/regexp2 v1.11.4/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
+github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
+github.com/dlclark/regexp2 v1.7.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
+github.com/dlclark/regexp2 v1.10.0 h1:+/GIL799phkJqYW+3YbOd8LCcbHzT0Pbo8zl70MHsq0=
+github.com/dlclark/regexp2 v1.10.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/domodwyer/mailyak/v3 v3.6.2 h1:x3tGMsyFhTCaxp6ycgR0FE/bu5QiNp+hetUuCOBXMn8=
github.com/domodwyer/mailyak/v3 v3.6.2/go.mod h1:lOm/u9CyCVWHeaAmHIdF4RiKVxKUT/H5XX10lIKAL6c=
-github.com/dop251/goja v0.0.0-20240822155948-fa6d1ed5e4b6 h1:0x8Sh2rKCTVUQnRTJFIwtRWAp91VMsnATQEsMAg14kM=
-github.com/dop251/goja v0.0.0-20240822155948-fa6d1ed5e4b6/go.mod h1:MxLav0peU43GgvwVgNbLAj1s/bSGboKkhuULvq/7hx4=
-github.com/dop251/goja_nodejs v0.0.0-20240728170619-29b559befffc h1:MKYt39yZJi0Z9xEeRmDX2L4ocE0ETKcHKw6MVL3R+co=
-github.com/dop251/goja_nodejs v0.0.0-20240728170619-29b559befffc/go.mod h1:VULptt4Q/fNzQUJlqY/GP3qHyU7ZH46mFkBZe0ZTokU=
+github.com/dop251/goja v0.0.0-20211022113120-dc8c55024d06/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk=
+github.com/dop251/goja v0.0.0-20231027120936-b396bb4c349d h1:wi6jN5LVt/ljaBG4ue79Ekzb12QfJ52L9Q98tl8SWhw=
+github.com/dop251/goja v0.0.0-20231027120936-b396bb4c349d/go.mod h1:QMWlm50DNe14hD7t24KEqZuUdC9sOTy8W6XbCU1mlw4=
+github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7/go.mod h1:hn7BA7c8pLvoGndExHudxTDKZ84Pyvv+90pbBjbTz0Y=
+github.com/dop251/goja_nodejs v0.0.0-20211022123610-8dd9abb0616d/go.mod h1:DngW8aVqWbuLRMHItjPUyqdj+HWPvnQe8V8y1nDpIbM=
+github.com/dop251/goja_nodejs v0.0.0-20231122114759-e84d9a924c5c h1:hLoodLRD4KLWIH8eyAQCLcH8EqIrjac7fCkp/fHnvuQ=
+github.com/dop251/goja_nodejs v0.0.0-20231122114759-e84d9a924c5c/go.mod h1:bhGPmCgCCTSRfiMYWjpS46IDo9EUZXlsuUaPXSWGbv0=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
@@ -106,13 +113,11 @@ github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-ozzo/ozzo-validation/v4 v4.3.0 h1:byhDUpfEwjsVQb1vBunvIjh2BHQ9ead57VkAEY4V+Es=
github.com/go-ozzo/ozzo-validation/v4 v4.3.0/go.mod h1:2NKgrcHl3z6cJs+3Oo940FPRiTzuqKbvfrL2RxCj6Ew=
-github.com/go-sourcemap/sourcemap v2.1.4+incompatible h1:a+iTbH5auLKxaNwQFg0B+TCYl6lbukKPc7b5x0n1s6Q=
-github.com/go-sourcemap/sourcemap v2.1.4+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
+github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU=
+github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y=
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
-github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA=
-github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg=
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
@@ -140,8 +145,9 @@ github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
-github.com/google/pprof v0.0.0-20240827171923-fa2c70bbbfe5 h1:5iH8iuqE5apketRbSFBy+X1V0o+l+8NF1avt4HWl7cA=
-github.com/google/pprof v0.0.0-20240827171923-fa2c70bbbfe5/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144=
+github.com/google/pprof v0.0.0-20230207041349-798e818bf904/go.mod h1:uglQLonpP8qtYCYyzA+8c/9qtqgA3qsXGYqCPKARAFg=
+github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd h1:gbpYu9NMq8jhDVbvlGkMFWCjLFlqqEZjEmObmhUy6Vo=
+github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd/go.mod h1:kf6iHlnVGwgKolg33glAes7Yg/8iWP8ukqeldJSO7jw=
github.com/google/s2a-go v0.1.8 h1:zZDs9gcbt9ZPLV0ndSyQk6Kacx2g/X+SKYovpnz3SMM=
github.com/google/s2a-go v0.1.8/go.mod h1:6iNWHTpQ+nfNRN5E00MSdfDwVesa8hhS32PhPO8deJA=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
@@ -149,26 +155,30 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/wire v0.6.0 h1:HBkoIh4BdSxoyo9PveV8giw7ZsaBOvzWKfcg/6MrVwI=
github.com/google/wire v0.6.0/go.mod h1:F4QhpQ9EDIdJ1Mbop/NZBRB+5yrR6qg3BnctaoUk6NA=
-github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs=
-github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0=
+github.com/googleapis/enterprise-certificate-proxy v0.3.4 h1:XYIDZApgAnrN1c855gTgghdIA6Stxb52D5RnLI1SLyw=
+github.com/googleapis/enterprise-certificate-proxy v0.3.4/go.mod h1:YKe7cfqYXjKGpGvmSg28/fFvhNzinZQm8DGnaburhGA=
github.com/googleapis/gax-go/v2 v2.13.0 h1:yitjD5f7jQHhyDsnhKEBU52NdvvdSeGzlAnDPT0hH1s=
github.com/googleapis/gax-go/v2 v2.13.0/go.mod h1:Z/fvTZXF8/uw7Xu5GuslPw+bplx6SS338j1Is2S+B7A=
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec h1:qv2VnGeEQHchGaZ/u7lxST/RaJw+cv273q79D81Xbog=
github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec/go.mod h1:Q48J4R4DvxnHolD5P8pOtXigYlRuPLGl6moFx3ulM68=
+github.com/ianlancetaylor/demangle v0.0.0-20220319035150-800ac71e25c2/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
+github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
+github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
+github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
-github.com/labstack/echo/v5 v5.0.0-20230722203903-ec5b858dab61 h1:FwuzbVh87iLiUQj1+uQUsuw9x5t9m5n5g7rG7o4svW4=
-github.com/labstack/echo/v5 v5.0.0-20230722203903-ec5b858dab61/go.mod h1:paQfF1YtHe+GrGg5fOgjsjoCX/UKDr9bc1DoWpZfns8=
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
@@ -176,8 +186,8 @@ github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hd
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
-github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
-github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
+github.com/mattn/go-sqlite3 v1.14.23 h1:gbShiuAP1W5j9UOksQ06aiiqPMxYecovVGwmTxWtuw0=
+github.com/mattn/go-sqlite3 v1.14.23/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
@@ -192,6 +202,7 @@ github.com/pocketbase/tygoja v0.0.0-20240113091827-17918475d342/go.mod h1:dOJ+pC
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
+github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
@@ -211,34 +222,30 @@ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8=
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
-github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
-github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
-github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo=
-github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
-go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.53.0 h1:9G6E0TXzGFVfTnawRzrPl83iHOAV7L8NJiR8RSGYV1g=
-go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.53.0/go.mod h1:azvtTADFQJA8mX80jIH/akaE7h+dbm/sVuaHqN13w74=
-go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0 h1:4K4tsIXefpVJtvA/8srF4V4y0akAoPHkIslgAkjixJA=
-go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0/go.mod h1:jjdQuTGVsXV4vSs+CJ2qYDeDPf9yIJV23qlIzBm73Vg=
-go.opentelemetry.io/otel v1.28.0 h1:/SqNcYk+idO0CxKEUOtKQClMK/MimZihKYMruSMViUo=
-go.opentelemetry.io/otel v1.28.0/go.mod h1:q68ijF8Fc8CnMHKyzqL6akLO46ePnjkgfIMIjUIX9z4=
-go.opentelemetry.io/otel/metric v1.28.0 h1:f0HGvSl1KRAU1DLgLGFjrwVyismPlnuU6JD6bOeuA5Q=
-go.opentelemetry.io/otel/metric v1.28.0/go.mod h1:Fb1eVBFZmLVTMb6PPohq3TO9IIhUisDsbJoL/+uQW4s=
-go.opentelemetry.io/otel/trace v1.28.0 h1:GhQ9cUuQGmNDd5BTCP2dAvv75RdMxEfTmYejp+lkx9g=
-go.opentelemetry.io/otel/trace v1.28.0/go.mod h1:jPyXzNPg6da9+38HEwElrQiHlVMTnVfM3/yv2OlIHaI=
+go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0 h1:r6I7RJCN86bpD/FQwedZ0vSixDpwuWREjW9oRMsmqDc=
+go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0/go.mod h1:B9yO6b04uB80CzjedvewuqDhxJxi11s7/GtiGa8bAjI=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 h1:TT4fX+nBOA/+LUkobKGW1ydGcn+G3vRw9+g5HwCphpk=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0/go.mod h1:L7UH0GbB0p47T4Rri3uHjbpCFYrVrwc1I25QhNPiGK8=
+go.opentelemetry.io/otel v1.29.0 h1:PdomN/Al4q/lN6iBJEN3AwPvUiHPMlt93c8bqTG5Llw=
+go.opentelemetry.io/otel v1.29.0/go.mod h1:N/WtXPs1CNCUEx+Agz5uouwCba+i+bJGFicT8SR4NP8=
+go.opentelemetry.io/otel/metric v1.29.0 h1:vPf/HFWTNkPu1aYeIsc98l4ktOQaL6LeSoeV2g+8YLc=
+go.opentelemetry.io/otel/metric v1.29.0/go.mod h1:auu/QWieFVWx+DmQOUMgj0F8LHWdgalxXqvp7BII/W8=
+go.opentelemetry.io/otel/trace v1.29.0 h1:J/8ZNK4XgR7a21DZUAsbF8pZ5Jcw1VhACmnYt39JTi4=
+go.opentelemetry.io/otel/trace v1.29.0/go.mod h1:eHl3w0sp3paPkYstJOmAimxhiFXPg+MMTlEh3nsQgWQ=
gocloud.dev v0.39.0 h1:EYABYGhAalPUaMrbSKOr5lejxoxvXj99nE8XFtsDgds=
gocloud.dev v0.39.0/go.mod h1:drz+VyYNBvrMTW0KZiBAYEdl8lbNZx+OQ7oQvdrFmSQ=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
-golang.org/x/crypto v0.26.0 h1:RrRspgV4mU+YwB4FYnuBoKsUapNIL5cohGAmSH3azsw=
-golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54=
+golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A=
+golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
-golang.org/x/image v0.19.0 h1:D9FX4QWkLfkeqaC62SonffIIuYdOk/UE2XKUBgRIBIQ=
-golang.org/x/image v0.19.0/go.mod h1:y0zrRqlQRWQ5PXaYCOMLTW2fpsxZ8Qh9I/ohnInJEys=
+golang.org/x/image v0.20.0 h1:7cVCUjQwfL18gyBJOmYvptfSHS8Fb3YUDtfLIZ7Nbpw=
+golang.org/x/image v0.20.0/go.mod h1:0a88To4CYVBAHp5FXJm8o7QbUl37Vd85ply1vyD8auM=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
@@ -255,11 +262,11 @@ golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLL
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
-golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE=
-golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg=
+golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo=
+golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
-golang.org/x/oauth2 v0.22.0 h1:BzDx2FehcG7jJwgWLELCdmLuxk2i+x9UDpSiss2u0ZA=
-golang.org/x/oauth2 v0.22.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
+golang.org/x/oauth2 v0.23.0 h1:PbgcYx2W7i4LvjJWEbf0ngHV6qJYr86PkAV3bXdLEbs=
+golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -273,23 +280,26 @@ golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.24.0 h1:Twjiwq9dn6R1fQcyiK+wQyHWfaz/BJB+YIpzU/Cv3Xg=
-golang.org/x/sys v0.24.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34=
+golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
-golang.org/x/term v0.23.0 h1:F6D4vR+EHoL9/sWAWgAR1H2DcHr4PareCbAaCo1RpuU=
-golang.org/x/term v0.23.0/go.mod h1:DgV24QBUrK6jhZXl+20l6UWznPlwAHm1Q1mGHtydmSk=
+golang.org/x/term v0.24.0 h1:Mh5cbb+Zk2hqqXNO7S1iTjEphVL+jb8ZWaqh/g+JWkM=
+golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
-golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc=
-golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
+golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224=
+golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U=
golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
@@ -303,29 +313,29 @@ golang.org/x/tools v0.23.0 h1:SGsXPZ+2l4JsgaCKkx+FQ9YZ5XEtA1GZYuoDjenLjvg=
golang.org/x/tools v0.23.0/go.mod h1:pnu6ufv6vQkll6szChhK3C3L/ruaIv5eBeztNG8wtsI=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
-golang.org/x/xerrors v0.0.0-20240716161551-93cc26a95ae9 h1:LLhsEBxRTBLuKlQxFBYUOU8xyFgXv6cOTp2HASDlsDk=
-golang.org/x/xerrors v0.0.0-20240716161551-93cc26a95ae9/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
-google.golang.org/api v0.194.0 h1:dztZKG9HgtIpbI35FhfuSNR/zmaMVdxNlntHj1sIS4s=
-google.golang.org/api v0.194.0/go.mod h1:AgvUFdojGANh3vI+P7EVnxj3AISHllxGCJSFmggmnd0=
+golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY=
+golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
+google.golang.org/api v0.197.0 h1:x6CwqQLsFiA5JKAiGyGBjc2bNtHtLddhJCE2IKuhhcQ=
+google.golang.org/api v0.197.0/go.mod h1:AuOuo20GoQ331nq7DquGHlU6d+2wN2fZ8O0ta60nRNw=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
-google.golang.org/genproto v0.0.0-20240814211410-ddb44dafa142 h1:oLiyxGgE+rt22duwci1+TG7bg2/L1LQsXwfjPlmuJA0=
-google.golang.org/genproto v0.0.0-20240814211410-ddb44dafa142/go.mod h1:G11eXq53iI5Q+kyNOmCvnzBaxEA2Q/Ik5Tj7nqBE8j4=
+google.golang.org/genproto v0.0.0-20240903143218-8af14fe29dc1 h1:BulPr26Jqjnd4eYDVe+YvyR7Yc2vJGkO5/0UxD0/jZU=
+google.golang.org/genproto v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:hL97c3SYopEHblzpxRL4lSs523++l8DYxGM1FQiYmb4=
google.golang.org/genproto/googleapis/api v0.0.0-20240812133136-8ffd90a71988 h1:+/tmTy5zAieooKIXfzDm9KiA3Bv6JBwriRN9LY+yayk=
google.golang.org/genproto/googleapis/api v0.0.0-20240812133136-8ffd90a71988/go.mod h1:4+X6GvPs+25wZKbQq9qyAXrwIRExv7w0Ea6MgZLZiDM=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20240827150818-7e3bb234dfed h1:J6izYgfBXAI3xTKLgxzTmUltdYaLsuBxFCgDHWJ/eXg=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20240827150818-7e3bb234dfed/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 h1:pPJltXNxVzT4pK9yD8vR9X75DaWYYmLGMsEvBfFQZzQ=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
-google.golang.org/grpc v1.65.0 h1:bs/cUb4lp1G5iImFFd3u5ixQzweKizoZJAwBNLR42lc=
-google.golang.org/grpc v1.65.0/go.mod h1:WgYC2ypjlB0EiQi6wdKixMqukr6lBc0Vo+oOgjrM5ZQ=
+google.golang.org/grpc v1.66.2 h1:3QdXkuq3Bkh7w+ywLdLvM56cmGvQHUMZpiCzt6Rqaoo=
+google.golang.org/grpc v1.66.2/go.mod h1:s3/l6xSSCURdVfAnL+TqCNMyTDAGN6+lZeVxnZR128Y=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
@@ -338,6 +348,9 @@ google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlba
google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg=
google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
+gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
@@ -348,16 +361,16 @@ honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWh
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ=
modernc.org/cc/v4 v4.21.4/go.mod h1:HM7VJTZbUCR3rV8EYBi9wxnJ0ZBRiGE5OeGXNA0IsLQ=
-modernc.org/ccgo/v4 v4.19.2 h1:lwQZgvboKD0jBwdaeVCTouxhxAyN6iawF3STraAal8Y=
-modernc.org/ccgo/v4 v4.19.2/go.mod h1:ysS3mxiMV38XGRTTcgo0DQTeTmAO4oCmJl1nX9VFI3s=
+modernc.org/ccgo/v4 v4.21.0 h1:kKPI3dF7RIag8YcToh5ZwDcVMIv6VGa0ED5cvh0LMW4=
+modernc.org/ccgo/v4 v4.21.0/go.mod h1:h6kt6H/A2+ew/3MW/p6KEoQmrq/i3pr0J/SiwiaF/g0=
modernc.org/fileutil v1.3.0 h1:gQ5SIzK3H9kdfai/5x41oQiKValumqNTDXMvKo62HvE=
modernc.org/fileutil v1.3.0/go.mod h1:XatxS8fZi3pS8/hKG2GH/ArUogfxjpEKs3Ku3aK4JyQ=
-modernc.org/gc/v2 v2.4.1 h1:9cNzOqPyMJBvrUipmynX0ZohMhcxPtMccYgGOJdOiBw=
-modernc.org/gc/v2 v2.4.1/go.mod h1:wzN5dK1AzVGoH6XOzc3YZ+ey/jPgYHLuVckd62P0GYU=
+modernc.org/gc/v2 v2.5.0 h1:bJ9ChznK1L1mUtAQtxi0wi5AtAs5jQuw4PrPHO5pb6M=
+modernc.org/gc/v2 v2.5.0/go.mod h1:wzN5dK1AzVGoH6XOzc3YZ+ey/jPgYHLuVckd62P0GYU=
modernc.org/gc/v3 v3.0.0-20240801135723-a856999a2e4a h1:CfbpOLEo2IwNzJdMvE8aiRbPMxoTpgAJeyePh0SmO8M=
modernc.org/gc/v3 v3.0.0-20240801135723-a856999a2e4a/go.mod h1:Qz0X07sNOR1jWYCrJMEnbW/X55x206Q7Vt4mz6/wHp4=
-modernc.org/libc v1.55.3 h1:AzcW1mhlPNrRtjS5sS+eW2ISCgSOLLNyFzRh/V3Qj/U=
-modernc.org/libc v1.55.3/go.mod h1:qFXepLhz+JjFThQ4kzwzOjA/y/artDeg+pcYnY+Q83w=
+modernc.org/libc v1.60.1 h1:at373l8IFRTkJIkAU85BIuUoBM4T1b51ds0E1ovPG2s=
+modernc.org/libc v1.60.1/go.mod h1:xJuobKuNxKH3RUatS7GjR+suWj+5c2K7bi4m/S5arOY=
modernc.org/mathutil v1.6.0 h1:fRe9+AmYlaej+64JsEEhoWuAYBkOtQiMEU7n/XgfYi4=
modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo=
modernc.org/memory v1.8.0 h1:IqGTL6eFMaDZZhEWwcREgeMXYwmW83LYW8cROZYkg+E=
@@ -366,8 +379,8 @@ modernc.org/opt v0.1.3 h1:3XOZf2yznlhC+ibLltsDGzABUGVx8J6pnFMS3E4dcq4=
modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0=
modernc.org/sortutil v1.2.0 h1:jQiD3PfS2REGJNzNCMMaLSp/wdMNieTbKX920Cqdgqc=
modernc.org/sortutil v1.2.0/go.mod h1:TKU2s7kJMf1AE84OoiGppNHJwvB753OYfNl2WRb++Ss=
-modernc.org/sqlite v1.32.0 h1:6BM4uGza7bWypsw4fdLRsLxut6bHe4c58VeqjRgST8s=
-modernc.org/sqlite v1.32.0/go.mod h1:UqoylwmTb9F+IqXERT8bW9zzOWN8qwAIcLdzeBZs4hA=
+modernc.org/sqlite v1.33.1 h1:trb6Z3YYoeM9eDL1O8do81kP+0ejv+YzgyFo+Gwy0nM=
+modernc.org/sqlite v1.33.1/go.mod h1:pXV2xHxhzXZsgT/RtTFAPY6JJDEvOTcTdwADQCCWD4k=
modernc.org/strutil v1.2.0 h1:agBi9dp1I+eOnxXeiZawM8F4LawKv4NzGWSaLfyeNZA=
modernc.org/strutil v1.2.0/go.mod h1:/mdcBmfOibveCTBxUl5B5l6W+TTH1FXPLHZE6bTosX0=
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
diff --git a/golangci.yml b/golangci.yml
index 6ba99fff..9b8db42e 100644
--- a/golangci.yml
+++ b/golangci.yml
@@ -1,14 +1,13 @@
run:
- go: 1.21
+ go: 1.23
concurrency: 4
timeout: 10m
linters:
disable-all: true
enable:
+ - asasalint
- asciicheck
- - depguard
- - exportloopref
- gofmt
- goimports
- gomodguard
@@ -20,6 +19,8 @@ linters:
- nakedret
- nolintlint
- prealloc
+ - prealloc
+ - reassign
- staticcheck
- typecheck
- unconvert
diff --git a/mails/admin.go b/mails/admin.go
deleted file mode 100644
index e4f484a2..00000000
--- a/mails/admin.go
+++ /dev/null
@@ -1,76 +0,0 @@
-package mails
-
-import (
- "fmt"
- "net/mail"
-
- "github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/mails/templates"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tokens"
- "github.com/pocketbase/pocketbase/tools/mailer"
- "github.com/pocketbase/pocketbase/tools/rest"
-)
-
-// SendAdminPasswordReset sends a password reset request email to the specified admin.
-func SendAdminPasswordReset(app core.App, admin *models.Admin) error {
- token, tokenErr := tokens.NewAdminResetPasswordToken(app, admin)
- if tokenErr != nil {
- return tokenErr
- }
-
- actionUrl, urlErr := rest.NormalizeUrl(fmt.Sprintf(
- "%s/_/#/confirm-password-reset/%s",
- app.Settings().Meta.AppUrl,
- token,
- ))
- if urlErr != nil {
- return urlErr
- }
-
- params := struct {
- AppName string
- AppUrl string
- Admin *models.Admin
- Token string
- ActionUrl string
- }{
- AppName: app.Settings().Meta.AppName,
- AppUrl: app.Settings().Meta.AppUrl,
- Admin: admin,
- Token: token,
- ActionUrl: actionUrl,
- }
-
- mailClient := app.NewMailClient()
-
- // resolve body template
- body, renderErr := resolveTemplateContent(params, templates.Layout, templates.AdminPasswordResetBody)
- if renderErr != nil {
- return renderErr
- }
-
- message := &mailer.Message{
- From: mail.Address{
- Name: app.Settings().Meta.SenderName,
- Address: app.Settings().Meta.SenderAddress,
- },
- To: []mail.Address{{Address: admin.Email}},
- Subject: "Reset admin password",
- HTML: body,
- }
-
- event := new(core.MailerAdminEvent)
- event.MailClient = mailClient
- event.Message = message
- event.Admin = admin
- event.Meta = map[string]any{"token": token}
-
- return app.OnMailerBeforeAdminResetPasswordSend().Trigger(event, func(e *core.MailerAdminEvent) error {
- if err := e.MailClient.Send(e.Message); err != nil {
- return err
- }
-
- return app.OnMailerAfterAdminResetPasswordSend().Trigger(e)
- })
-}
diff --git a/mails/admin_test.go b/mails/admin_test.go
deleted file mode 100644
index 1bfdb3de..00000000
--- a/mails/admin_test.go
+++ /dev/null
@@ -1,39 +0,0 @@
-package mails_test
-
-import (
- "strings"
- "testing"
-
- "github.com/pocketbase/pocketbase/mails"
- "github.com/pocketbase/pocketbase/tests"
-)
-
-func TestSendAdminPasswordReset(t *testing.T) {
- t.Parallel()
-
- testApp, _ := tests.NewTestApp()
- defer testApp.Cleanup()
-
- // ensure that action url normalization will be applied
- testApp.Settings().Meta.AppUrl = "http://localhost:8090////"
-
- admin, _ := testApp.Dao().FindAdminByEmail("test@example.com")
-
- err := mails.SendAdminPasswordReset(testApp, admin)
- if err != nil {
- t.Fatal(err)
- }
-
- if testApp.TestMailer.TotalSend != 1 {
- t.Fatalf("Expected one email to be sent, got %d", testApp.TestMailer.TotalSend)
- }
-
- expectedParts := []string{
- "http://localhost:8090/_/#/confirm-password-reset/eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.",
- }
- for _, part := range expectedParts {
- if !strings.Contains(testApp.TestMailer.LastMessage.HTML, part) {
- t.Fatalf("Couldn't find %s \nin\n %s", part, testApp.TestMailer.LastMessage.HTML)
- }
- }
-}
diff --git a/mails/record.go b/mails/record.go
index dfeca5c9..8a59cb67 100644
--- a/mails/record.go
+++ b/mails/record.go
@@ -6,59 +6,14 @@ import (
"github.com/pocketbase/pocketbase/core"
"github.com/pocketbase/pocketbase/mails/templates"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/settings"
- "github.com/pocketbase/pocketbase/tokens"
"github.com/pocketbase/pocketbase/tools/mailer"
)
-// @todo remove after the refactoring
-//
-// SendRecordPasswordLoginAlert sends a OAuth2 password login alert to the specified auth record.
-func SendRecordPasswordLoginAlert(app core.App, authRecord *models.Record, providerNames ...string) error {
- params := struct {
- AppName string
- AppUrl string
- Record *models.Record
- ProviderNames []string
- }{
- AppName: app.Settings().Meta.AppName,
- AppUrl: app.Settings().Meta.AppUrl,
- Record: authRecord,
- ProviderNames: providerNames,
- }
-
+// SendRecordAuthAlert sends a new device login alert to the specified auth record.
+func SendRecordAuthAlert(app core.App, authRecord *core.Record) error {
mailClient := app.NewMailClient()
- // resolve body template
- body, renderErr := resolveTemplateContent(params, templates.Layout, templates.PasswordLoginAlertBody)
- if renderErr != nil {
- return renderErr
- }
-
- message := &mailer.Message{
- From: mail.Address{
- Name: app.Settings().Meta.SenderName,
- Address: app.Settings().Meta.SenderAddress,
- },
- To: []mail.Address{{Address: authRecord.Email()}},
- Subject: "Password login alert",
- HTML: body,
- }
-
- return mailClient.Send(message)
-}
-
-// SendRecordPasswordReset sends a password reset request email to the specified user.
-func SendRecordPasswordReset(app core.App, authRecord *models.Record) error {
- token, tokenErr := tokens.NewRecordResetPasswordToken(app, authRecord)
- if tokenErr != nil {
- return tokenErr
- }
-
- mailClient := app.NewMailClient()
-
- subject, body, err := resolveEmailTemplate(app, token, app.Settings().Meta.ResetPasswordTemplate)
+ subject, body, err := resolveEmailTemplate(app, authRecord, authRecord.Collection().AuthAlert.EmailTemplate, nil)
if err != nil {
return err
}
@@ -74,31 +29,24 @@ func SendRecordPasswordReset(app core.App, authRecord *models.Record) error {
}
event := new(core.MailerRecordEvent)
- event.MailClient = mailClient
+ event.App = app
+ event.Mailer = mailClient
event.Message = message
- event.Collection = authRecord.Collection()
event.Record = authRecord
- event.Meta = map[string]any{"token": token}
- return app.OnMailerBeforeRecordResetPasswordSend().Trigger(event, func(e *core.MailerRecordEvent) error {
- if err := e.MailClient.Send(e.Message); err != nil {
- return err
- }
-
- return app.OnMailerAfterRecordResetPasswordSend().Trigger(e)
+ return app.OnMailerRecordAuthAlertSend().Trigger(event, func(e *core.MailerRecordEvent) error {
+ return e.Mailer.Send(e.Message)
})
}
-// SendRecordVerification sends a verification request email to the specified user.
-func SendRecordVerification(app core.App, authRecord *models.Record) error {
- token, tokenErr := tokens.NewRecordVerifyToken(app, authRecord)
- if tokenErr != nil {
- return tokenErr
- }
-
+// SendRecordOTP sends OTP email to the specified auth record.
+func SendRecordOTP(app core.App, authRecord *core.Record, otpId string, pass string) error {
mailClient := app.NewMailClient()
- subject, body, err := resolveEmailTemplate(app, token, app.Settings().Meta.VerificationTemplate)
+ subject, body, err := resolveEmailTemplate(app, authRecord, authRecord.Collection().OTP.EmailTemplate, map[string]any{
+ core.EmailPlaceholderOTPId: otpId,
+ core.EmailPlaceholderOTP: pass,
+ })
if err != nil {
return err
}
@@ -114,31 +62,108 @@ func SendRecordVerification(app core.App, authRecord *models.Record) error {
}
event := new(core.MailerRecordEvent)
- event.MailClient = mailClient
+ event.App = app
+ event.Mailer = mailClient
event.Message = message
- event.Collection = authRecord.Collection()
event.Record = authRecord
- event.Meta = map[string]any{"token": token}
+ event.Meta = map[string]any{
+ "otpId": otpId,
+ "password": pass,
+ }
- return app.OnMailerBeforeRecordVerificationSend().Trigger(event, func(e *core.MailerRecordEvent) error {
- if err := e.MailClient.Send(e.Message); err != nil {
- return err
- }
-
- return app.OnMailerAfterRecordVerificationSend().Trigger(e)
+ return app.OnMailerRecordOTPSend().Trigger(event, func(e *core.MailerRecordEvent) error {
+ return e.Mailer.Send(e.Message)
})
}
-// SendRecordChangeEmail sends a change email confirmation email to the specified user.
-func SendRecordChangeEmail(app core.App, record *models.Record, newEmail string) error {
- token, tokenErr := tokens.NewRecordChangeEmailToken(app, record, newEmail)
+// SendRecordPasswordReset sends a password reset request email to the specified auth record.
+func SendRecordPasswordReset(app core.App, authRecord *core.Record) error {
+ token, tokenErr := authRecord.NewPasswordResetToken()
if tokenErr != nil {
return tokenErr
}
mailClient := app.NewMailClient()
- subject, body, err := resolveEmailTemplate(app, token, app.Settings().Meta.ConfirmEmailChangeTemplate)
+ subject, body, err := resolveEmailTemplate(app, authRecord, authRecord.Collection().ResetPasswordTemplate, map[string]any{
+ core.EmailPlaceholderToken: token,
+ })
+ if err != nil {
+ return err
+ }
+
+ message := &mailer.Message{
+ From: mail.Address{
+ Name: app.Settings().Meta.SenderName,
+ Address: app.Settings().Meta.SenderAddress,
+ },
+ To: []mail.Address{{Address: authRecord.Email()}},
+ Subject: subject,
+ HTML: body,
+ }
+
+ event := new(core.MailerRecordEvent)
+ event.App = app
+ event.Mailer = mailClient
+ event.Message = message
+ event.Record = authRecord
+ event.Meta = map[string]any{"token": token}
+
+ return app.OnMailerRecordPasswordResetSend().Trigger(event, func(e *core.MailerRecordEvent) error {
+ return e.Mailer.Send(e.Message)
+ })
+}
+
+// SendRecordVerification sends a verification request email to the specified auth record.
+func SendRecordVerification(app core.App, authRecord *core.Record) error {
+ token, tokenErr := authRecord.NewVerificationToken()
+ if tokenErr != nil {
+ return tokenErr
+ }
+
+ mailClient := app.NewMailClient()
+
+ subject, body, err := resolveEmailTemplate(app, authRecord, authRecord.Collection().VerificationTemplate, map[string]any{
+ core.EmailPlaceholderToken: token,
+ })
+ if err != nil {
+ return err
+ }
+
+ message := &mailer.Message{
+ From: mail.Address{
+ Name: app.Settings().Meta.SenderName,
+ Address: app.Settings().Meta.SenderAddress,
+ },
+ To: []mail.Address{{Address: authRecord.Email()}},
+ Subject: subject,
+ HTML: body,
+ }
+
+ event := new(core.MailerRecordEvent)
+ event.App = app
+ event.Mailer = mailClient
+ event.Message = message
+ event.Record = authRecord
+ event.Meta = map[string]any{"token": token}
+
+ return app.OnMailerRecordVerificationSend().Trigger(event, func(e *core.MailerRecordEvent) error {
+ return e.Mailer.Send(e.Message)
+ })
+}
+
+// SendRecordChangeEmail sends a change email confirmation email to the specified auth record.
+func SendRecordChangeEmail(app core.App, authRecord *core.Record, newEmail string) error {
+ token, tokenErr := authRecord.NewEmailChangeToken(newEmail)
+ if tokenErr != nil {
+ return tokenErr
+ }
+
+ mailClient := app.NewMailClient()
+
+ subject, body, err := resolveEmailTemplate(app, authRecord, authRecord.Collection().ConfirmEmailChangeTemplate, map[string]any{
+ core.EmailPlaceholderToken: token,
+ })
if err != nil {
return err
}
@@ -154,42 +179,59 @@ func SendRecordChangeEmail(app core.App, record *models.Record, newEmail string)
}
event := new(core.MailerRecordEvent)
- event.MailClient = mailClient
+ event.App = app
+ event.Mailer = mailClient
event.Message = message
- event.Collection = record.Collection()
- event.Record = record
+ event.Record = authRecord
event.Meta = map[string]any{
"token": token,
"newEmail": newEmail,
}
- return app.OnMailerBeforeRecordChangeEmailSend().Trigger(event, func(e *core.MailerRecordEvent) error {
- if err := e.MailClient.Send(e.Message); err != nil {
- return err
- }
-
- return app.OnMailerAfterRecordChangeEmailSend().Trigger(e)
+ return app.OnMailerRecordEmailChangeSend().Trigger(event, func(e *core.MailerRecordEvent) error {
+ return e.Mailer.Send(e.Message)
})
}
func resolveEmailTemplate(
app core.App,
- token string,
- emailTemplate settings.EmailTemplate,
+ authRecord *core.Record,
+ emailTemplate core.EmailTemplate,
+ placeholders map[string]any,
) (subject string, body string, err error) {
- subject, rawBody, _ := emailTemplate.Resolve(
- app.Settings().Meta.AppName,
- app.Settings().Meta.AppUrl,
- token,
- )
-
- params := struct {
- HtmlContent template.HTML
- }{
- HtmlContent: template.HTML(rawBody),
+ if placeholders == nil {
+ placeholders = map[string]any{}
}
- body, err = resolveTemplateContent(params, templates.Layout, templates.HtmlBody)
+ // register default system placeholders
+ if _, ok := placeholders[core.EmailPlaceholderAppName]; !ok {
+ placeholders[core.EmailPlaceholderAppName] = app.Settings().Meta.AppName
+ }
+ if _, ok := placeholders[core.EmailPlaceholderAppURL]; !ok {
+ placeholders[core.EmailPlaceholderAppURL] = app.Settings().Meta.AppURL
+ }
+
+ // register default auth record placeholders
+ for _, field := range authRecord.Collection().Fields {
+ if field.GetHidden() {
+ continue
+ }
+
+ fieldPlacehodler := "{RECORD:" + field.GetName() + "}"
+ if _, ok := placeholders[fieldPlacehodler]; !ok {
+ placeholders[fieldPlacehodler] = authRecord.Get(field.GetName())
+ }
+ }
+
+ subject, rawBody := emailTemplate.Resolve(placeholders)
+
+ params := struct {
+ HTMLContent template.HTML
+ }{
+ HTMLContent: template.HTML(rawBody),
+ }
+
+ body, err = resolveTemplateContent(params, templates.Layout, templates.HTMLBody)
if err != nil {
return "", "", err
}
diff --git a/mails/record_test.go b/mails/record_test.go
index fa8840e8..1917493f 100644
--- a/mails/record_test.go
+++ b/mails/record_test.go
@@ -8,31 +8,32 @@ import (
"github.com/pocketbase/pocketbase/tests"
)
-func TestSendRecordPasswordLoginAlert(t *testing.T) {
+func TestSendRecordAuthAlert(t *testing.T) {
t.Parallel()
testApp, _ := tests.NewTestApp()
defer testApp.Cleanup()
- // ensure that action url normalization will be applied
- testApp.Settings().Meta.AppUrl = "http://localhost:8090////"
+ user, _ := testApp.FindFirstRecordByData("users", "email", "test@example.com")
- user, _ := testApp.Dao().FindFirstRecordByData("users", "email", "test@example.com")
-
- err := mails.SendRecordPasswordLoginAlert(testApp, user, "test1", "test2")
+ err := mails.SendRecordAuthAlert(testApp, user)
if err != nil {
t.Fatal(err)
}
- if testApp.TestMailer.TotalSend != 1 {
- t.Fatalf("Expected one email to be sent, got %d", testApp.TestMailer.TotalSend)
+ if testApp.TestMailer.TotalSend() != 1 {
+ t.Fatalf("Expected one email to be sent, got %d", testApp.TestMailer.TotalSend())
}
- expectedParts := []string{"using a password", "OAuth2", "test1", "test2", "auth linked"}
-
+ expectedParts := []string{
+ user.GetString("name") + "{RECORD:tokenKey}", // public and private record placeholder checks
+ "login to your " + testApp.Settings().Meta.AppName + " account from a new location",
+ "If this was you",
+ "If this wasn't you",
+ }
for _, part := range expectedParts {
- if !strings.Contains(testApp.TestMailer.LastMessage.HTML, part) {
- t.Fatalf("Couldn't find %s\n in\n %s", part, testApp.TestMailer.LastMessage.HTML)
+ if !strings.Contains(testApp.TestMailer.LastMessage().HTML, part) {
+ t.Fatalf("Couldn't find %s \nin\n %s", part, testApp.TestMailer.LastMessage().HTML)
}
}
}
@@ -43,26 +44,24 @@ func TestSendRecordPasswordReset(t *testing.T) {
testApp, _ := tests.NewTestApp()
defer testApp.Cleanup()
- // ensure that action url normalization will be applied
- testApp.Settings().Meta.AppUrl = "http://localhost:8090////"
-
- user, _ := testApp.Dao().FindFirstRecordByData("users", "email", "test@example.com")
+ user, _ := testApp.FindFirstRecordByData("users", "email", "test@example.com")
err := mails.SendRecordPasswordReset(testApp, user)
if err != nil {
t.Fatal(err)
}
- if testApp.TestMailer.TotalSend != 1 {
- t.Fatalf("Expected one email to be sent, got %d", testApp.TestMailer.TotalSend)
+ if testApp.TestMailer.TotalSend() != 1 {
+ t.Fatalf("Expected one email to be sent, got %d", testApp.TestMailer.TotalSend())
}
expectedParts := []string{
+ user.GetString("name") + "{RECORD:tokenKey}", // the record name as {RECORD:name}
"http://localhost:8090/_/#/auth/confirm-password-reset/eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.",
}
for _, part := range expectedParts {
- if !strings.Contains(testApp.TestMailer.LastMessage.HTML, part) {
- t.Fatalf("Couldn't find %s \nin\n %s", part, testApp.TestMailer.LastMessage.HTML)
+ if !strings.Contains(testApp.TestMailer.LastMessage().HTML, part) {
+ t.Fatalf("Couldn't find %s \nin\n %s", part, testApp.TestMailer.LastMessage().HTML)
}
}
}
@@ -73,23 +72,24 @@ func TestSendRecordVerification(t *testing.T) {
testApp, _ := tests.NewTestApp()
defer testApp.Cleanup()
- user, _ := testApp.Dao().FindFirstRecordByData("users", "email", "test@example.com")
+ user, _ := testApp.FindFirstRecordByData("users", "email", "test@example.com")
err := mails.SendRecordVerification(testApp, user)
if err != nil {
t.Fatal(err)
}
- if testApp.TestMailer.TotalSend != 1 {
- t.Fatalf("Expected one email to be sent, got %d", testApp.TestMailer.TotalSend)
+ if testApp.TestMailer.TotalSend() != 1 {
+ t.Fatalf("Expected one email to be sent, got %d", testApp.TestMailer.TotalSend())
}
expectedParts := []string{
+ user.GetString("name") + "{RECORD:tokenKey}", // the record name as {RECORD:name}
"http://localhost:8090/_/#/auth/confirm-verification/eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.",
}
for _, part := range expectedParts {
- if !strings.Contains(testApp.TestMailer.LastMessage.HTML, part) {
- t.Fatalf("Couldn't find %s \nin\n %s", part, testApp.TestMailer.LastMessage.HTML)
+ if !strings.Contains(testApp.TestMailer.LastMessage().HTML, part) {
+ t.Fatalf("Couldn't find %s \nin\n %s", part, testApp.TestMailer.LastMessage().HTML)
}
}
}
@@ -100,23 +100,53 @@ func TestSendRecordChangeEmail(t *testing.T) {
testApp, _ := tests.NewTestApp()
defer testApp.Cleanup()
- user, _ := testApp.Dao().FindFirstRecordByData("users", "email", "test@example.com")
+ user, _ := testApp.FindFirstRecordByData("users", "email", "test@example.com")
err := mails.SendRecordChangeEmail(testApp, user, "new_test@example.com")
if err != nil {
t.Fatal(err)
}
- if testApp.TestMailer.TotalSend != 1 {
- t.Fatalf("Expected one email to be sent, got %d", testApp.TestMailer.TotalSend)
+ if testApp.TestMailer.TotalSend() != 1 {
+ t.Fatalf("Expected one email to be sent, got %d", testApp.TestMailer.TotalSend())
}
expectedParts := []string{
+ user.GetString("name") + "{RECORD:tokenKey}", // the record name as {RECORD:name}
"http://localhost:8090/_/#/auth/confirm-email-change/eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.",
}
for _, part := range expectedParts {
- if !strings.Contains(testApp.TestMailer.LastMessage.HTML, part) {
- t.Fatalf("Couldn't find %s \nin\n %s", part, testApp.TestMailer.LastMessage.HTML)
+ if !strings.Contains(testApp.TestMailer.LastMessage().HTML, part) {
+ t.Fatalf("Couldn't find %s \nin\n %s", part, testApp.TestMailer.LastMessage().HTML)
+ }
+ }
+}
+
+func TestSendRecordOTP(t *testing.T) {
+ t.Parallel()
+
+ testApp, _ := tests.NewTestApp()
+ defer testApp.Cleanup()
+
+ user, _ := testApp.FindFirstRecordByData("users", "email", "test@example.com")
+
+ err := mails.SendRecordOTP(testApp, user, "test_otp_id", "test_otp_code")
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ if testApp.TestMailer.TotalSend() != 1 {
+ t.Fatalf("Expected one email to be sent, got %d", testApp.TestMailer.TotalSend())
+ }
+
+ expectedParts := []string{
+ user.GetString("name") + "{RECORD:tokenKey}", // the record name as {RECORD:name}
+ "one-time password",
+ "test_otp_code",
+ }
+ for _, part := range expectedParts {
+ if !strings.Contains(testApp.TestMailer.LastMessage().HTML, part) {
+ t.Fatalf("Couldn't find %s \nin\n %s", part, testApp.TestMailer.LastMessage().HTML)
}
}
}
diff --git a/mails/templates/admin_password_reset.go b/mails/templates/admin_password_reset.go
deleted file mode 100644
index f6207c3f..00000000
--- a/mails/templates/admin_password_reset.go
+++ /dev/null
@@ -1,21 +0,0 @@
-package templates
-
-// Available variables:
-//
-// ```
-// Admin *models.Admin
-// AppName string
-// AppUrl string
-// Token string
-// ActionUrl string
-// ```
-const AdminPasswordResetBody = `
-{{define "content"}}
- Hello,
- Follow this link to reset your admin password for {{.AppName}}.
-
- Reset password
-
- If you did not request to reset your password, please ignore this email and the link will expire on its own.
-{{end}}
-`
diff --git a/mails/templates/html_content.go b/mails/templates/html_content.go
index cb412751..34d58457 100644
--- a/mails/templates/html_content.go
+++ b/mails/templates/html_content.go
@@ -3,6 +3,6 @@ package templates
// Available variables:
//
// ```
-// HtmlContent template.HTML
+// HTMLContent template.HTML
// ```
-const HtmlBody = `{{define "content"}}{{.HtmlContent}}{{end}}`
+const HTMLBody = `{{define "content"}}{{.HTMLContent}}{{end}}`
diff --git a/mails/templates/layout.go b/mails/templates/layout.go
index 21e0b939..7a43ad92 100644
--- a/mails/templates/layout.go
+++ b/mails/templates/layout.go
@@ -53,7 +53,7 @@ const Layout = `
.btn {
display: inline-block;
vertical-align: top;
- border: 1px solid #e5e5e5;
+ border: 0;
cursor: pointer;
color: #fff !important;
background: #16161a !important;
diff --git a/mails/templates/password_login_alert.go b/mails/templates/password_login_alert.go
deleted file mode 100644
index 8ffd1299..00000000
--- a/mails/templates/password_login_alert.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package templates
-
-// Available variables:
-//
-// ```
-// Record *models.Record
-// AppName string
-// AppUrl string
-// ProviderNames []string
-// ```
-const PasswordLoginAlertBody = `
-{{define "content"}}
- Hello,
-
- Just to let you know that someone has logged in to your {{.AppName}} account using a password while you already have
- OAuth2
- {{range $index, $provider := .ProviderNames }}
- {{if $index}}|{{end}}
- {{ $provider }}
- {{ end }}
- auth linked.
-
- If you have recently signed in with a password, you may disregard this email.
- If you don't recognize the above action, you should immediately change your {{.AppName}} account password.
-
- Thanks,
- {{.AppName}} team
-
-{{end}}
-`
diff --git a/migrations/1640988000_init.go b/migrations/1640988000_init.go
index 48d71a77..5535a71b 100644
--- a/migrations/1640988000_init.go
+++ b/migrations/1640988000_init.go
@@ -1,26 +1,19 @@
-// Package migrations contains the system PocketBase DB migrations.
package migrations
import (
+ "fmt"
"path/filepath"
"runtime"
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/models/settings"
- "github.com/pocketbase/pocketbase/tools/migrate"
+ "github.com/pocketbase/pocketbase/core"
"github.com/pocketbase/pocketbase/tools/types"
)
-var AppMigrations migrate.MigrationsList
-
// Register is a short alias for `AppMigrations.Register()`
// that is usually used in external/user defined migrations.
func Register(
- up func(db dbx.Builder) error,
- down func(db dbx.Builder) error,
+ up func(app core.App) error,
+ down func(app core.App) error,
optFilename ...string,
) {
var optFiles []string
@@ -30,29 +23,28 @@ func Register(
_, path, _, _ := runtime.Caller(1)
optFiles = append(optFiles, filepath.Base(path))
}
- AppMigrations.Register(up, down, optFiles...)
+ core.AppMigrations.Register(up, down, optFiles...)
}
func init() {
- AppMigrations.Register(func(db dbx.Builder) error {
- _, tablesErr := db.NewQuery(`
- CREATE TABLE {{_admins}} (
- [[id]] TEXT PRIMARY KEY NOT NULL,
- [[avatar]] INTEGER DEFAULT 0 NOT NULL,
- [[email]] TEXT UNIQUE NOT NULL,
- [[tokenKey]] TEXT UNIQUE NOT NULL,
- [[passwordHash]] TEXT NOT NULL,
- [[lastResetSentAt]] TEXT DEFAULT "" NOT NULL,
- [[created]] TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
- [[updated]] TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
- );
+ core.SystemMigrations.Register(func(txApp core.App) error {
+ if err := createLogsTable(txApp); err != nil {
+ return fmt.Errorf("_logs error: %w", err)
+ }
+ if err := createParamsTable(txApp); err != nil {
+ return fmt.Errorf("_params exec error: %w", err)
+ }
+
+ // -----------------------------------------------------------
+
+ _, execerr := txApp.DB().NewQuery(`
CREATE TABLE {{_collections}} (
- [[id]] TEXT PRIMARY KEY NOT NULL,
+ [[id]] TEXT PRIMARY KEY DEFAULT ('r'||lower(hex(randomblob(7)))) NOT NULL,
[[system]] BOOLEAN DEFAULT FALSE NOT NULL,
[[type]] TEXT DEFAULT "base" NOT NULL,
[[name]] TEXT UNIQUE NOT NULL,
- [[schema]] JSON DEFAULT "[]" NOT NULL,
+ [[fields]] JSON DEFAULT "[]" NOT NULL,
[[indexes]] JSON DEFAULT "[]" NOT NULL,
[[listRule]] TEXT DEFAULT NULL,
[[viewRule]] TEXT DEFAULT NULL,
@@ -63,104 +55,54 @@ func init() {
[[created]] TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
[[updated]] TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
);
-
- CREATE TABLE {{_params}} (
- [[id]] TEXT PRIMARY KEY NOT NULL,
- [[key]] TEXT UNIQUE NOT NULL,
- [[value]] JSON DEFAULT NULL,
- [[created]] TEXT DEFAULT "" NOT NULL,
- [[updated]] TEXT DEFAULT "" NOT NULL
- );
-
- CREATE TABLE {{_externalAuths}} (
- [[id]] TEXT PRIMARY KEY NOT NULL,
- [[collectionId]] TEXT NOT NULL,
- [[recordId]] TEXT NOT NULL,
- [[provider]] TEXT NOT NULL,
- [[providerId]] TEXT NOT NULL,
- [[created]] TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
- [[updated]] TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
- ---
- FOREIGN KEY ([[collectionId]]) REFERENCES {{_collections}} ([[id]]) ON UPDATE CASCADE ON DELETE CASCADE
- );
-
- CREATE UNIQUE INDEX _externalAuths_record_provider_idx on {{_externalAuths}} ([[collectionId]], [[recordId]], [[provider]]);
- CREATE UNIQUE INDEX _externalAuths_collection_provider_idx on {{_externalAuths}} ([[collectionId]], [[provider]], [[providerId]]);
`).Execute()
- if tablesErr != nil {
- return tablesErr
+ if execerr != nil {
+ return fmt.Errorf("_collections exec error: %w", execerr)
}
- dao := daos.New(db)
+ if err := createMFAsCollection(txApp); err != nil {
+ return fmt.Errorf("_mfas error: %w", err)
+ }
- // inserts default settings
- // -----------------------------------------------------------
- defaultSettings := settings.New()
- if err := dao.SaveSettings(defaultSettings); err != nil {
+ if err := createOTPsCollection(txApp); err != nil {
+ return fmt.Errorf("_otps error: %w", err)
+ }
+
+ if err := createExternalAuthsCollection(txApp); err != nil {
+ return fmt.Errorf("_externalAuths error: %w", err)
+ }
+
+ if err := createAuthOriginsCollection(txApp); err != nil {
+ return fmt.Errorf("_authOrigins error: %w", err)
+ }
+
+ if err := createSuperusersCollection(txApp); err != nil {
+ return fmt.Errorf("_superusers error: %w", err)
+ }
+
+ if err := createUsersCollection(txApp); err != nil {
+ return fmt.Errorf("users error: %w", err)
+ }
+
+ return nil
+ }, func(txApp core.App) error {
+ _, err := txApp.AuxDB().DropTable("_logs").Execute()
+ if err != nil {
return err
}
- // inserts the system users collection
- // -----------------------------------------------------------
- usersCollection := &models.Collection{}
- usersCollection.MarkAsNew()
- usersCollection.Id = "_pb_users_auth_"
- usersCollection.Name = "users"
- usersCollection.Type = models.CollectionTypeAuth
- usersCollection.ListRule = types.Pointer("id = @request.auth.id")
- usersCollection.ViewRule = types.Pointer("id = @request.auth.id")
- usersCollection.CreateRule = types.Pointer("")
- usersCollection.UpdateRule = types.Pointer("id = @request.auth.id")
- usersCollection.DeleteRule = types.Pointer("id = @request.auth.id")
-
- // set auth options
- usersCollection.SetOptions(models.CollectionAuthOptions{
- ManageRule: nil,
- AllowOAuth2Auth: true,
- AllowUsernameAuth: true,
- AllowEmailAuth: true,
- MinPasswordLength: 8,
- RequireEmail: false,
- })
-
- // set optional default fields
- usersCollection.Schema = schema.NewSchema(
- &schema.SchemaField{
- Id: "users_name",
- Type: schema.FieldTypeText,
- Name: "name",
- Options: &schema.TextOptions{},
- },
- &schema.SchemaField{
- Id: "users_avatar",
- Type: schema.FieldTypeFile,
- Name: "avatar",
- Options: &schema.FileOptions{
- MaxSelect: 1,
- MaxSize: 5242880,
- MimeTypes: []string{
- "image/jpeg",
- "image/png",
- "image/svg+xml",
- "image/gif",
- "image/webp",
- },
- },
- },
- )
-
- return dao.SaveCollection(usersCollection)
- }, func(db dbx.Builder) error {
tables := []string{
"users",
- "_externalAuths",
+ core.CollectionNameSuperusers,
+ core.CollectionNameMFAs,
+ core.CollectionNameOTPs,
+ core.CollectionNameAuthOrigins,
"_params",
"_collections",
- "_admins",
}
for _, name := range tables {
- if _, err := db.DropTable(name).Execute(); err != nil {
+ if _, err := txApp.DB().DropTable(name).Execute(); err != nil {
return err
}
}
@@ -168,3 +110,252 @@ func init() {
return nil
})
}
+
+func createParamsTable(txApp core.App) error {
+ _, execErr := txApp.DB().NewQuery(`
+ CREATE TABLE {{_params}} (
+ [[id]] TEXT PRIMARY KEY DEFAULT ('r'||lower(hex(randomblob(7)))) NOT NULL,
+ [[value]] JSON DEFAULT NULL,
+ [[created]] TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
+ [[updated]] TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
+ );
+ `).Execute()
+
+ return execErr
+}
+
+func createLogsTable(txApp core.App) error {
+ _, execErr := txApp.AuxDB().NewQuery(`
+ CREATE TABLE {{_logs}} (
+ [[id]] TEXT PRIMARY KEY DEFAULT ('r'||lower(hex(randomblob(7)))) NOT NULL,
+ [[level]] INTEGER DEFAULT 0 NOT NULL,
+ [[message]] TEXT DEFAULT "" NOT NULL,
+ [[data]] JSON DEFAULT "{}" NOT NULL,
+ [[created]] TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
+ );
+
+ CREATE INDEX idx_logs_level on {{_logs}} ([[level]]);
+ CREATE INDEX idx_logs_message on {{_logs}} ([[message]]);
+ CREATE INDEX idx_logs_created_hour on {{_logs}} (strftime('%Y-%m-%d %H:00:00', [[created]]));
+ `).Execute()
+
+ return execErr
+}
+
+func createMFAsCollection(txApp core.App) error {
+ col := core.NewBaseCollection(core.CollectionNameMFAs)
+ col.System = true
+
+ ownerRule := "@request.auth.id != '' && recordRef = @request.auth.id && collectionRef = @request.auth.collectionId"
+ col.ListRule = types.Pointer(ownerRule)
+ col.ViewRule = types.Pointer(ownerRule)
+ col.DeleteRule = types.Pointer(ownerRule)
+
+ col.Fields.Add(&core.TextField{
+ Name: "collectionRef",
+ System: true,
+ Required: true,
+ })
+ col.Fields.Add(&core.TextField{
+ Name: "recordRef",
+ System: true,
+ Required: true,
+ })
+ col.Fields.Add(&core.TextField{
+ Name: "method",
+ System: true,
+ Required: true,
+ })
+ col.Fields.Add(&core.AutodateField{
+ Name: "created",
+ System: true,
+ OnCreate: true,
+ })
+ col.Fields.Add(&core.AutodateField{
+ Name: "updated",
+ System: true,
+ OnCreate: true,
+ OnUpdate: true,
+ })
+ col.AddIndex("idx_mfas_collectionRef_recordRef", false, "collectionRef,recordRef", "")
+
+ return txApp.Save(col)
+}
+
+func createOTPsCollection(txApp core.App) error {
+ col := core.NewBaseCollection(core.CollectionNameOTPs)
+ col.System = true
+
+ ownerRule := "@request.auth.id != '' && recordRef = @request.auth.id && collectionRef = @request.auth.collectionId"
+ col.ListRule = types.Pointer(ownerRule)
+ col.ViewRule = types.Pointer(ownerRule)
+ col.DeleteRule = types.Pointer(ownerRule)
+
+ col.Fields.Add(&core.TextField{
+ Name: "collectionRef",
+ System: true,
+ Required: true,
+ })
+ col.Fields.Add(&core.TextField{
+ Name: "recordRef",
+ System: true,
+ Required: true,
+ })
+ col.Fields.Add(&core.PasswordField{
+ Name: "password",
+ System: true,
+ Hidden: true,
+ Required: true,
+ Cost: 8, // low cost for better performce and because it is not critical
+ })
+ col.Fields.Add(&core.AutodateField{
+ Name: "created",
+ System: true,
+ OnCreate: true,
+ })
+ col.Fields.Add(&core.AutodateField{
+ Name: "updated",
+ System: true,
+ OnCreate: true,
+ OnUpdate: true,
+ })
+ col.AddIndex("idx_otps_collectionRef_recordRef", false, "collectionRef, recordRef", "")
+
+ return txApp.Save(col)
+}
+
+func createAuthOriginsCollection(txApp core.App) error {
+ col := core.NewBaseCollection(core.CollectionNameAuthOrigins)
+ col.System = true
+
+ ownerRule := "@request.auth.id != '' && recordRef = @request.auth.id && collectionRef = @request.auth.collectionId"
+ col.ListRule = types.Pointer(ownerRule)
+ col.ViewRule = types.Pointer(ownerRule)
+ col.DeleteRule = types.Pointer(ownerRule)
+
+ col.Fields.Add(&core.TextField{
+ Name: "collectionRef",
+ System: true,
+ Required: true,
+ })
+ col.Fields.Add(&core.TextField{
+ Name: "recordRef",
+ System: true,
+ Required: true,
+ })
+ col.Fields.Add(&core.TextField{
+ Name: "fingerprint",
+ System: true,
+ Required: true,
+ })
+ col.Fields.Add(&core.AutodateField{
+ Name: "created",
+ System: true,
+ OnCreate: true,
+ })
+ col.Fields.Add(&core.AutodateField{
+ Name: "updated",
+ System: true,
+ OnCreate: true,
+ OnUpdate: true,
+ })
+ col.AddIndex("idx_authOrigins_unique_pairs", true, "collectionRef, recordRef, fingerprint", "")
+
+ return txApp.Save(col)
+}
+
+func createExternalAuthsCollection(txApp core.App) error {
+ col := core.NewBaseCollection(core.CollectionNameExternalAuths)
+ col.System = true
+
+ ownerRule := "@request.auth.id != '' && recordRef = @request.auth.id && collectionRef = @request.auth.collectionId"
+ col.ListRule = types.Pointer(ownerRule)
+ col.ViewRule = types.Pointer(ownerRule)
+ col.DeleteRule = types.Pointer(ownerRule)
+
+ col.Fields.Add(&core.TextField{
+ Name: "collectionRef",
+ System: true,
+ Required: true,
+ })
+ col.Fields.Add(&core.TextField{
+ Name: "recordRef",
+ System: true,
+ Required: true,
+ })
+ col.Fields.Add(&core.TextField{
+ Name: "provider",
+ System: true,
+ Required: true,
+ })
+ col.Fields.Add(&core.TextField{
+ Name: "providerId",
+ System: true,
+ Required: true,
+ })
+ col.Fields.Add(&core.AutodateField{
+ Name: "created",
+ System: true,
+ OnCreate: true,
+ })
+ col.Fields.Add(&core.AutodateField{
+ Name: "updated",
+ System: true,
+ OnCreate: true,
+ OnUpdate: true,
+ })
+ col.AddIndex("idx_externalAuths_record_provider", true, "collectionRef, recordRef, provider", "")
+ col.AddIndex("idx_externalAuths_collection_provider", true, "collectionRef, provider, providerId", "")
+
+ return txApp.Save(col)
+}
+
+func createSuperusersCollection(txApp core.App) error {
+ superusers := core.NewAuthCollection(core.CollectionNameSuperusers)
+ superusers.System = true
+ superusers.Fields.Add(&core.EmailField{
+ Name: "email",
+ System: true,
+ Required: true,
+ })
+ superusers.Fields.Add(&core.AutodateField{
+ Name: "created",
+ System: true,
+ OnCreate: true,
+ })
+ superusers.Fields.Add(&core.AutodateField{
+ Name: "updated",
+ System: true,
+ OnCreate: true,
+ OnUpdate: true,
+ })
+ superusers.AuthToken.Duration = 86400 // 1 day
+
+ return txApp.Save(superusers)
+}
+
+func createUsersCollection(txApp core.App) error {
+ users := core.NewAuthCollection("users")
+ users.Fields.Add(&core.TextField{
+ Name: "name",
+ Max: 255,
+ })
+ users.Fields.Add(&core.FileField{
+ Name: "avatar",
+ MaxSelect: 1,
+ MimeTypes: []string{"image/jpeg", "image/png", "image/svg+xml", "image/gif", "image/webp"},
+ })
+ users.Fields.Add(&core.AutodateField{
+ Name: "created",
+ OnCreate: true,
+ })
+ users.Fields.Add(&core.AutodateField{
+ Name: "updated",
+ OnCreate: true,
+ OnUpdate: true,
+ })
+ users.OAuth2.MappedFields.Name = "name"
+ users.OAuth2.MappedFields.AvatarURL = "avatar"
+
+ return txApp.Save(users)
+}
diff --git a/migrations/1673167670_multi_match_migrate.go b/migrations/1673167670_multi_match_migrate.go
deleted file mode 100644
index 936df7c0..00000000
--- a/migrations/1673167670_multi_match_migrate.go
+++ /dev/null
@@ -1,215 +0,0 @@
-package migrations
-
-import (
- "regexp"
- "strings"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
-)
-
-// This migration replaces for backward compatibility the default operators
-// (=, !=, >, etc.) with their any/opt equivalent (?=, ?=, ?>, etc.)
-// in any muli-rel expression collection rule.
-func init() {
- AppMigrations.Register(func(db dbx.Builder) error {
- dao := daos.New(db)
-
- exprRegex := regexp.MustCompile(`([\@\'\"\w\.]+)\s*(=|!=|~|!~|>|>=|<|<=)\s*([\@\'\"\w\.]+)`)
-
- collections := []*models.Collection{}
- if err := dao.CollectionQuery().All(&collections); err != nil {
- return err
- }
-
- findCollection := func(nameOrId string) *models.Collection {
- for _, c := range collections {
- if c.Id == nameOrId || c.Name == nameOrId {
- return c
- }
- }
-
- return nil
- }
-
- var isMultiRelLiteral func(mainCollection *models.Collection, literal string) bool
- isMultiRelLiteral = func(mainCollection *models.Collection, literal string) bool {
- if strings.HasPrefix(literal, "@collection.") {
- return true
- }
-
- if strings.HasPrefix(literal, `"`) ||
- strings.HasPrefix(literal, `'`) ||
- strings.HasPrefix(literal, "@request.method") ||
- strings.HasPrefix(literal, "@request.data") ||
- strings.HasPrefix(literal, "@request.query") {
- return false
- }
-
- parts := strings.Split(literal, ".")
- if len(parts) <= 1 {
- return false
- }
-
- if strings.HasPrefix(literal, "@request.auth") && len(parts) >= 4 {
- // check each auth collection
- for _, c := range collections {
- if c.IsAuth() && isMultiRelLiteral(c, strings.Join(parts[2:], ".")) {
- return true
- }
- }
-
- return false
- }
-
- activeCollection := mainCollection
-
- for i, p := range parts {
- f := activeCollection.Schema.GetFieldByName(p)
- if f == nil || f.Type != schema.FieldTypeRelation {
- return false // not a relation field
- }
-
- // is multi-relation and not the last prop
- opt, ok := f.Options.(*schema.RelationOptions)
- if ok && (opt.MaxSelect == nil || *opt.MaxSelect != 1) && i != len(parts)-1 {
- return true
- }
-
- activeCollection = findCollection(opt.CollectionId)
- if activeCollection == nil {
- return false
- }
- }
-
- return false
- }
-
- // replace all multi-match operators to their any/opt equivalent, eg. "=" => "?="
- migrateRule := func(collection *models.Collection, rule *string) (*string, error) {
- if rule == nil || *rule == "" {
- return rule, nil
- }
-
- newRule := *rule
- parts := exprRegex.FindAllStringSubmatch(newRule, -1)
-
- for _, p := range parts {
- if isMultiRelLiteral(collection, p[1]) || isMultiRelLiteral(collection, p[3]) {
- newRule = strings.ReplaceAll(newRule, p[0], p[1]+" ?"+p[2]+" "+p[3])
- }
- }
-
- return &newRule, nil
- }
-
- var ruleErr error
- for _, c := range collections {
- c.ListRule, ruleErr = migrateRule(c, c.ListRule)
- if ruleErr != nil {
- return ruleErr
- }
-
- c.ViewRule, ruleErr = migrateRule(c, c.ViewRule)
- if ruleErr != nil {
- return ruleErr
- }
-
- c.CreateRule, ruleErr = migrateRule(c, c.CreateRule)
- if ruleErr != nil {
- return ruleErr
- }
-
- c.UpdateRule, ruleErr = migrateRule(c, c.UpdateRule)
- if ruleErr != nil {
- return ruleErr
- }
-
- c.DeleteRule, ruleErr = migrateRule(c, c.DeleteRule)
- if ruleErr != nil {
- return ruleErr
- }
-
- if c.IsAuth() {
- opt := c.AuthOptions()
- opt.ManageRule, ruleErr = migrateRule(c, opt.ManageRule)
- if ruleErr != nil {
- return ruleErr
- }
- c.SetOptions(opt)
- }
-
- if err := dao.Save(c); err != nil {
- return err
- }
- }
-
- return nil
- }, func(db dbx.Builder) error {
- dao := daos.New(db)
-
- collections := []*models.Collection{}
- if err := dao.CollectionQuery().All(&collections); err != nil {
- return err
- }
-
- anyOpRegex := regexp.MustCompile(`\?(=|!=|~|!~|>|>=|<|<=)`)
-
- // replace any/opt operators to their old versions, eg. "?=" => "="
- revertRule := func(rule *string) (*string, error) {
- if rule == nil || *rule == "" {
- return rule, nil
- }
-
- newRule := *rule
- newRule = anyOpRegex.ReplaceAllString(newRule, "${1}")
-
- return &newRule, nil
- }
-
- var ruleErr error
- for _, c := range collections {
- c.ListRule, ruleErr = revertRule(c.ListRule)
- if ruleErr != nil {
- return ruleErr
- }
-
- c.ViewRule, ruleErr = revertRule(c.ViewRule)
- if ruleErr != nil {
- return ruleErr
- }
-
- c.CreateRule, ruleErr = revertRule(c.CreateRule)
- if ruleErr != nil {
- return ruleErr
- }
-
- c.UpdateRule, ruleErr = revertRule(c.UpdateRule)
- if ruleErr != nil {
- return ruleErr
- }
-
- c.DeleteRule, ruleErr = revertRule(c.DeleteRule)
- if ruleErr != nil {
- return ruleErr
- }
-
- if c.IsAuth() {
- opt := c.AuthOptions()
- opt.ManageRule, ruleErr = revertRule(opt.ManageRule)
- if ruleErr != nil {
- return ruleErr
- }
- c.SetOptions(opt)
- }
-
- if err := dao.Save(c); err != nil {
- return err
- }
- }
-
- return nil
- })
-}
diff --git a/migrations/1677152688_rename_authentik_to_oidc.go b/migrations/1677152688_rename_authentik_to_oidc.go
deleted file mode 100644
index b0b0d748..00000000
--- a/migrations/1677152688_rename_authentik_to_oidc.go
+++ /dev/null
@@ -1,26 +0,0 @@
-package migrations
-
-import (
- "github.com/pocketbase/dbx"
-)
-
-// This migration replaces the "authentikAuth" setting with "oidc".
-func init() {
- AppMigrations.Register(func(db dbx.Builder) error {
- _, err := db.NewQuery(`
- UPDATE {{_params}}
- SET [[value]] = replace([[value]], '"authentikAuth":', '"oidcAuth":')
- WHERE [[key]] = 'settings'
- `).Execute()
-
- return err
- }, func(db dbx.Builder) error {
- _, err := db.NewQuery(`
- UPDATE {{_params}}
- SET [[value]] = replace([[value]], '"oidcAuth":', '"authentikAuth":')
- WHERE [[key]] = 'settings'
- `).Execute()
-
- return err
- })
-}
diff --git a/migrations/1679943780_normalize_single_multiple_values.go b/migrations/1679943780_normalize_single_multiple_values.go
deleted file mode 100644
index bc82bc4f..00000000
--- a/migrations/1679943780_normalize_single_multiple_values.go
+++ /dev/null
@@ -1,108 +0,0 @@
-package migrations
-
-import (
- "fmt"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
-)
-
-// Normalizes old single and multiple values of MultiValuer fields (file, select, relation).
-func init() {
- AppMigrations.Register(func(db dbx.Builder) error {
- return normalizeMultivaluerFields(db)
- }, func(db dbx.Builder) error {
- return nil
- })
-}
-
-func normalizeMultivaluerFields(db dbx.Builder) error {
- dao := daos.New(db)
-
- collections := []*models.Collection{}
- if err := dao.CollectionQuery().All(&collections); err != nil {
- return err
- }
-
- for _, c := range collections {
- if c.IsView() {
- // skip view collections
- continue
- }
-
- for _, f := range c.Schema.Fields() {
- opt, ok := f.Options.(schema.MultiValuer)
- if !ok {
- continue
- }
-
- var updateQuery *dbx.Query
-
- if opt.IsMultiple() {
- updateQuery = dao.DB().NewQuery(fmt.Sprintf(
- `UPDATE {{%s}} set [[%s]] = (
- CASE
- WHEN COALESCE([[%s]], '') = ''
- THEN '[]'
- ELSE (
- CASE
- WHEN json_valid([[%s]]) AND json_type([[%s]]) == 'array'
- THEN [[%s]]
- ELSE json_array([[%s]])
- END
- )
- END
- )`,
- c.Name,
- f.Name,
- f.Name,
- f.Name,
- f.Name,
- f.Name,
- f.Name,
- ))
- } else {
- updateQuery = dao.DB().NewQuery(fmt.Sprintf(
- `UPDATE {{%s}} set [[%s]] = (
- CASE
- WHEN COALESCE([[%s]], '[]') = '[]'
- THEN ''
- ELSE (
- CASE
- WHEN json_valid([[%s]]) AND json_type([[%s]]) == 'array'
- THEN COALESCE(json_extract([[%s]], '$[#-1]'), '')
- ELSE [[%s]]
- END
- )
- END
- )`,
- c.Name,
- f.Name,
- f.Name,
- f.Name,
- f.Name,
- f.Name,
- f.Name,
- ))
- }
-
- if _, err := updateQuery.Execute(); err != nil {
- return err
- }
- }
- }
-
- // trigger view query update after the records normalization
- // (ignore save error in case of invalid query to allow users to change it from the UI)
- for _, c := range collections {
- if !c.IsView() {
- continue
- }
-
- dao.SaveCollection(c)
- }
-
- return nil
-}
diff --git a/migrations/1679943781_add_indexes_column.go b/migrations/1679943781_add_indexes_column.go
deleted file mode 100644
index 96b1039b..00000000
--- a/migrations/1679943781_add_indexes_column.go
+++ /dev/null
@@ -1,141 +0,0 @@
-package migrations
-
-import (
- "fmt"
- "strings"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tools/dbutils"
- "github.com/pocketbase/pocketbase/tools/list"
-)
-
-// Adds _collections indexes column (if not already).
-//
-// Note: This migration will be deleted once schema.SchemaField.Unuique is removed.
-func init() {
- AppMigrations.Register(func(db dbx.Builder) error {
- dao := daos.New(db)
-
- // cleanup failed remaining/"dangling" temp views to prevent
- // errors during the indexes upsert
- // ---
- tempViews := []string{}
- viewsErr := db.Select("name").
- From("sqlite_schema").
- AndWhere(dbx.HashExp{"type": "view"}).
- AndWhere(dbx.NewExp(`[[name]] LIKE '\_temp\_%' ESCAPE '\'`)).
- Column(&tempViews)
- if viewsErr != nil {
- return viewsErr
- }
- for _, name := range tempViews {
- if err := dao.DeleteView(name); err != nil {
- return err
- }
- }
- // ---
-
- cols, err := dao.TableColumns("_collections")
- if err != nil {
- return err
- }
-
- var hasIndexesColumn bool
- for _, col := range cols {
- if col == "indexes" {
- // already existing (probably via the init migration)
- hasIndexesColumn = true
- break
- }
- }
-
- if !hasIndexesColumn {
- if _, err := db.AddColumn("_collections", "indexes", `JSON DEFAULT "[]" NOT NULL`).Execute(); err != nil {
- return err
- }
- }
-
- collections := []*models.Collection{}
- if err := dao.CollectionQuery().AndWhere(dbx.NewExp("type != 'view'")).All(&collections); err != nil {
- return err
- }
-
- type indexInfo struct {
- Sql string `db:"sql"`
- IndexName string `db:"name"`
- TableName string `db:"tbl_name"`
- }
-
- indexesQuery := db.NewQuery(`SELECT * FROM sqlite_master WHERE type = "index" and sql is not null`)
- rawIndexes := []indexInfo{}
- if err := indexesQuery.All(&rawIndexes); err != nil {
- return err
- }
-
- indexesByTableName := map[string][]indexInfo{}
- for _, idx := range rawIndexes {
- indexesByTableName[idx.TableName] = append(indexesByTableName[idx.TableName], idx)
- }
-
- for _, c := range collections {
- c.Indexes = nil // reset
-
- excludeIndexes := []string{
- "_" + c.Id + "_email_idx",
- "_" + c.Id + "_username_idx",
- "_" + c.Id + "_tokenKey_idx",
- }
-
- // convert custom indexes into the related collections
- for _, idx := range indexesByTableName[c.Name] {
- if strings.Contains(idx.IndexName, "sqlite_autoindex_") ||
- list.ExistInSlice(idx.IndexName, excludeIndexes) {
- continue
- }
-
- // drop old index (it will be recreated with the collection)
- if _, err := db.DropIndex(idx.TableName, idx.IndexName).Execute(); err != nil {
- return err
- }
-
- c.Indexes = append(c.Indexes, idx.Sql)
- }
-
- // convert unique fields to indexes
- FieldsLoop:
- for _, f := range c.Schema.Fields() {
- if !f.Unique {
- continue
- }
-
- for _, idx := range indexesByTableName[c.Name] {
- parsed := dbutils.ParseIndex(idx.Sql)
- if parsed.Unique && len(parsed.Columns) == 1 && strings.EqualFold(parsed.Columns[0].Name, f.Name) {
- continue FieldsLoop // already added
- }
- }
-
- c.Indexes = append(c.Indexes, fmt.Sprintf(
- `CREATE UNIQUE INDEX "idx_unique_%s" on "%s" ("%s")`,
- f.Id,
- c.Name,
- f.Name,
- ))
- }
-
- if len(c.Indexes) > 0 {
- if err := dao.SaveCollection(c); err != nil {
- return err
- }
- }
- }
-
- return nil
- }, func(db dbx.Builder) error {
- _, err := db.DropColumn("_collections", "indexes").Execute()
-
- return err
- })
-}
diff --git a/migrations/1685164450_check_fk.go b/migrations/1685164450_check_fk.go
deleted file mode 100644
index ecc0ff7d..00000000
--- a/migrations/1685164450_check_fk.go
+++ /dev/null
@@ -1,20 +0,0 @@
-package migrations
-
-import (
- "github.com/pocketbase/dbx"
-)
-
-// Cleanup dangling deleted collections references
-// (see https://github.com/pocketbase/pocketbase/discussions/2570).
-func init() {
- AppMigrations.Register(func(db dbx.Builder) error {
- _, err := db.NewQuery(`
- DELETE FROM {{_externalAuths}}
- WHERE [[collectionId]] NOT IN (SELECT [[id]] FROM {{_collections}})
- `).Execute()
-
- return err
- }, func(db dbx.Builder) error {
- return nil
- })
-}
diff --git a/migrations/1689579878_renormalize_single_multiple_values.go b/migrations/1689579878_renormalize_single_multiple_values.go
deleted file mode 100644
index 51b62b94..00000000
--- a/migrations/1689579878_renormalize_single_multiple_values.go
+++ /dev/null
@@ -1,15 +0,0 @@
-package migrations
-
-import (
- "github.com/pocketbase/dbx"
-)
-
-// Renormalizes old single and multiple values of MultiValuer fields (file, select, relation)
-// (see https://github.com/pocketbase/pocketbase/issues/2930).
-func init() {
- AppMigrations.Register(func(db dbx.Builder) error {
- return normalizeMultivaluerFields(db)
- }, func(db dbx.Builder) error {
- return nil
- })
-}
diff --git a/migrations/1690319366_reset_null_values.go b/migrations/1690319366_reset_null_values.go
deleted file mode 100644
index 306ab3a4..00000000
--- a/migrations/1690319366_reset_null_values.go
+++ /dev/null
@@ -1,58 +0,0 @@
-package migrations
-
-import (
- "fmt"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
-)
-
-// Reset all previously inserted NULL values to the fields zero-default.
-func init() {
- AppMigrations.Register(func(db dbx.Builder) error {
- dao := daos.New(db)
-
- collections := []*models.Collection{}
- if err := dao.CollectionQuery().All(&collections); err != nil {
- return err
- }
-
- for _, collection := range collections {
- if collection.IsView() {
- continue
- }
-
- for _, f := range collection.Schema.Fields() {
- defaultVal := "''"
-
- switch f.Type {
- case schema.FieldTypeJson:
- continue
- case schema.FieldTypeBool:
- defaultVal = "FALSE"
- case schema.FieldTypeNumber:
- defaultVal = "0"
- default:
- if opt, ok := f.Options.(schema.MultiValuer); ok && opt.IsMultiple() {
- defaultVal = "'[]'"
- }
- }
-
- _, err := db.NewQuery(fmt.Sprintf(
- "UPDATE {{%s}} SET [[%s]] = %s WHERE [[%s]] IS NULL",
- collection.Name,
- f.Name,
- defaultVal,
- f.Name,
- )).Execute()
- if err != nil {
- return err
- }
- }
- }
-
- return nil
- }, nil)
-}
diff --git a/migrations/1690454337_transform_relations_to_views.go b/migrations/1690454337_transform_relations_to_views.go
deleted file mode 100644
index e158dd3f..00000000
--- a/migrations/1690454337_transform_relations_to_views.go
+++ /dev/null
@@ -1,61 +0,0 @@
-package migrations
-
-import (
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
-)
-
-// Transform the relation fields to views from non-view collections to json or text fields
-// (see https://github.com/pocketbase/pocketbase/issues/3000).
-func init() {
- AppMigrations.Register(func(db dbx.Builder) error {
- dao := daos.New(db)
-
- views, err := dao.FindCollectionsByType(models.CollectionTypeView)
- if err != nil {
- return err
- }
-
- for _, view := range views {
- refs, err := dao.FindCollectionReferences(view)
- if err != nil {
- return nil
- }
-
- for collection, fields := range refs {
- if collection.IsView() {
- continue // view-view relations are allowed
- }
-
- for _, f := range fields {
- opt, ok := f.Options.(schema.MultiValuer)
- if !ok {
- continue
- }
-
- if opt.IsMultiple() {
- f.Type = schema.FieldTypeJson
- f.Options = &schema.JsonOptions{}
- } else {
- f.Type = schema.FieldTypeText
- f.Options = &schema.TextOptions{}
- }
-
- // replace the existing field
- // (this usually is not necessary since it is a pointer,
- // but it is better to be explicit in case FindCollectionReferences changes)
- collection.Schema.AddField(f)
- }
-
- // "raw" save without records table sync
- if err := dao.Save(collection); err != nil {
- return err
- }
- }
- }
-
- return nil
- }, nil)
-}
diff --git a/migrations/1691747913_resave_views.go b/migrations/1691747913_resave_views.go
deleted file mode 100644
index 3d684609..00000000
--- a/migrations/1691747913_resave_views.go
+++ /dev/null
@@ -1,28 +0,0 @@
-package migrations
-
-import (
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
-)
-
-// Resave all view collections to ensure that the proper id normalization is applied.
-// (see https://github.com/pocketbase/pocketbase/issues/3110)
-func init() {
- AppMigrations.Register(func(db dbx.Builder) error {
- dao := daos.New(db)
-
- collections, err := dao.FindCollectionsByType(models.CollectionTypeView)
- if err != nil {
- return nil
- }
-
- for _, collection := range collections {
- // ignore errors to allow users to adjust
- // the view queries after app start
- dao.SaveCollection(collection)
- }
-
- return nil
- }, nil)
-}
diff --git a/migrations/1692609521_copy_display_fields.go b/migrations/1692609521_copy_display_fields.go
deleted file mode 100644
index b2a20f59..00000000
--- a/migrations/1692609521_copy_display_fields.go
+++ /dev/null
@@ -1,62 +0,0 @@
-package migrations
-
-import (
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
-)
-
-// Copy the now deprecated RelationOptions.DisplayFields values from
-// all relation fields and register its value as Presentable under
-// the specific field in the related collection.
-//
-// If there is more than one relation to a single collection with explicitly
-// set DisplayFields only one of the configuration will be copied.
-func init() {
- AppMigrations.Register(func(db dbx.Builder) error {
- dao := daos.New(db)
-
- collections := []*models.Collection{}
- if err := dao.CollectionQuery().All(&collections); err != nil {
- return err
- }
-
- indexedCollections := make(map[string]*models.Collection, len(collections))
- for _, collection := range collections {
- indexedCollections[collection.Id] = collection
- }
-
- for _, collection := range indexedCollections {
- for _, f := range collection.Schema.Fields() {
- if f.Type != schema.FieldTypeRelation {
- continue
- }
-
- options, ok := f.Options.(*schema.RelationOptions)
- if !ok || len(options.DisplayFields) == 0 {
- continue
- }
-
- relCollection, ok := indexedCollections[options.CollectionId]
- if !ok {
- continue
- }
-
- for _, name := range options.DisplayFields {
- relField := relCollection.Schema.GetFieldByName(name)
- if relField != nil {
- relField.Presentable = true
- }
- }
-
- // only raw model save
- if err := dao.Save(relCollection); err != nil {
- return err
- }
- }
- }
-
- return nil
- }, nil)
-}
diff --git a/migrations/1701496825_allow_single_oauth2_provider_in_multiple_auth_collections.go b/migrations/1701496825_allow_single_oauth2_provider_in_multiple_auth_collections.go
deleted file mode 100644
index 55265f97..00000000
--- a/migrations/1701496825_allow_single_oauth2_provider_in_multiple_auth_collections.go
+++ /dev/null
@@ -1,23 +0,0 @@
-package migrations
-
-import (
- "github.com/pocketbase/dbx"
-)
-
-// Fixes the unique _externalAuths constraint for old installations
-// to allow a single OAuth2 provider to be registered for different auth collections.
-func init() {
- AppMigrations.Register(func(db dbx.Builder) error {
- _, createErr := db.NewQuery("CREATE UNIQUE INDEX IF NOT EXISTS _externalAuths_collection_provider_idx on {{_externalAuths}} ([[collectionId]], [[provider]], [[providerId]])").Execute()
- if createErr != nil {
- return createErr
- }
-
- _, dropErr := db.NewQuery("DROP INDEX IF EXISTS _externalAuths_provider_providerId_idx").Execute()
- if dropErr != nil {
- return dropErr
- }
-
- return nil
- }, nil)
-}
diff --git a/migrations/1702134272_set_default_json_max_size.go b/migrations/1702134272_set_default_json_max_size.go
deleted file mode 100644
index c523e8ab..00000000
--- a/migrations/1702134272_set_default_json_max_size.go
+++ /dev/null
@@ -1,51 +0,0 @@
-package migrations
-
-import (
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
-)
-
-// Update all collections with json fields to have a default MaxSize json field option.
-func init() {
- AppMigrations.Register(func(db dbx.Builder) error {
- dao := daos.New(db)
-
- // note: update even the view collections to prevent
- // unnecessary change detections during the automigrate
- collections := []*models.Collection{}
- if err := dao.CollectionQuery().All(&collections); err != nil {
- return err
- }
-
- for _, collection := range collections {
- var needSave bool
-
- for _, f := range collection.Schema.Fields() {
- if f.Type != schema.FieldTypeJson {
- continue
- }
-
- options, _ := f.Options.(*schema.JsonOptions)
- if options == nil {
- options = &schema.JsonOptions{}
- }
- options.MaxSize = 2000000 // 2mb
- f.Options = options
- needSave = true
- }
-
- if !needSave {
- continue
- }
-
- // save only the collection model without updating its records table
- if err := dao.Save(collection); err != nil {
- return err
- }
- }
-
- return nil
- }, nil)
-}
diff --git a/migrations/1717233556_v0.23_migrate.go b/migrations/1717233556_v0.23_migrate.go
new file mode 100644
index 00000000..ea693056
--- /dev/null
+++ b/migrations/1717233556_v0.23_migrate.go
@@ -0,0 +1,912 @@
+package migrations
+
+import (
+ "encoding/json"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/pocketbase/dbx"
+ "github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/tools/security"
+ "github.com/pocketbase/pocketbase/tools/types"
+ "github.com/spf13/cast"
+ "golang.org/x/crypto/bcrypt"
+)
+
+// note: this migration will be deleted in future version
+
+func init() {
+ core.SystemMigrations.Register(func(txApp core.App) error {
+ // note: mfas and authOrigins tables are available only with v0.23
+ hasUpgraded := txApp.HasTable(core.CollectionNameMFAs) && txApp.HasTable(core.CollectionNameAuthOrigins)
+ if hasUpgraded {
+ return nil
+ }
+
+ oldSettings, err := loadOldSettings(txApp)
+ if err != nil {
+ return fmt.Errorf("failed to fetch old settings: %w", err)
+ }
+
+ if err = migrateOldCollections(txApp, oldSettings); err != nil {
+ return err
+ }
+
+ if err = migrateSuperusers(txApp, oldSettings); err != nil {
+ return fmt.Errorf("failed to migrate admins->superusers: %w", err)
+ }
+
+ if err = migrateSettings(txApp, oldSettings); err != nil {
+ return fmt.Errorf("failed to migrate settings: %w", err)
+ }
+
+ if err = migrateExternalAuths(txApp); err != nil {
+ return fmt.Errorf("failed to migrate externalAuths: %w", err)
+ }
+
+ if err = createMFAsCollection(txApp); err != nil {
+ return fmt.Errorf("failed to create mfas collection: %w", err)
+ }
+
+ if err = createOTPsCollection(txApp); err != nil {
+ return fmt.Errorf("failed to create otps collection: %w", err)
+ }
+
+ if err = createAuthOriginsCollection(txApp); err != nil {
+ return fmt.Errorf("failed to create authOrigins collection: %w", err)
+ }
+
+ if err = createLogsTable(txApp); err != nil {
+ return fmt.Errorf("failed tocreate logs table: %w", err)
+ }
+
+ if err = os.Remove(filepath.Join(txApp.DataDir(), "logs.db")); err != nil {
+ txApp.Logger().Warn("Failed to delete old logs.db file")
+ }
+
+ return nil
+ }, nil)
+}
+
+// -------------------------------------------------------------------
+
+func migrateSuperusers(txApp core.App, oldSettings *oldSettingsModel) error {
+ // create new superusers collection and table
+ err := createSuperusersCollection(txApp)
+ if err != nil {
+ return err
+ }
+
+ // update with the token options from the old settings
+ superusersCollection, err := txApp.FindCollectionByNameOrId(core.CollectionNameSuperusers)
+ if err != nil {
+ return err
+ }
+
+ superusersCollection.AuthToken.Secret = zeroFallback(
+ cast.ToString(getMapVal(oldSettings.Value, "adminAuthToken", "secret")),
+ superusersCollection.AuthToken.Secret,
+ )
+ superusersCollection.AuthToken.Duration = zeroFallback(
+ cast.ToInt64(getMapVal(oldSettings.Value, "adminAuthToken", "duration")),
+ superusersCollection.AuthToken.Duration,
+ )
+ superusersCollection.PasswordResetToken.Secret = zeroFallback(
+ cast.ToString(getMapVal(oldSettings.Value, "adminPasswordResetToken", "secret")),
+ superusersCollection.PasswordResetToken.Secret,
+ )
+ superusersCollection.PasswordResetToken.Duration = zeroFallback(
+ cast.ToInt64(getMapVal(oldSettings.Value, "adminPasswordResetToken", "duration")),
+ superusersCollection.PasswordResetToken.Duration,
+ )
+ superusersCollection.FileToken.Secret = zeroFallback(
+ cast.ToString(getMapVal(oldSettings.Value, "adminFileToken", "secret")),
+ superusersCollection.FileToken.Secret,
+ )
+ superusersCollection.FileToken.Duration = zeroFallback(
+ cast.ToInt64(getMapVal(oldSettings.Value, "adminFileToken", "duration")),
+ superusersCollection.FileToken.Duration,
+ )
+ if err = txApp.Save(superusersCollection); err != nil {
+ return fmt.Errorf("failed to migrate token configs: %w", err)
+ }
+
+ // copy old admins records into the new one
+ _, err = txApp.DB().NewQuery(`
+ INSERT INTO {{` + core.CollectionNameSuperusers + `}} ([[id]], [[verified]], [[email]], [[password]], [[tokenKey]], [[created]], [[updated]])
+ SELECT [[id]], true, [[email]], [[passwordHash]], [[tokenKey]], [[created]], [[updated]] FROM {{_admins}};
+ `).Execute()
+ if err != nil {
+ return err
+ }
+
+ // remove old admins table
+ _, err = txApp.DB().DropTable("_admins").Execute()
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// -------------------------------------------------------------------
+
+type oldSettingsModel struct {
+ Id string `db:"id" json:"id"`
+ Key string `db:"key" json:"key"`
+ RawValue types.JSONRaw `db:"value" json:"value"`
+ Value map[string]any `db:"-" json:"-"`
+}
+
+func loadOldSettings(txApp core.App) (*oldSettingsModel, error) {
+ oldSettings := &oldSettingsModel{Value: map[string]any{}}
+ err := txApp.DB().Select().From("_params").Where(dbx.HashExp{"key": "settings"}).One(oldSettings)
+ if err != nil {
+ return nil, err
+ }
+
+ // try without decrypt
+ plainDecodeErr := json.Unmarshal(oldSettings.RawValue, &oldSettings.Value)
+
+ // failed, try to decrypt
+ if plainDecodeErr != nil {
+ encryptionKey := os.Getenv(txApp.EncryptionEnv())
+
+ // load without decryption has failed and there is no encryption key to use for decrypt
+ if encryptionKey == "" {
+ return nil, fmt.Errorf("invalid settings db data or missing encryption key %q", txApp.EncryptionEnv())
+ }
+
+ // decrypt
+ decrypted, decryptErr := security.Decrypt(string(oldSettings.RawValue), encryptionKey)
+ if decryptErr != nil {
+ return nil, decryptErr
+ }
+
+ // decode again
+ decryptedDecodeErr := json.Unmarshal(decrypted, &oldSettings.Value)
+ if decryptedDecodeErr != nil {
+ return nil, decryptedDecodeErr
+ }
+ }
+
+ return oldSettings, nil
+}
+
+func migrateSettings(txApp core.App, oldSettings *oldSettingsModel) error {
+ // renamed old params collection
+ _, err := txApp.DB().RenameTable("_params", "_params_old").Execute()
+ if err != nil {
+ return err
+ }
+
+ // create new params table
+ err = createParamsTable(txApp)
+ if err != nil {
+ return err
+ }
+
+ // migrate old settings
+ newSettings := txApp.Settings()
+ // ---
+ newSettings.Meta.AppName = cast.ToString(getMapVal(oldSettings.Value, "meta", "appName"))
+ newSettings.Meta.AppURL = strings.TrimSuffix(cast.ToString(getMapVal(oldSettings.Value, "meta", "appUrl")), "/")
+ newSettings.Meta.HideControls = cast.ToBool(getMapVal(oldSettings.Value, "meta", "hideControls"))
+ newSettings.Meta.SenderName = cast.ToString(getMapVal(oldSettings.Value, "meta", "senderName"))
+ newSettings.Meta.SenderAddress = cast.ToString(getMapVal(oldSettings.Value, "meta", "senderAddress"))
+ // ---
+ newSettings.Logs.MaxDays = cast.ToInt(getMapVal(oldSettings.Value, "logs", "maxDays"))
+ newSettings.Logs.MinLevel = cast.ToInt(getMapVal(oldSettings.Value, "logs", "minLevel"))
+ newSettings.Logs.LogIP = cast.ToBool(getMapVal(oldSettings.Value, "logs", "logIp"))
+ // ---
+ newSettings.SMTP.Enabled = cast.ToBool(getMapVal(oldSettings.Value, "smtp", "enabled"))
+ newSettings.SMTP.Port = cast.ToInt(getMapVal(oldSettings.Value, "smtp", "port"))
+ newSettings.SMTP.Host = cast.ToString(getMapVal(oldSettings.Value, "smtp", "host"))
+ newSettings.SMTP.Username = cast.ToString(getMapVal(oldSettings.Value, "smtp", "username"))
+ newSettings.SMTP.Password = cast.ToString(getMapVal(oldSettings.Value, "smtp", "password"))
+ newSettings.SMTP.AuthMethod = cast.ToString(getMapVal(oldSettings.Value, "smtp", "authMethod"))
+ newSettings.SMTP.TLS = cast.ToBool(getMapVal(oldSettings.Value, "smtp", "tls"))
+ newSettings.SMTP.LocalName = cast.ToString(getMapVal(oldSettings.Value, "smtp", "localName"))
+ // ---
+ newSettings.Backups.Cron = cast.ToString(getMapVal(oldSettings.Value, "backups", "cron"))
+ newSettings.Backups.CronMaxKeep = cast.ToInt(getMapVal(oldSettings.Value, "backups", "cronMaxKeep"))
+ newSettings.Backups.S3 = core.S3Config{
+ Enabled: cast.ToBool(getMapVal(oldSettings.Value, "backups", "s3", "enabled")),
+ Bucket: cast.ToString(getMapVal(oldSettings.Value, "backups", "s3", "bucket")),
+ Region: cast.ToString(getMapVal(oldSettings.Value, "backups", "s3", "region")),
+ Endpoint: cast.ToString(getMapVal(oldSettings.Value, "backups", "s3", "endpoint")),
+ AccessKey: cast.ToString(getMapVal(oldSettings.Value, "backups", "s3", "accessKey")),
+ Secret: cast.ToString(getMapVal(oldSettings.Value, "backups", "s3", "secret")),
+ ForcePathStyle: cast.ToBool(getMapVal(oldSettings.Value, "backups", "s3", "forcePathStyle")),
+ }
+ // ---
+ newSettings.S3 = core.S3Config{
+ Enabled: cast.ToBool(getMapVal(oldSettings.Value, "s3", "enabled")),
+ Bucket: cast.ToString(getMapVal(oldSettings.Value, "s3", "bucket")),
+ Region: cast.ToString(getMapVal(oldSettings.Value, "s3", "region")),
+ Endpoint: cast.ToString(getMapVal(oldSettings.Value, "s3", "endpoint")),
+ AccessKey: cast.ToString(getMapVal(oldSettings.Value, "s3", "accessKey")),
+ Secret: cast.ToString(getMapVal(oldSettings.Value, "s3", "secret")),
+ ForcePathStyle: cast.ToBool(getMapVal(oldSettings.Value, "s3", "forcePathStyle")),
+ }
+ // ---
+ err = txApp.Save(newSettings)
+ if err != nil {
+ return err
+ }
+
+ // remove old params table
+ _, err = txApp.DB().DropTable("_params_old").Execute()
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// -------------------------------------------------------------------
+
+func migrateExternalAuths(txApp core.App) error {
+ // renamed old externalAuths table
+ _, err := txApp.DB().RenameTable("_externalAuths", "_externalAuths_old").Execute()
+ if err != nil {
+ return err
+ }
+
+ // create new externalAuths collection and table
+ err = createExternalAuthsCollection(txApp)
+ if err != nil {
+ return err
+ }
+
+ // copy old externalAuths records into the new one
+ _, err = txApp.DB().NewQuery(`
+ INSERT INTO {{` + core.CollectionNameExternalAuths + `}} ([[id]], [[collectionRef]], [[recordRef]], [[provider]], [[providerId]], [[created]], [[updated]])
+ SELECT [[id]], [[collectionId]], [[recordId]], [[provider]], [[providerId]], [[created]], [[updated]] FROM {{_externalAuths_old}};
+ `).Execute()
+ if err != nil {
+ return err
+ }
+
+ // remove old externalAuths table
+ _, err = txApp.DB().DropTable("_externalAuths_old").Execute()
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// -------------------------------------------------------------------
+
+func migrateOldCollections(txApp core.App, oldSettings *oldSettingsModel) error {
+ oldCollections := []*OldCollectionModel{}
+ err := txApp.DB().Select().From("_collections").All(&oldCollections)
+ if err != nil {
+ return err
+ }
+
+ for _, c := range oldCollections {
+ dummyAuthCollection := core.NewAuthCollection("test")
+
+ options := c.Options
+ c.Options = types.JSONMap[any]{} // reset
+
+ // update rules
+ // ---
+ c.ListRule = migrateRule(c.ListRule)
+ c.ViewRule = migrateRule(c.ViewRule)
+ c.CreateRule = migrateRule(c.CreateRule)
+ c.UpdateRule = migrateRule(c.UpdateRule)
+ c.DeleteRule = migrateRule(c.DeleteRule)
+
+ // migrate fields
+ // ---
+ for i, field := range c.Schema {
+ switch cast.ToString(field["type"]) {
+ case "bool":
+ field = toBoolField(field)
+ case "number":
+ field = toNumberField(field)
+ case "text":
+ field = toTextField(field)
+ case "url":
+ field = toURLField(field)
+ case "email":
+ field = toEmailField(field)
+ case "editor":
+ field = toEditorField(field)
+ case "date":
+ field = toDateField(field)
+ case "select":
+ field = toSelectField(field)
+ case "json":
+ field = toJSONField(field)
+ case "relation":
+ field = toRelationField(field)
+ case "file":
+ field = toFileField(field)
+ }
+ c.Schema[i] = field
+ }
+
+ // type specific changes
+ switch c.Type {
+ case "auth":
+ // token configs
+ // ---
+ c.Options["authToken"] = map[string]any{
+ "secret": zeroFallback(cast.ToString(getMapVal(oldSettings.Value, "recordAuthToken", "secret")), dummyAuthCollection.AuthToken.Secret),
+ "duration": zeroFallback(cast.ToInt64(getMapVal(oldSettings.Value, "recordAuthToken", "duration")), dummyAuthCollection.AuthToken.Duration),
+ }
+ c.Options["passwordResetToken"] = map[string]any{
+ "secret": zeroFallback(cast.ToString(getMapVal(oldSettings.Value, "recordPasswordResetToken", "secret")), dummyAuthCollection.PasswordResetToken.Secret),
+ "duration": zeroFallback(cast.ToInt64(getMapVal(oldSettings.Value, "recordPasswordResetToken", "duration")), dummyAuthCollection.PasswordResetToken.Duration),
+ }
+ c.Options["emailChangeToken"] = map[string]any{
+ "secret": zeroFallback(cast.ToString(getMapVal(oldSettings.Value, "recordEmailChangeToken", "secret")), dummyAuthCollection.EmailChangeToken.Secret),
+ "duration": zeroFallback(cast.ToInt64(getMapVal(oldSettings.Value, "recordEmailChangeToken", "duration")), dummyAuthCollection.EmailChangeToken.Duration),
+ }
+ c.Options["verificationToken"] = map[string]any{
+ "secret": zeroFallback(cast.ToString(getMapVal(oldSettings.Value, "recordVerificationToken", "secret")), dummyAuthCollection.VerificationToken.Secret),
+ "duration": zeroFallback(cast.ToInt64(getMapVal(oldSettings.Value, "recordVerificationToken", "duration")), dummyAuthCollection.VerificationToken.Duration),
+ }
+ c.Options["fileToken"] = map[string]any{
+ "secret": zeroFallback(cast.ToString(getMapVal(oldSettings.Value, "recordFileToken", "secret")), dummyAuthCollection.FileToken.Secret),
+ "duration": zeroFallback(cast.ToInt64(getMapVal(oldSettings.Value, "recordFileToken", "duration")), dummyAuthCollection.FileToken.Duration),
+ }
+
+ onlyVerified := cast.ToBool(options["onlyVerified"])
+ if onlyVerified {
+ c.Options["authRule"] = "verified=true"
+ } else {
+ c.Options["authRule"] = ""
+ }
+
+ c.Options["manageRule"] = nil
+ if options["manageRule"] != nil {
+ manageRule := cast.ToString(options["manageRule"])
+ c.Options["manageRule"] = &manageRule
+ }
+
+ // passwordAuth
+ identityFields := []string{}
+ if cast.ToBool(options["allowEmailAuth"]) {
+ identityFields = append(identityFields, "email")
+ }
+ if cast.ToBool(options["allowUsernameAuth"]) {
+ identityFields = append(identityFields, "username")
+ }
+ c.Options["passwordAuth"] = map[string]any{
+ "enabled": len(identityFields) > 0,
+ "identityFields": identityFields,
+ }
+
+ // oauth2
+ // ---
+ oauth2Providers := []map[string]any{}
+ providerNames := []string{
+ "googleAuth",
+ "facebookAuth",
+ "githubAuth",
+ "gitlabAuth",
+ "discordAuth",
+ "twitterAuth",
+ "microsoftAuth",
+ "spotifyAuth",
+ "kakaoAuth",
+ "twitchAuth",
+ "stravaAuth",
+ "giteeAuth",
+ "livechatAuth",
+ "giteaAuth",
+ "oidcAuth",
+ "oidc2Auth",
+ "oidc3Auth",
+ "appleAuth",
+ "instagramAuth",
+ "vkAuth",
+ "yandexAuth",
+ "patreonAuth",
+ "mailcowAuth",
+ "bitbucketAuth",
+ "planningcenterAuth",
+ }
+ for _, name := range providerNames {
+ if !cast.ToBool(getMapVal(oldSettings.Value, name, "enabled")) {
+ continue
+ }
+ oauth2Providers = append(oauth2Providers, map[string]any{
+ "name": strings.TrimSuffix(name, "Auth"),
+ "clientId": cast.ToString(getMapVal(oldSettings.Value, name, "clientId")),
+ "clientSecret": cast.ToString(getMapVal(oldSettings.Value, name, "clientSecret")),
+ "authURL": cast.ToString(getMapVal(oldSettings.Value, name, "authUrl")),
+ "tokenURL": cast.ToString(getMapVal(oldSettings.Value, name, "tokenUrl")),
+ "userInfoURL": cast.ToString(getMapVal(oldSettings.Value, name, "userApiUrl")),
+ "displayName": cast.ToString(getMapVal(oldSettings.Value, name, "displayName")),
+ "pkce": getMapVal(oldSettings.Value, name, "pkce"),
+ })
+ }
+
+ c.Options["oauth2"] = map[string]any{
+ "enabled": cast.ToBool(options["allowOAuth2Auth"]) && len(oauth2Providers) > 0,
+ "providers": oauth2Providers,
+ "mappedFields": map[string]string{
+ "username": "username",
+ },
+ }
+
+ // default email templates
+ // ---
+ emailTemplates := map[string]core.EmailTemplate{
+ "verificationTemplate": dummyAuthCollection.VerificationTemplate,
+ "resetPasswordTemplate": dummyAuthCollection.ResetPasswordTemplate,
+ "confirmEmailChangeTemplate": dummyAuthCollection.ConfirmEmailChangeTemplate,
+ }
+ for name, fallback := range emailTemplates {
+ c.Options[name] = map[string]any{
+ "subject": zeroFallback(
+ cast.ToString(getMapVal(oldSettings.Value, "meta", name, "subject")),
+ fallback.Subject,
+ ),
+ "body": zeroFallback(
+ strings.ReplaceAll(
+ cast.ToString(getMapVal(oldSettings.Value, "meta", name, "body")),
+ "{ACTION_URL}",
+ cast.ToString(getMapVal(oldSettings.Value, "meta", name, "actionUrl")),
+ ),
+ fallback.Body,
+ ),
+ }
+ }
+
+ // mfa
+ // ---
+ c.Options["mfa"] = map[string]any{
+ "enabled": dummyAuthCollection.MFA.Enabled,
+ "duration": dummyAuthCollection.MFA.Duration,
+ "rule": dummyAuthCollection.MFA.Rule,
+ }
+
+ // otp
+ // ---
+ c.Options["otp"] = map[string]any{
+ "enabled": dummyAuthCollection.OTP.Enabled,
+ "duration": dummyAuthCollection.OTP.Duration,
+ "length": dummyAuthCollection.OTP.Length,
+ "emailTemplate": map[string]any{
+ "subject": dummyAuthCollection.OTP.EmailTemplate.Subject,
+ "body": dummyAuthCollection.OTP.EmailTemplate.Body,
+ },
+ }
+
+ // auth alerts
+ // ---
+ c.Options["authAlert"] = map[string]any{
+ "enabled": dummyAuthCollection.AuthAlert.Enabled,
+ "emailTemplate": map[string]any{
+ "subject": dummyAuthCollection.AuthAlert.EmailTemplate.Subject,
+ "body": dummyAuthCollection.AuthAlert.EmailTemplate.Body,
+ },
+ }
+
+ // add system field indexes
+ // ---
+ c.Indexes = append(types.JSONArray[string]{
+ fmt.Sprintf("CREATE UNIQUE INDEX `_%s_username_idx` ON `%s` (username COLLATE NOCASE)", c.Id, c.Name),
+ fmt.Sprintf("CREATE UNIQUE INDEX `_%s_email_idx` ON `%s` (email) WHERE email != ''", c.Id, c.Name),
+ fmt.Sprintf("CREATE UNIQUE INDEX `_%s_tokenKey_idx` ON `%s` (tokenKey)", c.Id, c.Name),
+ }, c.Indexes...)
+
+ // prepend the auth system fields
+ // ---
+ tokenKeyField := map[string]any{
+ "type": "text",
+ "id": "_pbf_auth_tokenKey_",
+ "name": "tokenKey",
+ "system": true,
+ "hidden": true,
+ "required": true,
+ "presentable": false,
+ "primaryKey": false,
+ "min": 30,
+ "max": 60,
+ "pattern": "",
+ "autogeneratePattern": "[a-zA-Z0-9_]{50}",
+ }
+ passwordField := map[string]any{
+ "type": "password",
+ "id": "_pbf_auth_password_",
+ "name": "password",
+ "presentable": false,
+ "system": true,
+ "hidden": true,
+ "required": true,
+ "pattern": "",
+ "min": cast.ToInt(options["minPasswordLength"]),
+ "cost": bcrypt.DefaultCost, // new default
+ }
+ emailField := map[string]any{
+ "type": "email",
+ "id": "_pbf_auth_email_",
+ "name": "email",
+ "system": true,
+ "hidden": false,
+ "presentable": false,
+ "required": cast.ToBool(options["requireEmail"]),
+ "exceptDomains": cast.ToStringSlice(options["exceptEmailDomains"]),
+ "onlyDomains": cast.ToStringSlice(options["onlyEmailDomains"]),
+ }
+ emailVisibilityField := map[string]any{
+ "type": "bool",
+ "id": "_pbf_auth_emailVisibility_",
+ "name": "emailVisibility",
+ "system": true,
+ "hidden": false,
+ "presentable": false,
+ "required": false,
+ }
+ verifiedField := map[string]any{
+ "type": "bool",
+ "id": "_pbf_auth_verified_",
+ "name": "verified",
+ "system": true,
+ "hidden": false,
+ "presentable": false,
+ "required": false,
+ }
+ usernameField := map[string]any{
+ "type": "text",
+ "id": "_pbf_auth_username_",
+ "name": "username",
+ "system": false,
+ "hidden": false,
+ "required": true,
+ "presentable": false,
+ "primaryKey": false,
+ "min": 3,
+ "max": 150,
+ "pattern": `^[\w][\w\.\-]*$`,
+ "autogeneratePattern": "users[0-9]{6}",
+ }
+ c.Schema = append(types.JSONArray[types.JSONMap[any]]{
+ passwordField,
+ tokenKeyField,
+ emailField,
+ emailVisibilityField,
+ verifiedField,
+ usernameField,
+ }, c.Schema...)
+
+ // rename passwordHash records rable column to password
+ // ---
+ _, err = txApp.DB().RenameColumn(c.Name, "passwordHash", "password").Execute()
+ if err != nil {
+ return err
+ }
+
+ // delete unnecessary auth columns
+ dropColumns := []string{"lastResetSentAt", "lastVerificationSentAt", "lastAuthAlertSentAt"}
+ for _, drop := range dropColumns {
+ // ignore errors in case the columns don't exist
+ _, _ = txApp.DB().DropColumn(c.Name, drop).Execute()
+ }
+ case "view":
+ c.Options["viewQuery"] = cast.ToString(options["query"])
+ }
+
+ // prepend the id field
+ idField := map[string]any{
+ "type": "text",
+ "id": "_pbf_text_id_",
+ "name": "id",
+ "system": true,
+ "required": true,
+ "presentable": false,
+ "hidden": false,
+ "primaryKey": true,
+ "min": 15,
+ "max": 15,
+ "pattern": "^[a-z0-9]+$",
+ "autogeneratePattern": "[a-z0-9]{15}",
+ }
+ c.Schema = append(types.JSONArray[types.JSONMap[any]]{idField}, c.Schema...)
+
+ var addCreated, addUpdated bool
+
+ if c.Type == "view" {
+ // manually check if the view has created/updated columns
+ columns, _ := txApp.TableColumns(c.Name)
+ for _, c := range columns {
+ if strings.EqualFold(c, "created") {
+ addCreated = true
+ } else if strings.EqualFold(c, "updated") {
+ addUpdated = true
+ }
+ }
+ } else {
+ addCreated = true
+ addUpdated = true
+ }
+
+ if addCreated {
+ createdField := map[string]any{
+ "type": "autodate",
+ "id": "_pbf_autodate_created_",
+ "name": "created",
+ "system": false,
+ "presentable": false,
+ "hidden": false,
+ "onCreate": true,
+ "onUpdate": false,
+ }
+ c.Schema = append(c.Schema, createdField)
+ }
+
+ if addUpdated {
+ updatedField := map[string]any{
+ "type": "autodate",
+ "id": "_pbf_autodate_updated_",
+ "name": "updated",
+ "system": false,
+ "presentable": false,
+ "hidden": false,
+ "onCreate": true,
+ "onUpdate": true,
+ }
+ c.Schema = append(c.Schema, updatedField)
+ }
+
+ if err = txApp.DB().Model(c).Update(); err != nil {
+ return err
+ }
+ }
+
+ _, err = txApp.DB().RenameColumn("_collections", "schema", "fields").Execute()
+ if err != nil {
+ return err
+ }
+
+ // run collection validations
+ collections, err := txApp.FindAllCollections()
+ if err != nil {
+ return fmt.Errorf("failed to retrieve all collections: %w", err)
+ }
+ for _, c := range collections {
+ err = txApp.Validate(c)
+ if err != nil {
+ return fmt.Errorf("migrated collection %q validation failure: %w", c.Name, err)
+ }
+ }
+
+ return nil
+}
+
+type OldCollectionModel struct {
+ Id string `db:"id" json:"id"`
+ Created types.DateTime `db:"created" json:"created"`
+ Updated types.DateTime `db:"updated" json:"updated"`
+ Name string `db:"name" json:"name"`
+ Type string `db:"type" json:"type"`
+ System bool `db:"system" json:"system"`
+ Schema types.JSONArray[types.JSONMap[any]] `db:"schema" json:"schema"`
+ Indexes types.JSONArray[string] `db:"indexes" json:"indexes"`
+ ListRule *string `db:"listRule" json:"listRule"`
+ ViewRule *string `db:"viewRule" json:"viewRule"`
+ CreateRule *string `db:"createRule" json:"createRule"`
+ UpdateRule *string `db:"updateRule" json:"updateRule"`
+ DeleteRule *string `db:"deleteRule" json:"deleteRule"`
+ Options types.JSONMap[any] `db:"options" json:"options"`
+}
+
+func (c OldCollectionModel) TableName() string {
+ return "_collections"
+}
+
+func migrateRule(rule *string) *string {
+ if rule == nil {
+ return nil
+ }
+
+ str := strings.ReplaceAll(*rule, "@request.data", "@request.body")
+
+ return &str
+}
+
+func toBoolField(data map[string]any) map[string]any {
+ return map[string]any{
+ "type": "bool",
+ "id": cast.ToString(data["id"]),
+ "name": cast.ToString(data["name"]),
+ "system": cast.ToBool(data["system"]),
+ "required": cast.ToBool(data["required"]),
+ "presentable": cast.ToBool(data["presentable"]),
+ "hidden": false,
+ }
+}
+
+func toNumberField(data map[string]any) map[string]any {
+ return map[string]any{
+ "type": "number",
+ "id": cast.ToString(data["id"]),
+ "name": cast.ToString(data["name"]),
+ "system": cast.ToBool(data["system"]),
+ "required": cast.ToBool(data["required"]),
+ "presentable": cast.ToBool(data["presentable"]),
+ "hidden": false,
+ "onlyInt": cast.ToBool(getMapVal(data, "options", "noDecimal")),
+ "min": getMapVal(data, "options", "min"),
+ "max": getMapVal(data, "options", "max"),
+ }
+}
+
+func toTextField(data map[string]any) map[string]any {
+ return map[string]any{
+ "type": "text",
+ "id": cast.ToString(data["id"]),
+ "name": cast.ToString(data["name"]),
+ "system": cast.ToBool(data["system"]),
+ "primaryKey": cast.ToBool(data["primaryKey"]),
+ "hidden": cast.ToBool(data["hidden"]),
+ "presentable": cast.ToBool(data["presentable"]),
+ "required": cast.ToBool(data["required"]),
+ "min": cast.ToInt(getMapVal(data, "options", "min")),
+ "max": cast.ToInt(getMapVal(data, "options", "max")),
+ "pattern": cast.ToString(getMapVal(data, "options", "pattern")),
+ "autogeneratePattern": cast.ToString(getMapVal(data, "options", "autogeneratePattern")),
+ }
+}
+
+func toEmailField(data map[string]any) map[string]any {
+ return map[string]any{
+ "type": "email",
+ "id": cast.ToString(data["id"]),
+ "name": cast.ToString(data["name"]),
+ "system": cast.ToBool(data["system"]),
+ "required": cast.ToBool(data["required"]),
+ "presentable": cast.ToBool(data["presentable"]),
+ "hidden": false,
+ "exceptDomains": cast.ToStringSlice(getMapVal(data, "options", "exceptDomains")),
+ "onlyDomains": cast.ToStringSlice(getMapVal(data, "options", "onlyDomains")),
+ }
+}
+
+func toURLField(data map[string]any) map[string]any {
+ return map[string]any{
+ "type": "url",
+ "id": cast.ToString(data["id"]),
+ "name": cast.ToString(data["name"]),
+ "system": cast.ToBool(data["system"]),
+ "required": cast.ToBool(data["required"]),
+ "presentable": cast.ToBool(data["presentable"]),
+ "hidden": false,
+ "exceptDomains": cast.ToStringSlice(getMapVal(data, "options", "exceptDomains")),
+ "onlyDomains": cast.ToStringSlice(getMapVal(data, "options", "onlyDomains")),
+ }
+}
+
+func toEditorField(data map[string]any) map[string]any {
+ return map[string]any{
+ "type": "editor",
+ "id": cast.ToString(data["id"]),
+ "name": cast.ToString(data["name"]),
+ "system": cast.ToBool(data["system"]),
+ "required": cast.ToBool(data["required"]),
+ "presentable": cast.ToBool(data["presentable"]),
+ "hidden": false,
+ "convertURLs": cast.ToBool(getMapVal(data, "options", "convertUrls")),
+ }
+}
+
+func toDateField(data map[string]any) map[string]any {
+ return map[string]any{
+ "type": "date",
+ "id": cast.ToString(data["id"]),
+ "name": cast.ToString(data["name"]),
+ "system": cast.ToBool(data["system"]),
+ "required": cast.ToBool(data["required"]),
+ "presentable": cast.ToBool(data["presentable"]),
+ "hidden": false,
+ "min": cast.ToString(getMapVal(data, "options", "min")),
+ "max": cast.ToString(getMapVal(data, "options", "max")),
+ }
+}
+
+func toJSONField(data map[string]any) map[string]any {
+ return map[string]any{
+ "type": "json",
+ "id": cast.ToString(data["id"]),
+ "name": cast.ToString(data["name"]),
+ "system": cast.ToBool(data["system"]),
+ "required": cast.ToBool(data["required"]),
+ "presentable": cast.ToBool(data["presentable"]),
+ "hidden": false,
+ "maxSize": cast.ToInt64(getMapVal(data, "options", "maxSize")),
+ }
+}
+
+func toSelectField(data map[string]any) map[string]any {
+ return map[string]any{
+ "type": "select",
+ "id": cast.ToString(data["id"]),
+ "name": cast.ToString(data["name"]),
+ "system": cast.ToBool(data["system"]),
+ "required": cast.ToBool(data["required"]),
+ "presentable": cast.ToBool(data["presentable"]),
+ "hidden": false,
+ "values": cast.ToStringSlice(getMapVal(data, "options", "values")),
+ "maxSelect": cast.ToInt(getMapVal(data, "options", "maxSelect")),
+ }
+}
+
+func toRelationField(data map[string]any) map[string]any {
+ maxSelect := cast.ToInt(getMapVal(data, "options", "maxSelect"))
+ if maxSelect <= 0 {
+ maxSelect = 2147483647
+ }
+
+ return map[string]any{
+ "type": "relation",
+ "id": cast.ToString(data["id"]),
+ "name": cast.ToString(data["name"]),
+ "system": cast.ToBool(data["system"]),
+ "required": cast.ToBool(data["required"]),
+ "presentable": cast.ToBool(data["presentable"]),
+ "hidden": false,
+ "collectionId": cast.ToString(getMapVal(data, "options", "collectionId")),
+ "cascadeDelete": cast.ToBool(getMapVal(data, "options", "cascadeDelete")),
+ "minSelect": cast.ToInt(getMapVal(data, "options", "minSelect")),
+ "maxSelect": maxSelect,
+ }
+}
+
+func toFileField(data map[string]any) map[string]any {
+ return map[string]any{
+ "type": "file",
+ "id": cast.ToString(data["id"]),
+ "name": cast.ToString(data["name"]),
+ "system": cast.ToBool(data["system"]),
+ "required": cast.ToBool(data["required"]),
+ "presentable": cast.ToBool(data["presentable"]),
+ "hidden": false,
+ "maxSelect": cast.ToInt(getMapVal(data, "options", "maxSelect")),
+ "maxSize": cast.ToInt64(getMapVal(data, "options", "maxSize")),
+ "thumbs": cast.ToStringSlice(getMapVal(data, "options", "thumbs")),
+ "mimeTypes": cast.ToStringSlice(getMapVal(data, "options", "mimeTypes")),
+ "protected": cast.ToBool(getMapVal(data, "options", "protected")),
+ }
+}
+
+func getMapVal(m map[string]any, keys ...string) any {
+ if len(keys) == 0 {
+ return nil
+ }
+
+ result, ok := m[keys[0]]
+ if !ok {
+ return nil
+ }
+
+ // end key reached
+ if len(keys) == 1 {
+ return result
+ }
+
+ if m, ok = result.(map[string]any); !ok {
+ return nil
+ }
+
+ return getMapVal(m, keys[1:]...)
+}
+
+func zeroFallback[T comparable](v T, fallback T) T {
+ var zero T
+
+ if v == zero {
+ return fallback
+ }
+
+ return v
+}
diff --git a/migrations/1718706525_add_login_alert_column.go b/migrations/1718706525_add_login_alert_column.go
deleted file mode 100644
index 1ab71e69..00000000
--- a/migrations/1718706525_add_login_alert_column.go
+++ /dev/null
@@ -1,56 +0,0 @@
-package migrations
-
-import (
- "slices"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/tools/security"
-)
-
-// adds a "lastLoginAlertSentAt" column to all auth collection tables (if not already)
-func init() {
- AppMigrations.Register(func(db dbx.Builder) error {
- dao := daos.New(db)
-
- collections := []*models.Collection{}
- err := dao.CollectionQuery().AndWhere(dbx.HashExp{"type": models.CollectionTypeAuth}).All(&collections)
- if err != nil {
- return err
- }
-
- var needToResetTokens bool
-
- for _, c := range collections {
- columns, err := dao.TableColumns(c.Name)
- if err != nil {
- return err
- }
- if slices.Contains(columns, schema.FieldNameLastLoginAlertSentAt) {
- continue // already inserted
- }
-
- _, err = db.AddColumn(c.Name, schema.FieldNameLastLoginAlertSentAt, "TEXT DEFAULT '' NOT NULL").Execute()
- if err != nil {
- return err
- }
-
- opts := c.AuthOptions()
- if opts.AllowOAuth2Auth && (opts.AllowEmailAuth || opts.AllowUsernameAuth) {
- needToResetTokens = true
- }
- }
-
- settings, _ := dao.FindSettings()
- if needToResetTokens && settings != nil {
- settings.RecordAuthToken.Secret = security.RandomString(50)
- if err := dao.SaveSettings(settings); err != nil {
- return err
- }
- }
-
- return nil
- }, nil)
-}
diff --git a/migrations/logs/1640988000_init.go b/migrations/logs/1640988000_init.go
deleted file mode 100644
index a8d8ef2e..00000000
--- a/migrations/logs/1640988000_init.go
+++ /dev/null
@@ -1,38 +0,0 @@
-package logs
-
-import (
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/tools/migrate"
-)
-
-var LogsMigrations migrate.MigrationsList
-
-func init() {
- LogsMigrations.Register(func(db dbx.Builder) error {
- _, err := db.NewQuery(`
- CREATE TABLE {{_requests}} (
- [[id]] TEXT PRIMARY KEY NOT NULL,
- [[url]] TEXT DEFAULT "" NOT NULL,
- [[method]] TEXT DEFAULT "get" NOT NULL,
- [[status]] INTEGER DEFAULT 200 NOT NULL,
- [[auth]] TEXT DEFAULT "guest" NOT NULL,
- [[ip]] TEXT DEFAULT "127.0.0.1" NOT NULL,
- [[referer]] TEXT DEFAULT "" NOT NULL,
- [[userAgent]] TEXT DEFAULT "" NOT NULL,
- [[meta]] JSON DEFAULT "{}" NOT NULL,
- [[created]] TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
- [[updated]] TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
- );
-
- CREATE INDEX _request_status_idx on {{_requests}} ([[status]]);
- CREATE INDEX _request_auth_idx on {{_requests}} ([[auth]]);
- CREATE INDEX _request_ip_idx on {{_requests}} ([[ip]]);
- CREATE INDEX _request_created_hour_idx on {{_requests}} (strftime('%Y-%m-%d %H:00:00', [[created]]));
- `).Execute()
-
- return err
- }, func(db dbx.Builder) error {
- _, err := db.DropTable("_requests").Execute()
- return err
- })
-}
diff --git a/migrations/logs/1660821103_add_user_ip_column.go b/migrations/logs/1660821103_add_user_ip_column.go
deleted file mode 100644
index 1af099d5..00000000
--- a/migrations/logs/1660821103_add_user_ip_column.go
+++ /dev/null
@@ -1,57 +0,0 @@
-package logs
-
-import (
- "github.com/pocketbase/dbx"
-)
-
-func init() {
- LogsMigrations.Register(func(db dbx.Builder) error {
- // delete old index (don't check for error because of backward compatibility with old installations)
- db.DropIndex("_requests", "_request_ip_idx").Execute()
-
- // rename ip -> remoteIp
- if _, err := db.RenameColumn("_requests", "ip", "remoteIp").Execute(); err != nil {
- return err
- }
-
- // add new userIp column
- if _, err := db.AddColumn("_requests", "userIp", `TEXT DEFAULT "127.0.0.1" NOT NULL`).Execute(); err != nil {
- return err
- }
-
- // add new indexes
- if _, err := db.CreateIndex("_requests", "_request_remote_ip_idx", "remoteIp").Execute(); err != nil {
- return err
- }
- if _, err := db.CreateIndex("_requests", "_request_user_ip_idx", "userIp").Execute(); err != nil {
- return err
- }
-
- return nil
- }, func(db dbx.Builder) error {
- // delete new indexes
- if _, err := db.DropIndex("_requests", "_request_remote_ip_idx").Execute(); err != nil {
- return err
- }
- if _, err := db.DropIndex("_requests", "_request_user_ip_idx").Execute(); err != nil {
- return err
- }
-
- // drop userIp column
- if _, err := db.DropColumn("_requests", "userIp").Execute(); err != nil {
- return err
- }
-
- // restore original remoteIp column name
- if _, err := db.RenameColumn("_requests", "remoteIp", "ip").Execute(); err != nil {
- return err
- }
-
- // restore original index
- if _, err := db.CreateIndex("_requests", "_request_ip_idx", "ip").Execute(); err != nil {
- return err
- }
-
- return nil
- })
-}
diff --git a/migrations/logs/1677760279_uppsercase_method.go b/migrations/logs/1677760279_uppsercase_method.go
deleted file mode 100644
index cef96960..00000000
--- a/migrations/logs/1677760279_uppsercase_method.go
+++ /dev/null
@@ -1,18 +0,0 @@
-package logs
-
-import (
- "github.com/pocketbase/dbx"
-)
-
-// This migration normalizes the request logs method to UPPERCASE (eg. "get" => "GET").
-func init() {
- LogsMigrations.Register(func(db dbx.Builder) error {
- _, err := db.NewQuery("UPDATE {{_requests}} SET method=UPPER(method)").Execute()
-
- return err
- }, func(db dbx.Builder) error {
- _, err := db.NewQuery("UPDATE {{_requests}} SET method=LOWER(method)").Execute()
-
- return err
- })
-}
diff --git a/migrations/logs/1699187560_logs_generalization.go b/migrations/logs/1699187560_logs_generalization.go
deleted file mode 100644
index beb18333..00000000
--- a/migrations/logs/1699187560_logs_generalization.go
+++ /dev/null
@@ -1,57 +0,0 @@
-package logs
-
-import (
- "github.com/pocketbase/dbx"
-)
-
-func init() {
- LogsMigrations.Register(func(db dbx.Builder) error {
- if _, err := db.DropTable("_requests").Execute(); err != nil {
- return err
- }
-
- _, err := db.NewQuery(`
- CREATE TABLE {{_logs}} (
- [[id]] TEXT PRIMARY KEY DEFAULT ('r'||lower(hex(randomblob(7)))) NOT NULL,
- [[level]] INTEGER DEFAULT 0 NOT NULL,
- [[message]] TEXT DEFAULT "" NOT NULL,
- [[data]] JSON DEFAULT "{}" NOT NULL,
- [[created]] TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
- [[updated]] TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
- );
-
- CREATE INDEX _logs_level_idx on {{_logs}} ([[level]]);
- CREATE INDEX _logs_message_idx on {{_logs}} ([[message]]);
- CREATE INDEX _logs_created_hour_idx on {{_logs}} (strftime('%Y-%m-%d %H:00:00', [[created]]));
- `).Execute()
-
- return err
- }, func(db dbx.Builder) error {
- if _, err := db.DropTable("_logs").Execute(); err != nil {
- return err
- }
-
- _, err := db.NewQuery(`
- CREATE TABLE {{_requests}} (
- [[id]] TEXT PRIMARY KEY NOT NULL,
- [[url]] TEXT DEFAULT "" NOT NULL,
- [[method]] TEXT DEFAULT "get" NOT NULL,
- [[status]] INTEGER DEFAULT 200 NOT NULL,
- [[auth]] TEXT DEFAULT "guest" NOT NULL,
- [[ip]] TEXT DEFAULT "127.0.0.1" NOT NULL,
- [[referer]] TEXT DEFAULT "" NOT NULL,
- [[userAgent]] TEXT DEFAULT "" NOT NULL,
- [[meta]] JSON DEFAULT "{}" NOT NULL,
- [[created]] TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
- [[updated]] TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
- );
-
- CREATE INDEX _request_status_idx on {{_requests}} ([[status]]);
- CREATE INDEX _request_auth_idx on {{_requests}} ([[auth]]);
- CREATE INDEX _request_ip_idx on {{_requests}} ([[ip]]);
- CREATE INDEX _request_created_hour_idx on {{_requests}} (strftime('%Y-%m-%d %H:00:00', [[created]]));
- `).Execute()
-
- return err
- })
-}
diff --git a/models/admin.go b/models/admin.go
deleted file mode 100644
index 047aff76..00000000
--- a/models/admin.go
+++ /dev/null
@@ -1,67 +0,0 @@
-package models
-
-import (
- "errors"
-
- "github.com/pocketbase/pocketbase/tools/security"
- "github.com/pocketbase/pocketbase/tools/types"
- "golang.org/x/crypto/bcrypt"
-)
-
-var (
- _ Model = (*Admin)(nil)
-)
-
-type Admin struct {
- BaseModel
-
- Avatar int `db:"avatar" json:"avatar"`
- Email string `db:"email" json:"email"`
- TokenKey string `db:"tokenKey" json:"-"`
- PasswordHash string `db:"passwordHash" json:"-"`
- LastResetSentAt types.DateTime `db:"lastResetSentAt" json:"-"`
-}
-
-// TableName returns the Admin model SQL table name.
-func (m *Admin) TableName() string {
- return "_admins"
-}
-
-// ValidatePassword validates a plain password against the model's password.
-func (m *Admin) ValidatePassword(password string) bool {
- bytePassword := []byte(password)
- bytePasswordHash := []byte(m.PasswordHash)
-
- // comparing the password with the hash
- err := bcrypt.CompareHashAndPassword(bytePasswordHash, bytePassword)
-
- // nil means it is a match
- return err == nil
-}
-
-// SetPassword sets cryptographically secure string to `model.Password`.
-//
-// Additionally this method also resets the LastResetSentAt and the TokenKey fields.
-func (m *Admin) SetPassword(password string) error {
- if password == "" {
- return errors.New("The provided plain password is empty")
- }
-
- // hash the password
- hashedPassword, err := bcrypt.GenerateFromPassword([]byte(password), 12)
- if err != nil {
- return err
- }
-
- m.PasswordHash = string(hashedPassword)
- m.LastResetSentAt = types.DateTime{} // reset
-
- // invalidate previously issued tokens
- return m.RefreshTokenKey()
-}
-
-// RefreshTokenKey generates and sets new random token key.
-func (m *Admin) RefreshTokenKey() error {
- m.TokenKey = security.RandomString(50)
- return nil
-}
diff --git a/models/admin_test.go b/models/admin_test.go
deleted file mode 100644
index 6730d229..00000000
--- a/models/admin_test.go
+++ /dev/null
@@ -1,112 +0,0 @@
-package models_test
-
-import (
- "testing"
-
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-func TestAdminTableName(t *testing.T) {
- t.Parallel()
-
- m := models.Admin{}
- if m.TableName() != "_admins" {
- t.Fatalf("Unexpected table name, got %q", m.TableName())
- }
-}
-
-func TestAdminValidatePassword(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- admin models.Admin
- password string
- expected bool
- }{
- {
- // empty passwordHash + empty pass
- models.Admin{},
- "",
- false,
- },
- {
- // empty passwordHash + nonempty pass
- models.Admin{},
- "123456",
- false,
- },
- {
- // nonempty passwordHash + empty pass
- models.Admin{PasswordHash: "$2a$10$SKk/Y/Yc925PBtsSYBvq3Ous9Jy18m4KTn6b/PQQ.Y9QVjy3o/Fv."},
- "",
- false,
- },
- {
- // nonempty passwordHash + wrong pass
- models.Admin{PasswordHash: "$2a$10$SKk/Y/Yc925PBtsSYBvq3Ous9Jy18m4KTn6b/PQQ.Y9QVjy3o/Fv."},
- "654321",
- false,
- },
- {
- // nonempty passwordHash + correct pass
- models.Admin{PasswordHash: "$2a$10$SKk/Y/Yc925PBtsSYBvq3Ous9Jy18m4KTn6b/PQQ.Y9QVjy3o/Fv."},
- "123456",
- true,
- },
- }
-
- for i, s := range scenarios {
- result := s.admin.ValidatePassword(s.password)
- if result != s.expected {
- t.Errorf("(%d) Expected %v, got %v", i, s.expected, result)
- }
- }
-}
-
-func TestAdminSetPassword(t *testing.T) {
- t.Parallel()
-
- m := models.Admin{
- // 123456
- PasswordHash: "$2a$10$SKk/Y/Yc925PBtsSYBvq3Ous9Jy18m4KTn6b/PQQ.Y9QVjy3o/Fv.",
- LastResetSentAt: types.NowDateTime(),
- TokenKey: "test",
- }
-
- // empty pass
- err1 := m.SetPassword("")
- if err1 == nil {
- t.Fatal("Expected empty password error")
- }
-
- err2 := m.SetPassword("654321")
- if err2 != nil {
- t.Fatalf("Expected nil, got error %v", err2)
- }
-
- if !m.ValidatePassword("654321") {
- t.Fatalf("Password is invalid")
- }
-
- if m.TokenKey == "test" {
- t.Fatalf("Expected TokenKey to change, got %v", m.TokenKey)
- }
-
- if !m.LastResetSentAt.IsZero() {
- t.Fatalf("Expected LastResetSentAt to be zero datetime, got %v", m.LastResetSentAt)
- }
-}
-
-func TestAdminRefreshTokenKey(t *testing.T) {
- t.Parallel()
-
- m := models.Admin{TokenKey: "test"}
-
- m.RefreshTokenKey()
-
- // empty pass
- if m.TokenKey == "" || m.TokenKey == "test" {
- t.Fatalf("Expected TokenKey to change, got %q", m.TokenKey)
- }
-}
diff --git a/models/backup_file_info.go b/models/backup_file_info.go
deleted file mode 100644
index 794900f3..00000000
--- a/models/backup_file_info.go
+++ /dev/null
@@ -1,9 +0,0 @@
-package models
-
-import "github.com/pocketbase/pocketbase/tools/types"
-
-type BackupFileInfo struct {
- Key string `json:"key"`
- Size int64 `json:"size"`
- Modified types.DateTime `json:"modified"`
-}
diff --git a/models/base.go b/models/base.go
deleted file mode 100644
index 44f5a76d..00000000
--- a/models/base.go
+++ /dev/null
@@ -1,122 +0,0 @@
-// Package models implements all PocketBase DB models and DTOs.
-package models
-
-import (
- "github.com/pocketbase/pocketbase/tools/security"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-const (
- // DefaultIdLength is the default length of the generated model id.
- DefaultIdLength = 15
-
- // DefaultIdAlphabet is the default characters set used for generating the model id.
- DefaultIdAlphabet = "abcdefghijklmnopqrstuvwxyz0123456789"
-)
-
-// ColumnValueMapper defines an interface for custom db model data serialization.
-type ColumnValueMapper interface {
- // ColumnValueMap returns the data to be used when persisting the model.
- ColumnValueMap() map[string]any
-}
-
-// FilesManager defines an interface with common methods that files manager models should implement.
-type FilesManager interface {
- // BaseFilesPath returns the storage dir path used by the interface instance.
- BaseFilesPath() string
-}
-
-// Model defines an interface with common methods that all db models should have.
-type Model interface {
- TableName() string
- IsNew() bool
- MarkAsNew()
- MarkAsNotNew()
- HasId() bool
- GetId() string
- SetId(id string)
- GetCreated() types.DateTime
- GetUpdated() types.DateTime
- RefreshId()
- RefreshCreated()
- RefreshUpdated()
-}
-
-// -------------------------------------------------------------------
-// BaseModel
-// -------------------------------------------------------------------
-
-// BaseModel defines common fields and methods used by all other models.
-type BaseModel struct {
- isNotNew bool
-
- Id string `db:"id" json:"id"`
- Created types.DateTime `db:"created" json:"created"`
- Updated types.DateTime `db:"updated" json:"updated"`
-}
-
-// HasId returns whether the model has a nonzero id.
-func (m *BaseModel) HasId() bool {
- return m.GetId() != ""
-}
-
-// GetId returns the model id.
-func (m *BaseModel) GetId() string {
- return m.Id
-}
-
-// SetId sets the model id to the provided string value.
-func (m *BaseModel) SetId(id string) {
- m.Id = id
-}
-
-// MarkAsNew marks the model as "new" (aka. enforces m.IsNew() to be true).
-func (m *BaseModel) MarkAsNew() {
- m.isNotNew = false
-}
-
-// MarkAsNotNew marks the model as "not new" (aka. enforces m.IsNew() to be false)
-func (m *BaseModel) MarkAsNotNew() {
- m.isNotNew = true
-}
-
-// IsNew indicates what type of db query (insert or update)
-// should be used with the model instance.
-func (m *BaseModel) IsNew() bool {
- return !m.isNotNew
-}
-
-// GetCreated returns the model Created datetime.
-func (m *BaseModel) GetCreated() types.DateTime {
- return m.Created
-}
-
-// GetUpdated returns the model Updated datetime.
-func (m *BaseModel) GetUpdated() types.DateTime {
- return m.Updated
-}
-
-// RefreshId generates and sets a new model id.
-//
-// The generated id is a cryptographically random 15 characters length string.
-func (m *BaseModel) RefreshId() {
- m.Id = security.RandomStringWithAlphabet(DefaultIdLength, DefaultIdAlphabet)
-}
-
-// RefreshCreated updates the model Created field with the current datetime.
-func (m *BaseModel) RefreshCreated() {
- m.Created = types.NowDateTime()
-}
-
-// RefreshUpdated updates the model Updated field with the current datetime.
-func (m *BaseModel) RefreshUpdated() {
- m.Updated = types.NowDateTime()
-}
-
-// PostScan implements the [dbx.PostScanner] interface.
-//
-// It is executed right after the model was populated with the db row values.
-func (m *BaseModel) PostScan() error {
- m.MarkAsNotNew()
- return nil
-}
diff --git a/models/base_test.go b/models/base_test.go
deleted file mode 100644
index 434887f3..00000000
--- a/models/base_test.go
+++ /dev/null
@@ -1,134 +0,0 @@
-package models_test
-
-import (
- "testing"
-
- "github.com/pocketbase/pocketbase/models"
-)
-
-func TestBaseModelHasId(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- model models.BaseModel
- expected bool
- }{
- {
- models.BaseModel{},
- false,
- },
- {
- models.BaseModel{Id: ""},
- false,
- },
- {
- models.BaseModel{Id: "abc"},
- true,
- },
- }
-
- for i, s := range scenarios {
- result := s.model.HasId()
- if result != s.expected {
- t.Errorf("(%d) Expected %v, got %v", i, s.expected, result)
- }
- }
-}
-
-func TestBaseModelId(t *testing.T) {
- t.Parallel()
-
- m := models.BaseModel{}
-
- if m.GetId() != "" {
- t.Fatalf("Expected empty id value, got %v", m.GetId())
- }
-
- m.SetId("test")
-
- if m.GetId() != "test" {
- t.Fatalf("Expected %q id, got %v", "test", m.GetId())
- }
-
- m.RefreshId()
-
- if len(m.GetId()) != 15 {
- t.Fatalf("Expected 15 chars id, got %v", m.GetId())
- }
-}
-
-func TestBaseModelIsNew(t *testing.T) {
- t.Parallel()
-
- m0 := models.BaseModel{}
- m1 := models.BaseModel{Id: ""}
- m2 := models.BaseModel{Id: "test"}
- m3 := models.BaseModel{}
- m3.MarkAsNotNew()
- m4 := models.BaseModel{Id: "test"}
- m4.MarkAsNotNew()
- m5 := models.BaseModel{Id: "test"}
- m5.MarkAsNew()
- m5.MarkAsNotNew()
- m6 := models.BaseModel{}
- m6.RefreshId()
- m7 := models.BaseModel{}
- m7.MarkAsNotNew()
- m7.RefreshId()
- m8 := models.BaseModel{}
- m8.PostScan()
-
- scenarios := []struct {
- model models.BaseModel
- expected bool
- }{
- {m0, true},
- {m1, true},
- {m2, true},
- {m3, false},
- {m4, false},
- {m5, false},
- {m6, true},
- {m7, false},
- {m8, false},
- }
-
- for i, s := range scenarios {
- result := s.model.IsNew()
- if result != s.expected {
- t.Errorf("(%d) Expected IsNew %v, got %v", i, s.expected, result)
- }
- }
-}
-
-func TestBaseModelCreated(t *testing.T) {
- t.Parallel()
-
- m := models.BaseModel{}
-
- if !m.GetCreated().IsZero() {
- t.Fatalf("Expected zero datetime, got %v", m.GetCreated())
- }
-
- m.RefreshCreated()
-
- if m.GetCreated().IsZero() {
- t.Fatalf("Expected non-zero datetime, got %v", m.GetCreated())
- }
-}
-
-func TestBaseModelUpdated(t *testing.T) {
- t.Parallel()
-
- m := models.BaseModel{}
-
- if !m.GetUpdated().IsZero() {
- t.Fatalf("Expected zero datetime, got %v", m.GetUpdated())
- }
-
- m.RefreshUpdated()
-
- if m.GetUpdated().IsZero() {
- t.Fatalf("Expected non-zero datetime, got %v", m.GetUpdated())
- }
-}
diff --git a/models/collection.go b/models/collection.go
deleted file mode 100644
index f9a3eaa5..00000000
--- a/models/collection.go
+++ /dev/null
@@ -1,220 +0,0 @@
-package models
-
-import (
- "encoding/json"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/go-ozzo/ozzo-validation/v4/is"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-var (
- _ Model = (*Collection)(nil)
- _ FilesManager = (*Collection)(nil)
-)
-
-const (
- CollectionTypeBase = "base"
- CollectionTypeAuth = "auth"
- CollectionTypeView = "view"
-)
-
-type Collection struct {
- BaseModel
-
- Name string `db:"name" json:"name"`
- Type string `db:"type" json:"type"`
- System bool `db:"system" json:"system"`
- Schema schema.Schema `db:"schema" json:"schema"`
- Indexes types.JsonArray[string] `db:"indexes" json:"indexes"`
-
- // rules
- ListRule *string `db:"listRule" json:"listRule"`
- ViewRule *string `db:"viewRule" json:"viewRule"`
- CreateRule *string `db:"createRule" json:"createRule"`
- UpdateRule *string `db:"updateRule" json:"updateRule"`
- DeleteRule *string `db:"deleteRule" json:"deleteRule"`
-
- Options types.JsonMap `db:"options" json:"options"`
-}
-
-// TableName returns the Collection model SQL table name.
-func (m *Collection) TableName() string {
- return "_collections"
-}
-
-// BaseFilesPath returns the storage dir path used by the collection.
-func (m *Collection) BaseFilesPath() string {
- return m.Id
-}
-
-// IsBase checks if the current collection has "base" type.
-func (m *Collection) IsBase() bool {
- return m.Type == CollectionTypeBase
-}
-
-// IsAuth checks if the current collection has "auth" type.
-func (m *Collection) IsAuth() bool {
- return m.Type == CollectionTypeAuth
-}
-
-// IsView checks if the current collection has "view" type.
-func (m *Collection) IsView() bool {
- return m.Type == CollectionTypeView
-}
-
-// MarshalJSON implements the [json.Marshaler] interface.
-func (m Collection) MarshalJSON() ([]byte, error) {
- type alias Collection // prevent recursion
-
- m.NormalizeOptions()
-
- return json.Marshal(alias(m))
-}
-
-// BaseOptions decodes the current collection options and returns them
-// as new [CollectionBaseOptions] instance.
-func (m *Collection) BaseOptions() CollectionBaseOptions {
- result := CollectionBaseOptions{}
- m.DecodeOptions(&result)
- return result
-}
-
-// AuthOptions decodes the current collection options and returns them
-// as new [CollectionAuthOptions] instance.
-func (m *Collection) AuthOptions() CollectionAuthOptions {
- result := CollectionAuthOptions{}
- m.DecodeOptions(&result)
- return result
-}
-
-// ViewOptions decodes the current collection options and returns them
-// as new [CollectionViewOptions] instance.
-func (m *Collection) ViewOptions() CollectionViewOptions {
- result := CollectionViewOptions{}
- m.DecodeOptions(&result)
- return result
-}
-
-// NormalizeOptions updates the current collection options with a
-// new normalized state based on the collection type.
-func (m *Collection) NormalizeOptions() error {
- var typedOptions any
- switch m.Type {
- case CollectionTypeAuth:
- typedOptions = m.AuthOptions()
- case CollectionTypeView:
- typedOptions = m.ViewOptions()
- default:
- typedOptions = m.BaseOptions()
- }
-
- // serialize
- raw, err := json.Marshal(typedOptions)
- if err != nil {
- return err
- }
-
- // load into a new JsonMap
- m.Options = types.JsonMap{}
- if err := json.Unmarshal(raw, &m.Options); err != nil {
- return err
- }
-
- return nil
-}
-
-// DecodeOptions decodes the current collection options into the
-// provided "result" (must be a pointer).
-func (m *Collection) DecodeOptions(result any) error {
- // raw serialize
- raw, err := json.Marshal(m.Options)
- if err != nil {
- return err
- }
-
- // decode into the provided result
- if err := json.Unmarshal(raw, result); err != nil {
- return err
- }
-
- return nil
-}
-
-// SetOptions normalizes and unmarshals the specified options into m.Options.
-func (m *Collection) SetOptions(typedOptions any) error {
- // serialize
- raw, err := json.Marshal(typedOptions)
- if err != nil {
- return err
- }
-
- m.Options = types.JsonMap{}
- if err := json.Unmarshal(raw, &m.Options); err != nil {
- return err
- }
-
- return m.NormalizeOptions()
-}
-
-// -------------------------------------------------------------------
-
-// CollectionBaseOptions defines the "base" Collection.Options fields.
-type CollectionBaseOptions struct {
-}
-
-// Validate implements [validation.Validatable] interface.
-func (o CollectionBaseOptions) Validate() error {
- return nil
-}
-
-// -------------------------------------------------------------------
-
-// CollectionAuthOptions defines the "auth" Collection.Options fields.
-type CollectionAuthOptions struct {
- ManageRule *string `form:"manageRule" json:"manageRule"`
- AllowOAuth2Auth bool `form:"allowOAuth2Auth" json:"allowOAuth2Auth"`
- AllowUsernameAuth bool `form:"allowUsernameAuth" json:"allowUsernameAuth"`
- AllowEmailAuth bool `form:"allowEmailAuth" json:"allowEmailAuth"`
- RequireEmail bool `form:"requireEmail" json:"requireEmail"`
- ExceptEmailDomains []string `form:"exceptEmailDomains" json:"exceptEmailDomains"`
- OnlyVerified bool `form:"onlyVerified" json:"onlyVerified"`
- OnlyEmailDomains []string `form:"onlyEmailDomains" json:"onlyEmailDomains"`
- MinPasswordLength int `form:"minPasswordLength" json:"minPasswordLength"`
-}
-
-// Validate implements [validation.Validatable] interface.
-func (o CollectionAuthOptions) Validate() error {
- return validation.ValidateStruct(&o,
- validation.Field(&o.ManageRule, validation.NilOrNotEmpty),
- validation.Field(
- &o.ExceptEmailDomains,
- validation.When(len(o.OnlyEmailDomains) > 0, validation.Empty).Else(validation.Each(is.Domain)),
- ),
- validation.Field(
- &o.OnlyEmailDomains,
- validation.When(len(o.ExceptEmailDomains) > 0, validation.Empty).Else(validation.Each(is.Domain)),
- ),
- validation.Field(
- &o.MinPasswordLength,
- validation.When(o.AllowUsernameAuth || o.AllowEmailAuth, validation.Required),
- validation.Min(5),
- validation.Max(72),
- ),
- )
-}
-
-// -------------------------------------------------------------------
-
-// CollectionViewOptions defines the "view" Collection.Options fields.
-type CollectionViewOptions struct {
- Query string `form:"query" json:"query"`
-}
-
-// Validate implements [validation.Validatable] interface.
-func (o CollectionViewOptions) Validate() error {
- return validation.ValidateStruct(&o,
- validation.Field(&o.Query, validation.Required),
- )
-}
diff --git a/models/collection_test.go b/models/collection_test.go
deleted file mode 100644
index ab8c0525..00000000
--- a/models/collection_test.go
+++ /dev/null
@@ -1,522 +0,0 @@
-package models_test
-
-import (
- "encoding/json"
- "testing"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/tools/list"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-func TestCollectionTableName(t *testing.T) {
- t.Parallel()
-
- m := models.Collection{}
- if m.TableName() != "_collections" {
- t.Fatalf("Unexpected table name, got %q", m.TableName())
- }
-}
-
-func TestCollectionBaseFilesPath(t *testing.T) {
- t.Parallel()
-
- m := models.Collection{}
-
- m.RefreshId()
-
- expected := m.Id
- if m.BaseFilesPath() != expected {
- t.Fatalf("Expected path %s, got %s", expected, m.BaseFilesPath())
- }
-}
-
-func TestCollectionIsBase(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- collection models.Collection
- expected bool
- }{
- {models.Collection{}, false},
- {models.Collection{Type: "unknown"}, false},
- {models.Collection{Type: models.CollectionTypeBase}, true},
- {models.Collection{Type: models.CollectionTypeAuth}, false},
- }
-
- for i, s := range scenarios {
- result := s.collection.IsBase()
- if result != s.expected {
- t.Errorf("(%d) Expected %v, got %v", i, s.expected, result)
- }
- }
-}
-
-func TestCollectionIsAuth(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- collection models.Collection
- expected bool
- }{
- {models.Collection{}, false},
- {models.Collection{Type: "unknown"}, false},
- {models.Collection{Type: models.CollectionTypeBase}, false},
- {models.Collection{Type: models.CollectionTypeAuth}, true},
- }
-
- for i, s := range scenarios {
- result := s.collection.IsAuth()
- if result != s.expected {
- t.Errorf("(%d) Expected %v, got %v", i, s.expected, result)
- }
- }
-}
-
-func TestCollectionMarshalJSON(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- name string
- collection models.Collection
- expected string
- }{
- {
- "no type",
- models.Collection{Name: "test"},
- `{"id":"","created":"","updated":"","name":"test","type":"","system":false,"schema":[],"indexes":[],"listRule":null,"viewRule":null,"createRule":null,"updateRule":null,"deleteRule":null,"options":{}}`,
- },
- {
- "unknown type + non empty options",
- models.Collection{Name: "test", Type: "unknown", ListRule: types.Pointer("test_list"), Options: types.JsonMap{"test": 123}, Indexes: types.JsonArray[string]{"idx_test"}},
- `{"id":"","created":"","updated":"","name":"test","type":"unknown","system":false,"schema":[],"indexes":["idx_test"],"listRule":"test_list","viewRule":null,"createRule":null,"updateRule":null,"deleteRule":null,"options":{}}`,
- },
- {
- "base type + non empty options",
- models.Collection{Name: "test", Type: models.CollectionTypeBase, ListRule: types.Pointer("test_list"), Options: types.JsonMap{"test": 123}},
- `{"id":"","created":"","updated":"","name":"test","type":"base","system":false,"schema":[],"indexes":[],"listRule":"test_list","viewRule":null,"createRule":null,"updateRule":null,"deleteRule":null,"options":{}}`,
- },
- {
- "auth type + non empty options",
- models.Collection{BaseModel: models.BaseModel{Id: "test"}, Type: models.CollectionTypeAuth, Options: types.JsonMap{"test": 123, "allowOAuth2Auth": true, "minPasswordLength": 4, "onlyVerified": true}},
- `{"id":"test","created":"","updated":"","name":"","type":"auth","system":false,"schema":[],"indexes":[],"listRule":null,"viewRule":null,"createRule":null,"updateRule":null,"deleteRule":null,"options":{"allowEmailAuth":false,"allowOAuth2Auth":true,"allowUsernameAuth":false,"exceptEmailDomains":null,"manageRule":null,"minPasswordLength":4,"onlyEmailDomains":null,"onlyVerified":true,"requireEmail":false}}`,
- },
- }
-
- for _, s := range scenarios {
- t.Run(s.name, func(t *testing.T) {
- result, err := s.collection.MarshalJSON()
- if err != nil {
- t.Fatalf("Unexpected error %v", err)
- }
-
- if string(result) != s.expected {
- t.Fatalf("Expected\n%v\ngot\n%v", s.expected, string(result))
- }
- })
- }
-}
-
-func TestCollectionBaseOptions(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- name string
- collection models.Collection
- expected string
- }{
- {
- "no type",
- models.Collection{Options: types.JsonMap{"test": 123}},
- "{}",
- },
- {
- "unknown type",
- models.Collection{Type: "anything", Options: types.JsonMap{"test": 123}},
- "{}",
- },
- {
- "different type",
- models.Collection{Type: models.CollectionTypeAuth, Options: types.JsonMap{"test": 123, "minPasswordLength": 4}},
- "{}",
- },
- {
- "base type",
- models.Collection{Type: models.CollectionTypeBase, Options: types.JsonMap{"test": 123}},
- "{}",
- },
- }
-
- for _, s := range scenarios {
- t.Run(s.name, func(t *testing.T) {
- result := s.collection.BaseOptions()
-
- encoded, err := json.Marshal(result)
- if err != nil {
- t.Fatal(err)
- }
-
- if strEncoded := string(encoded); strEncoded != s.expected {
- t.Fatalf("Expected \n%v \ngot \n%v", s.expected, strEncoded)
- }
- })
- }
-}
-
-func TestCollectionAuthOptions(t *testing.T) {
- t.Parallel()
-
- options := types.JsonMap{"test": 123, "minPasswordLength": 4}
- expectedSerialization := `{"manageRule":null,"allowOAuth2Auth":false,"allowUsernameAuth":false,"allowEmailAuth":false,"requireEmail":false,"exceptEmailDomains":null,"onlyVerified":false,"onlyEmailDomains":null,"minPasswordLength":4}`
-
- scenarios := []struct {
- name string
- collection models.Collection
- expected string
- }{
- {
- "no type",
- models.Collection{Options: options},
- expectedSerialization,
- },
- {
- "unknown type",
- models.Collection{Type: "anything", Options: options},
- expectedSerialization,
- },
- {
- "different type",
- models.Collection{Type: models.CollectionTypeBase, Options: options},
- expectedSerialization,
- },
- {
- "auth type",
- models.Collection{Type: models.CollectionTypeAuth, Options: options},
- expectedSerialization,
- },
- }
-
- for _, s := range scenarios {
- t.Run(s.name, func(t *testing.T) {
- result := s.collection.AuthOptions()
-
- encoded, err := json.Marshal(result)
- if err != nil {
- t.Fatal(err)
- }
-
- if strEncoded := string(encoded); strEncoded != s.expected {
- t.Fatalf("Expected \n%v \ngot \n%v", s.expected, strEncoded)
- }
- })
- }
-}
-
-func TestCollectionViewOptions(t *testing.T) {
- t.Parallel()
-
- options := types.JsonMap{"query": "select id from demo1", "minPasswordLength": 4}
- expectedSerialization := `{"query":"select id from demo1"}`
-
- scenarios := []struct {
- name string
- collection models.Collection
- expected string
- }{
- {
- "no type",
- models.Collection{Options: options},
- expectedSerialization,
- },
- {
- "unknown type",
- models.Collection{Type: "anything", Options: options},
- expectedSerialization,
- },
- {
- "different type",
- models.Collection{Type: models.CollectionTypeBase, Options: options},
- expectedSerialization,
- },
- {
- "view type",
- models.Collection{Type: models.CollectionTypeView, Options: options},
- expectedSerialization,
- },
- }
-
- for _, s := range scenarios {
- t.Run(s.name, func(t *testing.T) {
- result := s.collection.ViewOptions()
-
- encoded, err := json.Marshal(result)
- if err != nil {
- t.Fatal(err)
- }
-
- if strEncoded := string(encoded); strEncoded != s.expected {
- t.Fatalf("Expected \n%v \ngot \n%v", s.expected, strEncoded)
- }
- })
- }
-}
-
-func TestNormalizeOptions(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- name string
- collection models.Collection
- expected string // serialized options
- }{
- {
- "unknown type",
- models.Collection{Type: "unknown", Options: types.JsonMap{"test": 123, "minPasswordLength": 4}},
- "{}",
- },
- {
- "base type",
- models.Collection{Type: models.CollectionTypeBase, Options: types.JsonMap{"test": 123, "minPasswordLength": 4}},
- "{}",
- },
- {
- "auth type",
- models.Collection{Type: models.CollectionTypeAuth, Options: types.JsonMap{"test": 123, "minPasswordLength": 4}},
- `{"allowEmailAuth":false,"allowOAuth2Auth":false,"allowUsernameAuth":false,"exceptEmailDomains":null,"manageRule":null,"minPasswordLength":4,"onlyEmailDomains":null,"onlyVerified":false,"requireEmail":false}`,
- },
- }
-
- for _, s := range scenarios {
- t.Run(s.name, func(t *testing.T) {
- if err := s.collection.NormalizeOptions(); err != nil {
- t.Fatalf("Unexpected error %v", err)
- }
-
- encoded, err := json.Marshal(s.collection.Options)
- if err != nil {
- t.Fatal(err)
- }
-
- if strEncoded := string(encoded); strEncoded != s.expected {
- t.Fatalf("Expected \n%v \ngot \n%v", s.expected, strEncoded)
- }
- })
- }
-}
-
-func TestDecodeOptions(t *testing.T) {
- t.Parallel()
-
- m := models.Collection{
- Options: types.JsonMap{"test": 123},
- }
-
- result := struct {
- Test int
- }{}
-
- if err := m.DecodeOptions(&result); err != nil {
- t.Fatal(err)
- }
-
- if result.Test != 123 {
- t.Fatalf("Expected %v, got %v", 123, result.Test)
- }
-}
-
-func TestSetOptions(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- name string
- collection models.Collection
- options any
- expected string // serialized options
- }{
- {
- "no type",
- models.Collection{},
- map[string]any{},
- "{}",
- },
- {
- "unknown type + non empty options",
- models.Collection{Type: "unknown", Options: types.JsonMap{"test": 123}},
- map[string]any{"test": 456, "minPasswordLength": 4},
- "{}",
- },
- {
- "base type",
- models.Collection{Type: models.CollectionTypeBase, Options: types.JsonMap{"test": 123}},
- map[string]any{"test": 456, "minPasswordLength": 4},
- "{}",
- },
- {
- "auth type",
- models.Collection{Type: models.CollectionTypeAuth, Options: types.JsonMap{"test": 123}},
- map[string]any{"test": 456, "minPasswordLength": 4},
- `{"allowEmailAuth":false,"allowOAuth2Auth":false,"allowUsernameAuth":false,"exceptEmailDomains":null,"manageRule":null,"minPasswordLength":4,"onlyEmailDomains":null,"onlyVerified":false,"requireEmail":false}`,
- },
- }
-
- for _, s := range scenarios {
- t.Run(s.name, func(t *testing.T) {
- if err := s.collection.SetOptions(s.options); err != nil {
- t.Fatalf("Unexpected error %v", err)
- }
-
- encoded, err := json.Marshal(s.collection.Options)
- if err != nil {
- t.Fatal(err)
- }
-
- if strEncoded := string(encoded); strEncoded != s.expected {
- t.Fatalf("Expected\n%v\ngot\n%v", s.expected, strEncoded)
- }
- })
- }
-}
-
-func TestCollectionBaseOptionsValidate(t *testing.T) {
- t.Parallel()
-
- opt := models.CollectionBaseOptions{}
- if err := opt.Validate(); err != nil {
- t.Fatal(err)
- }
-}
-
-func TestCollectionAuthOptionsValidate(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- name string
- options models.CollectionAuthOptions
- expectedErrors []string
- }{
- {
- "empty",
- models.CollectionAuthOptions{},
- nil,
- },
- {
- "empty string ManageRule",
- models.CollectionAuthOptions{ManageRule: types.Pointer("")},
- []string{"manageRule"},
- },
- {
- "minPasswordLength < 5",
- models.CollectionAuthOptions{MinPasswordLength: 3},
- []string{"minPasswordLength"},
- },
- {
- "minPasswordLength > 72",
- models.CollectionAuthOptions{MinPasswordLength: 73},
- []string{"minPasswordLength"},
- },
- {
- "both OnlyDomains and ExceptDomains set",
- models.CollectionAuthOptions{
- OnlyEmailDomains: []string{"example.com", "test.com"},
- ExceptEmailDomains: []string{"example.com", "test.com"},
- },
- []string{"onlyEmailDomains", "exceptEmailDomains"},
- },
- {
- "only OnlyDomains set",
- models.CollectionAuthOptions{
- OnlyEmailDomains: []string{"example.com", "test.com"},
- },
- []string{},
- },
- {
- "only ExceptEmailDomains set",
- models.CollectionAuthOptions{
- ExceptEmailDomains: []string{"example.com", "test.com"},
- },
- []string{},
- },
- {
- "all fields with valid data",
- models.CollectionAuthOptions{
- ManageRule: types.Pointer("test"),
- AllowOAuth2Auth: true,
- AllowUsernameAuth: true,
- AllowEmailAuth: true,
- RequireEmail: true,
- ExceptEmailDomains: []string{"example.com", "test.com"},
- OnlyEmailDomains: nil,
- MinPasswordLength: 5,
- },
- []string{},
- },
- }
-
- for _, s := range scenarios {
- t.Run(s.name, func(t *testing.T) {
- result := s.options.Validate()
-
- // parse errors
- errs, ok := result.(validation.Errors)
- if !ok && result != nil {
- t.Fatalf("Failed to parse errors %v", result)
- }
-
- if len(errs) != len(s.expectedErrors) {
- t.Fatalf("Expected error keys %v, got errors \n%v", s.expectedErrors, result)
- }
-
- for key := range errs {
- if !list.ExistInSlice(key, s.expectedErrors) {
- t.Fatalf("Unexpected error key %q in \n%v", key, errs)
- }
- }
- })
- }
-}
-
-func TestCollectionViewOptionsValidate(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- name string
- options models.CollectionViewOptions
- expectedErrors []string
- }{
- {
- "empty",
- models.CollectionViewOptions{},
- []string{"query"},
- },
- {
- "valid data",
- models.CollectionViewOptions{
- Query: "test123",
- },
- []string{},
- },
- }
-
- for _, s := range scenarios {
- t.Run(s.name, func(t *testing.T) {
- result := s.options.Validate()
-
- // parse errors
- errs, ok := result.(validation.Errors)
- if !ok && result != nil {
- t.Fatalf("Failed to parse errors %v", result)
- }
-
- if len(errs) != len(s.expectedErrors) {
- t.Fatalf("Expected error keys %v, got errors \n%v", s.expectedErrors, result)
- }
-
- for key := range errs {
- if !list.ExistInSlice(key, s.expectedErrors) {
- t.Fatalf("Unexpected error key %q in \n%v", key, errs)
- }
- }
- })
- }
-}
diff --git a/models/external_auth.go b/models/external_auth.go
deleted file mode 100644
index bf9e0314..00000000
--- a/models/external_auth.go
+++ /dev/null
@@ -1,16 +0,0 @@
-package models
-
-var _ Model = (*ExternalAuth)(nil)
-
-type ExternalAuth struct {
- BaseModel
-
- CollectionId string `db:"collectionId" json:"collectionId"`
- RecordId string `db:"recordId" json:"recordId"`
- Provider string `db:"provider" json:"provider"`
- ProviderId string `db:"providerId" json:"providerId"`
-}
-
-func (m *ExternalAuth) TableName() string {
- return "_externalAuths"
-}
diff --git a/models/external_auth_test.go b/models/external_auth_test.go
deleted file mode 100644
index 7688daa2..00000000
--- a/models/external_auth_test.go
+++ /dev/null
@@ -1,16 +0,0 @@
-package models_test
-
-import (
- "testing"
-
- "github.com/pocketbase/pocketbase/models"
-)
-
-func TestExternalAuthTableName(t *testing.T) {
- t.Parallel()
-
- m := models.ExternalAuth{}
- if m.TableName() != "_externalAuths" {
- t.Fatalf("Unexpected table name, got %q", m.TableName())
- }
-}
diff --git a/models/log.go b/models/log.go
deleted file mode 100644
index b8153c35..00000000
--- a/models/log.go
+++ /dev/null
@@ -1,19 +0,0 @@
-package models
-
-import (
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-var _ Model = (*Log)(nil)
-
-type Log struct {
- BaseModel
-
- Data types.JsonMap `db:"data" json:"data"`
- Message string `db:"message" json:"message"`
- Level int `db:"level" json:"level"`
-}
-
-func (m *Log) TableName() string {
- return "_logs"
-}
diff --git a/models/param.go b/models/param.go
deleted file mode 100644
index cf5ef053..00000000
--- a/models/param.go
+++ /dev/null
@@ -1,22 +0,0 @@
-package models
-
-import (
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-var _ Model = (*Param)(nil)
-
-const (
- ParamAppSettings = "settings"
-)
-
-type Param struct {
- BaseModel
-
- Key string `db:"key" json:"key"`
- Value types.JsonRaw `db:"value" json:"value"`
-}
-
-func (m *Param) TableName() string {
- return "_params"
-}
diff --git a/models/param_test.go b/models/param_test.go
deleted file mode 100644
index 0ea03c64..00000000
--- a/models/param_test.go
+++ /dev/null
@@ -1,16 +0,0 @@
-package models_test
-
-import (
- "testing"
-
- "github.com/pocketbase/pocketbase/models"
-)
-
-func TestParamTableName(t *testing.T) {
- t.Parallel()
-
- m := models.Param{}
- if m.TableName() != "_params" {
- t.Fatalf("Unexpected table name, got %q", m.TableName())
- }
-}
diff --git a/models/record.go b/models/record.go
deleted file mode 100644
index f15e8b86..00000000
--- a/models/record.go
+++ /dev/null
@@ -1,962 +0,0 @@
-package models
-
-import (
- "encoding/json"
- "errors"
- "fmt"
- "regexp"
- "strconv"
- "time"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/tools/list"
- "github.com/pocketbase/pocketbase/tools/security"
- "github.com/pocketbase/pocketbase/tools/store"
- "github.com/pocketbase/pocketbase/tools/types"
- "github.com/spf13/cast"
- "golang.org/x/crypto/bcrypt"
-)
-
-var (
- _ Model = (*Record)(nil)
- _ ColumnValueMapper = (*Record)(nil)
- _ FilesManager = (*Record)(nil)
-)
-
-type Record struct {
- BaseModel
-
- collection *Collection
-
- exportUnknown bool // whether to export unknown fields
- ignoreEmailVisibility bool // whether to ignore the emailVisibility flag for auth collections
- loaded bool
- originalData map[string]any // the original (aka. first loaded) model data
- expand *store.Store[any] // expanded relations
- data *store.Store[any] // any custom data in addition to the base model fields
-}
-
-// NewRecord initializes a new empty Record model.
-func NewRecord(collection *Collection) *Record {
- return &Record{
- collection: collection,
- data: store.New[any](nil),
- }
-}
-
-// nullStringMapValue returns the raw string value if it exist and
-// its not NULL, otherwise - nil.
-func nullStringMapValue(data dbx.NullStringMap, key string) any {
- nullString, ok := data[key]
-
- if ok && nullString.Valid {
- return nullString.String
- }
-
- return nil
-}
-
-// NewRecordFromNullStringMap initializes a single new Record model
-// with data loaded from the provided NullStringMap.
-//
-// Note that this method is intended to load and Scan data from a database
-// result and calls PostScan() which marks the record as "not new".
-func NewRecordFromNullStringMap(collection *Collection, data dbx.NullStringMap) *Record {
- resultMap := make(map[string]any, len(data))
-
- // load schema fields
- for _, field := range collection.Schema.Fields() {
- resultMap[field.Name] = nullStringMapValue(data, field.Name)
- }
-
- // load base model fields
- for _, name := range schema.BaseModelFieldNames() {
- resultMap[name] = nullStringMapValue(data, name)
- }
-
- // load auth fields
- if collection.IsAuth() {
- for _, name := range schema.AuthFieldNames() {
- resultMap[name] = nullStringMapValue(data, name)
- }
- }
-
- record := NewRecord(collection)
-
- record.Load(resultMap)
- record.PostScan()
-
- return record
-}
-
-// NewRecordsFromNullStringMaps initializes a new Record model for
-// each row in the provided NullStringMap slice.
-//
-// Note that this method is intended to load and Scan data from a database
-// result and calls PostScan() for each record marking them as "not new".
-func NewRecordsFromNullStringMaps(collection *Collection, rows []dbx.NullStringMap) []*Record {
- result := make([]*Record, len(rows))
-
- for i, row := range rows {
- result[i] = NewRecordFromNullStringMap(collection, row)
- }
-
- return result
-}
-
-// TableName returns the table name associated to the current Record model.
-func (m *Record) TableName() string {
- return m.collection.Name
-}
-
-// Collection returns the Collection model associated to the current Record model.
-func (m *Record) Collection() *Collection {
- return m.collection
-}
-
-// OriginalCopy returns a copy of the current record model populated
-// with its ORIGINAL data state (aka. the initially loaded) and
-// everything else reset to the defaults.
-func (m *Record) OriginalCopy() *Record {
- newRecord := NewRecord(m.collection)
- newRecord.Load(m.originalData)
-
- if m.IsNew() {
- newRecord.MarkAsNew()
- } else {
- newRecord.MarkAsNotNew()
- }
-
- return newRecord
-}
-
-// CleanCopy returns a copy of the current record model populated only
-// with its LATEST data state and everything else reset to the defaults.
-func (m *Record) CleanCopy() *Record {
- newRecord := NewRecord(m.collection)
- newRecord.Load(m.data.GetAll())
- newRecord.Id = m.Id
- newRecord.Created = m.Created
- newRecord.Updated = m.Updated
-
- if m.IsNew() {
- newRecord.MarkAsNew()
- } else {
- newRecord.MarkAsNotNew()
- }
-
- return newRecord
-}
-
-// Expand returns a shallow copy of the current Record model expand data.
-func (m *Record) Expand() map[string]any {
- if m.expand == nil {
- m.expand = store.New[any](nil)
- }
-
- return m.expand.GetAll()
-}
-
-// SetExpand shallow copies the provided data to the current Record model's expand.
-func (m *Record) SetExpand(expand map[string]any) {
- if m.expand == nil {
- m.expand = store.New[any](nil)
- }
-
- m.expand.Reset(expand)
-}
-
-// MergeExpand merges recursively the provided expand data into
-// the current model's expand (if any).
-//
-// Note that if an expanded prop with the same key is a slice (old or new expand)
-// then both old and new records will be merged into a new slice (aka. a :merge: [b,c] => [a,b,c]).
-// Otherwise the "old" expanded record will be replace with the "new" one (aka. a :merge: aNew => aNew).
-func (m *Record) MergeExpand(expand map[string]any) {
- // nothing to merge
- if len(expand) == 0 {
- return
- }
-
- // no old expand
- if m.expand == nil {
- m.expand = store.New(expand)
- return
- }
-
- oldExpand := m.expand.GetAll()
-
- for key, new := range expand {
- old, ok := oldExpand[key]
- if !ok {
- oldExpand[key] = new
- continue
- }
-
- var wasOldSlice bool
- var oldSlice []*Record
- switch v := old.(type) {
- case *Record:
- oldSlice = []*Record{v}
- case []*Record:
- wasOldSlice = true
- oldSlice = v
- default:
- // invalid old expand data -> assign directly the new
- // (no matter whether new is valid or not)
- oldExpand[key] = new
- continue
- }
-
- var wasNewSlice bool
- var newSlice []*Record
- switch v := new.(type) {
- case *Record:
- newSlice = []*Record{v}
- case []*Record:
- wasNewSlice = true
- newSlice = v
- default:
- // invalid new expand data -> skip
- continue
- }
-
- oldIndexed := make(map[string]*Record, len(oldSlice))
- for _, oldRecord := range oldSlice {
- oldIndexed[oldRecord.Id] = oldRecord
- }
-
- for _, newRecord := range newSlice {
- oldRecord := oldIndexed[newRecord.Id]
- if oldRecord != nil {
- // note: there is no need to update oldSlice since oldRecord is a reference
- oldRecord.MergeExpand(newRecord.Expand())
- } else {
- // missing new entry
- oldSlice = append(oldSlice, newRecord)
- }
- }
-
- if wasOldSlice || wasNewSlice || len(oldSlice) == 0 {
- oldExpand[key] = oldSlice
- } else {
- oldExpand[key] = oldSlice[0]
- }
- }
-
- m.expand.Reset(oldExpand)
-}
-
-// SchemaData returns a shallow copy ONLY of the defined record schema fields data.
-func (m *Record) SchemaData() map[string]any {
- result := make(map[string]any, len(m.collection.Schema.Fields()))
-
- data := m.data.GetAll()
-
- for _, field := range m.collection.Schema.Fields() {
- if v, ok := data[field.Name]; ok {
- result[field.Name] = v
- }
- }
-
- return result
-}
-
-// UnknownData returns a shallow copy ONLY of the unknown record fields data,
-// aka. fields that are neither one of the base and special system ones,
-// nor defined by the collection schema.
-func (m *Record) UnknownData() map[string]any {
- if m.data == nil {
- return nil
- }
-
- return m.extractUnknownData(m.data.GetAll())
-}
-
-// IgnoreEmailVisibility toggles the flag to ignore the auth record email visibility check.
-func (m *Record) IgnoreEmailVisibility(state bool) {
- m.ignoreEmailVisibility = state
-}
-
-// WithUnknownData toggles the export/serialization of unknown data fields
-// (false by default).
-func (m *Record) WithUnknownData(state bool) {
- m.exportUnknown = state
-}
-
-// Set sets the provided key-value data pair for the current Record model.
-//
-// If the record collection has field with name matching the provided "key",
-// the value will be further normalized according to the field rules.
-func (m *Record) Set(key string, value any) {
- switch key {
- case schema.FieldNameId:
- m.Id = cast.ToString(value)
- case schema.FieldNameCreated:
- m.Created, _ = types.ParseDateTime(value)
- case schema.FieldNameUpdated:
- m.Updated, _ = types.ParseDateTime(value)
- case schema.FieldNameExpand:
- m.SetExpand(cast.ToStringMap(value))
- default:
- var v = value
-
- if field := m.Collection().Schema.GetFieldByName(key); field != nil {
- v = field.PrepareValue(value)
- } else if m.collection.IsAuth() {
- // normalize auth fields
- switch key {
- case schema.FieldNameEmailVisibility, schema.FieldNameVerified:
- v = cast.ToBool(value)
- case schema.FieldNameLastResetSentAt, schema.FieldNameLastVerificationSentAt, schema.FieldNameLastLoginAlertSentAt:
- v, _ = types.ParseDateTime(value)
- case schema.FieldNameUsername, schema.FieldNameEmail, schema.FieldNameTokenKey, schema.FieldNamePasswordHash:
- v = cast.ToString(value)
- }
- }
-
- if m.data == nil {
- m.data = store.New[any](nil)
- }
-
- m.data.Set(key, v)
- }
-}
-
-// Get returns a normalized single record model data value for "key".
-func (m *Record) Get(key string) any {
- switch key {
- case schema.FieldNameId:
- return m.Id
- case schema.FieldNameCreated:
- return m.Created
- case schema.FieldNameUpdated:
- return m.Updated
- default:
- var v any
- if m.data != nil {
- v = m.data.Get(key)
- }
-
- // normalize the field value in case it is missing or an incorrect type was set
- // to ensure that the DB will always have normalized columns value.
- if field := m.Collection().Schema.GetFieldByName(key); field != nil {
- v = field.PrepareValue(v)
- } else if m.collection.IsAuth() {
- switch key {
- case schema.FieldNameEmailVisibility, schema.FieldNameVerified:
- v = cast.ToBool(v)
- case schema.FieldNameLastResetSentAt, schema.FieldNameLastVerificationSentAt, schema.FieldNameLastLoginAlertSentAt:
- v, _ = types.ParseDateTime(v)
- case schema.FieldNameUsername, schema.FieldNameEmail, schema.FieldNameTokenKey, schema.FieldNamePasswordHash:
- v = cast.ToString(v)
- }
- }
-
- return v
- }
-}
-
-// GetBool returns the data value for "key" as a bool.
-func (m *Record) GetBool(key string) bool {
- return cast.ToBool(m.Get(key))
-}
-
-// GetString returns the data value for "key" as a string.
-func (m *Record) GetString(key string) string {
- return cast.ToString(m.Get(key))
-}
-
-// GetInt returns the data value for "key" as an int.
-func (m *Record) GetInt(key string) int {
- return cast.ToInt(m.Get(key))
-}
-
-// GetFloat returns the data value for "key" as a float64.
-func (m *Record) GetFloat(key string) float64 {
- return cast.ToFloat64(m.Get(key))
-}
-
-// GetTime returns the data value for "key" as a [time.Time] instance.
-func (m *Record) GetTime(key string) time.Time {
- return cast.ToTime(m.Get(key))
-}
-
-// GetDateTime returns the data value for "key" as a DateTime instance.
-func (m *Record) GetDateTime(key string) types.DateTime {
- d, _ := types.ParseDateTime(m.Get(key))
- return d
-}
-
-// GetStringSlice returns the data value for "key" as a slice of unique strings.
-func (m *Record) GetStringSlice(key string) []string {
- return list.ToUniqueStringSlice(m.Get(key))
-}
-
-// ExpandedOne retrieves a single relation Record from the already
-// loaded expand data of the current model.
-//
-// If the requested expand relation is multiple, this method returns
-// only first available Record from the expanded relation.
-//
-// Returns nil if there is no such expand relation loaded.
-func (m *Record) ExpandedOne(relField string) *Record {
- if m.expand == nil {
- return nil
- }
-
- rel := m.expand.Get(relField)
-
- switch v := rel.(type) {
- case *Record:
- return v
- case []*Record:
- if len(v) > 0 {
- return v[0]
- }
- }
-
- return nil
-}
-
-// ExpandedAll retrieves a slice of relation Records from the already
-// loaded expand data of the current model.
-//
-// If the requested expand relation is single, this method normalizes
-// the return result and will wrap the single model as a slice.
-//
-// Returns nil slice if there is no such expand relation loaded.
-func (m *Record) ExpandedAll(relField string) []*Record {
- if m.expand == nil {
- return nil
- }
-
- rel := m.expand.Get(relField)
-
- switch v := rel.(type) {
- case *Record:
- return []*Record{v}
- case []*Record:
- return v
- }
-
- return nil
-}
-
-// Retrieves the "key" json field value and unmarshals it into "result".
-//
-// Example
-//
-// result := struct {
-// FirstName string `json:"first_name"`
-// }{}
-// err := m.UnmarshalJSONField("my_field_name", &result)
-func (m *Record) UnmarshalJSONField(key string, result any) error {
- return json.Unmarshal([]byte(m.GetString(key)), &result)
-}
-
-// BaseFilesPath returns the storage dir path used by the record.
-func (m *Record) BaseFilesPath() string {
- return fmt.Sprintf("%s/%s", m.Collection().BaseFilesPath(), m.Id)
-}
-
-// FindFileFieldByFile returns the first file type field for which
-// any of the record's data contains the provided filename.
-func (m *Record) FindFileFieldByFile(filename string) *schema.SchemaField {
- for _, field := range m.Collection().Schema.Fields() {
- if field.Type == schema.FieldTypeFile {
- names := m.GetStringSlice(field.Name)
- if list.ExistInSlice(filename, names) {
- return field
- }
- }
- }
- return nil
-}
-
-// Load bulk loads the provided data into the current Record model.
-func (m *Record) Load(data map[string]any) {
- if !m.loaded {
- m.loaded = true
- m.originalData = data
- }
-
- for k, v := range data {
- m.Set(k, v)
- }
-}
-
-// ColumnValueMap implements [ColumnValueMapper] interface.
-func (m *Record) ColumnValueMap() map[string]any {
- result := make(map[string]any, len(m.collection.Schema.Fields())+3)
-
- // export schema field values
- for _, field := range m.collection.Schema.Fields() {
- result[field.Name] = m.getNormalizeDataValueForDB(field.Name)
- }
-
- // export auth collection fields
- if m.collection.IsAuth() {
- for _, name := range schema.AuthFieldNames() {
- result[name] = m.getNormalizeDataValueForDB(name)
- }
- }
-
- // export base model fields
- result[schema.FieldNameId] = m.getNormalizeDataValueForDB(schema.FieldNameId)
- result[schema.FieldNameCreated] = m.getNormalizeDataValueForDB(schema.FieldNameCreated)
- result[schema.FieldNameUpdated] = m.getNormalizeDataValueForDB(schema.FieldNameUpdated)
-
- return result
-}
-
-// PublicExport exports only the record fields that are safe to be public.
-//
-// For auth records, to force the export of the email field you need to set
-// `m.IgnoreEmailVisibility(true)`.
-func (m *Record) PublicExport() map[string]any {
- result := make(map[string]any, len(m.collection.Schema.Fields())+5)
-
- // export unknown data fields if allowed
- if m.exportUnknown {
- for k, v := range m.UnknownData() {
- result[k] = v
- }
- }
-
- // export schema field values
- for _, field := range m.collection.Schema.Fields() {
- result[field.Name] = m.Get(field.Name)
- }
-
- // export some of the safe auth collection fields
- if m.collection.IsAuth() {
- result[schema.FieldNameVerified] = m.Verified()
- result[schema.FieldNameUsername] = m.Username()
- result[schema.FieldNameEmailVisibility] = m.EmailVisibility()
- if m.ignoreEmailVisibility || m.EmailVisibility() {
- result[schema.FieldNameEmail] = m.Email()
- }
- }
-
- // export base model fields
- result[schema.FieldNameId] = m.GetId()
- if created := m.GetCreated(); !m.Collection().IsView() || !created.IsZero() {
- result[schema.FieldNameCreated] = created
- }
- if updated := m.GetUpdated(); !m.Collection().IsView() || !updated.IsZero() {
- result[schema.FieldNameUpdated] = updated
- }
-
- // add helper collection reference fields
- result[schema.FieldNameCollectionId] = m.collection.Id
- result[schema.FieldNameCollectionName] = m.collection.Name
-
- // add expand (if set)
- if m.expand != nil && m.expand.Length() > 0 {
- result[schema.FieldNameExpand] = m.expand.GetAll()
- }
-
- return result
-}
-
-// MarshalJSON implements the [json.Marshaler] interface.
-//
-// Only the data exported by `PublicExport()` will be serialized.
-func (m Record) MarshalJSON() ([]byte, error) {
- return json.Marshal(m.PublicExport())
-}
-
-// UnmarshalJSON implements the [json.Unmarshaler] interface.
-func (m *Record) UnmarshalJSON(data []byte) error {
- result := map[string]any{}
-
- if err := json.Unmarshal(data, &result); err != nil {
- return err
- }
-
- m.Load(result)
-
- return nil
-}
-
-// ReplaceModifers returns a new map with applied modifier
-// values based on the current record and the specified data.
-//
-// The resolved modifier keys will be removed.
-//
-// Multiple modifiers will be applied one after another,
-// while reusing the previous base key value result (eg. 1; -5; +2 => -2).
-//
-// Example usage:
-//
-// newData := record.ReplaceModifers(data)
-// // record: {"field": 10}
-// // data: {"field+": 5}
-// // newData: {"field": 15}
-func (m *Record) ReplaceModifers(data map[string]any) map[string]any {
- var clone = shallowCopy(data)
- if len(clone) == 0 {
- return clone
- }
-
- var recordDataCache map[string]any
-
- // export recordData lazily
- recordData := func() map[string]any {
- if recordDataCache == nil {
- recordDataCache = m.SchemaData()
- }
- return recordDataCache
- }
-
- modifiers := schema.FieldValueModifiers()
-
- for _, field := range m.Collection().Schema.Fields() {
- key := field.Name
-
- for _, m := range modifiers {
- if mv, mOk := clone[key+m]; mOk {
- if _, ok := clone[key]; !ok {
- // get base value from the merged data
- clone[key] = recordData()[key]
- }
-
- clone[key] = field.PrepareValueWithModifier(clone[key], m, mv)
- delete(clone, key+m)
- }
- }
-
- if field.Type != schema.FieldTypeFile {
- continue
- }
-
- // -----------------------------------------------------------
- // legacy file field modifiers (kept for backward compatibility)
- // -----------------------------------------------------------
-
- var oldNames []string
- var toDelete []string
- if _, ok := clone[key]; ok {
- oldNames = list.ToUniqueStringSlice(clone[key])
- } else {
- // get oldNames from the model
- oldNames = list.ToUniqueStringSlice(recordData()[key])
- }
-
- // search for individual file name to delete (eg. "file.test.png = null")
- for _, name := range oldNames {
- suffixedKey := key + "." + name
- if v, ok := clone[suffixedKey]; ok && cast.ToString(v) == "" {
- toDelete = append(toDelete, name)
- delete(clone, suffixedKey)
- continue
- }
- }
-
- // search for individual file index to delete (eg. "file.0 = null")
- keyExp, _ := regexp.Compile(`^` + regexp.QuoteMeta(key) + `\.\d+$`)
- for indexedKey := range clone {
- if keyExp.MatchString(indexedKey) && cast.ToString(clone[indexedKey]) == "" {
- index, indexErr := strconv.Atoi(indexedKey[len(key)+1:])
- if indexErr != nil || index < 0 || index >= len(oldNames) {
- continue
- }
- toDelete = append(toDelete, oldNames[index])
- delete(clone, indexedKey)
- }
- }
-
- if toDelete != nil {
- clone[key] = field.PrepareValue(list.SubtractSlice(oldNames, toDelete))
- }
- }
-
- return clone
-}
-
-// getNormalizeDataValueForDB returns the "key" data value formatted for db storage.
-func (m *Record) getNormalizeDataValueForDB(key string) any {
- var val any
-
- // normalize auth fields
- if m.collection.IsAuth() {
- switch key {
- case schema.FieldNameEmailVisibility, schema.FieldNameVerified:
- return m.GetBool(key)
- case schema.FieldNameLastResetSentAt, schema.FieldNameLastVerificationSentAt, schema.FieldNameLastLoginAlertSentAt:
- return m.GetDateTime(key)
- case schema.FieldNameUsername, schema.FieldNameEmail, schema.FieldNameTokenKey, schema.FieldNamePasswordHash:
- return m.GetString(key)
- }
- }
-
- val = m.Get(key)
-
- switch ids := val.(type) {
- case []string:
- // encode string slice
- return append(types.JsonArray[string]{}, ids...)
- case []int:
- // encode int slice
- return append(types.JsonArray[int]{}, ids...)
- case []float64:
- // encode float64 slice
- return append(types.JsonArray[float64]{}, ids...)
- case []any:
- // encode interface slice
- return append(types.JsonArray[any]{}, ids...)
- default:
- // no changes
- return val
- }
-}
-
-// shallowCopy shallow copy data into a new map.
-func shallowCopy(data map[string]any) map[string]any {
- result := make(map[string]any, len(data))
-
- for k, v := range data {
- result[k] = v
- }
-
- return result
-}
-
-func (m *Record) extractUnknownData(data map[string]any) map[string]any {
- knownFields := map[string]struct{}{}
-
- for _, name := range schema.SystemFieldNames() {
- knownFields[name] = struct{}{}
- }
- for _, name := range schema.BaseModelFieldNames() {
- knownFields[name] = struct{}{}
- }
-
- for _, f := range m.collection.Schema.Fields() {
- knownFields[f.Name] = struct{}{}
- }
-
- if m.collection.IsAuth() {
- for _, name := range schema.AuthFieldNames() {
- knownFields[name] = struct{}{}
- }
- }
-
- result := map[string]any{}
-
- for k, v := range data {
- if _, ok := knownFields[k]; !ok {
- result[k] = v
- }
- }
-
- return result
-}
-
-// -------------------------------------------------------------------
-// Auth helpers
-// -------------------------------------------------------------------
-
-var notAuthRecordErr = errors.New("Not an auth collection record.")
-
-// Username returns the "username" auth record data value.
-func (m *Record) Username() string {
- return m.GetString(schema.FieldNameUsername)
-}
-
-// SetUsername sets the "username" auth record data value.
-//
-// This method doesn't check whether the provided value is a valid username.
-//
-// Returns an error if the record is not from an auth collection.
-func (m *Record) SetUsername(username string) error {
- if !m.collection.IsAuth() {
- return notAuthRecordErr
- }
-
- m.Set(schema.FieldNameUsername, username)
-
- return nil
-}
-
-// Email returns the "email" auth record data value.
-func (m *Record) Email() string {
- return m.GetString(schema.FieldNameEmail)
-}
-
-// SetEmail sets the "email" auth record data value.
-//
-// This method doesn't check whether the provided value is a valid email.
-//
-// Returns an error if the record is not from an auth collection.
-func (m *Record) SetEmail(email string) error {
- if !m.collection.IsAuth() {
- return notAuthRecordErr
- }
-
- m.Set(schema.FieldNameEmail, email)
-
- return nil
-}
-
-// Verified returns the "emailVisibility" auth record data value.
-func (m *Record) EmailVisibility() bool {
- return m.GetBool(schema.FieldNameEmailVisibility)
-}
-
-// SetEmailVisibility sets the "emailVisibility" auth record data value.
-//
-// Returns an error if the record is not from an auth collection.
-func (m *Record) SetEmailVisibility(visible bool) error {
- if !m.collection.IsAuth() {
- return notAuthRecordErr
- }
-
- m.Set(schema.FieldNameEmailVisibility, visible)
-
- return nil
-}
-
-// Verified returns the "verified" auth record data value.
-func (m *Record) Verified() bool {
- return m.GetBool(schema.FieldNameVerified)
-}
-
-// SetVerified sets the "verified" auth record data value.
-//
-// Returns an error if the record is not from an auth collection.
-func (m *Record) SetVerified(verified bool) error {
- if !m.collection.IsAuth() {
- return notAuthRecordErr
- }
-
- m.Set(schema.FieldNameVerified, verified)
-
- return nil
-}
-
-// TokenKey returns the "tokenKey" auth record data value.
-func (m *Record) TokenKey() string {
- return m.GetString(schema.FieldNameTokenKey)
-}
-
-// SetTokenKey sets the "tokenKey" auth record data value.
-//
-// Returns an error if the record is not from an auth collection.
-func (m *Record) SetTokenKey(key string) error {
- if !m.collection.IsAuth() {
- return notAuthRecordErr
- }
-
- m.Set(schema.FieldNameTokenKey, key)
-
- return nil
-}
-
-// RefreshTokenKey generates and sets new random auth record "tokenKey".
-//
-// Returns an error if the record is not from an auth collection.
-func (m *Record) RefreshTokenKey() error {
- return m.SetTokenKey(security.RandomString(50))
-}
-
-// LastResetSentAt returns the "lastResentSentAt" auth record data value.
-func (m *Record) LastResetSentAt() types.DateTime {
- return m.GetDateTime(schema.FieldNameLastResetSentAt)
-}
-
-// SetLastResetSentAt sets the "lastResentSentAt" auth record data value.
-//
-// Returns an error if the record is not from an auth collection.
-func (m *Record) SetLastResetSentAt(dateTime types.DateTime) error {
- if !m.collection.IsAuth() {
- return notAuthRecordErr
- }
-
- m.Set(schema.FieldNameLastResetSentAt, dateTime)
-
- return nil
-}
-
-// LastVerificationSentAt returns the "lastVerificationSentAt" auth record data value.
-func (m *Record) LastVerificationSentAt() types.DateTime {
- return m.GetDateTime(schema.FieldNameLastVerificationSentAt)
-}
-
-// SetLastVerificationSentAt sets an "lastVerificationSentAt" auth record data value.
-//
-// Returns an error if the record is not from an auth collection.
-func (m *Record) SetLastVerificationSentAt(dateTime types.DateTime) error {
- if !m.collection.IsAuth() {
- return notAuthRecordErr
- }
-
- m.Set(schema.FieldNameLastVerificationSentAt, dateTime)
-
- return nil
-}
-
-// LastLoginAlertSentAt returns the "lastLoginAlertSentAt" auth record data value.
-func (m *Record) LastLoginAlertSentAt() types.DateTime {
- return m.GetDateTime(schema.FieldNameLastLoginAlertSentAt)
-}
-
-// SetLastLoginAlertSentAt sets an "lastLoginAlertSentAt" auth record data value.
-//
-// Returns an error if the record is not from an auth collection.
-func (m *Record) SetLastLoginAlertSentAt(dateTime types.DateTime) error {
- if !m.collection.IsAuth() {
- return notAuthRecordErr
- }
-
- m.Set(schema.FieldNameLastLoginAlertSentAt, dateTime)
-
- return nil
-}
-
-// PasswordHash returns the "passwordHash" auth record data value.
-func (m *Record) PasswordHash() string {
- return m.GetString(schema.FieldNamePasswordHash)
-}
-
-// ValidatePassword validates a plain password against the auth record password.
-//
-// Returns false if the password is incorrect or record is not from an auth collection.
-func (m *Record) ValidatePassword(password string) bool {
- if !m.collection.IsAuth() {
- return false
- }
-
- err := bcrypt.CompareHashAndPassword([]byte(m.PasswordHash()), []byte(password))
-
- return err == nil
-}
-
-// SetPassword sets cryptographically secure string to the auth record "password" field.
-// This method also resets the "lastResetSentAt" and the "tokenKey" fields.
-//
-// Returns an error if the record is not from an auth collection or
-// an empty password is provided.
-func (m *Record) SetPassword(password string) error {
- if !m.collection.IsAuth() {
- return notAuthRecordErr
- }
-
- if password == "" {
- return errors.New("The provided plain password is empty")
- }
-
- // hash the password
- hashedPassword, err := bcrypt.GenerateFromPassword([]byte(password), 12)
- if err != nil {
- return err
- }
-
- m.Set(schema.FieldNamePasswordHash, string(hashedPassword))
- m.Set(schema.FieldNameLastResetSentAt, types.DateTime{})
-
- // invalidate previously issued tokens
- return m.RefreshTokenKey()
-}
diff --git a/models/record_test.go b/models/record_test.go
deleted file mode 100644
index b43f9203..00000000
--- a/models/record_test.go
+++ /dev/null
@@ -1,2199 +0,0 @@
-package models_test
-
-import (
- "bytes"
- "database/sql"
- "encoding/json"
- "testing"
- "time"
-
- "github.com/pocketbase/dbx"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/tools/list"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-func TestNewRecord(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{
- Name: "test_collection",
- Schema: schema.NewSchema(
- &schema.SchemaField{
- Name: "test",
- Type: schema.FieldTypeText,
- },
- ),
- }
-
- m := models.NewRecord(collection)
-
- if m.Collection().Name != collection.Name {
- t.Fatalf("Expected collection with name %q, got %q", collection.Id, m.Collection().Id)
- }
-
- if len(m.SchemaData()) != 0 {
- t.Fatalf("Expected empty schema data, got %v", m.SchemaData())
- }
-}
-
-func TestNewRecordFromNullStringMap(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{
- Name: "test",
- Schema: schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeText,
- },
- &schema.SchemaField{
- Name: "field2",
- Type: schema.FieldTypeText,
- },
- &schema.SchemaField{
- Name: "field3",
- Type: schema.FieldTypeBool,
- },
- &schema.SchemaField{
- Name: "field4",
- Type: schema.FieldTypeNumber,
- },
- &schema.SchemaField{
- Name: "field5",
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{
- Values: []string{"test1", "test2"},
- MaxSelect: 1,
- },
- },
- &schema.SchemaField{
- Name: "field6",
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{
- MaxSelect: 2,
- MaxSize: 1,
- },
- },
- ),
- }
-
- data := dbx.NullStringMap{
- "id": sql.NullString{
- String: "test_id",
- Valid: true,
- },
- "created": sql.NullString{
- String: "2022-01-01 10:00:00.123Z",
- Valid: true,
- },
- "updated": sql.NullString{
- String: "2022-01-01 10:00:00.456Z",
- Valid: true,
- },
- // auth collection specific fields
- "username": sql.NullString{
- String: "test_username",
- Valid: true,
- },
- "email": sql.NullString{
- String: "test_email",
- Valid: true,
- },
- "emailVisibility": sql.NullString{
- String: "true",
- Valid: true,
- },
- "verified": sql.NullString{
- String: "",
- Valid: false,
- },
- "tokenKey": sql.NullString{
- String: "test_tokenKey",
- Valid: true,
- },
- "passwordHash": sql.NullString{
- String: "test_passwordHash",
- Valid: true,
- },
- "lastResetSentAt": sql.NullString{
- String: "2022-01-02 10:00:00.123Z",
- Valid: true,
- },
- "lastVerificationSentAt": sql.NullString{
- String: "2022-02-03 10:00:00.456Z",
- Valid: true,
- },
- // custom schema fields
- "field1": sql.NullString{
- String: "test",
- Valid: true,
- },
- "field2": sql.NullString{
- String: "test",
- Valid: false, // test invalid db serialization
- },
- "field3": sql.NullString{
- String: "true",
- Valid: true,
- },
- "field4": sql.NullString{
- String: "123.123",
- Valid: true,
- },
- "field5": sql.NullString{
- String: `["test1","test2"]`, // will select only the last elem
- Valid: true,
- },
- "field6": sql.NullString{
- String: "test", // will be converted to slice
- Valid: true,
- },
- "unknown": sql.NullString{
- String: "test",
- Valid: true,
- },
- }
-
- scenarios := []struct {
- collectionType string
- expectedJson string
- }{
- {
- models.CollectionTypeBase,
- `{"collectionId":"","collectionName":"test","created":"2022-01-01 10:00:00.123Z","field1":"test","field2":"","field3":true,"field4":123.123,"field5":"test2","field6":["test"],"id":"test_id","updated":"2022-01-01 10:00:00.456Z"}`,
- },
- {
- models.CollectionTypeAuth,
- `{"collectionId":"","collectionName":"test","created":"2022-01-01 10:00:00.123Z","email":"test_email","emailVisibility":true,"field1":"test","field2":"","field3":true,"field4":123.123,"field5":"test2","field6":["test"],"id":"test_id","updated":"2022-01-01 10:00:00.456Z","username":"test_username","verified":false}`,
- },
- }
-
- for i, s := range scenarios {
- collection.Type = s.collectionType
- m := models.NewRecordFromNullStringMap(collection, data)
- m.IgnoreEmailVisibility(true)
-
- encoded, err := m.MarshalJSON()
- if err != nil {
- t.Errorf("(%d) Unexpected error: %v", i, err)
- continue
- }
-
- if string(encoded) != s.expectedJson {
- t.Errorf("(%d) Expected \n%v \ngot \n%v", i, s.expectedJson, string(encoded))
- }
-
- // additional data checks
- if collection.IsAuth() {
- if v := m.GetString(schema.FieldNamePasswordHash); v != "test_passwordHash" {
- t.Errorf("(%d) Expected %q, got %q", i, "test_passwordHash", v)
- }
- if v := m.GetString(schema.FieldNameTokenKey); v != "test_tokenKey" {
- t.Errorf("(%d) Expected %q, got %q", i, "test_tokenKey", v)
- }
- if v := m.GetString(schema.FieldNameLastResetSentAt); v != "2022-01-02 10:00:00.123Z" {
- t.Errorf("(%d) Expected %q, got %q", i, "2022-01-02 10:00:00.123Z", v)
- }
- if v := m.GetString(schema.FieldNameLastVerificationSentAt); v != "2022-02-03 10:00:00.456Z" {
- t.Errorf("(%d) Expected %q, got %q", i, "2022-01-02 10:00:00.123Z", v)
- }
- }
- }
-}
-
-func TestNewRecordsFromNullStringMaps(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{
- Name: "test",
- Schema: schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeText,
- },
- &schema.SchemaField{
- Name: "field2",
- Type: schema.FieldTypeNumber,
- },
- &schema.SchemaField{
- Name: "field3",
- Type: schema.FieldTypeUrl,
- },
- ),
- }
-
- data := []dbx.NullStringMap{
- {
- "id": sql.NullString{
- String: "test_id1",
- Valid: true,
- },
- "created": sql.NullString{
- String: "2022-01-01 10:00:00.123Z",
- Valid: true,
- },
- "updated": sql.NullString{
- String: "2022-01-01 10:00:00.456Z",
- Valid: true,
- },
- // partial auth fields
- "email": sql.NullString{
- String: "test_email",
- Valid: true,
- },
- "tokenKey": sql.NullString{
- String: "test_tokenKey",
- Valid: true,
- },
- "emailVisibility": sql.NullString{
- String: "true",
- Valid: true,
- },
- // custom schema fields
- "field1": sql.NullString{
- String: "test",
- Valid: true,
- },
- "field2": sql.NullString{
- String: "123.123",
- Valid: true,
- },
- "field3": sql.NullString{
- String: "test",
- Valid: false, // should force resolving to empty string
- },
- "unknown": sql.NullString{
- String: "test",
- Valid: true,
- },
- },
- {
- "field3": sql.NullString{
- String: "test",
- Valid: true,
- },
- "email": sql.NullString{
- String: "test_email",
- Valid: true,
- },
- "emailVisibility": sql.NullString{
- String: "false",
- Valid: true,
- },
- },
- }
-
- scenarios := []struct {
- collectionType string
- expectedJson string
- }{
- {
- models.CollectionTypeBase,
- `[{"collectionId":"","collectionName":"test","created":"2022-01-01 10:00:00.123Z","field1":"test","field2":123.123,"field3":"","id":"test_id1","updated":"2022-01-01 10:00:00.456Z"},{"collectionId":"","collectionName":"test","created":"","field1":"","field2":0,"field3":"test","id":"","updated":""}]`,
- },
- {
- models.CollectionTypeAuth,
- `[{"collectionId":"","collectionName":"test","created":"2022-01-01 10:00:00.123Z","email":"test_email","emailVisibility":true,"field1":"test","field2":123.123,"field3":"","id":"test_id1","updated":"2022-01-01 10:00:00.456Z","username":"","verified":false},{"collectionId":"","collectionName":"test","created":"","emailVisibility":false,"field1":"","field2":0,"field3":"test","id":"","updated":"","username":"","verified":false}]`,
- },
- }
-
- for i, s := range scenarios {
- collection.Type = s.collectionType
- result := models.NewRecordsFromNullStringMaps(collection, data)
-
- encoded, err := json.Marshal(result)
- if err != nil {
- t.Errorf("(%d) Unexpected error: %v", i, err)
- continue
- }
-
- if string(encoded) != s.expectedJson {
- t.Errorf("(%d) Expected \n%v \ngot \n%v", i, s.expectedJson, string(encoded))
- }
- }
-}
-
-func TestRecordTableName(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{}
- collection.Name = "test"
- collection.RefreshId()
-
- m := models.NewRecord(collection)
-
- if m.TableName() != collection.Name {
- t.Fatalf("Expected table %q, got %q", collection.Name, m.TableName())
- }
-}
-
-func TestRecordCollection(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{}
- collection.RefreshId()
-
- m := models.NewRecord(collection)
-
- if m.Collection().Id != collection.Id {
- t.Fatalf("Expected collection with id %v, got %v", collection.Id, m.Collection().Id)
- }
-}
-
-func TestRecordOriginalCopy(t *testing.T) {
- t.Parallel()
-
- m := models.NewRecord(&models.Collection{})
- m.Load(map[string]any{"f": "123"})
-
- // change the field
- m.Set("f", "456")
-
- if v := m.GetString("f"); v != "456" {
- t.Fatalf("Expected f to be %q, got %q", "456", v)
- }
-
- if v := m.OriginalCopy().GetString("f"); v != "123" {
- t.Fatalf("Expected the initial/original f to be %q, got %q", "123", v)
- }
-
- // loading new data shouldn't affect the original state
- m.Load(map[string]any{"f": "789"})
-
- if v := m.GetString("f"); v != "789" {
- t.Fatalf("Expected f to be %q, got %q", "789", v)
- }
-
- if v := m.OriginalCopy().GetString("f"); v != "123" {
- t.Fatalf("Expected the initial/original f still to be %q, got %q", "123", v)
- }
-}
-
-func TestRecordCleanCopy(t *testing.T) {
- t.Parallel()
-
- m := models.NewRecord(&models.Collection{
- Name: "cname",
- Type: models.CollectionTypeAuth,
- })
- m.Load(map[string]any{
- "id": "id1",
- "created": "2023-01-01 00:00:00.000Z",
- "updated": "2023-01-02 00:00:00.000Z",
- "username": "test",
- "verified": true,
- "email": "test@example.com",
- "unknown": "456",
- })
-
- // make a change to ensure that the latest data is targeted
- m.Set("id", "id2")
-
- // allow the special flags and options to check whether they will be ignored
- m.SetExpand(map[string]any{"test": 123})
- m.IgnoreEmailVisibility(true)
- m.WithUnknownData(true)
-
- copy := m.CleanCopy()
- copyExport, _ := copy.MarshalJSON()
-
- expectedExport := []byte(`{"collectionId":"","collectionName":"cname","created":"2023-01-01 00:00:00.000Z","emailVisibility":false,"id":"id2","updated":"2023-01-02 00:00:00.000Z","username":"test","verified":true}`)
- if !bytes.Equal(copyExport, expectedExport) {
- t.Fatalf("Expected clean export \n%s, \ngot \n%s", expectedExport, copyExport)
- }
-}
-
-func TestRecordSetAndGetExpand(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{}
- m := models.NewRecord(collection)
-
- data := map[string]any{"test": 123}
-
- m.SetExpand(data)
-
- // change the original data to check if it was shallow copied
- data["test"] = 456
-
- expand := m.Expand()
- if v, ok := expand["test"]; !ok || v != 123 {
- t.Fatalf("Expected expand.test to be %v, got %v", 123, v)
- }
-}
-
-func TestRecordMergeExpand(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{}
- m := models.NewRecord(collection)
- m.Id = "m"
-
- // a
- a := models.NewRecord(collection)
- a.Id = "a"
- a1 := models.NewRecord(collection)
- a1.Id = "a1"
- a2 := models.NewRecord(collection)
- a2.Id = "a2"
- a3 := models.NewRecord(collection)
- a3.Id = "a3"
- a31 := models.NewRecord(collection)
- a31.Id = "a31"
- a32 := models.NewRecord(collection)
- a32.Id = "a32"
- a.SetExpand(map[string]any{
- "a1": a1,
- "a23": []*models.Record{a2, a3},
- })
- a3.SetExpand(map[string]any{
- "a31": a31,
- "a32": []*models.Record{a32},
- })
-
- // b
- b := models.NewRecord(collection)
- b.Id = "b"
- b1 := models.NewRecord(collection)
- b1.Id = "b1"
- b.SetExpand(map[string]any{
- "b1": b1,
- })
-
- // c
- c := models.NewRecord(collection)
- c.Id = "c"
-
- // load initial expand
- m.SetExpand(map[string]any{
- "a": a,
- "b": b,
- "c": []*models.Record{c},
- })
-
- // a (new)
- aNew := models.NewRecord(collection)
- aNew.Id = a.Id
- a3New := models.NewRecord(collection)
- a3New.Id = a3.Id
- a32New := models.NewRecord(collection)
- a32New.Id = "a32New"
- a33New := models.NewRecord(collection)
- a33New.Id = "a33New"
- a3New.SetExpand(map[string]any{
- "a32": []*models.Record{a32New},
- "a33New": a33New,
- })
- aNew.SetExpand(map[string]any{
- "a23": []*models.Record{a2, a3New},
- })
-
- // b (new)
- bNew := models.NewRecord(collection)
- bNew.Id = "bNew"
- dNew := models.NewRecord(collection)
- dNew.Id = "dNew"
-
- // merge expands
- m.MergeExpand(map[string]any{
- "a": aNew,
- "b": []*models.Record{bNew},
- "dNew": dNew,
- })
-
- result := m.Expand()
-
- raw, err := json.Marshal(result)
- if err != nil {
- t.Fatal(err)
- }
- rawStr := string(raw)
-
- expected := `{"a":{"collectionId":"","collectionName":"","created":"","expand":{"a1":{"collectionId":"","collectionName":"","created":"","id":"a1","updated":""},"a23":[{"collectionId":"","collectionName":"","created":"","id":"a2","updated":""},{"collectionId":"","collectionName":"","created":"","expand":{"a31":{"collectionId":"","collectionName":"","created":"","id":"a31","updated":""},"a32":[{"collectionId":"","collectionName":"","created":"","id":"a32","updated":""},{"collectionId":"","collectionName":"","created":"","id":"a32New","updated":""}],"a33New":{"collectionId":"","collectionName":"","created":"","id":"a33New","updated":""}},"id":"a3","updated":""}]},"id":"a","updated":""},"b":[{"collectionId":"","collectionName":"","created":"","expand":{"b1":{"collectionId":"","collectionName":"","created":"","id":"b1","updated":""}},"id":"b","updated":""},{"collectionId":"","collectionName":"","created":"","id":"bNew","updated":""}],"c":[{"collectionId":"","collectionName":"","created":"","id":"c","updated":""}],"dNew":{"collectionId":"","collectionName":"","created":"","id":"dNew","updated":""}}`
-
- if expected != rawStr {
- t.Fatalf("Expected \n%v, \ngot \n%v", expected, rawStr)
- }
-}
-
-func TestRecordMergeExpandNilCheck(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{}
-
- scenarios := []struct {
- name string
- expand map[string]any
- expected string
- }{
- {
- "nil expand",
- nil,
- `{"collectionId":"","collectionName":"","created":"","id":"","updated":""}`,
- },
- {
- "empty expand",
- map[string]any{},
- `{"collectionId":"","collectionName":"","created":"","id":"","updated":""}`,
- },
- {
- "non-empty expand",
- map[string]any{"test": models.NewRecord(collection)},
- `{"collectionId":"","collectionName":"","created":"","expand":{"test":{"collectionId":"","collectionName":"","created":"","id":"","updated":""}},"id":"","updated":""}`,
- },
- }
-
- for _, s := range scenarios {
- m := models.NewRecord(collection)
- m.MergeExpand(s.expand)
-
- raw, err := json.Marshal(m)
- if err != nil {
- t.Fatal(err)
- }
- rawStr := string(raw)
-
- if rawStr != s.expected {
- t.Fatalf("[%s] Expected \n%v, \ngot \n%v", s.name, s.expected, rawStr)
- }
- }
-}
-
-func TestRecordExpandedRel(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{}
-
- main := models.NewRecord(collection)
-
- single := models.NewRecord(collection)
- single.Id = "single"
-
- multiple1 := models.NewRecord(collection)
- multiple1.Id = "multiple1"
-
- multiple2 := models.NewRecord(collection)
- multiple2.Id = "multiple2"
-
- main.SetExpand(map[string]any{
- "single": single,
- "multiple": []*models.Record{multiple1, multiple2},
- })
-
- if v := main.ExpandedOne("missing"); v != nil {
- t.Fatalf("Expected nil, got %v", v)
- }
-
- if v := main.ExpandedOne("single"); v == nil || v.Id != "single" {
- t.Fatalf("Expected record with id %q, got %v", "single", v)
- }
-
- if v := main.ExpandedOne("multiple"); v == nil || v.Id != "multiple1" {
- t.Fatalf("Expected record with id %q, got %v", "multiple1", v)
- }
-}
-
-func TestRecordExpandedAll(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{}
-
- main := models.NewRecord(collection)
-
- single := models.NewRecord(collection)
- single.Id = "single"
-
- multiple1 := models.NewRecord(collection)
- multiple1.Id = "multiple1"
-
- multiple2 := models.NewRecord(collection)
- multiple2.Id = "multiple2"
-
- main.SetExpand(map[string]any{
- "single": single,
- "multiple": []*models.Record{multiple1, multiple2},
- })
-
- if v := main.ExpandedAll("missing"); v != nil {
- t.Fatalf("Expected nil, got %v", v)
- }
-
- if v := main.ExpandedAll("single"); len(v) != 1 || v[0].Id != "single" {
- t.Fatalf("Expected [single] slice, got %v", v)
- }
-
- if v := main.ExpandedAll("multiple"); len(v) != 2 || v[0].Id != "multiple1" || v[1].Id != "multiple2" {
- t.Fatalf("Expected [multiple1, multiple2] slice, got %v", v)
- }
-}
-
-func TestRecordSchemaData(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{
- Type: models.CollectionTypeAuth,
- Schema: schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeText,
- },
- &schema.SchemaField{
- Name: "field2",
- Type: schema.FieldTypeNumber,
- },
- ),
- }
-
- m := models.NewRecord(collection)
- m.Set("email", "test@example.com")
- m.Set("field1", 123)
- m.Set("field2", 456)
- m.Set("unknown", 789)
-
- encoded, err := json.Marshal(m.SchemaData())
- if err != nil {
- t.Fatal(err)
- }
-
- expected := `{"field1":"123","field2":456}`
-
- if v := string(encoded); v != expected {
- t.Fatalf("Expected \n%v \ngot \n%v", v, expected)
- }
-}
-
-func TestRecordUnknownData(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{
- Schema: schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeText,
- },
- &schema.SchemaField{
- Name: "field2",
- Type: schema.FieldTypeNumber,
- },
- ),
- }
-
- data := map[string]any{
- "id": "test_id",
- "created": "2022-01-01 00:00:00.000",
- "updated": "2022-01-01 00:00:00.000",
- "collectionId": "test_collectionId",
- "collectionName": "test_collectionName",
- "expand": "test_expand",
- "field1": "test_field1",
- "field2": "test_field1",
- "unknown1": "test_unknown1",
- "unknown2": "test_unknown2",
- "passwordHash": "test_passwordHash",
- "username": "test_username",
- "emailVisibility": true,
- "email": "test_email",
- "verified": true,
- "tokenKey": "test_tokenKey",
- "lastResetSentAt": "2022-01-01 00:00:00.000",
- "lastVerificationSentAt": "2022-01-01 00:00:00.000",
- }
-
- scenarios := []struct {
- collectionType string
- expectedKeys []string
- }{
- {
- models.CollectionTypeBase,
- []string{
- "unknown1",
- "unknown2",
- "passwordHash",
- "username",
- "emailVisibility",
- "email",
- "verified",
- "tokenKey",
- "lastResetSentAt",
- "lastVerificationSentAt",
- },
- },
- {
- models.CollectionTypeAuth,
- []string{"unknown1", "unknown2"},
- },
- }
-
- for i, s := range scenarios {
- collection.Type = s.collectionType
- m := models.NewRecord(collection)
- m.Load(data)
-
- result := m.UnknownData()
-
- if len(result) != len(s.expectedKeys) {
- t.Errorf("(%d) Expected data \n%v \ngot \n%v", i, s.expectedKeys, result)
- continue
- }
-
- for _, key := range s.expectedKeys {
- if _, ok := result[key]; !ok {
- t.Errorf("(%d) Missing expected key %q in \n%v", i, key, result)
- }
- }
- }
-}
-
-func TestRecordSetAndGet(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{
- Schema: schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeText,
- },
- &schema.SchemaField{
- Name: "field2",
- Type: schema.FieldTypeNumber,
- },
- // fields that are not explicitly set to check
- // the default retrieval value (single and multiple)
- &schema.SchemaField{
- Name: "field3",
- Type: schema.FieldTypeBool,
- },
- &schema.SchemaField{
- Name: "field4",
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{MaxSelect: 2},
- },
- &schema.SchemaField{
- Name: "field5",
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{MaxSelect: types.Pointer(1)},
- },
- ),
- }
-
- m := models.NewRecord(collection)
- m.Set("id", "test_id")
- m.Set("created", "2022-09-15 00:00:00.123Z")
- m.Set("updated", "invalid")
- m.Set("field1", 123) // should be casted to string
- m.Set("field2", "invlaid") // should be casted to zero-number
- m.Set("unknown", 456) // undefined fields are allowed but not exported by default
- m.Set("expand", map[string]any{"test": 123}) // should store the value in m.expand
-
- if v := m.Get("id"); v != "test_id" {
- t.Fatalf("Expected id %q, got %q", "test_id", v)
- }
-
- if v := m.GetString("created"); v != "2022-09-15 00:00:00.123Z" {
- t.Fatalf("Expected created %q, got %q", "2022-09-15 00:00:00.123Z", v)
- }
-
- if v := m.GetString("updated"); v != "" {
- t.Fatalf("Expected updated to be empty, got %q", v)
- }
-
- if v, ok := m.Get("field1").(string); !ok || v != "123" {
- t.Fatalf("Expected field1 %#v, got %#v", "123", m.Get("field1"))
- }
-
- if v, ok := m.Get("field2").(float64); !ok || v != 0.0 {
- t.Fatalf("Expected field2 %#v, got %#v", 0.0, m.Get("field2"))
- }
-
- if v, ok := m.Get("field3").(bool); !ok || v != false {
- t.Fatalf("Expected field3 %#v, got %#v", false, m.Get("field3"))
- }
-
- if v, ok := m.Get("field4").([]string); !ok || len(v) != 0 {
- t.Fatalf("Expected field4 %#v, got %#v", "[]", m.Get("field4"))
- }
-
- if v, ok := m.Get("field5").(string); !ok || len(v) != 0 {
- t.Fatalf("Expected field5 %#v, got %#v", "", m.Get("field5"))
- }
-
- if v := m.Get("unknown"); v != 456 {
- t.Fatalf("Expected unknown %v, got %v", 456, v)
- }
-
- if m.Expand()["test"] != 123 {
- t.Fatalf("Expected expand to be %v, got %v", map[string]any{"test": 123}, m.Expand())
- }
-}
-
-func TestRecordGetBool(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- value any
- expected bool
- }{
- {nil, false},
- {"", false},
- {0, false},
- {1, true},
- {[]string{"true"}, false},
- {time.Now(), false},
- {"test", false},
- {"false", false},
- {"true", true},
- {false, false},
- {true, true},
- }
-
- collection := &models.Collection{}
-
- for i, s := range scenarios {
- m := models.NewRecord(collection)
- m.Set("test", s.value)
-
- result := m.GetBool("test")
- if result != s.expected {
- t.Errorf("(%d) Expected %v, got %v", i, s.expected, result)
- }
- }
-}
-
-func TestRecordGetString(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- value any
- expected string
- }{
- {nil, ""},
- {"", ""},
- {0, "0"},
- {1.4, "1.4"},
- {[]string{"true"}, ""},
- {map[string]int{"test": 1}, ""},
- {[]byte("abc"), "abc"},
- {"test", "test"},
- {false, "false"},
- {true, "true"},
- }
-
- collection := &models.Collection{}
-
- for i, s := range scenarios {
- m := models.NewRecord(collection)
- m.Set("test", s.value)
-
- result := m.GetString("test")
- if result != s.expected {
- t.Errorf("(%d) Expected %v, got %v", i, s.expected, result)
- }
- }
-}
-
-func TestRecordGetInt(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- value any
- expected int
- }{
- {nil, 0},
- {"", 0},
- {[]string{"true"}, 0},
- {map[string]int{"test": 1}, 0},
- {time.Now(), 0},
- {"test", 0},
- {123, 123},
- {2.4, 2},
- {"123", 123},
- {"123.5", 0},
- {false, 0},
- {true, 1},
- }
-
- collection := &models.Collection{}
-
- for i, s := range scenarios {
- m := models.NewRecord(collection)
- m.Set("test", s.value)
-
- result := m.GetInt("test")
- if result != s.expected {
- t.Errorf("(%d) Expected %v, got %v", i, s.expected, result)
- }
- }
-}
-
-func TestRecordGetFloat(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- value any
- expected float64
- }{
- {nil, 0},
- {"", 0},
- {[]string{"true"}, 0},
- {map[string]int{"test": 1}, 0},
- {time.Now(), 0},
- {"test", 0},
- {123, 123},
- {2.4, 2.4},
- {"123", 123},
- {"123.5", 123.5},
- {false, 0},
- {true, 1},
- }
-
- collection := &models.Collection{}
-
- for i, s := range scenarios {
- m := models.NewRecord(collection)
- m.Set("test", s.value)
-
- result := m.GetFloat("test")
- if result != s.expected {
- t.Errorf("(%d) Expected %v, got %v", i, s.expected, result)
- }
- }
-}
-
-func TestRecordGetTime(t *testing.T) {
- t.Parallel()
-
- nowTime := time.Now()
- testTime, _ := time.Parse(types.DefaultDateLayout, "2022-01-01 08:00:40.000Z")
-
- scenarios := []struct {
- value any
- expected time.Time
- }{
- {nil, time.Time{}},
- {"", time.Time{}},
- {false, time.Time{}},
- {true, time.Time{}},
- {"test", time.Time{}},
- {[]string{"true"}, time.Time{}},
- {map[string]int{"test": 1}, time.Time{}},
- {1641024040, testTime},
- {"2022-01-01 08:00:40.000", testTime},
- {nowTime, nowTime},
- }
-
- collection := &models.Collection{}
-
- for i, s := range scenarios {
- m := models.NewRecord(collection)
- m.Set("test", s.value)
-
- result := m.GetTime("test")
- if !result.Equal(s.expected) {
- t.Errorf("(%d) Expected %v, got %v", i, s.expected, result)
- }
- }
-}
-
-func TestRecordGetDateTime(t *testing.T) {
- t.Parallel()
-
- nowTime := time.Now()
- testTime, _ := time.Parse(types.DefaultDateLayout, "2022-01-01 08:00:40.000Z")
-
- scenarios := []struct {
- value any
- expected time.Time
- }{
- {nil, time.Time{}},
- {"", time.Time{}},
- {false, time.Time{}},
- {true, time.Time{}},
- {"test", time.Time{}},
- {[]string{"true"}, time.Time{}},
- {map[string]int{"test": 1}, time.Time{}},
- {1641024040, testTime},
- {"2022-01-01 08:00:40.000", testTime},
- {nowTime, nowTime},
- }
-
- collection := &models.Collection{}
-
- for i, s := range scenarios {
- m := models.NewRecord(collection)
- m.Set("test", s.value)
-
- result := m.GetDateTime("test")
- if !result.Time().Equal(s.expected) {
- t.Errorf("(%d) Expected %v, got %v", i, s.expected, result)
- }
- }
-}
-
-func TestRecordGetStringSlice(t *testing.T) {
- t.Parallel()
-
- nowTime := time.Now()
-
- scenarios := []struct {
- value any
- expected []string
- }{
- {nil, []string{}},
- {"", []string{}},
- {false, []string{"false"}},
- {true, []string{"true"}},
- {nowTime, []string{}},
- {123, []string{"123"}},
- {"test", []string{"test"}},
- {map[string]int{"test": 1}, []string{}},
- {`["test1", "test2"]`, []string{"test1", "test2"}},
- {[]int{123, 123, 456}, []string{"123", "456"}},
- {[]string{"test", "test", "123"}, []string{"test", "123"}},
- }
-
- collection := &models.Collection{}
-
- for i, s := range scenarios {
- m := models.NewRecord(collection)
- m.Set("test", s.value)
-
- result := m.GetStringSlice("test")
-
- if len(result) != len(s.expected) {
- t.Errorf("(%d) Expected %d elements, got %d: %v", i, len(s.expected), len(result), result)
- continue
- }
-
- for _, v := range result {
- if !list.ExistInSlice(v, s.expected) {
- t.Errorf("(%d) Cannot find %v in %v", i, v, s.expected)
- }
- }
- }
-}
-
-func TestRecordUnmarshalJSONField(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{
- Schema: schema.NewSchema(&schema.SchemaField{
- Name: "field",
- Type: schema.FieldTypeJson,
- }),
- }
- m := models.NewRecord(collection)
-
- var testPointer *string
- var testStr string
- var testInt int
- var testBool bool
- var testSlice []int
- var testMap map[string]any
-
- scenarios := []struct {
- value any
- destination any
- expectError bool
- expectedJson string
- }{
- {nil, testStr, true, `""`},
- {"", testStr, false, `""`},
- {1, testInt, false, `1`},
- {true, testBool, false, `true`},
- {[]int{1, 2, 3}, testSlice, false, `[1,2,3]`},
- {map[string]any{"test": 123}, testMap, false, `{"test":123}`},
- // json encoded values
- {`null`, testPointer, false, `null`},
- {`true`, testBool, false, `true`},
- {`456`, testInt, false, `456`},
- {`"test"`, testStr, false, `"test"`},
- {`[4,5,6]`, testSlice, false, `[4,5,6]`},
- {`{"test":456}`, testMap, false, `{"test":456}`},
- }
-
- for i, s := range scenarios {
- m.Set("field", s.value)
-
- err := m.UnmarshalJSONField("field", &s.destination)
- hasErr := err != nil
-
- if hasErr != s.expectError {
- t.Errorf("(%d) Expected hasErr %v, got %v", i, s.expectError, hasErr)
- continue
- }
-
- raw, _ := json.Marshal(s.destination)
- if v := string(raw); v != s.expectedJson {
- t.Errorf("(%d) Expected %q, got %q", i, s.expectedJson, v)
- }
- }
-}
-
-func TestRecordBaseFilesPath(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{}
- collection.RefreshId()
- collection.Name = "test"
-
- m := models.NewRecord(collection)
- m.RefreshId()
-
- expected := collection.BaseFilesPath() + "/" + m.Id
- result := m.BaseFilesPath()
-
- if result != expected {
- t.Fatalf("Expected %q, got %q", expected, result)
- }
-}
-
-func TestRecordFindFileFieldByFile(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{
- Schema: schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeText,
- },
- &schema.SchemaField{
- Name: "field2",
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{
- MaxSelect: 1,
- MaxSize: 1,
- },
- },
- &schema.SchemaField{
- Name: "field3",
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{
- MaxSelect: 2,
- MaxSize: 1,
- },
- },
- ),
- }
-
- m := models.NewRecord(collection)
- m.Set("field1", "test")
- m.Set("field2", "test.png")
- m.Set("field3", []string{"test1.png", "test2.png"})
-
- scenarios := []struct {
- filename string
- expectField string
- }{
- {"", ""},
- {"test", ""},
- {"test2", ""},
- {"test.png", "field2"},
- {"test2.png", "field3"},
- }
-
- for i, s := range scenarios {
- result := m.FindFileFieldByFile(s.filename)
-
- var fieldName string
- if result != nil {
- fieldName = result.Name
- }
-
- if s.expectField != fieldName {
- t.Errorf("(%d) Expected field %v, got %v", i, s.expectField, result)
- continue
- }
- }
-}
-
-func TestRecordLoadAndData(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{
- Schema: schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeText,
- },
- &schema.SchemaField{
- Name: "field2",
- Type: schema.FieldTypeNumber,
- },
- ),
- }
-
- data := map[string]any{
- "id": "test_id",
- "created": "2022-01-01 10:00:00.123Z",
- "updated": "2022-01-01 10:00:00.456Z",
- "field1": "test_field",
- "field2": "123", // should be casted to float
- "unknown": "test_unknown",
- // auth collection sepcific casting test
- "passwordHash": "test_passwordHash",
- "emailVisibility": "12345", // should be casted to bool only for auth collections
- "username": 123, // should be casted to string only for auth collections
- "email": "test_email",
- "verified": true,
- "tokenKey": "test_tokenKey",
- "lastResetSentAt": "2022-01-01 11:00:00.000", // should be casted to DateTime only for auth collections
- "lastVerificationSentAt": "2022-01-01 12:00:00.000", // should be casted to DateTime only for auth collections
- }
-
- scenarios := []struct {
- collectionType string
- }{
- {models.CollectionTypeBase},
- {models.CollectionTypeAuth},
- }
-
- for i, s := range scenarios {
- collection.Type = s.collectionType
- m := models.NewRecord(collection)
-
- m.Load(data)
-
- expectations := map[string]any{}
- for k, v := range data {
- expectations[k] = v
- }
-
- expectations["created"], _ = types.ParseDateTime("2022-01-01 10:00:00.123Z")
- expectations["updated"], _ = types.ParseDateTime("2022-01-01 10:00:00.456Z")
- expectations["field2"] = 123.0
-
- // extra casting test
- if collection.IsAuth() {
- lastResetSentAt, _ := types.ParseDateTime(expectations["lastResetSentAt"])
- lastVerificationSentAt, _ := types.ParseDateTime(expectations["lastVerificationSentAt"])
- expectations["emailVisibility"] = false
- expectations["username"] = "123"
- expectations["verified"] = true
- expectations["lastResetSentAt"] = lastResetSentAt
- expectations["lastVerificationSentAt"] = lastVerificationSentAt
- }
-
- for k, v := range expectations {
- if m.Get(k) != v {
- t.Errorf("(%d) Expected field %s to be %v, got %v", i, k, v, m.Get(k))
- }
- }
- }
-}
-
-func TestRecordColumnValueMap(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{
- Schema: schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeText,
- },
- &schema.SchemaField{
- Name: "field2",
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{
- MaxSelect: 1,
- MaxSize: 1,
- },
- },
- &schema.SchemaField{
- Name: "field3",
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{
- MaxSelect: 2,
- Values: []string{"test1", "test2", "test3"},
- },
- },
- &schema.SchemaField{
- Name: "field4",
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{
- MaxSelect: types.Pointer(2),
- },
- },
- ),
- }
-
- scenarios := []struct {
- collectionType string
- expectedJson string
- }{
- {
- models.CollectionTypeBase,
- `{"created":"2022-01-01 10:00:30.123Z","field1":"test","field2":"test.png","field3":["test1","test2"],"field4":["test11","test12"],"id":"test_id","updated":""}`,
- },
- {
- models.CollectionTypeAuth,
- `{"created":"2022-01-01 10:00:30.123Z","email":"test_email","emailVisibility":true,"field1":"test","field2":"test.png","field3":["test1","test2"],"field4":["test11","test12"],"id":"test_id","lastLoginAlertSentAt":"","lastResetSentAt":"2022-01-02 10:00:30.123Z","lastVerificationSentAt":"","passwordHash":"test_passwordHash","tokenKey":"test_tokenKey","updated":"","username":"test_username","verified":false}`,
- },
- }
-
- created, _ := types.ParseDateTime("2022-01-01 10:00:30.123Z")
- lastResetSentAt, _ := types.ParseDateTime("2022-01-02 10:00:30.123Z")
- data := map[string]any{
- "id": "test_id",
- "created": created,
- "field1": "test",
- "field2": "test.png",
- "field3": []string{"test1", "test2"},
- "field4": []string{"test11", "test12", "test11"}, // strip duplicate,
- "unknown": "test_unknown",
- "passwordHash": "test_passwordHash",
- "username": "test_username",
- "emailVisibility": true,
- "email": "test_email",
- "verified": "invalid", // should be casted
- "tokenKey": "test_tokenKey",
- "lastResetSentAt": lastResetSentAt,
- }
-
- m := models.NewRecord(collection)
-
- for i, s := range scenarios {
- collection.Type = s.collectionType
-
- m.Load(data)
-
- result := m.ColumnValueMap()
-
- encoded, err := json.Marshal(result)
- if err != nil {
- t.Errorf("(%d) Unexpected error %v", i, err)
- continue
- }
-
- if str := string(encoded); str != s.expectedJson {
- t.Errorf("(%d) Expected \n%v \ngot \n%v", i, s.expectedJson, str)
- }
- }
-}
-
-func TestRecordPublicExportAndMarshalJSON(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{
- Name: "c_name",
- Schema: schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeText,
- },
- &schema.SchemaField{
- Name: "field2",
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{
- MaxSelect: 1,
- MaxSize: 1,
- },
- },
- &schema.SchemaField{
- Name: "field3",
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{
- MaxSelect: 2,
- Values: []string{"test1", "test2", "test3"},
- },
- },
- ),
- }
- collection.Id = "c_id"
-
- scenarios := []struct {
- collectionType string
- exportHidden bool
- exportUnknown bool
- expectedJson string
- }{
- // base
- {
- models.CollectionTypeBase,
- false,
- false,
- `{"collectionId":"c_id","collectionName":"c_name","created":"2022-01-01 10:00:30.123Z","expand":{"test":123},"field1":"test","field2":"test.png","field3":["test1","test2"],"id":"test_id","updated":""}`,
- },
- {
- models.CollectionTypeBase,
- true,
- false,
- `{"collectionId":"c_id","collectionName":"c_name","created":"2022-01-01 10:00:30.123Z","expand":{"test":123},"field1":"test","field2":"test.png","field3":["test1","test2"],"id":"test_id","updated":""}`,
- },
- {
- models.CollectionTypeBase,
- false,
- true,
- `{"collectionId":"c_id","collectionName":"c_name","created":"2022-01-01 10:00:30.123Z","email":"test_email","emailVisibility":"test_invalid","expand":{"test":123},"field1":"test","field2":"test.png","field3":["test1","test2"],"id":"test_id","lastResetSentAt":"2022-01-02 10:00:30.123Z","lastVerificationSentAt":"test_lastVerificationSentAt","passwordHash":"test_passwordHash","tokenKey":"test_tokenKey","unknown":"test_unknown","updated":"","username":123,"verified":true}`,
- },
- {
- models.CollectionTypeBase,
- true,
- true,
- `{"collectionId":"c_id","collectionName":"c_name","created":"2022-01-01 10:00:30.123Z","email":"test_email","emailVisibility":"test_invalid","expand":{"test":123},"field1":"test","field2":"test.png","field3":["test1","test2"],"id":"test_id","lastResetSentAt":"2022-01-02 10:00:30.123Z","lastVerificationSentAt":"test_lastVerificationSentAt","passwordHash":"test_passwordHash","tokenKey":"test_tokenKey","unknown":"test_unknown","updated":"","username":123,"verified":true}`,
- },
-
- // auth
- {
- models.CollectionTypeAuth,
- false,
- false,
- `{"collectionId":"c_id","collectionName":"c_name","created":"2022-01-01 10:00:30.123Z","emailVisibility":false,"expand":{"test":123},"field1":"test","field2":"test.png","field3":["test1","test2"],"id":"test_id","updated":"","username":"123","verified":true}`,
- },
- {
- models.CollectionTypeAuth,
- true,
- false,
- `{"collectionId":"c_id","collectionName":"c_name","created":"2022-01-01 10:00:30.123Z","email":"test_email","emailVisibility":false,"expand":{"test":123},"field1":"test","field2":"test.png","field3":["test1","test2"],"id":"test_id","updated":"","username":"123","verified":true}`,
- },
- {
- models.CollectionTypeAuth,
- false,
- true,
- `{"collectionId":"c_id","collectionName":"c_name","created":"2022-01-01 10:00:30.123Z","emailVisibility":false,"expand":{"test":123},"field1":"test","field2":"test.png","field3":["test1","test2"],"id":"test_id","unknown":"test_unknown","updated":"","username":"123","verified":true}`,
- },
- {
- models.CollectionTypeAuth,
- true,
- true,
- `{"collectionId":"c_id","collectionName":"c_name","created":"2022-01-01 10:00:30.123Z","email":"test_email","emailVisibility":false,"expand":{"test":123},"field1":"test","field2":"test.png","field3":["test1","test2"],"id":"test_id","unknown":"test_unknown","updated":"","username":"123","verified":true}`,
- },
- }
-
- created, _ := types.ParseDateTime("2022-01-01 10:00:30.123Z")
- lastResetSentAt, _ := types.ParseDateTime("2022-01-02 10:00:30.123Z")
-
- data := map[string]any{
- "id": "test_id",
- "created": created,
- "field1": "test",
- "field2": "test.png",
- "field3": []string{"test1", "test2"},
- "expand": map[string]any{"test": 123},
- "collectionId": "m_id", // should be always ignored
- "collectionName": "m_name", // should be always ignored
- "unknown": "test_unknown",
- "passwordHash": "test_passwordHash",
- "username": 123, // for auth collections should be casted to string
- "emailVisibility": "test_invalid", // for auth collections should be casted to bool
- "email": "test_email",
- "verified": true,
- "tokenKey": "test_tokenKey",
- "lastResetSentAt": lastResetSentAt,
- "lastVerificationSentAt": "test_lastVerificationSentAt",
- }
-
- m := models.NewRecord(collection)
-
- for i, s := range scenarios {
- collection.Type = s.collectionType
-
- m.Load(data)
- m.IgnoreEmailVisibility(s.exportHidden)
- m.WithUnknownData(s.exportUnknown)
-
- exportResult, err := json.Marshal(m.PublicExport())
- if err != nil {
- t.Errorf("(%d) Unexpected error %v", i, err)
- continue
- }
- exportResultStr := string(exportResult)
-
- // MarshalJSON and PublicExport should return the same
- marshalResult, err := m.MarshalJSON()
- if err != nil {
- t.Errorf("(%d) Unexpected error %v", i, err)
- continue
- }
- marshalResultStr := string(marshalResult)
-
- if exportResultStr != marshalResultStr {
- t.Errorf("(%d) Expected the PublicExport to be the same as MarshalJSON, but got \n%v \nvs \n%v", i, exportResultStr, marshalResultStr)
- }
-
- if exportResultStr != s.expectedJson {
- t.Errorf("(%d) Expected json \n%v \ngot \n%v", i, s.expectedJson, exportResultStr)
- }
- }
-}
-
-func TestRecordUnmarshalJSON(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{
- Schema: schema.NewSchema(
- &schema.SchemaField{
- Name: "field1",
- Type: schema.FieldTypeText,
- },
- &schema.SchemaField{
- Name: "field2",
- Type: schema.FieldTypeNumber,
- },
- ),
- }
-
- data := map[string]any{
- "id": "test_id",
- "created": "2022-01-01 10:00:00.123Z",
- "updated": "2022-01-01 10:00:00.456Z",
- "field1": "test_field",
- "field2": "123", // should be casted to float
- "unknown": "test_unknown",
- // auth collection sepcific casting test
- "passwordHash": "test_passwordHash",
- "emailVisibility": "12345", // should be casted to bool only for auth collections
- "username": 123.123, // should be casted to string only for auth collections
- "email": "test_email",
- "verified": true,
- "tokenKey": "test_tokenKey",
- "lastResetSentAt": "2022-01-01 11:00:00.000", // should be casted to DateTime only for auth collections
- "lastVerificationSentAt": "2022-01-01 12:00:00.000", // should be casted to DateTime only for auth collections
- }
- dataRaw, err := json.Marshal(data)
- if err != nil {
- t.Fatalf("Unexpected data marshal error %v", err)
- }
-
- scenarios := []struct {
- collectionType string
- }{
- {models.CollectionTypeBase},
- {models.CollectionTypeAuth},
- }
-
- // with invalid data
- m0 := models.NewRecord(collection)
- if err := m0.UnmarshalJSON([]byte("test")); err == nil {
- t.Fatal("Expected error, got nil")
- }
-
- // with valid data (it should be pretty much the same as load)
- for i, s := range scenarios {
- collection.Type = s.collectionType
- m := models.NewRecord(collection)
-
- err := m.UnmarshalJSON(dataRaw)
- if err != nil {
- t.Errorf("(%d) Unexpected error %v", i, err)
- continue
- }
-
- expectations := map[string]any{}
- for k, v := range data {
- expectations[k] = v
- }
-
- expectations["created"], _ = types.ParseDateTime("2022-01-01 10:00:00.123Z")
- expectations["updated"], _ = types.ParseDateTime("2022-01-01 10:00:00.456Z")
- expectations["field2"] = 123.0
-
- // extra casting test
- if collection.IsAuth() {
- lastResetSentAt, _ := types.ParseDateTime(expectations["lastResetSentAt"])
- lastVerificationSentAt, _ := types.ParseDateTime(expectations["lastVerificationSentAt"])
- expectations["emailVisibility"] = false
- expectations["username"] = "123.123"
- expectations["verified"] = true
- expectations["lastResetSentAt"] = lastResetSentAt
- expectations["lastVerificationSentAt"] = lastVerificationSentAt
- }
-
- for k, v := range expectations {
- if m.Get(k) != v {
- t.Errorf("(%d) Expected field %s to be %v, got %v", i, k, v, m.Get(k))
- }
- }
- }
-}
-
-func TestRecordReplaceModifers(t *testing.T) {
- t.Parallel()
-
- collection := &models.Collection{
- Schema: schema.NewSchema(
- &schema.SchemaField{
- Name: "text",
- Type: schema.FieldTypeText,
- },
- &schema.SchemaField{
- Name: "number",
- Type: schema.FieldTypeNumber,
- },
- &schema.SchemaField{
- Name: "rel_one",
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{MaxSelect: types.Pointer(1)},
- },
- &schema.SchemaField{
- Name: "rel_many",
- Type: schema.FieldTypeRelation,
- },
- &schema.SchemaField{
- Name: "select_one",
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{MaxSelect: 1},
- },
- &schema.SchemaField{
- Name: "select_many",
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{MaxSelect: 10},
- },
- &schema.SchemaField{
- Name: "file_one",
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{MaxSelect: 1},
- },
- &schema.SchemaField{
- Name: "file_one_index",
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{MaxSelect: 1},
- },
- &schema.SchemaField{
- Name: "file_one_name",
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{MaxSelect: 1},
- },
- &schema.SchemaField{
- Name: "file_many",
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{MaxSelect: 10},
- },
- ),
- }
-
- record := models.NewRecord(collection)
-
- record.Load(map[string]any{
- "text": "test",
- "number": 10,
- "rel_one": "a",
- "rel_many": []string{"a", "b"},
- "select_one": "a",
- "select_many": []string{"a", "b", "c"},
- "file_one": "a",
- "file_one_index": "b",
- "file_one_name": "c",
- "file_many": []string{"a", "b", "c", "d", "e", "f"},
- })
-
- result := record.ReplaceModifers(map[string]any{
- "text-": "m-",
- "text+": "m+",
- "number-": 3,
- "number+": 5,
- "rel_one-": "a",
- "rel_one+": "b",
- "rel_many-": []string{"a"},
- "rel_many+": []string{"c", "d", "e"},
- "select_one-": "a",
- "select_one+": "c",
- "select_many-": []string{"b", "c"},
- "select_many+": []string{"d", "e"},
- "file_one+": "skip", // should be ignored
- "file_one-": "a",
- "file_one_index.0": "",
- "file_one_name.c": "",
- "file_many+": []string{"e", "f"}, // should be ignored
- "file_many-": []string{"c", "d"},
- "file_many.f": nil,
- "file_many.0": nil,
- })
-
- raw, err := json.Marshal(result)
- if err != nil {
- t.Fatal(err)
- }
-
- expected := `{"file_many":["b","e"],"file_one":"","file_one_index":"","file_one_name":"","number":12,"rel_many":["b","c","d","e"],"rel_one":"b","select_many":["a","d","e"],"select_one":"c","text":"test"}`
-
- if v := string(raw); v != expected {
- t.Fatalf("Expected \n%s, \ngot \n%s", expected, v)
- }
-}
-
-// -------------------------------------------------------------------
-// Auth helpers:
-// -------------------------------------------------------------------
-
-func TestRecordUsername(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- collectionType string
- expectError bool
- }{
- {models.CollectionTypeBase, true},
- {models.CollectionTypeAuth, false},
- }
-
- testValue := "test 1232 !@#%" // formatting isn't checked
-
- for i, s := range scenarios {
- collection := &models.Collection{Type: s.collectionType}
- m := models.NewRecord(collection)
-
- if s.expectError {
- if err := m.SetUsername(testValue); err == nil {
- t.Errorf("(%d) Expected error, got nil", i)
- }
- if v := m.Username(); v != "" {
- t.Fatalf("(%d) Expected empty string, got %q", i, v)
- }
- // verify that nothing is stored in the record data slice
- if v := m.Get(schema.FieldNameUsername); v != nil {
- t.Fatalf("(%d) Didn't expect data field %q: %v", i, schema.FieldNameUsername, v)
- }
- } else {
- if err := m.SetUsername(testValue); err != nil {
- t.Fatalf("(%d) Expected nil, got error %v", i, err)
- }
- if v := m.Username(); v != testValue {
- t.Fatalf("(%d) Expected %q, got %q", i, testValue, v)
- }
- // verify that the field is stored in the record data slice
- if v := m.Get(schema.FieldNameUsername); v != testValue {
- t.Fatalf("(%d) Expected data field value %q, got %q", i, testValue, v)
- }
- }
- }
-}
-
-func TestRecordEmail(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- collectionType string
- expectError bool
- }{
- {models.CollectionTypeBase, true},
- {models.CollectionTypeAuth, false},
- }
-
- testValue := "test 1232 !@#%" // formatting isn't checked
-
- for i, s := range scenarios {
- collection := &models.Collection{Type: s.collectionType}
- m := models.NewRecord(collection)
-
- if s.expectError {
- if err := m.SetEmail(testValue); err == nil {
- t.Errorf("(%d) Expected error, got nil", i)
- }
- if v := m.Email(); v != "" {
- t.Fatalf("(%d) Expected empty string, got %q", i, v)
- }
- // verify that nothing is stored in the record data slice
- if v := m.Get(schema.FieldNameEmail); v != nil {
- t.Fatalf("(%d) Didn't expect data field %q: %v", i, schema.FieldNameEmail, v)
- }
- } else {
- if err := m.SetEmail(testValue); err != nil {
- t.Fatalf("(%d) Expected nil, got error %v", i, err)
- }
- if v := m.Email(); v != testValue {
- t.Fatalf("(%d) Expected %q, got %q", i, testValue, v)
- }
- // verify that the field is stored in the record data slice
- if v := m.Get(schema.FieldNameEmail); v != testValue {
- t.Fatalf("(%d) Expected data field value %q, got %q", i, testValue, v)
- }
- }
- }
-}
-
-func TestRecordEmailVisibility(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- collectionType string
- value bool
- expectError bool
- }{
- {models.CollectionTypeBase, true, true},
- {models.CollectionTypeBase, true, true},
- {models.CollectionTypeAuth, false, false},
- {models.CollectionTypeAuth, true, false},
- }
-
- for i, s := range scenarios {
- collection := &models.Collection{Type: s.collectionType}
- m := models.NewRecord(collection)
-
- if s.expectError {
- if err := m.SetEmailVisibility(s.value); err == nil {
- t.Errorf("(%d) Expected error, got nil", i)
- }
- if v := m.EmailVisibility(); v != false {
- t.Fatalf("(%d) Expected empty string, got %v", i, v)
- }
- // verify that nothing is stored in the record data slice
- if v := m.Get(schema.FieldNameEmailVisibility); v != nil {
- t.Fatalf("(%d) Didn't expect data field %q: %v", i, schema.FieldNameEmailVisibility, v)
- }
- } else {
- if err := m.SetEmailVisibility(s.value); err != nil {
- t.Fatalf("(%d) Expected nil, got error %v", i, err)
- }
- if v := m.EmailVisibility(); v != s.value {
- t.Fatalf("(%d) Expected %v, got %v", i, s.value, v)
- }
- // verify that the field is stored in the record data slice
- if v := m.Get(schema.FieldNameEmailVisibility); v != s.value {
- t.Fatalf("(%d) Expected data field value %v, got %v", i, s.value, v)
- }
- }
- }
-}
-
-func TestRecordEmailVerified(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- collectionType string
- value bool
- expectError bool
- }{
- {models.CollectionTypeBase, true, true},
- {models.CollectionTypeBase, true, true},
- {models.CollectionTypeAuth, false, false},
- {models.CollectionTypeAuth, true, false},
- }
-
- for i, s := range scenarios {
- collection := &models.Collection{Type: s.collectionType}
- m := models.NewRecord(collection)
-
- if s.expectError {
- if err := m.SetVerified(s.value); err == nil {
- t.Errorf("(%d) Expected error, got nil", i)
- }
- if v := m.Verified(); v != false {
- t.Fatalf("(%d) Expected empty string, got %v", i, v)
- }
- // verify that nothing is stored in the record data slice
- if v := m.Get(schema.FieldNameVerified); v != nil {
- t.Fatalf("(%d) Didn't expect data field %q: %v", i, schema.FieldNameVerified, v)
- }
- } else {
- if err := m.SetVerified(s.value); err != nil {
- t.Fatalf("(%d) Expected nil, got error %v", i, err)
- }
- if v := m.Verified(); v != s.value {
- t.Fatalf("(%d) Expected %v, got %v", i, s.value, v)
- }
- // verify that the field is stored in the record data slice
- if v := m.Get(schema.FieldNameVerified); v != s.value {
- t.Fatalf("(%d) Expected data field value %v, got %v", i, s.value, v)
- }
- }
- }
-}
-
-func TestRecordTokenKey(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- collectionType string
- expectError bool
- }{
- {models.CollectionTypeBase, true},
- {models.CollectionTypeAuth, false},
- }
-
- testValue := "test 1232 !@#%" // formatting isn't checked
-
- for i, s := range scenarios {
- collection := &models.Collection{Type: s.collectionType}
- m := models.NewRecord(collection)
-
- if s.expectError {
- if err := m.SetTokenKey(testValue); err == nil {
- t.Errorf("(%d) Expected error, got nil", i)
- }
- if v := m.TokenKey(); v != "" {
- t.Fatalf("(%d) Expected empty string, got %q", i, v)
- }
- // verify that nothing is stored in the record data slice
- if v := m.Get(schema.FieldNameTokenKey); v != nil {
- t.Fatalf("(%d) Didn't expect data field %q: %v", i, schema.FieldNameTokenKey, v)
- }
- } else {
- if err := m.SetTokenKey(testValue); err != nil {
- t.Fatalf("(%d) Expected nil, got error %v", i, err)
- }
- if v := m.TokenKey(); v != testValue {
- t.Fatalf("(%d) Expected %q, got %q", i, testValue, v)
- }
- // verify that the field is stored in the record data slice
- if v := m.Get(schema.FieldNameTokenKey); v != testValue {
- t.Fatalf("(%d) Expected data field value %q, got %q", i, testValue, v)
- }
- }
- }
-}
-
-func TestRecordRefreshTokenKey(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- collectionType string
- expectError bool
- }{
- {models.CollectionTypeBase, true},
- {models.CollectionTypeAuth, false},
- }
-
- for i, s := range scenarios {
- collection := &models.Collection{Type: s.collectionType}
- m := models.NewRecord(collection)
-
- if s.expectError {
- if err := m.RefreshTokenKey(); err == nil {
- t.Errorf("(%d) Expected error, got nil", i)
- }
- if v := m.TokenKey(); v != "" {
- t.Fatalf("(%d) Expected empty string, got %q", i, v)
- }
- // verify that nothing is stored in the record data slice
- if v := m.Get(schema.FieldNameTokenKey); v != nil {
- t.Fatalf("(%d) Didn't expect data field %q: %v", i, schema.FieldNameTokenKey, v)
- }
- } else {
- if err := m.RefreshTokenKey(); err != nil {
- t.Fatalf("(%d) Expected nil, got error %v", i, err)
- }
- if v := m.TokenKey(); len(v) != 50 {
- t.Fatalf("(%d) Expected 50 chars, got %d", i, len(v))
- }
- // verify that the field is stored in the record data slice
- if v := m.Get(schema.FieldNameTokenKey); v != m.TokenKey() {
- t.Fatalf("(%d) Expected data field value %q, got %q", i, m.TokenKey(), v)
- }
- }
- }
-}
-
-func TestRecordLastPasswordLoginAlertSentAt(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- collectionType string
- expectError bool
- }{
- {models.CollectionTypeBase, true},
- {models.CollectionTypeAuth, false},
- }
-
- testValue, err := types.ParseDateTime("2022-01-01 00:00:00.123Z")
- if err != nil {
- t.Fatal(err)
- }
-
- for i, s := range scenarios {
- collection := &models.Collection{Type: s.collectionType}
- m := models.NewRecord(collection)
-
- if s.expectError {
- if err := m.SetLastLoginAlertSentAt(testValue); err == nil {
- t.Errorf("(%d) Expected error, got nil", i)
- }
- if v := m.LastLoginAlertSentAt(); !v.IsZero() {
- t.Fatalf("(%d) Expected empty value, got %v", i, v)
- }
- // verify that nothing is stored in the record data slice
- if v := m.Get(schema.FieldNameLastLoginAlertSentAt); v != nil {
- t.Fatalf("(%d) Didn't expect data field %q: %v", i, schema.FieldNameLastLoginAlertSentAt, v)
- }
- } else {
- if err := m.SetLastLoginAlertSentAt(testValue); err != nil {
- t.Fatalf("(%d) Expected nil, got error %v", i, err)
- }
- if v := m.LastLoginAlertSentAt(); v != testValue {
- t.Fatalf("(%d) Expected %v, got %v", i, testValue, v)
- }
- // verify that the field is stored in the record data slice
- if v := m.Get(schema.FieldNameLastLoginAlertSentAt); v != testValue {
- t.Fatalf("(%d) Expected data field value %v, got %v", i, testValue, v)
- }
- }
- }
-}
-
-func TestRecordLastResetSentAt(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- collectionType string
- expectError bool
- }{
- {models.CollectionTypeBase, true},
- {models.CollectionTypeAuth, false},
- }
-
- testValue, err := types.ParseDateTime("2022-01-01 00:00:00.123Z")
- if err != nil {
- t.Fatal(err)
- }
-
- for i, s := range scenarios {
- collection := &models.Collection{Type: s.collectionType}
- m := models.NewRecord(collection)
-
- if s.expectError {
- if err := m.SetLastResetSentAt(testValue); err == nil {
- t.Errorf("(%d) Expected error, got nil", i)
- }
- if v := m.LastResetSentAt(); !v.IsZero() {
- t.Fatalf("(%d) Expected empty value, got %v", i, v)
- }
- // verify that nothing is stored in the record data slice
- if v := m.Get(schema.FieldNameLastResetSentAt); v != nil {
- t.Fatalf("(%d) Didn't expect data field %q: %v", i, schema.FieldNameLastResetSentAt, v)
- }
- } else {
- if err := m.SetLastResetSentAt(testValue); err != nil {
- t.Fatalf("(%d) Expected nil, got error %v", i, err)
- }
- if v := m.LastResetSentAt(); v != testValue {
- t.Fatalf("(%d) Expected %v, got %v", i, testValue, v)
- }
- // verify that the field is stored in the record data slice
- if v := m.Get(schema.FieldNameLastResetSentAt); v != testValue {
- t.Fatalf("(%d) Expected data field value %v, got %v", i, testValue, v)
- }
- }
- }
-}
-
-func TestRecordLastVerificationSentAt(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- collectionType string
- expectError bool
- }{
- {models.CollectionTypeBase, true},
- {models.CollectionTypeAuth, false},
- }
-
- testValue, err := types.ParseDateTime("2022-01-01 00:00:00.123Z")
- if err != nil {
- t.Fatal(err)
- }
-
- for i, s := range scenarios {
- collection := &models.Collection{Type: s.collectionType}
- m := models.NewRecord(collection)
-
- if s.expectError {
- if err := m.SetLastVerificationSentAt(testValue); err == nil {
- t.Errorf("(%d) Expected error, got nil", i)
- }
- if v := m.LastVerificationSentAt(); !v.IsZero() {
- t.Fatalf("(%d) Expected empty value, got %v", i, v)
- }
- // verify that nothing is stored in the record data slice
- if v := m.Get(schema.FieldNameLastVerificationSentAt); v != nil {
- t.Fatalf("(%d) Didn't expect data field %q: %v", i, schema.FieldNameLastVerificationSentAt, v)
- }
- } else {
- if err := m.SetLastVerificationSentAt(testValue); err != nil {
- t.Fatalf("(%d) Expected nil, got error %v", i, err)
- }
- if v := m.LastVerificationSentAt(); v != testValue {
- t.Fatalf("(%d) Expected %v, got %v", i, testValue, v)
- }
- // verify that the field is stored in the record data slice
- if v := m.Get(schema.FieldNameLastVerificationSentAt); v != testValue {
- t.Fatalf("(%d) Expected data field value %v, got %v", i, testValue, v)
- }
- }
- }
-}
-
-func TestRecordPasswordHash(t *testing.T) {
- t.Parallel()
-
- m := models.NewRecord(&models.Collection{})
-
- if v := m.PasswordHash(); v != "" {
- t.Errorf("Expected PasswordHash() to be empty, got %v", v)
- }
-
- m.Set(schema.FieldNamePasswordHash, "test")
-
- if v := m.PasswordHash(); v != "test" {
- t.Errorf("Expected PasswordHash() to be 'test', got %v", v)
- }
-}
-
-func TestRecordValidatePassword(t *testing.T) {
- t.Parallel()
-
- // 123456
- hash := "$2a$10$YKU8mPP8sTE3xZrpuM.xQuq27KJ7aIJB2oUeKPsDDqZshbl5g5cDK"
-
- scenarios := []struct {
- collectionType string
- password string
- hash string
- expected bool
- }{
- {models.CollectionTypeBase, "123456", hash, false},
- {models.CollectionTypeAuth, "", "", false},
- {models.CollectionTypeAuth, "", hash, false},
- {models.CollectionTypeAuth, "123456", hash, true},
- {models.CollectionTypeAuth, "654321", hash, false},
- }
-
- for i, s := range scenarios {
- collection := &models.Collection{Type: s.collectionType}
- m := models.NewRecord(collection)
- m.Set(schema.FieldNamePasswordHash, hash)
-
- if v := m.ValidatePassword(s.password); v != s.expected {
- t.Errorf("(%d) Expected %v, got %v", i, s.expected, v)
- }
- }
-}
-
-func TestRecordSetPassword(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- collectionType string
- password string
- expectError bool
- }{
- {models.CollectionTypeBase, "", true},
- {models.CollectionTypeBase, "123456", true},
- {models.CollectionTypeAuth, "", true},
- {models.CollectionTypeAuth, "123456", false},
- }
-
- for i, s := range scenarios {
- collection := &models.Collection{Type: s.collectionType}
- m := models.NewRecord(collection)
-
- if s.expectError {
- if err := m.SetPassword(s.password); err == nil {
- t.Errorf("(%d) Expected error, got nil", i)
- }
- if v := m.GetString(schema.FieldNamePasswordHash); v != "" {
- t.Errorf("(%d) Expected empty hash, got %q", i, v)
- }
- } else {
- if err := m.SetPassword(s.password); err != nil {
- t.Errorf("(%d) Expected nil, got err", i)
- }
- if v := m.GetString(schema.FieldNamePasswordHash); v == "" {
- t.Errorf("(%d) Expected non empty hash", i)
- }
- if !m.ValidatePassword(s.password) {
- t.Errorf("(%d) Expected true, got false", i)
- }
- }
- }
-}
diff --git a/models/request.go b/models/request.go
deleted file mode 100644
index 0b1784c0..00000000
--- a/models/request.go
+++ /dev/null
@@ -1,31 +0,0 @@
-package models
-
-import "github.com/pocketbase/pocketbase/tools/types"
-
-var _ Model = (*Request)(nil)
-
-// list with the supported values for `Request.Auth`
-const (
- RequestAuthGuest = "guest"
- RequestAuthAdmin = "admin"
- RequestAuthRecord = "authRecord"
-)
-
-// Deprecated: Replaced by the Log model and will be removed in a future version.
-type Request struct {
- BaseModel
-
- Url string `db:"url" json:"url"`
- Method string `db:"method" json:"method"`
- Status int `db:"status" json:"status"`
- Auth string `db:"auth" json:"auth"`
- UserIp string `db:"userIp" json:"userIp"`
- RemoteIp string `db:"remoteIp" json:"remoteIp"`
- Referer string `db:"referer" json:"referer"`
- UserAgent string `db:"userAgent" json:"userAgent"`
- Meta types.JsonMap `db:"meta" json:"meta"`
-}
-
-func (m *Request) TableName() string {
- return "_requests"
-}
diff --git a/models/request_info.go b/models/request_info.go
deleted file mode 100644
index 216dd32f..00000000
--- a/models/request_info.go
+++ /dev/null
@@ -1,41 +0,0 @@
-package models
-
-import (
- "strings"
-
- "github.com/pocketbase/pocketbase/models/schema"
-)
-
-const (
- RequestInfoContextDefault = "default"
- RequestInfoContextRealtime = "realtime"
- RequestInfoContextProtectedFile = "protectedFile"
- RequestInfoContextOAuth2 = "oauth2"
-)
-
-// RequestInfo defines a HTTP request data struct, usually used
-// as part of the `@request.*` filter resolver.
-type RequestInfo struct {
- Context string `json:"context"`
- Query map[string]any `json:"query"`
- Data map[string]any `json:"data"`
- Headers map[string]any `json:"headers"`
- AuthRecord *Record `json:"authRecord"`
- Admin *Admin `json:"admin"`
- Method string `json:"method"`
-}
-
-// HasModifierDataKeys loosely checks if the current struct has any modifier Data keys.
-func (r *RequestInfo) HasModifierDataKeys() bool {
- allModifiers := schema.FieldValueModifiers()
-
- for key := range r.Data {
- for _, m := range allModifiers {
- if strings.HasSuffix(key, m) {
- return true
- }
- }
- }
-
- return false
-}
diff --git a/models/request_info_test.go b/models/request_info_test.go
deleted file mode 100644
index 157ddac1..00000000
--- a/models/request_info_test.go
+++ /dev/null
@@ -1,60 +0,0 @@
-package models_test
-
-import (
- "testing"
-
- "github.com/pocketbase/pocketbase/models"
-)
-
-func TestRequestInfoHasModifierDataKeys(t *testing.T) {
- t.Parallel()
-
- scenarios := []struct {
- name string
- requestInfo *models.RequestInfo
- expected bool
- }{
- {
- "empty",
- &models.RequestInfo{},
- false,
- },
- {
- "Data with regular fields",
- &models.RequestInfo{
- Query: map[string]any{"data+": "demo"}, // should be ignored
- Data: map[string]any{"a": 123, "b": "test", "c.d": false},
- },
- false,
- },
- {
- "Data with +modifier fields",
- &models.RequestInfo{
- Data: map[string]any{"a+": 123, "b": "test", "c.d": false},
- },
- true,
- },
- {
- "Data with -modifier fields",
- &models.RequestInfo{
- Data: map[string]any{"a": 123, "b-": "test", "c.d": false},
- },
- true,
- },
- {
- "Data with mixed modifier fields",
- &models.RequestInfo{
- Data: map[string]any{"a": 123, "b-": "test", "c.d+": false},
- },
- true,
- },
- }
-
- for _, s := range scenarios {
- result := s.requestInfo.HasModifierDataKeys()
-
- if result != s.expected {
- t.Fatalf("[%s] Expected %v, got %v", s.name, s.expected, result)
- }
- }
-}
diff --git a/models/request_test.go b/models/request_test.go
deleted file mode 100644
index 0f1f99e5..00000000
--- a/models/request_test.go
+++ /dev/null
@@ -1,14 +0,0 @@
-package models_test
-
-import (
- "testing"
-
- "github.com/pocketbase/pocketbase/models"
-)
-
-func TestRequestTableName(t *testing.T) {
- m := models.Request{}
- if m.TableName() != "_requests" {
- t.Fatalf("Unexpected table name, got %q", m.TableName())
- }
-}
diff --git a/models/schema/schema.go b/models/schema/schema.go
deleted file mode 100644
index d48b0550..00000000
--- a/models/schema/schema.go
+++ /dev/null
@@ -1,240 +0,0 @@
-// Package schema implements custom Schema and SchemaField datatypes
-// for handling the Collection schema definitions.
-package schema
-
-import (
- "database/sql/driver"
- "encoding/json"
- "fmt"
- "strconv"
- "strings"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/tools/list"
- "github.com/pocketbase/pocketbase/tools/security"
-)
-
-// NewSchema creates a new Schema instance with the provided fields.
-func NewSchema(fields ...*SchemaField) Schema {
- s := Schema{}
-
- for _, f := range fields {
- s.AddField(f)
- }
-
- return s
-}
-
-// Schema defines a dynamic db schema as a slice of `SchemaField`s.
-type Schema struct {
- fields []*SchemaField
-}
-
-// Fields returns the registered schema fields.
-func (s *Schema) Fields() []*SchemaField {
- return s.fields
-}
-
-// InitFieldsOptions calls `InitOptions()` for all schema fields.
-func (s *Schema) InitFieldsOptions() error {
- for _, field := range s.Fields() {
- if err := field.InitOptions(); err != nil {
- return err
- }
- }
- return nil
-}
-
-// Clone creates a deep clone of the current schema.
-func (s *Schema) Clone() (*Schema, error) {
- copyRaw, err := json.Marshal(s)
- if err != nil {
- return nil, err
- }
-
- result := &Schema{}
- if err := json.Unmarshal(copyRaw, result); err != nil {
- return nil, err
- }
-
- return result, nil
-}
-
-// AsMap returns a map with all registered schema field.
-// The returned map is indexed with each field name.
-func (s *Schema) AsMap() map[string]*SchemaField {
- result := map[string]*SchemaField{}
-
- for _, field := range s.fields {
- result[field.Name] = field
- }
-
- return result
-}
-
-// GetFieldById returns a single field by its id.
-func (s *Schema) GetFieldById(id string) *SchemaField {
- for _, field := range s.fields {
- if field.Id == id {
- return field
- }
- }
- return nil
-}
-
-// GetFieldByName returns a single field by its name.
-func (s *Schema) GetFieldByName(name string) *SchemaField {
- for _, field := range s.fields {
- if field.Name == name {
- return field
- }
- }
- return nil
-}
-
-// RemoveField removes a single schema field by its id.
-//
-// This method does nothing if field with `id` doesn't exist.
-func (s *Schema) RemoveField(id string) {
- for i, field := range s.fields {
- if field.Id == id {
- s.fields = append(s.fields[:i], s.fields[i+1:]...)
- return
- }
- }
-}
-
-// AddField registers the provided newField to the current schema.
-//
-// If field with `newField.Id` already exist, the existing field is
-// replaced with the new one.
-//
-// Otherwise the new field is appended to the other schema fields.
-func (s *Schema) AddField(newField *SchemaField) {
- if newField.Id == "" {
- // set default id
- newField.Id = strings.ToLower(security.PseudorandomString(8))
- }
-
- for i, field := range s.fields {
- // replace existing
- if field.Id == newField.Id {
- s.fields[i] = newField
- return
- }
- }
-
- // add new field
- s.fields = append(s.fields, newField)
-}
-
-// Validate makes Schema validatable by implementing [validation.Validatable] interface.
-//
-// Internally calls each individual field's validator and additionally
-// checks for invalid renamed fields and field name duplications.
-func (s Schema) Validate() error {
- return validation.Validate(&s.fields, validation.By(func(value any) error {
- fields := s.fields // use directly the schema value to avoid unnecessary interface casting
-
- ids := []string{}
- names := []string{}
- for i, field := range fields {
- if list.ExistInSlice(field.Id, ids) {
- return validation.Errors{
- strconv.Itoa(i): validation.Errors{
- "id": validation.NewError(
- "validation_duplicated_field_id",
- "Duplicated or invalid schema field id",
- ),
- },
- }
- }
-
- // field names are used as db columns and should be case insensitive
- nameLower := strings.ToLower(field.Name)
-
- if list.ExistInSlice(nameLower, names) {
- return validation.Errors{
- strconv.Itoa(i): validation.Errors{
- "name": validation.NewError(
- "validation_duplicated_field_name",
- "Duplicated or invalid schema field name",
- ),
- },
- }
- }
-
- ids = append(ids, field.Id)
- names = append(names, nameLower)
- }
-
- return nil
- }))
-}
-
-// MarshalJSON implements the [json.Marshaler] interface.
-func (s Schema) MarshalJSON() ([]byte, error) {
- if s.fields == nil {
- s.fields = []*SchemaField{}
- }
- return json.Marshal(s.fields)
-}
-
-// UnmarshalJSON implements the [json.Unmarshaler] interface.
-//
-// On success, all schema field options are auto initialized.
-func (s *Schema) UnmarshalJSON(data []byte) error {
- fields := []*SchemaField{}
- if err := json.Unmarshal(data, &fields); err != nil {
- return err
- }
-
- s.fields = []*SchemaField{}
-
- for _, f := range fields {
- s.AddField(f)
- }
-
- for _, field := range s.fields {
- if err := field.InitOptions(); err != nil {
- // ignore the error and remove the invalid field
- s.RemoveField(field.Id)
- }
- }
-
- return nil
-}
-
-// Value implements the [driver.Valuer] interface.
-func (s Schema) Value() (driver.Value, error) {
- if s.fields == nil {
- // initialize an empty slice to ensure that `[]` is returned
- s.fields = []*SchemaField{}
- }
-
- data, err := json.Marshal(s.fields)
-
- return string(data), err
-}
-
-// Scan implements [sql.Scanner] interface to scan the provided value
-// into the current Schema instance.
-func (s *Schema) Scan(value any) error {
- var data []byte
- switch v := value.(type) {
- case nil:
- // no cast needed
- case []byte:
- data = v
- case string:
- data = []byte(v)
- default:
- return fmt.Errorf("Failed to unmarshal Schema value %q.", value)
- }
-
- if len(data) == 0 {
- data = []byte("[]")
- }
-
- return s.UnmarshalJSON(data)
-}
diff --git a/models/schema/schema_field.go b/models/schema/schema_field.go
deleted file mode 100644
index 38a735f1..00000000
--- a/models/schema/schema_field.go
+++ /dev/null
@@ -1,730 +0,0 @@
-package schema
-
-import (
- "encoding/json"
- "errors"
- "regexp"
- "strconv"
- "strings"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/go-ozzo/ozzo-validation/v4/is"
- "github.com/pocketbase/pocketbase/tools/filesystem"
- "github.com/pocketbase/pocketbase/tools/list"
- "github.com/pocketbase/pocketbase/tools/types"
- "github.com/spf13/cast"
-)
-
-var schemaFieldNameRegex = regexp.MustCompile(`^\w+$`)
-
-// field value modifiers
-const (
- FieldValueModifierAdd string = "+"
- FieldValueModifierSubtract string = "-"
-)
-
-// FieldValueModifiers returns a list with all available field modifier tokens.
-func FieldValueModifiers() []string {
- return []string{
- FieldValueModifierAdd,
- FieldValueModifierSubtract,
- }
-}
-
-// commonly used field names
-const (
- FieldNameId string = "id"
- FieldNameCreated string = "created"
- FieldNameUpdated string = "updated"
- FieldNameCollectionId string = "collectionId"
- FieldNameCollectionName string = "collectionName"
- FieldNameExpand string = "expand"
- FieldNameUsername string = "username"
- FieldNameEmail string = "email"
- FieldNameEmailVisibility string = "emailVisibility"
- FieldNameVerified string = "verified"
- FieldNameTokenKey string = "tokenKey"
- FieldNamePasswordHash string = "passwordHash"
- FieldNameLastResetSentAt string = "lastResetSentAt"
- FieldNameLastVerificationSentAt string = "lastVerificationSentAt"
- FieldNameLastLoginAlertSentAt string = "lastLoginAlertSentAt"
-)
-
-// BaseModelFieldNames returns the field names that all models have (id, created, updated).
-func BaseModelFieldNames() []string {
- return []string{
- FieldNameId,
- FieldNameCreated,
- FieldNameUpdated,
- }
-}
-
-// SystemFields returns special internal field names that are usually readonly.
-func SystemFieldNames() []string {
- return []string{
- FieldNameCollectionId,
- FieldNameCollectionName,
- FieldNameExpand,
- }
-}
-
-// AuthFieldNames returns the reserved "auth" collection auth field names.
-func AuthFieldNames() []string {
- return []string{
- FieldNameUsername,
- FieldNameEmail,
- FieldNameEmailVisibility,
- FieldNameVerified,
- FieldNameTokenKey,
- FieldNamePasswordHash,
- FieldNameLastResetSentAt,
- FieldNameLastVerificationSentAt,
- FieldNameLastLoginAlertSentAt,
- }
-}
-
-// All valid field types
-const (
- FieldTypeText string = "text"
- FieldTypeNumber string = "number"
- FieldTypeBool string = "bool"
- FieldTypeEmail string = "email"
- FieldTypeUrl string = "url"
- FieldTypeEditor string = "editor"
- FieldTypeDate string = "date"
- FieldTypeSelect string = "select"
- FieldTypeJson string = "json"
- FieldTypeFile string = "file"
- FieldTypeRelation string = "relation"
-
- // Deprecated: Will be removed in v0.9+
- FieldTypeUser string = "user"
-)
-
-// FieldTypes returns slice with all supported field types.
-func FieldTypes() []string {
- return []string{
- FieldTypeText,
- FieldTypeNumber,
- FieldTypeBool,
- FieldTypeEmail,
- FieldTypeUrl,
- FieldTypeEditor,
- FieldTypeDate,
- FieldTypeSelect,
- FieldTypeJson,
- FieldTypeFile,
- FieldTypeRelation,
- }
-}
-
-// ArraybleFieldTypes returns slice with all array value supported field types.
-func ArraybleFieldTypes() []string {
- return []string{
- FieldTypeSelect,
- FieldTypeFile,
- FieldTypeRelation,
- }
-}
-
-// SchemaField defines a single schema field structure.
-type SchemaField struct {
- System bool `form:"system" json:"system"`
- Id string `form:"id" json:"id"`
- Name string `form:"name" json:"name"`
- Type string `form:"type" json:"type"`
- Required bool `form:"required" json:"required"`
-
- // Presentable indicates whether the field is suitable for
- // visualization purposes (eg. in the Admin UI relation views).
- Presentable bool `form:"presentable" json:"presentable"`
-
- // Deprecated: This field is no-op and will be removed in future versions.
- // Please use the collection.Indexes field to define a unique constraint.
- Unique bool `form:"unique" json:"unique"`
-
- Options any `form:"options" json:"options"`
-}
-
-// ColDefinition returns the field db column type definition as string.
-func (f *SchemaField) ColDefinition() string {
- switch f.Type {
- case FieldTypeNumber:
- return "NUMERIC DEFAULT 0 NOT NULL"
- case FieldTypeBool:
- return "BOOLEAN DEFAULT FALSE NOT NULL"
- case FieldTypeJson:
- return "JSON DEFAULT NULL"
- default:
- if opt, ok := f.Options.(MultiValuer); ok && opt.IsMultiple() {
- return "JSON DEFAULT '[]' NOT NULL"
- }
-
- return "TEXT DEFAULT '' NOT NULL"
- }
-}
-
-// String serializes and returns the current field as string.
-func (f SchemaField) String() string {
- data, _ := f.MarshalJSON()
- return string(data)
-}
-
-// MarshalJSON implements the [json.Marshaler] interface.
-func (f SchemaField) MarshalJSON() ([]byte, error) {
- type alias SchemaField // alias to prevent recursion
-
- f.InitOptions()
-
- return json.Marshal(alias(f))
-}
-
-// UnmarshalJSON implements the [json.Unmarshaler] interface.
-//
-// The schema field options are auto initialized on success.
-func (f *SchemaField) UnmarshalJSON(data []byte) error {
- type alias *SchemaField // alias to prevent recursion
-
- a := alias(f)
-
- if err := json.Unmarshal(data, a); err != nil {
- return err
- }
-
- return f.InitOptions()
-}
-
-// Validate makes `SchemaField` validatable by implementing [validation.Validatable] interface.
-func (f SchemaField) Validate() error {
- // init field options (if not already)
- f.InitOptions()
-
- excludeNames := BaseModelFieldNames()
- // exclude special filter literals
- excludeNames = append(excludeNames, "null", "true", "false", "_rowid_")
- // exclude system literals
- excludeNames = append(excludeNames, SystemFieldNames()...)
-
- return validation.ValidateStruct(&f,
- validation.Field(&f.Options, validation.Required, validation.By(f.checkOptions)),
- validation.Field(&f.Id, validation.Required, validation.Length(5, 255)),
- validation.Field(
- &f.Name,
- validation.Required,
- validation.Length(1, 255),
- validation.Match(schemaFieldNameRegex),
- validation.NotIn(list.ToInterfaceSlice(excludeNames)...),
- validation.By(f.checkForVia),
- ),
- validation.Field(&f.Type, validation.Required, validation.In(list.ToInterfaceSlice(FieldTypes())...)),
- // currently file fields cannot be unique because a proper
- // hash/content check could cause performance issues
- validation.Field(&f.Unique, validation.When(f.Type == FieldTypeFile, validation.Empty)),
- )
-}
-
-func (f *SchemaField) checkOptions(value any) error {
- v, ok := value.(FieldOptions)
- if !ok {
- return validation.NewError("validation_invalid_options", "Failed to initialize field options")
- }
-
- return v.Validate()
-}
-
-// @todo merge with the collections during the refactoring
-func (f *SchemaField) checkForVia(value any) error {
- v, _ := value.(string)
- if v == "" {
- return nil
- }
-
- if strings.Contains(strings.ToLower(v), "_via_") {
- return validation.NewError("validation_invalid_name", "The name of the field cannot contain '_via_'.")
- }
-
- return nil
-}
-
-// InitOptions initializes the current field options based on its type.
-//
-// Returns error on unknown field type.
-func (f *SchemaField) InitOptions() error {
- if _, ok := f.Options.(FieldOptions); ok {
- return nil // already inited
- }
-
- serialized, err := json.Marshal(f.Options)
- if err != nil {
- return err
- }
-
- var options any
- switch f.Type {
- case FieldTypeText:
- options = &TextOptions{}
- case FieldTypeNumber:
- options = &NumberOptions{}
- case FieldTypeBool:
- options = &BoolOptions{}
- case FieldTypeEmail:
- options = &EmailOptions{}
- case FieldTypeUrl:
- options = &UrlOptions{}
- case FieldTypeEditor:
- options = &EditorOptions{}
- case FieldTypeDate:
- options = &DateOptions{}
- case FieldTypeSelect:
- options = &SelectOptions{}
- case FieldTypeJson:
- options = &JsonOptions{}
- case FieldTypeFile:
- options = &FileOptions{}
- case FieldTypeRelation:
- options = &RelationOptions{}
-
- // Deprecated: Will be removed in v0.9+
- case FieldTypeUser:
- options = &UserOptions{}
-
- default:
- return errors.New("Missing or unknown field field type.")
- }
-
- if err := json.Unmarshal(serialized, options); err != nil {
- return err
- }
-
- f.Options = options
-
- return nil
-}
-
-// PrepareValue returns normalized and properly formatted field value.
-func (f *SchemaField) PrepareValue(value any) any {
- // init field options (if not already)
- f.InitOptions()
-
- switch f.Type {
- case FieldTypeText, FieldTypeEmail, FieldTypeUrl, FieldTypeEditor:
- return cast.ToString(value)
- case FieldTypeJson:
- val := value
-
- if str, ok := val.(string); ok {
- // in order to support seamlessly both json and multipart/form-data requests,
- // the following normalization rules are applied for plain string values:
- // - "true" is converted to the json `true`
- // - "false" is converted to the json `false`
- // - "null" is converted to the json `null`
- // - "[1,2,3]" is converted to the json `[1,2,3]`
- // - "{\"a\":1,\"b\":2}" is converted to the json `{"a":1,"b":2}`
- // - numeric strings are converted to json number
- // - double quoted strings are left as they are (aka. without normalizations)
- // - any other string (empty string too) is double quoted
- if str == "" {
- val = strconv.Quote(str)
- } else if str == "null" || str == "true" || str == "false" {
- val = str
- } else if ((str[0] >= '0' && str[0] <= '9') ||
- str[0] == '-' ||
- str[0] == '"' ||
- str[0] == '[' ||
- str[0] == '{') &&
- is.JSON.Validate(str) == nil {
- val = str
- } else {
- val = strconv.Quote(str)
- }
- }
-
- val, _ = types.ParseJsonRaw(val)
- return val
- case FieldTypeNumber:
- return cast.ToFloat64(value)
- case FieldTypeBool:
- return cast.ToBool(value)
- case FieldTypeDate:
- val, _ := types.ParseDateTime(value)
- return val
- case FieldTypeSelect:
- val := list.ToUniqueStringSlice(value)
-
- options, _ := f.Options.(*SelectOptions)
- if !options.IsMultiple() {
- if len(val) > 0 {
- return val[len(val)-1] // the last selected
- }
- return ""
- }
-
- return val
- case FieldTypeFile:
- val := list.ToUniqueStringSlice(value)
-
- options, _ := f.Options.(*FileOptions)
- if !options.IsMultiple() {
- if len(val) > 0 {
- return val[len(val)-1] // the last selected
- }
- return ""
- }
-
- return val
- case FieldTypeRelation:
- ids := list.ToUniqueStringSlice(value)
-
- options, _ := f.Options.(*RelationOptions)
- if !options.IsMultiple() {
- if len(ids) > 0 {
- return ids[len(ids)-1] // the last selected
- }
- return ""
- }
-
- return ids
- default:
- return value // unmodified
- }
-}
-
-// PrepareValueWithModifier returns normalized and properly formatted field value
-// by "merging" baseValue with the modifierValue based on the specified modifier (+ or -).
-func (f *SchemaField) PrepareValueWithModifier(baseValue any, modifier string, modifierValue any) any {
- resolvedValue := baseValue
-
- switch f.Type {
- case FieldTypeNumber:
- switch modifier {
- case FieldValueModifierAdd:
- resolvedValue = cast.ToFloat64(baseValue) + cast.ToFloat64(modifierValue)
- case FieldValueModifierSubtract:
- resolvedValue = cast.ToFloat64(baseValue) - cast.ToFloat64(modifierValue)
- }
- case FieldTypeSelect, FieldTypeRelation:
- switch modifier {
- case FieldValueModifierAdd:
- resolvedValue = append(
- list.ToUniqueStringSlice(baseValue),
- list.ToUniqueStringSlice(modifierValue)...,
- )
- case FieldValueModifierSubtract:
- resolvedValue = list.SubtractSlice(
- list.ToUniqueStringSlice(baseValue),
- list.ToUniqueStringSlice(modifierValue),
- )
- }
- case FieldTypeFile:
- // note: file for now supports only the subtract modifier
- if modifier == FieldValueModifierSubtract {
- resolvedValue = list.SubtractSlice(
- list.ToUniqueStringSlice(baseValue),
- list.ToUniqueStringSlice(modifierValue),
- )
- }
- }
-
- return f.PrepareValue(resolvedValue)
-}
-
-// -------------------------------------------------------------------
-
-// MultiValuer defines common interface methods that every multi-valued (eg. with MaxSelect) field option struct has.
-type MultiValuer interface {
- IsMultiple() bool
-}
-
-// FieldOptions defines common interface methods that every field option struct has.
-type FieldOptions interface {
- Validate() error
-}
-
-type TextOptions struct {
- Min *int `form:"min" json:"min"`
- Max *int `form:"max" json:"max"`
- Pattern string `form:"pattern" json:"pattern"`
-}
-
-func (o TextOptions) Validate() error {
- minVal := 0
- if o.Min != nil {
- minVal = *o.Min
- }
-
- return validation.ValidateStruct(&o,
- validation.Field(&o.Min, validation.Min(0)),
- validation.Field(&o.Max, validation.Min(minVal)),
- validation.Field(&o.Pattern, validation.By(o.checkRegex)),
- )
-}
-
-func (o *TextOptions) checkRegex(value any) error {
- v, _ := value.(string)
- if v == "" {
- return nil // nothing to check
- }
-
- if _, err := regexp.Compile(v); err != nil {
- return validation.NewError("validation_invalid_regex", err.Error())
- }
-
- return nil
-}
-
-// -------------------------------------------------------------------
-
-type NumberOptions struct {
- Min *float64 `form:"min" json:"min"`
- Max *float64 `form:"max" json:"max"`
- NoDecimal bool `form:"noDecimal" json:"noDecimal"`
-}
-
-func (o NumberOptions) Validate() error {
- var maxRules []validation.Rule
- if o.Min != nil && o.Max != nil {
- maxRules = append(maxRules, validation.Min(*o.Min), validation.By(o.checkNoDecimal))
- }
-
- return validation.ValidateStruct(&o,
- validation.Field(&o.Min, validation.By(o.checkNoDecimal)),
- validation.Field(&o.Max, maxRules...),
- )
-}
-
-func (o *NumberOptions) checkNoDecimal(value any) error {
- v, _ := value.(*float64)
- if v == nil || !o.NoDecimal {
- return nil // nothing to check
- }
-
- if *v != float64(int64(*v)) {
- return validation.NewError("validation_no_decimal_constraint", "Decimal numbers are not allowed.")
- }
-
- return nil
-}
-
-// -------------------------------------------------------------------
-
-type BoolOptions struct {
-}
-
-func (o BoolOptions) Validate() error {
- return nil
-}
-
-// -------------------------------------------------------------------
-
-type EmailOptions struct {
- ExceptDomains []string `form:"exceptDomains" json:"exceptDomains"`
- OnlyDomains []string `form:"onlyDomains" json:"onlyDomains"`
-}
-
-func (o EmailOptions) Validate() error {
- return validation.ValidateStruct(&o,
- validation.Field(
- &o.ExceptDomains,
- validation.When(len(o.OnlyDomains) > 0, validation.Empty).Else(validation.Each(is.Domain)),
- ),
- validation.Field(
- &o.OnlyDomains,
- validation.When(len(o.ExceptDomains) > 0, validation.Empty).Else(validation.Each(is.Domain)),
- ),
- )
-}
-
-// -------------------------------------------------------------------
-
-type UrlOptions struct {
- ExceptDomains []string `form:"exceptDomains" json:"exceptDomains"`
- OnlyDomains []string `form:"onlyDomains" json:"onlyDomains"`
-}
-
-func (o UrlOptions) Validate() error {
- return validation.ValidateStruct(&o,
- validation.Field(
- &o.ExceptDomains,
- validation.When(len(o.OnlyDomains) > 0, validation.Empty).Else(validation.Each(is.Domain)),
- ),
- validation.Field(
- &o.OnlyDomains,
- validation.When(len(o.ExceptDomains) > 0, validation.Empty).Else(validation.Each(is.Domain)),
- ),
- )
-}
-
-// -------------------------------------------------------------------
-
-type EditorOptions struct {
- // ConvertUrls is usually used to instruct the editor whether to
- // apply url conversion (eg. stripping the domain name in case the
- // urls are using the same domain as the one where the editor is loaded).
- //
- // (see also https://www.tiny.cloud/docs/tinymce/6/url-handling/#convert_urls)
- ConvertUrls bool `form:"convertUrls" json:"convertUrls"`
-}
-
-func (o EditorOptions) Validate() error {
- return nil
-}
-
-// -------------------------------------------------------------------
-
-type DateOptions struct {
- Min types.DateTime `form:"min" json:"min"`
- Max types.DateTime `form:"max" json:"max"`
-}
-
-func (o DateOptions) Validate() error {
- return validation.ValidateStruct(&o,
- validation.Field(&o.Max, validation.By(o.checkRange(o.Min, o.Max))),
- )
-}
-
-func (o *DateOptions) checkRange(min types.DateTime, max types.DateTime) validation.RuleFunc {
- return func(value any) error {
- v, _ := value.(types.DateTime)
-
- if v.IsZero() || min.IsZero() || max.IsZero() {
- return nil // nothing to check
- }
-
- return validation.Date(types.DefaultDateLayout).
- Min(min.Time()).
- Max(max.Time()).
- Validate(v.String())
- }
-}
-
-// -------------------------------------------------------------------
-
-type SelectOptions struct {
- MaxSelect int `form:"maxSelect" json:"maxSelect"`
- Values []string `form:"values" json:"values"`
-}
-
-func (o SelectOptions) Validate() error {
- max := len(o.Values)
- if max == 0 {
- max = 1
- }
-
- return validation.ValidateStruct(&o,
- validation.Field(&o.Values, validation.Required),
- validation.Field(
- &o.MaxSelect,
- validation.Required,
- validation.Min(1),
- validation.Max(max),
- ),
- )
-}
-
-// IsMultiple implements MultiValuer interface and checks whether the
-// current field options support multiple values.
-func (o SelectOptions) IsMultiple() bool {
- return o.MaxSelect > 1
-}
-
-// -------------------------------------------------------------------
-
-type JsonOptions struct {
- MaxSize int `form:"maxSize" json:"maxSize"`
-}
-
-func (o JsonOptions) Validate() error {
- return validation.ValidateStruct(&o,
- validation.Field(&o.MaxSize, validation.Required, validation.Min(1)),
- )
-}
-
-// -------------------------------------------------------------------
-
-var _ MultiValuer = (*FileOptions)(nil)
-
-type FileOptions struct {
- MimeTypes []string `form:"mimeTypes" json:"mimeTypes"`
- Thumbs []string `form:"thumbs" json:"thumbs"`
- MaxSelect int `form:"maxSelect" json:"maxSelect"`
- MaxSize int `form:"maxSize" json:"maxSize"`
- Protected bool `form:"protected" json:"protected"`
-}
-
-func (o FileOptions) Validate() error {
- return validation.ValidateStruct(&o,
- validation.Field(&o.MaxSelect, validation.Required, validation.Min(1)),
- validation.Field(&o.MaxSize, validation.Required, validation.Min(1)),
- validation.Field(&o.Thumbs, validation.Each(
- validation.NotIn("0x0", "0x0t", "0x0b", "0x0f"),
- validation.Match(filesystem.ThumbSizeRegex),
- )),
- )
-}
-
-// IsMultiple implements MultiValuer interface and checks whether the
-// current field options support multiple values.
-func (o FileOptions) IsMultiple() bool {
- return o.MaxSelect > 1
-}
-
-// -------------------------------------------------------------------
-
-var _ MultiValuer = (*RelationOptions)(nil)
-
-type RelationOptions struct {
- // CollectionId is the id of the related collection.
- CollectionId string `form:"collectionId" json:"collectionId"`
-
- // CascadeDelete indicates whether the root model should be deleted
- // in case of delete of all linked relations.
- CascadeDelete bool `form:"cascadeDelete" json:"cascadeDelete"`
-
- // MinSelect indicates the min number of allowed relation records
- // that could be linked to the main model.
- //
- // If nil no limits are applied.
- MinSelect *int `form:"minSelect" json:"minSelect"`
-
- // MaxSelect indicates the max number of allowed relation records
- // that could be linked to the main model.
- //
- // If nil no limits are applied.
- MaxSelect *int `form:"maxSelect" json:"maxSelect"`
-
- // Deprecated: This field is no-op and will be removed in future versions.
- // Instead use the individula SchemaField.Presentable option for each field in the relation collection.
- DisplayFields []string `form:"displayFields" json:"displayFields"`
-}
-
-func (o RelationOptions) Validate() error {
- minVal := 0
- if o.MinSelect != nil {
- minVal = *o.MinSelect
- }
-
- return validation.ValidateStruct(&o,
- validation.Field(&o.CollectionId, validation.Required),
- validation.Field(&o.MinSelect, validation.Min(0)),
- validation.Field(&o.MaxSelect, validation.NilOrNotEmpty, validation.Min(minVal)),
- )
-}
-
-// IsMultiple implements MultiValuer interface and checks whether the
-// current field options support multiple values.
-func (o RelationOptions) IsMultiple() bool {
- return o.MaxSelect == nil || *o.MaxSelect > 1
-}
-
-// -------------------------------------------------------------------
-
-// Deprecated: Will be removed in v0.9+
-type UserOptions struct {
- MaxSelect int `form:"maxSelect" json:"maxSelect"`
- CascadeDelete bool `form:"cascadeDelete" json:"cascadeDelete"`
-}
-
-// Deprecated: Will be removed in v0.9+
-func (o UserOptions) Validate() error {
- return nil
-}
diff --git a/models/schema/schema_field_test.go b/models/schema/schema_field_test.go
deleted file mode 100644
index abc7c244..00000000
--- a/models/schema/schema_field_test.go
+++ /dev/null
@@ -1,2278 +0,0 @@
-package schema_test
-
-import (
- "encoding/json"
- "fmt"
- "testing"
- "time"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-func TestBaseModelFieldNames(t *testing.T) {
- result := schema.BaseModelFieldNames()
- expected := 3
-
- if len(result) != expected {
- t.Fatalf("Expected %d field names, got %d (%v)", expected, len(result), result)
- }
-}
-
-func TestSystemFieldNames(t *testing.T) {
- result := schema.SystemFieldNames()
- expected := 3
-
- if len(result) != expected {
- t.Fatalf("Expected %d field names, got %d (%v)", expected, len(result), result)
- }
-}
-
-func TestAuthFieldNames(t *testing.T) {
- result := schema.AuthFieldNames()
- expected := 9
-
- if len(result) != expected {
- t.Fatalf("Expected %d auth field names, got %d (%v)", expected, len(result), result)
- }
-}
-
-func TestFieldTypes(t *testing.T) {
- result := schema.FieldTypes()
- expected := 11
-
- if len(result) != expected {
- t.Fatalf("Expected %d types, got %d (%v)", expected, len(result), result)
- }
-}
-
-func TestArraybleFieldTypes(t *testing.T) {
- result := schema.ArraybleFieldTypes()
- expected := 3
-
- if len(result) != expected {
- t.Fatalf("Expected %d arrayble types, got %d (%v)", expected, len(result), result)
- }
-}
-
-func TestSchemaFieldColDefinition(t *testing.T) {
- scenarios := []struct {
- field schema.SchemaField
- expected string
- }{
- {
- schema.SchemaField{Type: schema.FieldTypeText, Name: "test"},
- "TEXT DEFAULT '' NOT NULL",
- },
- {
- schema.SchemaField{Type: schema.FieldTypeNumber, Name: "test"},
- "NUMERIC DEFAULT 0 NOT NULL",
- },
- {
- schema.SchemaField{Type: schema.FieldTypeBool, Name: "test"},
- "BOOLEAN DEFAULT FALSE NOT NULL",
- },
- {
- schema.SchemaField{Type: schema.FieldTypeEmail, Name: "test"},
- "TEXT DEFAULT '' NOT NULL",
- },
- {
- schema.SchemaField{Type: schema.FieldTypeUrl, Name: "test"},
- "TEXT DEFAULT '' NOT NULL",
- },
- {
- schema.SchemaField{Type: schema.FieldTypeEditor, Name: "test"},
- "TEXT DEFAULT '' NOT NULL",
- },
- {
- schema.SchemaField{Type: schema.FieldTypeDate, Name: "test"},
- "TEXT DEFAULT '' NOT NULL",
- },
- {
- schema.SchemaField{Type: schema.FieldTypeJson, Name: "test"},
- "JSON DEFAULT NULL",
- },
- {
- schema.SchemaField{Type: schema.FieldTypeSelect, Name: "test"},
- "TEXT DEFAULT '' NOT NULL",
- },
- {
- schema.SchemaField{Type: schema.FieldTypeSelect, Name: "test_multiple", Options: &schema.SelectOptions{MaxSelect: 2}},
- "JSON DEFAULT '[]' NOT NULL",
- },
- {
- schema.SchemaField{Type: schema.FieldTypeFile, Name: "test"},
- "TEXT DEFAULT '' NOT NULL",
- },
- {
- schema.SchemaField{Type: schema.FieldTypeFile, Name: "test_multiple", Options: &schema.FileOptions{MaxSelect: 2}},
- "JSON DEFAULT '[]' NOT NULL",
- },
- {
- schema.SchemaField{Type: schema.FieldTypeRelation, Name: "test", Options: &schema.RelationOptions{MaxSelect: types.Pointer(1)}},
- "TEXT DEFAULT '' NOT NULL",
- },
- {
- schema.SchemaField{Type: schema.FieldTypeRelation, Name: "test_multiple", Options: &schema.RelationOptions{MaxSelect: nil}},
- "JSON DEFAULT '[]' NOT NULL",
- },
- }
-
- for i, s := range scenarios {
- def := s.field.ColDefinition()
- if def != s.expected {
- t.Errorf("(%d) Expected definition %q, got %q", i, s.expected, def)
- }
- }
-}
-
-func TestSchemaFieldString(t *testing.T) {
- f := schema.SchemaField{
- Id: "abc",
- Name: "test",
- Type: schema.FieldTypeText,
- Required: true,
- Presentable: true,
- System: true,
- Options: &schema.TextOptions{
- Pattern: "test",
- },
- }
-
- result := f.String()
- expected := `{"system":true,"id":"abc","name":"test","type":"text","required":true,"presentable":true,"unique":false,"options":{"min":null,"max":null,"pattern":"test"}}`
-
- if result != expected {
- t.Errorf("Expected \n%v, got \n%v", expected, result)
- }
-}
-
-func TestSchemaFieldMarshalJSON(t *testing.T) {
- scenarios := []struct {
- field schema.SchemaField
- expected string
- }{
- // empty
- {
- schema.SchemaField{},
- `{"system":false,"id":"","name":"","type":"","required":false,"presentable":false,"unique":false,"options":null}`,
- },
- // without defined options
- {
- schema.SchemaField{
- Id: "abc",
- Name: "test",
- Type: schema.FieldTypeText,
- Required: true,
- Presentable: true,
- System: true,
- },
- `{"system":true,"id":"abc","name":"test","type":"text","required":true,"presentable":true,"unique":false,"options":{"min":null,"max":null,"pattern":""}}`,
- },
- // with defined options
- {
- schema.SchemaField{
- Name: "test",
- Type: schema.FieldTypeText,
- Required: true,
- Unique: false,
- System: true,
- Options: &schema.TextOptions{
- Pattern: "test",
- },
- },
- `{"system":true,"id":"","name":"test","type":"text","required":true,"presentable":false,"unique":false,"options":{"min":null,"max":null,"pattern":"test"}}`,
- },
- }
-
- for i, s := range scenarios {
- result, err := s.field.MarshalJSON()
- if err != nil {
- t.Fatalf("(%d) %v", i, err)
- }
-
- if string(result) != s.expected {
- t.Errorf("(%d), Expected \n%v, got \n%v", i, s.expected, string(result))
- }
- }
-}
-
-func TestSchemaFieldUnmarshalJSON(t *testing.T) {
- scenarios := []struct {
- data []byte
- expectError bool
- expectJson string
- }{
- {
- nil,
- true,
- `{"system":false,"id":"","name":"","type":"","required":false,"presentable":false,"unique":false,"options":null}`,
- },
- {
- []byte{},
- true,
- `{"system":false,"id":"","name":"","type":"","required":false,"presentable":false,"unique":false,"options":null}`,
- },
- {
- []byte(`{"system": true}`),
- true,
- `{"system":true,"id":"","name":"","type":"","required":false,"presentable":false,"unique":false,"options":null}`,
- },
- {
- []byte(`{"invalid"`),
- true,
- `{"system":false,"id":"","name":"","type":"","required":false,"presentable":false,"unique":false,"options":null}`,
- },
- {
- []byte(`{"type":"text","system":true}`),
- false,
- `{"system":true,"id":"","name":"","type":"text","required":false,"presentable":false,"unique":false,"options":{"min":null,"max":null,"pattern":""}}`,
- },
- {
- []byte(`{"type":"text","options":{"pattern":"test"}}`),
- false,
- `{"system":false,"id":"","name":"","type":"text","required":false,"presentable":false,"unique":false,"options":{"min":null,"max":null,"pattern":"test"}}`,
- },
- }
-
- for i, s := range scenarios {
- f := schema.SchemaField{}
- err := f.UnmarshalJSON(s.data)
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("(%d) Expected hasErr %v, got %v (%v)", i, s.expectError, hasErr, err)
- }
-
- if f.String() != s.expectJson {
- t.Errorf("(%d), Expected json \n%v, got \n%v", i, s.expectJson, f.String())
- }
- }
-}
-
-func TestSchemaFieldValidate(t *testing.T) {
- scenarios := []struct {
- name string
- field schema.SchemaField
- expectedErrors []string
- }{
- {
- "empty field",
- schema.SchemaField{},
- []string{"id", "options", "name", "type"},
- },
- {
- "missing id",
- schema.SchemaField{
- Type: schema.FieldTypeText,
- Id: "",
- Name: "test",
- },
- []string{"id"},
- },
- {
- "invalid id length check",
- schema.SchemaField{
- Type: schema.FieldTypeText,
- Id: "1234",
- Name: "test",
- },
- []string{"id"},
- },
- {
- "valid id length check",
- schema.SchemaField{
- Type: schema.FieldTypeText,
- Id: "12345",
- Name: "test",
- },
- []string{},
- },
- {
- "invalid name format",
- schema.SchemaField{
- Type: schema.FieldTypeText,
- Id: "1234567890",
- Name: "test!@#",
- },
- []string{"name"},
- },
- {
- "name with _via_",
- schema.SchemaField{
- Type: schema.FieldTypeText,
- Id: "1234567890",
- Name: "a_via_b",
- },
- []string{"name"},
- },
- {
- "reserved name (null)",
- schema.SchemaField{
- Type: schema.FieldTypeText,
- Id: "1234567890",
- Name: "null",
- },
- []string{"name"},
- },
- {
- "reserved name (true)",
- schema.SchemaField{
- Type: schema.FieldTypeText,
- Id: "1234567890",
- Name: "null",
- },
- []string{"name"},
- },
- {
- "reserved name (false)",
- schema.SchemaField{
- Type: schema.FieldTypeText,
- Id: "1234567890",
- Name: "false",
- },
- []string{"name"},
- },
- {
- "reserved name (_rowid_)",
- schema.SchemaField{
- Type: schema.FieldTypeText,
- Id: "1234567890",
- Name: "_rowid_",
- },
- []string{"name"},
- },
- {
- "reserved name (id)",
- schema.SchemaField{
- Type: schema.FieldTypeText,
- Id: "1234567890",
- Name: schema.FieldNameId,
- },
- []string{"name"},
- },
- {
- "reserved name (created)",
- schema.SchemaField{
- Type: schema.FieldTypeText,
- Id: "1234567890",
- Name: schema.FieldNameCreated,
- },
- []string{"name"},
- },
- {
- "reserved name (updated)",
- schema.SchemaField{
- Type: schema.FieldTypeText,
- Id: "1234567890",
- Name: schema.FieldNameUpdated,
- },
- []string{"name"},
- },
- {
- "reserved name (collectionId)",
- schema.SchemaField{
- Type: schema.FieldTypeText,
- Id: "1234567890",
- Name: schema.FieldNameCollectionId,
- },
- []string{"name"},
- },
- {
- "reserved name (collectionName)",
- schema.SchemaField{
- Type: schema.FieldTypeText,
- Id: "1234567890",
- Name: schema.FieldNameCollectionName,
- },
- []string{"name"},
- },
- {
- "reserved name (expand)",
- schema.SchemaField{
- Type: schema.FieldTypeText,
- Id: "1234567890",
- Name: schema.FieldNameExpand,
- },
- []string{"name"},
- },
- {
- "valid name",
- schema.SchemaField{
- Type: schema.FieldTypeText,
- Id: "1234567890",
- Name: "test",
- },
- []string{},
- },
- {
- "unique check for type file",
- schema.SchemaField{
- Type: schema.FieldTypeFile,
- Id: "1234567890",
- Name: "test",
- Unique: true,
- Options: &schema.FileOptions{MaxSelect: 1, MaxSize: 1},
- },
- []string{"unique"},
- },
- {
- "trigger options validator (auto init)",
- schema.SchemaField{
- Type: schema.FieldTypeFile,
- Id: "1234567890",
- Name: "test",
- },
- []string{"options"},
- },
- {
- "trigger options validator (invalid option field value)",
- schema.SchemaField{
- Type: schema.FieldTypeFile,
- Id: "1234567890",
- Name: "test",
- Options: &schema.FileOptions{MaxSelect: 0, MaxSize: 0},
- },
- []string{"options"},
- },
- {
- "trigger options validator (valid option field value)",
- schema.SchemaField{
- Type: schema.FieldTypeFile,
- Id: "1234567890",
- Name: "test",
- Options: &schema.FileOptions{MaxSelect: 1, MaxSize: 1},
- },
- []string{},
- },
- }
-
- for _, s := range scenarios {
- result := s.field.Validate()
-
- // parse errors
- errs, ok := result.(validation.Errors)
- if !ok && result != nil {
- t.Errorf("[%s] Failed to parse errors %v", s.name, result)
- continue
- }
-
- // check errors
- if len(errs) > len(s.expectedErrors) {
- t.Errorf("[%s] Expected error keys %v, got %v", s.name, s.expectedErrors, errs)
- }
- for _, k := range s.expectedErrors {
- if _, ok := errs[k]; !ok {
- t.Errorf("[%s] Missing expected error key %q in %v", s.name, k, errs)
- }
- }
- }
-}
-
-func TestSchemaFieldInitOptions(t *testing.T) {
- scenarios := []struct {
- field schema.SchemaField
- expectError bool
- expectJson string
- }{
- {
- schema.SchemaField{},
- true,
- `{"system":false,"id":"","name":"","type":"","required":false,"presentable":false,"unique":false,"options":null}`,
- },
- {
- schema.SchemaField{Type: "unknown"},
- true,
- `{"system":false,"id":"","name":"","type":"unknown","required":false,"presentable":false,"unique":false,"options":null}`,
- },
- {
- schema.SchemaField{Type: schema.FieldTypeText},
- false,
- `{"system":false,"id":"","name":"","type":"text","required":false,"presentable":false,"unique":false,"options":{"min":null,"max":null,"pattern":""}}`,
- },
- {
- schema.SchemaField{Type: schema.FieldTypeNumber},
- false,
- `{"system":false,"id":"","name":"","type":"number","required":false,"presentable":false,"unique":false,"options":{"min":null,"max":null,"noDecimal":false}}`,
- },
- {
- schema.SchemaField{Type: schema.FieldTypeBool},
- false,
- `{"system":false,"id":"","name":"","type":"bool","required":false,"presentable":false,"unique":false,"options":{}}`,
- },
- {
- schema.SchemaField{Type: schema.FieldTypeEmail},
- false,
- `{"system":false,"id":"","name":"","type":"email","required":false,"presentable":false,"unique":false,"options":{"exceptDomains":null,"onlyDomains":null}}`,
- },
- {
- schema.SchemaField{Type: schema.FieldTypeUrl},
- false,
- `{"system":false,"id":"","name":"","type":"url","required":false,"presentable":false,"unique":false,"options":{"exceptDomains":null,"onlyDomains":null}}`,
- },
- {
- schema.SchemaField{Type: schema.FieldTypeEditor},
- false,
- `{"system":false,"id":"","name":"","type":"editor","required":false,"presentable":false,"unique":false,"options":{"convertUrls":false}}`,
- },
- {
- schema.SchemaField{Type: schema.FieldTypeDate},
- false,
- `{"system":false,"id":"","name":"","type":"date","required":false,"presentable":false,"unique":false,"options":{"min":"","max":""}}`,
- },
- {
- schema.SchemaField{Type: schema.FieldTypeSelect},
- false,
- `{"system":false,"id":"","name":"","type":"select","required":false,"presentable":false,"unique":false,"options":{"maxSelect":0,"values":null}}`,
- },
- {
- schema.SchemaField{Type: schema.FieldTypeJson},
- false,
- `{"system":false,"id":"","name":"","type":"json","required":false,"presentable":false,"unique":false,"options":{"maxSize":0}}`,
- },
- {
- schema.SchemaField{Type: schema.FieldTypeFile},
- false,
- `{"system":false,"id":"","name":"","type":"file","required":false,"presentable":false,"unique":false,"options":{"mimeTypes":null,"thumbs":null,"maxSelect":0,"maxSize":0,"protected":false}}`,
- },
- {
- schema.SchemaField{Type: schema.FieldTypeRelation},
- false,
- `{"system":false,"id":"","name":"","type":"relation","required":false,"presentable":false,"unique":false,"options":{"collectionId":"","cascadeDelete":false,"minSelect":null,"maxSelect":null,"displayFields":null}}`,
- },
- {
- schema.SchemaField{Type: schema.FieldTypeUser},
- false,
- `{"system":false,"id":"","name":"","type":"user","required":false,"presentable":false,"unique":false,"options":{"maxSelect":0,"cascadeDelete":false}}`,
- },
- {
- schema.SchemaField{
- Type: schema.FieldTypeText,
- Options: &schema.TextOptions{Pattern: "test"},
- },
- false,
- `{"system":false,"id":"","name":"","type":"text","required":false,"presentable":false,"unique":false,"options":{"min":null,"max":null,"pattern":"test"}}`,
- },
- }
-
- for i, s := range scenarios {
- t.Run(fmt.Sprintf("s%d_%s", i, s.field.Type), func(t *testing.T) {
- err := s.field.InitOptions()
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Fatalf("Expected %v, got %v (%v)", s.expectError, hasErr, err)
- }
-
- if s.field.String() != s.expectJson {
- t.Fatalf(" Expected\n%v\ngot\n%v", s.expectJson, s.field.String())
- }
- })
- }
-}
-
-func TestSchemaFieldPrepareValue(t *testing.T) {
- scenarios := []struct {
- field schema.SchemaField
- value any
- expectJson string
- }{
- {schema.SchemaField{Type: "unknown"}, "test", `"test"`},
- {schema.SchemaField{Type: "unknown"}, 123, "123"},
- {schema.SchemaField{Type: "unknown"}, []int{1, 2, 1}, "[1,2,1]"},
-
- // text
- {schema.SchemaField{Type: schema.FieldTypeText}, nil, `""`},
- {schema.SchemaField{Type: schema.FieldTypeText}, "", `""`},
- {schema.SchemaField{Type: schema.FieldTypeText}, []int{1, 2}, `""`},
- {schema.SchemaField{Type: schema.FieldTypeText}, "test", `"test"`},
- {schema.SchemaField{Type: schema.FieldTypeText}, 123, `"123"`},
-
- // email
- {schema.SchemaField{Type: schema.FieldTypeEmail}, nil, `""`},
- {schema.SchemaField{Type: schema.FieldTypeEmail}, "", `""`},
- {schema.SchemaField{Type: schema.FieldTypeEmail}, []int{1, 2}, `""`},
- {schema.SchemaField{Type: schema.FieldTypeEmail}, "test", `"test"`},
- {schema.SchemaField{Type: schema.FieldTypeEmail}, 123, `"123"`},
-
- // url
- {schema.SchemaField{Type: schema.FieldTypeUrl}, nil, `""`},
- {schema.SchemaField{Type: schema.FieldTypeUrl}, "", `""`},
- {schema.SchemaField{Type: schema.FieldTypeUrl}, []int{1, 2}, `""`},
- {schema.SchemaField{Type: schema.FieldTypeUrl}, "test", `"test"`},
- {schema.SchemaField{Type: schema.FieldTypeUrl}, 123, `"123"`},
-
- // editor
- {schema.SchemaField{Type: schema.FieldTypeEditor}, nil, `""`},
- {schema.SchemaField{Type: schema.FieldTypeEditor}, "", `""`},
- {schema.SchemaField{Type: schema.FieldTypeEditor}, []int{1, 2}, `""`},
- {schema.SchemaField{Type: schema.FieldTypeEditor}, "test", `"test"`},
- {schema.SchemaField{Type: schema.FieldTypeEditor}, 123, `"123"`},
-
- // json
- {schema.SchemaField{Type: schema.FieldTypeJson}, nil, "null"},
- {schema.SchemaField{Type: schema.FieldTypeJson}, "null", "null"},
- {schema.SchemaField{Type: schema.FieldTypeJson}, 123, "123"},
- {schema.SchemaField{Type: schema.FieldTypeJson}, -123, "-123"},
- {schema.SchemaField{Type: schema.FieldTypeJson}, "123", "123"},
- {schema.SchemaField{Type: schema.FieldTypeJson}, "-123", "-123"},
- {schema.SchemaField{Type: schema.FieldTypeJson}, 123.456, "123.456"},
- {schema.SchemaField{Type: schema.FieldTypeJson}, -123.456, "-123.456"},
- {schema.SchemaField{Type: schema.FieldTypeJson}, "123.456", "123.456"},
- {schema.SchemaField{Type: schema.FieldTypeJson}, "-123.456", "-123.456"},
- {schema.SchemaField{Type: schema.FieldTypeJson}, "123.456 abc", `"123.456 abc"`}, // invalid numeric string
- {schema.SchemaField{Type: schema.FieldTypeJson}, "-a123", `"-a123"`},
- {schema.SchemaField{Type: schema.FieldTypeJson}, true, "true"},
- {schema.SchemaField{Type: schema.FieldTypeJson}, "true", "true"},
- {schema.SchemaField{Type: schema.FieldTypeJson}, false, "false"},
- {schema.SchemaField{Type: schema.FieldTypeJson}, "false", "false"},
- {schema.SchemaField{Type: schema.FieldTypeJson}, "", `""`},
- {schema.SchemaField{Type: schema.FieldTypeJson}, `test`, `"test"`},
- {schema.SchemaField{Type: schema.FieldTypeJson}, `"test"`, `"test"`},
- {schema.SchemaField{Type: schema.FieldTypeJson}, `{test":1}`, `"{test\":1}"`}, // invalid object string
- {schema.SchemaField{Type: schema.FieldTypeJson}, `[1 2 3]`, `"[1 2 3]"`}, // invalid array string
- {schema.SchemaField{Type: schema.FieldTypeJson}, map[string]int{}, `{}`},
- {schema.SchemaField{Type: schema.FieldTypeJson}, `{}`, `{}`},
- {schema.SchemaField{Type: schema.FieldTypeJson}, map[string]int{"test": 123}, `{"test":123}`},
- {schema.SchemaField{Type: schema.FieldTypeJson}, `{"test":123}`, `{"test":123}`},
- {schema.SchemaField{Type: schema.FieldTypeJson}, []int{}, `[]`},
- {schema.SchemaField{Type: schema.FieldTypeJson}, `[]`, `[]`},
- {schema.SchemaField{Type: schema.FieldTypeJson}, []int{1, 2, 1}, `[1,2,1]`},
- {schema.SchemaField{Type: schema.FieldTypeJson}, `[1,2,1]`, `[1,2,1]`},
-
- // number
- {schema.SchemaField{Type: schema.FieldTypeNumber}, nil, "0"},
- {schema.SchemaField{Type: schema.FieldTypeNumber}, "", "0"},
- {schema.SchemaField{Type: schema.FieldTypeNumber}, "test", "0"},
- {schema.SchemaField{Type: schema.FieldTypeNumber}, 1, "1"},
- {schema.SchemaField{Type: schema.FieldTypeNumber}, 1.5, "1.5"},
- {schema.SchemaField{Type: schema.FieldTypeNumber}, "1.5", "1.5"},
-
- // bool
- {schema.SchemaField{Type: schema.FieldTypeBool}, nil, "false"},
- {schema.SchemaField{Type: schema.FieldTypeBool}, 1, "true"},
- {schema.SchemaField{Type: schema.FieldTypeBool}, 0, "false"},
- {schema.SchemaField{Type: schema.FieldTypeBool}, "", "false"},
- {schema.SchemaField{Type: schema.FieldTypeBool}, "test", "false"},
- {schema.SchemaField{Type: schema.FieldTypeBool}, "false", "false"},
- {schema.SchemaField{Type: schema.FieldTypeBool}, "true", "true"},
- {schema.SchemaField{Type: schema.FieldTypeBool}, false, "false"},
- {schema.SchemaField{Type: schema.FieldTypeBool}, true, "true"},
-
- // date
- {schema.SchemaField{Type: schema.FieldTypeDate}, nil, `""`},
- {schema.SchemaField{Type: schema.FieldTypeDate}, "", `""`},
- {schema.SchemaField{Type: schema.FieldTypeDate}, "test", `""`},
- {schema.SchemaField{Type: schema.FieldTypeDate}, 1641024040, `"2022-01-01 08:00:40.000Z"`},
- {schema.SchemaField{Type: schema.FieldTypeDate}, "2022-01-01 11:27:10.123", `"2022-01-01 11:27:10.123Z"`},
- {schema.SchemaField{Type: schema.FieldTypeDate}, "2022-01-01 11:27:10.123Z", `"2022-01-01 11:27:10.123Z"`},
- {schema.SchemaField{Type: schema.FieldTypeDate}, types.DateTime{}, `""`},
- {schema.SchemaField{Type: schema.FieldTypeDate}, time.Time{}, `""`},
-
- // select (single)
- {schema.SchemaField{Type: schema.FieldTypeSelect}, nil, `""`},
- {schema.SchemaField{Type: schema.FieldTypeSelect}, "", `""`},
- {schema.SchemaField{Type: schema.FieldTypeSelect}, 123, `"123"`},
- {schema.SchemaField{Type: schema.FieldTypeSelect}, "test", `"test"`},
- {schema.SchemaField{Type: schema.FieldTypeSelect}, []string{"test1", "test2"}, `"test2"`},
- {
- // no values validation/filtering
- schema.SchemaField{
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{
- Values: []string{"test1", "test2"},
- },
- },
- "test",
- `"test"`,
- },
- // select (multiple)
- {
- schema.SchemaField{
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{MaxSelect: 2},
- },
- nil,
- `[]`,
- },
- {
- schema.SchemaField{
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{MaxSelect: 2},
- },
- "",
- `[]`,
- },
- {
- schema.SchemaField{
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{MaxSelect: 2},
- },
- []string{},
- `[]`,
- },
- {
- schema.SchemaField{
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{MaxSelect: 2},
- },
- 123,
- `["123"]`,
- },
- {
- schema.SchemaField{
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{MaxSelect: 2},
- },
- "test",
- `["test"]`,
- },
- {
- // no values validation
- schema.SchemaField{
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{MaxSelect: 2},
- },
- []string{"test1", "test2", "test3"},
- `["test1","test2","test3"]`,
- },
- {
- // duplicated values
- schema.SchemaField{
- Type: schema.FieldTypeSelect,
- Options: &schema.SelectOptions{MaxSelect: 2},
- },
- []string{"test1", "test2", "test1"},
- `["test1","test2"]`,
- },
-
- // file (single)
- {schema.SchemaField{Type: schema.FieldTypeFile}, nil, `""`},
- {schema.SchemaField{Type: schema.FieldTypeFile}, "", `""`},
- {schema.SchemaField{Type: schema.FieldTypeFile}, 123, `"123"`},
- {schema.SchemaField{Type: schema.FieldTypeFile}, "test", `"test"`},
- {schema.SchemaField{Type: schema.FieldTypeFile}, []string{"test1", "test2"}, `"test2"`},
- // file (multiple)
- {
- schema.SchemaField{
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{MaxSelect: 2},
- },
- nil,
- `[]`,
- },
- {
- schema.SchemaField{
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{MaxSelect: 2},
- },
- "",
- `[]`,
- },
- {
- schema.SchemaField{
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{MaxSelect: 2},
- },
- []string{},
- `[]`,
- },
- {
- schema.SchemaField{
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{MaxSelect: 2},
- },
- 123,
- `["123"]`,
- },
- {
- schema.SchemaField{
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{MaxSelect: 2},
- },
- "test",
- `["test"]`,
- },
- {
- // no values validation
- schema.SchemaField{
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{MaxSelect: 2},
- },
- []string{"test1", "test2", "test3"},
- `["test1","test2","test3"]`,
- },
- {
- // duplicated values
- schema.SchemaField{
- Type: schema.FieldTypeFile,
- Options: &schema.FileOptions{MaxSelect: 2},
- },
- []string{"test1", "test2", "test1"},
- `["test1","test2"]`,
- },
-
- // relation (single)
- {
- schema.SchemaField{
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{MaxSelect: types.Pointer(1)},
- },
- nil,
- `""`,
- },
- {
- schema.SchemaField{
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{MaxSelect: types.Pointer(1)},
- },
- "",
- `""`,
- },
- {
- schema.SchemaField{
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{MaxSelect: types.Pointer(1)},
- },
- 123,
- `"123"`,
- },
- {
- schema.SchemaField{
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{MaxSelect: types.Pointer(1)},
- },
- "abc",
- `"abc"`,
- },
- {
- schema.SchemaField{
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{MaxSelect: types.Pointer(1)},
- },
- "1ba88b4f-e9da-42f0-9764-9a55c953e724",
- `"1ba88b4f-e9da-42f0-9764-9a55c953e724"`,
- },
- {
- schema.SchemaField{Type: schema.FieldTypeRelation, Options: &schema.RelationOptions{MaxSelect: types.Pointer(1)}},
- []string{"1ba88b4f-e9da-42f0-9764-9a55c953e724", "2ba88b4f-e9da-42f0-9764-9a55c953e724"},
- `"2ba88b4f-e9da-42f0-9764-9a55c953e724"`,
- },
- // relation (multiple)
- {
- schema.SchemaField{
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{MaxSelect: types.Pointer(2)},
- },
- nil,
- `[]`,
- },
- {
- schema.SchemaField{
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{MaxSelect: types.Pointer(2)},
- },
- "",
- `[]`,
- },
- {
- schema.SchemaField{
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{MaxSelect: types.Pointer(2)},
- },
- []string{},
- `[]`,
- },
- {
- schema.SchemaField{
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{MaxSelect: types.Pointer(2)},
- },
- 123,
- `["123"]`,
- },
- {
- schema.SchemaField{
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{MaxSelect: types.Pointer(2)},
- },
- []string{"", "abc"},
- `["abc"]`,
- },
- {
- // no values validation
- schema.SchemaField{
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{MaxSelect: types.Pointer(2)},
- },
- []string{"1ba88b4f-e9da-42f0-9764-9a55c953e724", "2ba88b4f-e9da-42f0-9764-9a55c953e724"},
- `["1ba88b4f-e9da-42f0-9764-9a55c953e724","2ba88b4f-e9da-42f0-9764-9a55c953e724"]`,
- },
- {
- // duplicated values
- schema.SchemaField{
- Type: schema.FieldTypeRelation,
- Options: &schema.RelationOptions{MaxSelect: types.Pointer(2)},
- },
- []string{"1ba88b4f-e9da-42f0-9764-9a55c953e724", "2ba88b4f-e9da-42f0-9764-9a55c953e724", "1ba88b4f-e9da-42f0-9764-9a55c953e724"},
- `["1ba88b4f-e9da-42f0-9764-9a55c953e724","2ba88b4f-e9da-42f0-9764-9a55c953e724"]`,
- },
- }
-
- for i, s := range scenarios {
- result := s.field.PrepareValue(s.value)
-
- encoded, err := json.Marshal(result)
- if err != nil {
- t.Errorf("(%d) %v", i, err)
- continue
- }
-
- if string(encoded) != s.expectJson {
- t.Errorf("(%d), Expected %v, got %v", i, s.expectJson, string(encoded))
- }
- }
-}
-
-func TestSchemaFieldPrepareValueWithModifier(t *testing.T) {
- scenarios := []struct {
- name string
- field schema.SchemaField
- baseValue any
- modifier string
- modifierValue any
- expectJson string
- }{
- // text
- {
- "text with '+' modifier",
- schema.SchemaField{Type: schema.FieldTypeText},
- "base",
- "+",
- "new",
- `"base"`,
- },
- {
- "text with '-' modifier",
- schema.SchemaField{Type: schema.FieldTypeText},
- "base",
- "-",
- "new",
- `"base"`,
- },
- {
- "text with unknown modifier",
- schema.SchemaField{Type: schema.FieldTypeText},
- "base",
- "?",
- "new",
- `"base"`,
- },
- {
- "text cast check",
- schema.SchemaField{Type: schema.FieldTypeText},
- 123,
- "?",
- "new",
- `"123"`,
- },
-
- // number
- {
- "number with '+' modifier",
- schema.SchemaField{Type: schema.FieldTypeNumber},
- 1,
- "+",
- 4,
- `5`,
- },
- {
- "number with '-' modifier",
- schema.SchemaField{Type: schema.FieldTypeNumber},
- 1,
- "-",
- 4,
- `-3`,
- },
- {
- "number with unknown modifier",
- schema.SchemaField{Type: schema.FieldTypeNumber},
- "1",
- "?",
- 4,
- `1`,
- },
- {
- "number cast check",
- schema.SchemaField{Type: schema.FieldTypeNumber},
- "test",
- "+",
- "4",
- `4`,
- },
-
- // bool
- {
- "bool with '+' modifier",
- schema.SchemaField{Type: schema.FieldTypeBool},
- true,
- "+",
- false,
- `true`,
- },
- {
- "bool with '-' modifier",
- schema.SchemaField{Type: schema.FieldTypeBool},
- true,
- "-",
- false,
- `true`,
- },
- {
- "bool with unknown modifier",
- schema.SchemaField{Type: schema.FieldTypeBool},
- true,
- "?",
- false,
- `true`,
- },
- {
- "bool cast check",
- schema.SchemaField{Type: schema.FieldTypeBool},
- "true",
- "?",
- false,
- `true`,
- },
-
- // email
- {
- "email with '+' modifier",
- schema.SchemaField{Type: schema.FieldTypeEmail},
- "base",
- "+",
- "new",
- `"base"`,
- },
- {
- "email with '-' modifier",
- schema.SchemaField{Type: schema.FieldTypeEmail},
- "base",
- "-",
- "new",
- `"base"`,
- },
- {
- "email with unknown modifier",
- schema.SchemaField{Type: schema.FieldTypeEmail},
- "base",
- "?",
- "new",
- `"base"`,
- },
- {
- "email cast check",
- schema.SchemaField{Type: schema.FieldTypeEmail},
- 123,
- "?",
- "new",
- `"123"`,
- },
-
- // url
- {
- "url with '+' modifier",
- schema.SchemaField{Type: schema.FieldTypeUrl},
- "base",
- "+",
- "new",
- `"base"`,
- },
- {
- "url with '-' modifier",
- schema.SchemaField{Type: schema.FieldTypeUrl},
- "base",
- "-",
- "new",
- `"base"`,
- },
- {
- "url with unknown modifier",
- schema.SchemaField{Type: schema.FieldTypeUrl},
- "base",
- "?",
- "new",
- `"base"`,
- },
- {
- "url cast check",
- schema.SchemaField{Type: schema.FieldTypeUrl},
- 123,
- "-",
- "new",
- `"123"`,
- },
-
- // editor
- {
- "editor with '+' modifier",
- schema.SchemaField{Type: schema.FieldTypeEditor},
- "base",
- "+",
- "new",
- `"base"`,
- },
- {
- "editor with '-' modifier",
- schema.SchemaField{Type: schema.FieldTypeEditor},
- "base",
- "-",
- "new",
- `"base"`,
- },
- {
- "editor with unknown modifier",
- schema.SchemaField{Type: schema.FieldTypeEditor},
- "base",
- "?",
- "new",
- `"base"`,
- },
- {
- "editor cast check",
- schema.SchemaField{Type: schema.FieldTypeEditor},
- 123,
- "-",
- "new",
- `"123"`,
- },
-
- // date
- {
- "date with '+' modifier",
- schema.SchemaField{Type: schema.FieldTypeDate},
- "2023-01-01 00:00:00.123",
- "+",
- "2023-02-01 00:00:00.456",
- `"2023-01-01 00:00:00.123Z"`,
- },
- {
- "date with '-' modifier",
- schema.SchemaField{Type: schema.FieldTypeDate},
- "2023-01-01 00:00:00.123Z",
- "-",
- "2023-02-01 00:00:00.456Z",
- `"2023-01-01 00:00:00.123Z"`,
- },
- {
- "date with unknown modifier",
- schema.SchemaField{Type: schema.FieldTypeDate},
- "2023-01-01 00:00:00.123",
- "?",
- "2023-01-01 00:00:00.456",
- `"2023-01-01 00:00:00.123Z"`,
- },
- {
- "date cast check",
- schema.SchemaField{Type: schema.FieldTypeDate},
- 1672524000, // 2022-12-31 22:00:00.000Z
- "+",
- 100,
- `"2022-12-31 22:00:00.000Z"`,
- },
-
- // json
- {
- "json with '+' modifier",
- schema.SchemaField{Type: schema.FieldTypeJson},
- 10,
- "+",
- 5,
- `10`,
- },
- {
- "json with '+' modifier (slice)",
- schema.SchemaField{Type: schema.FieldTypeJson},
- []string{"a", "b"},
- "+",
- "c",
- `["a","b"]`,
- },
- {
- "json with '-' modifier",
- schema.SchemaField{Type: schema.FieldTypeJson},
- 10,
- "-",
- 5,
- `10`,
- },
- {
- "json with '-' modifier (slice)",
- schema.SchemaField{Type: schema.FieldTypeJson},
- `["a","b"]`,
- "-",
- "c",
- `["a","b"]`,
- },
- {
- "json with unknown modifier",
- schema.SchemaField{Type: schema.FieldTypeJson},
- `"base"`,
- "?",
- `"new"`,
- `"base"`,
- },
-
- // single select
- {
- "single select with '+' modifier (empty base)",
- schema.SchemaField{Type: schema.FieldTypeSelect, Options: &schema.SelectOptions{MaxSelect: 1}},
- "",
- "+",
- "b",
- `"b"`,
- },
- {
- "single select with '+' modifier (nonempty base)",
- schema.SchemaField{Type: schema.FieldTypeSelect, Options: &schema.SelectOptions{MaxSelect: 1}},
- "a",
- "+",
- "b",
- `"b"`,
- },
- {
- "single select with '-' modifier (empty base)",
- schema.SchemaField{Type: schema.FieldTypeSelect, Options: &schema.SelectOptions{MaxSelect: 1}},
- "",
- "-",
- "a",
- `""`,
- },
- {
- "single select with '-' modifier (nonempty base and empty modifier value)",
- schema.SchemaField{Type: schema.FieldTypeSelect, Options: &schema.SelectOptions{MaxSelect: 1}},
- "a",
- "-",
- "",
- `"a"`,
- },
- {
- "single select with '-' modifier (nonempty base and different value)",
- schema.SchemaField{Type: schema.FieldTypeSelect, Options: &schema.SelectOptions{MaxSelect: 1}},
- "a",
- "-",
- "b",
- `"a"`,
- },
- {
- "single select with '-' modifier (nonempty base and matching value)",
- schema.SchemaField{Type: schema.FieldTypeSelect, Options: &schema.SelectOptions{MaxSelect: 1}},
- "a",
- "-",
- "a",
- `""`,
- },
- {
- "single select with '-' modifier (nonempty base and matching value in a slice)",
- schema.SchemaField{Type: schema.FieldTypeSelect, Options: &schema.SelectOptions{MaxSelect: 1}},
- "a",
- "-",
- []string{"b", "a", "c", "123"},
- `""`,
- },
- {
- "single select with unknown modifier (nonempty)",
- schema.SchemaField{Type: schema.FieldTypeSelect, Options: &schema.SelectOptions{MaxSelect: 1}},
- "",
- "?",
- "a",
- `""`,
- },
-
- // multi select
- {
- "multi select with '+' modifier (empty base)",
- schema.SchemaField{Type: schema.FieldTypeSelect, Options: &schema.SelectOptions{MaxSelect: 10}},
- nil,
- "+",
- "b",
- `["b"]`,
- },
- {
- "multi select with '+' modifier (nonempty base)",
- schema.SchemaField{Type: schema.FieldTypeSelect, Options: &schema.SelectOptions{MaxSelect: 10}},
- []string{"a"},
- "+",
- []string{"b", "c"},
- `["a","b","c"]`,
- },
- {
- "multi select with '+' modifier (nonempty base; already existing value)",
- schema.SchemaField{Type: schema.FieldTypeSelect, Options: &schema.SelectOptions{MaxSelect: 10}},
- []string{"a", "b"},
- "+",
- "b",
- `["a","b"]`,
- },
- {
- "multi select with '-' modifier (empty base)",
- schema.SchemaField{Type: schema.FieldTypeSelect, Options: &schema.SelectOptions{MaxSelect: 10}},
- nil,
- "-",
- []string{"a"},
- `[]`,
- },
- {
- "multi select with '-' modifier (nonempty base and empty modifier value)",
- schema.SchemaField{Type: schema.FieldTypeSelect, Options: &schema.SelectOptions{MaxSelect: 10}},
- "a",
- "-",
- "",
- `["a"]`,
- },
- {
- "multi select with '-' modifier (nonempty base and different value)",
- schema.SchemaField{Type: schema.FieldTypeSelect, Options: &schema.SelectOptions{MaxSelect: 10}},
- "a",
- "-",
- "b",
- `["a"]`,
- },
- {
- "multi select with '-' modifier (nonempty base and matching value)",
- schema.SchemaField{Type: schema.FieldTypeSelect, Options: &schema.SelectOptions{MaxSelect: 10}},
- []string{"a", "b", "c", "d"},
- "-",
- "c",
- `["a","b","d"]`,
- },
- {
- "multi select with '-' modifier (nonempty base and matching value in a slice)",
- schema.SchemaField{Type: schema.FieldTypeSelect, Options: &schema.SelectOptions{MaxSelect: 10}},
- []string{"a", "b", "c", "d"},
- "-",
- []string{"b", "a", "123"},
- `["c","d"]`,
- },
- {
- "multi select with unknown modifier (nonempty)",
- schema.SchemaField{Type: schema.FieldTypeSelect, Options: &schema.SelectOptions{MaxSelect: 10}},
- []string{"a", "b"},
- "?",
- "a",
- `["a","b"]`,
- },
-
- // single relation
- {
- "single relation with '+' modifier (empty base)",
- schema.SchemaField{Type: schema.FieldTypeRelation, Options: &schema.RelationOptions{MaxSelect: types.Pointer(1)}},
- "",
- "+",
- "b",
- `"b"`,
- },
- {
- "single relation with '+' modifier (nonempty base)",
- schema.SchemaField{Type: schema.FieldTypeRelation, Options: &schema.RelationOptions{MaxSelect: types.Pointer(1)}},
- "a",
- "+",
- "b",
- `"b"`,
- },
- {
- "single relation with '-' modifier (empty base)",
- schema.SchemaField{Type: schema.FieldTypeRelation, Options: &schema.RelationOptions{MaxSelect: types.Pointer(1)}},
- "",
- "-",
- "a",
- `""`,
- },
- {
- "single relation with '-' modifier (nonempty base and empty modifier value)",
- schema.SchemaField{Type: schema.FieldTypeRelation, Options: &schema.RelationOptions{MaxSelect: types.Pointer(1)}},
- "a",
- "-",
- "",
- `"a"`,
- },
- {
- "single relation with '-' modifier (nonempty base and different value)",
- schema.SchemaField{Type: schema.FieldTypeRelation, Options: &schema.RelationOptions{MaxSelect: types.Pointer(1)}},
- "a",
- "-",
- "b",
- `"a"`,
- },
- {
- "single relation with '-' modifier (nonempty base and matching value)",
- schema.SchemaField{Type: schema.FieldTypeRelation, Options: &schema.RelationOptions{MaxSelect: types.Pointer(1)}},
- "a",
- "-",
- "a",
- `""`,
- },
- {
- "single relation with '-' modifier (nonempty base and matching value in a slice)",
- schema.SchemaField{Type: schema.FieldTypeRelation, Options: &schema.RelationOptions{MaxSelect: types.Pointer(1)}},
- "a",
- "-",
- []string{"b", "a", "c", "123"},
- `""`,
- },
- {
- "single relation with unknown modifier (nonempty)",
- schema.SchemaField{Type: schema.FieldTypeRelation, Options: &schema.RelationOptions{MaxSelect: types.Pointer(1)}},
- "",
- "?",
- "a",
- `""`,
- },
-
- // multi relation
- {
- "multi relation with '+' modifier (empty base)",
- schema.SchemaField{Type: schema.FieldTypeRelation},
- nil,
- "+",
- "b",
- `["b"]`,
- },
- {
- "multi relation with '+' modifier (nonempty base)",
- schema.SchemaField{Type: schema.FieldTypeRelation},
- []string{"a"},
- "+",
- []string{"b", "c"},
- `["a","b","c"]`,
- },
- {
- "multi relation with '+' modifier (nonempty base; already existing value)",
- schema.SchemaField{Type: schema.FieldTypeRelation},
- []string{"a", "b"},
- "+",
- "b",
- `["a","b"]`,
- },
- {
- "multi relation with '-' modifier (empty base)",
- schema.SchemaField{Type: schema.FieldTypeRelation},
- nil,
- "-",
- []string{"a"},
- `[]`,
- },
- {
- "multi relation with '-' modifier (nonempty base and empty modifier value)",
- schema.SchemaField{Type: schema.FieldTypeRelation},
- "a",
- "-",
- "",
- `["a"]`,
- },
- {
- "multi relation with '-' modifier (nonempty base and different value)",
- schema.SchemaField{Type: schema.FieldTypeRelation},
- "a",
- "-",
- "b",
- `["a"]`,
- },
- {
- "multi relation with '-' modifier (nonempty base and matching value)",
- schema.SchemaField{Type: schema.FieldTypeRelation},
- []string{"a", "b", "c", "d"},
- "-",
- "c",
- `["a","b","d"]`,
- },
- {
- "multi relation with '-' modifier (nonempty base and matching value in a slice)",
- schema.SchemaField{Type: schema.FieldTypeRelation},
- []string{"a", "b", "c", "d"},
- "-",
- []string{"b", "a", "123"},
- `["c","d"]`,
- },
- {
- "multi relation with unknown modifier (nonempty)",
- schema.SchemaField{Type: schema.FieldTypeRelation},
- []string{"a", "b"},
- "?",
- "a",
- `["a","b"]`,
- },
-
- // single file
- {
- "single file with '+' modifier (empty base)",
- schema.SchemaField{Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 1}},
- "",
- "+",
- "b",
- `""`,
- },
- {
- "single file with '+' modifier (nonempty base)",
- schema.SchemaField{Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 1}},
- "a",
- "+",
- "b",
- `"a"`,
- },
- {
- "single file with '-' modifier (empty base)",
- schema.SchemaField{Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 1}},
- "",
- "-",
- "a",
- `""`,
- },
- {
- "single file with '-' modifier (nonempty base and empty modifier value)",
- schema.SchemaField{Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 1}},
- "a",
- "-",
- "",
- `"a"`,
- },
- {
- "single file with '-' modifier (nonempty base and different value)",
- schema.SchemaField{Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 1}},
- "a",
- "-",
- "b",
- `"a"`,
- },
- {
- "single file with '-' modifier (nonempty base and matching value)",
- schema.SchemaField{Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 1}},
- "a",
- "-",
- "a",
- `""`,
- },
- {
- "single file with '-' modifier (nonempty base and matching value in a slice)",
- schema.SchemaField{Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 1}},
- "a",
- "-",
- []string{"b", "a", "c", "123"},
- `""`,
- },
- {
- "single file with unknown modifier (nonempty)",
- schema.SchemaField{Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 1}},
- "",
- "?",
- "a",
- `""`,
- },
-
- // multi file
- {
- "multi file with '+' modifier (empty base)",
- schema.SchemaField{Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 10}},
- nil,
- "+",
- "b",
- `[]`,
- },
- {
- "multi file with '+' modifier (nonempty base)",
- schema.SchemaField{Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 10}},
- []string{"a"},
- "+",
- []string{"b", "c"},
- `["a"]`,
- },
- {
- "multi file with '+' modifier (nonempty base; already existing value)",
- schema.SchemaField{Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 10}},
- []string{"a", "b"},
- "+",
- "b",
- `["a","b"]`,
- },
- {
- "multi file with '-' modifier (empty base)",
- schema.SchemaField{Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 10}},
- nil,
- "-",
- []string{"a"},
- `[]`,
- },
- {
- "multi file with '-' modifier (nonempty base and empty modifier value)",
- schema.SchemaField{Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 10}},
- "a",
- "-",
- "",
- `["a"]`,
- },
- {
- "multi file with '-' modifier (nonempty base and different value)",
- schema.SchemaField{Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 10}},
- "a",
- "-",
- "b",
- `["a"]`,
- },
- {
- "multi file with '-' modifier (nonempty base and matching value)",
- schema.SchemaField{Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 10}},
- []string{"a", "b", "c", "d"},
- "-",
- "c",
- `["a","b","d"]`,
- },
- {
- "multi file with '-' modifier (nonempty base and matching value in a slice)",
- schema.SchemaField{Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 10}},
- []string{"a", "b", "c", "d"},
- "-",
- []string{"b", "a", "123"},
- `["c","d"]`,
- },
- {
- "multi file with unknown modifier (nonempty)",
- schema.SchemaField{Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 10}},
- []string{"a", "b"},
- "?",
- "a",
- `["a","b"]`,
- },
- }
-
- for _, s := range scenarios {
- result := s.field.PrepareValueWithModifier(s.baseValue, s.modifier, s.modifierValue)
-
- encoded, err := json.Marshal(result)
- if err != nil {
- t.Fatalf("[%s] %v", s.name, err)
- }
-
- if string(encoded) != s.expectJson {
- t.Fatalf("[%s], Expected %v, got %v", s.name, s.expectJson, string(encoded))
- }
- }
-}
-
-// -------------------------------------------------------------------
-
-type fieldOptionsScenario struct {
- name string
- options schema.FieldOptions
- expectedErrors []string
-}
-
-func checkFieldOptionsScenarios(t *testing.T, scenarios []fieldOptionsScenario) {
- for i, s := range scenarios {
- result := s.options.Validate()
-
- prefix := fmt.Sprintf("%d", i)
- if s.name != "" {
- prefix = s.name
- }
-
- // parse errors
- errs, ok := result.(validation.Errors)
- if !ok && result != nil {
- t.Errorf("[%s] Failed to parse errors %v", prefix, result)
- continue
- }
-
- // check errors
- if len(errs) > len(s.expectedErrors) {
- t.Errorf("[%s] Expected error keys %v, got %v", prefix, s.expectedErrors, errs)
- }
- for _, k := range s.expectedErrors {
- if _, ok := errs[k]; !ok {
- t.Errorf("[%s] Missing expected error key %q in %v", prefix, k, errs)
- }
- }
- }
-}
-
-func TestTextOptionsValidate(t *testing.T) {
- minus := -1
- number0 := 0
- number1 := 10
- number2 := 20
- scenarios := []fieldOptionsScenario{
- {
- "empty",
- schema.TextOptions{},
- []string{},
- },
- {
- "min - failure",
- schema.TextOptions{
- Min: &minus,
- },
- []string{"min"},
- },
- {
- "min - success",
- schema.TextOptions{
- Min: &number0,
- },
- []string{},
- },
- {
- "max - failure without min",
- schema.TextOptions{
- Max: &minus,
- },
- []string{"max"},
- },
- {
- "max - failure with min",
- schema.TextOptions{
- Min: &number2,
- Max: &number1,
- },
- []string{"max"},
- },
- {
- "max - success",
- schema.TextOptions{
- Min: &number1,
- Max: &number2,
- },
- []string{},
- },
- {
- "pattern - failure",
- schema.TextOptions{Pattern: "(test"},
- []string{"pattern"},
- },
- {
- "pattern - success",
- schema.TextOptions{Pattern: `^\#?\w+$`},
- []string{},
- },
- }
-
- checkFieldOptionsScenarios(t, scenarios)
-}
-
-func TestNumberOptionsValidate(t *testing.T) {
- int1 := 10.0
- int2 := 20.0
-
- decimal1 := 10.5
- decimal2 := 20.5
-
- scenarios := []fieldOptionsScenario{
- {
- "empty",
- schema.NumberOptions{},
- []string{},
- },
- {
- "max - without min",
- schema.NumberOptions{
- Max: &int1,
- },
- []string{},
- },
- {
- "max - failure with min",
- schema.NumberOptions{
- Min: &int2,
- Max: &int1,
- },
- []string{"max"},
- },
- {
- "max - success with min",
- schema.NumberOptions{
- Min: &int1,
- Max: &int2,
- },
- []string{},
- },
- {
- "NoDecimal range failure",
- schema.NumberOptions{
- Min: &decimal1,
- Max: &decimal2,
- NoDecimal: true,
- },
- []string{"min", "max"},
- },
- {
- "NoDecimal range success",
- schema.NumberOptions{
- Min: &int1,
- Max: &int2,
- NoDecimal: true,
- },
- []string{},
- },
- }
-
- checkFieldOptionsScenarios(t, scenarios)
-}
-
-func TestBoolOptionsValidate(t *testing.T) {
- scenarios := []fieldOptionsScenario{
- {
- "empty",
- schema.BoolOptions{},
- []string{},
- },
- }
-
- checkFieldOptionsScenarios(t, scenarios)
-}
-
-func TestEmailOptionsValidate(t *testing.T) {
- scenarios := []fieldOptionsScenario{
- {
- "empty",
- schema.EmailOptions{},
- []string{},
- },
- {
- "ExceptDomains failure",
- schema.EmailOptions{
- ExceptDomains: []string{"invalid"},
- },
- []string{"exceptDomains"},
- },
- {
- "ExceptDomains success",
- schema.EmailOptions{
- ExceptDomains: []string{"example.com", "sub.example.com"},
- },
- []string{},
- },
- {
- "OnlyDomains check",
- schema.EmailOptions{
- OnlyDomains: []string{"invalid"},
- },
- []string{"onlyDomains"},
- },
- {
- "OnlyDomains success",
- schema.EmailOptions{
- OnlyDomains: []string{"example.com", "sub.example.com"},
- },
- []string{},
- },
- {
- "OnlyDomains + ExceptDomains at the same time",
- schema.EmailOptions{
- ExceptDomains: []string{"test1.com"},
- OnlyDomains: []string{"test2.com"},
- },
- []string{"exceptDomains", "onlyDomains"},
- },
- }
-
- checkFieldOptionsScenarios(t, scenarios)
-}
-
-func TestUrlOptionsValidate(t *testing.T) {
- scenarios := []fieldOptionsScenario{
- {
- "empty",
- schema.UrlOptions{},
- []string{},
- },
- {
- "ExceptDomains failure",
- schema.UrlOptions{
- ExceptDomains: []string{"invalid"},
- },
- []string{"exceptDomains"},
- },
- {
- "ExceptDomains success",
- schema.UrlOptions{
- ExceptDomains: []string{"example.com", "sub.example.com"},
- },
- []string{},
- },
- {
- "OnlyDomains check",
- schema.UrlOptions{
- OnlyDomains: []string{"invalid"},
- },
- []string{"onlyDomains"},
- },
- {
- "OnlyDomains success",
- schema.UrlOptions{
- OnlyDomains: []string{"example.com", "sub.example.com"},
- },
- []string{},
- },
- {
- "OnlyDomains + ExceptDomains at the same time",
- schema.UrlOptions{
- ExceptDomains: []string{"test1.com"},
- OnlyDomains: []string{"test2.com"},
- },
- []string{"exceptDomains", "onlyDomains"},
- },
- }
-
- checkFieldOptionsScenarios(t, scenarios)
-}
-
-func TestEditorOptionsValidate(t *testing.T) {
- scenarios := []fieldOptionsScenario{
- {
- "empty",
- schema.EditorOptions{},
- []string{},
- },
- }
-
- checkFieldOptionsScenarios(t, scenarios)
-}
-
-func TestDateOptionsValidate(t *testing.T) {
- date1 := types.NowDateTime()
- date2, _ := types.ParseDateTime(date1.Time().AddDate(1, 0, 0))
-
- scenarios := []fieldOptionsScenario{
- {
- "empty",
- schema.DateOptions{},
- []string{},
- },
- {
- "min only",
- schema.DateOptions{
- Min: date1,
- },
- []string{},
- },
- {
- "max only",
- schema.DateOptions{
- Min: date1,
- },
- []string{},
- },
- {
- "zero min + max",
- schema.DateOptions{
- Min: types.DateTime{},
- Max: date1,
- },
- []string{},
- },
- {
- "min + zero max",
- schema.DateOptions{
- Min: date1,
- Max: types.DateTime{},
- },
- []string{},
- },
- {
- "min > max",
- schema.DateOptions{
- Min: date2,
- Max: date1,
- },
- []string{"max"},
- },
- {
- "min == max",
- schema.DateOptions{
- Min: date1,
- Max: date1,
- },
- []string{"max"},
- },
- {
- "min < max",
- schema.DateOptions{
- Min: date1,
- Max: date2,
- },
- []string{},
- },
- }
-
- checkFieldOptionsScenarios(t, scenarios)
-}
-
-func TestSelectOptionsValidate(t *testing.T) {
- scenarios := []fieldOptionsScenario{
- {
- "empty",
- schema.SelectOptions{},
- []string{"values", "maxSelect"},
- },
- {
- "MaxSelect <= 0",
- schema.SelectOptions{
- Values: []string{"test1", "test2"},
- MaxSelect: 0,
- },
- []string{"maxSelect"},
- },
- {
- "MaxSelect > Values",
- schema.SelectOptions{
- Values: []string{"test1", "test2"},
- MaxSelect: 3,
- },
- []string{"maxSelect"},
- },
- {
- "MaxSelect <= Values",
- schema.SelectOptions{
- Values: []string{"test1", "test2"},
- MaxSelect: 2,
- },
- []string{},
- },
- }
-
- checkFieldOptionsScenarios(t, scenarios)
-}
-
-func TestSelectOptionsIsMultiple(t *testing.T) {
- scenarios := []struct {
- maxSelect int
- expect bool
- }{
- {-1, false},
- {0, false},
- {1, false},
- {2, true},
- }
-
- for i, s := range scenarios {
- opt := schema.SelectOptions{
- MaxSelect: s.maxSelect,
- }
-
- if v := opt.IsMultiple(); v != s.expect {
- t.Errorf("[%d] Expected %v, got %v", i, s.expect, v)
- }
- }
-}
-
-func TestJsonOptionsValidate(t *testing.T) {
- scenarios := []fieldOptionsScenario{
- {
- "empty",
- schema.JsonOptions{},
- []string{"maxSize"},
- },
- {
- "MaxSize < 0",
- schema.JsonOptions{MaxSize: -1},
- []string{"maxSize"},
- },
- {
- "MaxSize > 0",
- schema.JsonOptions{MaxSize: 1},
- []string{},
- },
- }
-
- checkFieldOptionsScenarios(t, scenarios)
-}
-
-func TestFileOptionsValidate(t *testing.T) {
- scenarios := []fieldOptionsScenario{
- {
- "empty",
- schema.FileOptions{},
- []string{"maxSelect", "maxSize"},
- },
- {
- "MaxSelect <= 0 && maxSize <= 0",
- schema.FileOptions{
- MaxSize: 0,
- MaxSelect: 0,
- },
- []string{"maxSelect", "maxSize"},
- },
- {
- "MaxSelect > 0 && maxSize > 0",
- schema.FileOptions{
- MaxSize: 2,
- MaxSelect: 1,
- },
- []string{},
- },
- {
- "invalid thumbs format",
- schema.FileOptions{
- MaxSize: 1,
- MaxSelect: 2,
- Thumbs: []string{"100", "200x100"},
- },
- []string{"thumbs"},
- },
- {
- "invalid thumbs format - zero width and height",
- schema.FileOptions{
- MaxSize: 1,
- MaxSelect: 2,
- Thumbs: []string{"0x0", "0x0t", "0x0b", "0x0f"},
- },
- []string{"thumbs"},
- },
- {
- "valid thumbs format",
- schema.FileOptions{
- MaxSize: 1,
- MaxSelect: 2,
- Thumbs: []string{
- "100x100", "200x100", "0x100", "100x0",
- "10x10t", "10x10b", "10x10f",
- },
- },
- []string{},
- },
- }
-
- checkFieldOptionsScenarios(t, scenarios)
-}
-
-func TestFileOptionsIsMultiple(t *testing.T) {
- scenarios := []struct {
- maxSelect int
- expect bool
- }{
- {-1, false},
- {0, false},
- {1, false},
- {2, true},
- }
-
- for i, s := range scenarios {
- opt := schema.FileOptions{
- MaxSelect: s.maxSelect,
- }
-
- if v := opt.IsMultiple(); v != s.expect {
- t.Errorf("[%d] Expected %v, got %v", i, s.expect, v)
- }
- }
-}
-
-func TestRelationOptionsValidate(t *testing.T) {
- scenarios := []fieldOptionsScenario{
- {
- "empty",
- schema.RelationOptions{},
- []string{"collectionId"},
- },
- {
- "empty CollectionId",
- schema.RelationOptions{
- CollectionId: "",
- MaxSelect: types.Pointer(1),
- },
- []string{"collectionId"},
- },
- {
- "MinSelect < 0",
- schema.RelationOptions{
- CollectionId: "abc",
- MinSelect: types.Pointer(-1),
- },
- []string{"minSelect"},
- },
- {
- "MinSelect >= 0",
- schema.RelationOptions{
- CollectionId: "abc",
- MinSelect: types.Pointer(0),
- },
- []string{},
- },
- {
- "MaxSelect <= 0",
- schema.RelationOptions{
- CollectionId: "abc",
- MaxSelect: types.Pointer(0),
- },
- []string{"maxSelect"},
- },
- {
- "MaxSelect > 0 && nonempty CollectionId",
- schema.RelationOptions{
- CollectionId: "abc",
- MaxSelect: types.Pointer(1),
- },
- []string{},
- },
- {
- "MinSelect < MaxSelect",
- schema.RelationOptions{
- CollectionId: "abc",
- MinSelect: nil,
- MaxSelect: types.Pointer(1),
- },
- []string{},
- },
- {
- "MinSelect = MaxSelect (non-zero)",
- schema.RelationOptions{
- CollectionId: "abc",
- MinSelect: types.Pointer(1),
- MaxSelect: types.Pointer(1),
- },
- []string{},
- },
- {
- "MinSelect = MaxSelect (both zero)",
- schema.RelationOptions{
- CollectionId: "abc",
- MinSelect: types.Pointer(0),
- MaxSelect: types.Pointer(0),
- },
- []string{"maxSelect"},
- },
- {
- "MinSelect > MaxSelect",
- schema.RelationOptions{
- CollectionId: "abc",
- MinSelect: types.Pointer(2),
- MaxSelect: types.Pointer(1),
- },
- []string{"maxSelect"},
- },
- }
-
- checkFieldOptionsScenarios(t, scenarios)
-}
-
-func TestRelationOptionsIsMultiple(t *testing.T) {
- scenarios := []struct {
- maxSelect *int
- expect bool
- }{
- {nil, true},
- {types.Pointer(-1), false},
- {types.Pointer(0), false},
- {types.Pointer(1), false},
- {types.Pointer(2), true},
- }
-
- for i, s := range scenarios {
- opt := schema.RelationOptions{
- MaxSelect: s.maxSelect,
- }
-
- if v := opt.IsMultiple(); v != s.expect {
- t.Errorf("[%d] Expected %v, got %v", i, s.expect, v)
- }
- }
-}
diff --git a/models/schema/schema_test.go b/models/schema/schema_test.go
deleted file mode 100644
index 7ff4afac..00000000
--- a/models/schema/schema_test.go
+++ /dev/null
@@ -1,414 +0,0 @@
-package schema_test
-
-import (
- "testing"
-
- "github.com/pocketbase/pocketbase/models/schema"
-)
-
-func TestNewSchemaAndFields(t *testing.T) {
- testSchema := schema.NewSchema(
- &schema.SchemaField{Id: "id1", Name: "test1"},
- &schema.SchemaField{Name: "test2"},
- &schema.SchemaField{Id: "id1", Name: "test1_new"}, // should replace the original id1 field
- )
-
- fields := testSchema.Fields()
-
- if len(fields) != 2 {
- t.Fatalf("Expected 2 fields, got %d (%v)", len(fields), fields)
- }
-
- for _, f := range fields {
- if f.Id == "" {
- t.Fatalf("Expected field id to be set, found empty id for field %v", f)
- }
- }
-
- if fields[0].Name != "test1_new" {
- t.Fatalf("Expected field with name test1_new, got %s", fields[0].Name)
- }
-
- if fields[1].Name != "test2" {
- t.Fatalf("Expected field with name test2, got %s", fields[1].Name)
- }
-}
-
-func TestSchemaInitFieldsOptions(t *testing.T) {
- f0 := &schema.SchemaField{Name: "test1", Type: "unknown"}
- schema0 := schema.NewSchema(f0)
-
- err0 := schema0.InitFieldsOptions()
- if err0 == nil {
- t.Fatalf("Expected unknown field schema to fail, got nil")
- }
-
- // ---
-
- f1 := &schema.SchemaField{Name: "test1", Type: schema.FieldTypeText}
- f2 := &schema.SchemaField{Name: "test2", Type: schema.FieldTypeEmail}
- schema1 := schema.NewSchema(f1, f2)
-
- err1 := schema1.InitFieldsOptions()
- if err1 != nil {
- t.Fatal(err1)
- }
-
- if _, ok := f1.Options.(*schema.TextOptions); !ok {
- t.Fatalf("Failed to init f1 options")
- }
-
- if _, ok := f2.Options.(*schema.EmailOptions); !ok {
- t.Fatalf("Failed to init f2 options")
- }
-}
-
-func TestSchemaClone(t *testing.T) {
- f1 := &schema.SchemaField{Name: "test1", Type: schema.FieldTypeText}
- f2 := &schema.SchemaField{Name: "test2", Type: schema.FieldTypeEmail}
- s1 := schema.NewSchema(f1, f2)
-
- s2, err := s1.Clone()
- if err != nil {
- t.Fatal(err)
- }
-
- s1Encoded, _ := s1.MarshalJSON()
- s2Encoded, _ := s2.MarshalJSON()
-
- if string(s1Encoded) != string(s2Encoded) {
- t.Fatalf("Expected the cloned schema to be equal, got %v VS\n %v", s1, s2)
- }
-
- // change in one schema shouldn't result to change in the other
- // (aka. check if it is a deep clone)
- s1.Fields()[0].Name = "test1_update"
- if s2.Fields()[0].Name != "test1" {
- t.Fatalf("Expected s2 field name to not change, got %q", s2.Fields()[0].Name)
- }
-}
-
-func TestSchemaAsMap(t *testing.T) {
- f1 := &schema.SchemaField{Name: "test1", Type: schema.FieldTypeText}
- f2 := &schema.SchemaField{Name: "test2", Type: schema.FieldTypeEmail}
- testSchema := schema.NewSchema(f1, f2)
-
- result := testSchema.AsMap()
-
- if len(result) != 2 {
- t.Fatalf("Expected 2 map elements, got %d (%v)", len(result), result)
- }
-
- expectedIndexes := []string{f1.Name, f2.Name}
-
- for _, index := range expectedIndexes {
- if _, ok := result[index]; !ok {
- t.Fatalf("Missing index %q", index)
- }
- }
-}
-
-func TestSchemaGetFieldByName(t *testing.T) {
- f1 := &schema.SchemaField{Name: "test1", Type: schema.FieldTypeText}
- f2 := &schema.SchemaField{Name: "test2", Type: schema.FieldTypeText}
- testSchema := schema.NewSchema(f1, f2)
-
- // missing field
- result1 := testSchema.GetFieldByName("missing")
- if result1 != nil {
- t.Fatalf("Found unexpected field %v", result1)
- }
-
- // existing field
- result2 := testSchema.GetFieldByName("test1")
- if result2 == nil || result2.Name != "test1" {
- t.Fatalf("Cannot find field with Name 'test1', got %v ", result2)
- }
-}
-
-func TestSchemaGetFieldById(t *testing.T) {
- f1 := &schema.SchemaField{Id: "id1", Name: "test1", Type: schema.FieldTypeText}
- f2 := &schema.SchemaField{Id: "id2", Name: "test2", Type: schema.FieldTypeText}
- testSchema := schema.NewSchema(f1, f2)
-
- // missing field id
- result1 := testSchema.GetFieldById("test1")
- if result1 != nil {
- t.Fatalf("Found unexpected field %v", result1)
- }
-
- // existing field id
- result2 := testSchema.GetFieldById("id2")
- if result2 == nil || result2.Id != "id2" {
- t.Fatalf("Cannot find field with id 'id2', got %v ", result2)
- }
-}
-
-func TestSchemaRemoveField(t *testing.T) {
- f1 := &schema.SchemaField{Id: "id1", Name: "test1", Type: schema.FieldTypeText}
- f2 := &schema.SchemaField{Id: "id2", Name: "test2", Type: schema.FieldTypeText}
- f3 := &schema.SchemaField{Id: "id3", Name: "test3", Type: schema.FieldTypeText}
- testSchema := schema.NewSchema(f1, f2, f3)
-
- testSchema.RemoveField("id2")
- testSchema.RemoveField("test3") // should do nothing
-
- expected := []string{"test1", "test3"}
-
- if len(testSchema.Fields()) != len(expected) {
- t.Fatalf("Expected %d, got %d (%v)", len(expected), len(testSchema.Fields()), testSchema)
- }
-
- for _, name := range expected {
- if f := testSchema.GetFieldByName(name); f == nil {
- t.Fatalf("Missing field %q", name)
- }
- }
-}
-
-func TestSchemaAddField(t *testing.T) {
- f1 := &schema.SchemaField{Name: "test1", Type: schema.FieldTypeText}
- f2 := &schema.SchemaField{Id: "f2Id", Name: "test2", Type: schema.FieldTypeText}
- f3 := &schema.SchemaField{Id: "f3Id", Name: "test3", Type: schema.FieldTypeText}
- testSchema := schema.NewSchema(f1, f2, f3)
-
- f2New := &schema.SchemaField{Id: "f2Id", Name: "test2_new", Type: schema.FieldTypeEmail}
- f4 := &schema.SchemaField{Name: "test4", Type: schema.FieldTypeUrl}
-
- testSchema.AddField(f2New)
- testSchema.AddField(f4)
-
- if len(testSchema.Fields()) != 4 {
- t.Fatalf("Expected %d, got %d (%v)", 4, len(testSchema.Fields()), testSchema)
- }
-
- // check if each field has id
- for _, f := range testSchema.Fields() {
- if f.Id == "" {
- t.Fatalf("Expected field id to be set, found empty id for field %v", f)
- }
- }
-
- // check if f2 field was replaced
- if f := testSchema.GetFieldById("f2Id"); f == nil || f.Type != schema.FieldTypeEmail {
- t.Fatalf("Expected f2 field to be replaced, found %v", f)
- }
-
- // check if f4 was added
- if f := testSchema.GetFieldByName("test4"); f == nil || f.Name != "test4" {
- t.Fatalf("Expected f4 field to be added, found %v", f)
- }
-}
-
-func TestSchemaValidate(t *testing.T) {
- // emulate duplicated field ids
- duplicatedIdsSchema := schema.NewSchema(
- &schema.SchemaField{Id: "id1", Name: "test1", Type: schema.FieldTypeText},
- &schema.SchemaField{Id: "id2", Name: "test2", Type: schema.FieldTypeText},
- )
- duplicatedIdsSchema.Fields()[1].Id = "id1" // manually set existing id
-
- scenarios := []struct {
- schema schema.Schema
- expectError bool
- }{
- // no fields
- {
- schema.NewSchema(),
- false,
- },
- // duplicated field ids
- {
- duplicatedIdsSchema,
- true,
- },
- // duplicated field names (case insensitive)
- {
- schema.NewSchema(
- &schema.SchemaField{Name: "test", Type: schema.FieldTypeText},
- &schema.SchemaField{Name: "TeSt", Type: schema.FieldTypeText},
- ),
- true,
- },
- // failure - base individual fields validation
- {
- schema.NewSchema(
- &schema.SchemaField{Name: "", Type: schema.FieldTypeText},
- ),
- true,
- },
- // success - base individual fields validation
- {
- schema.NewSchema(
- &schema.SchemaField{Name: "test", Type: schema.FieldTypeText},
- ),
- false,
- },
- // failure - individual field options validation
- {
- schema.NewSchema(
- &schema.SchemaField{Name: "test", Type: schema.FieldTypeFile},
- ),
- true,
- },
- // success - individual field options validation
- {
- schema.NewSchema(
- &schema.SchemaField{Name: "test", Type: schema.FieldTypeFile, Options: &schema.FileOptions{MaxSelect: 1, MaxSize: 1}},
- ),
- false,
- },
- }
-
- for i, s := range scenarios {
- err := s.schema.Validate()
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("(%d) Expected %v, got %v (%v)", i, s.expectError, hasErr, err)
- continue
- }
- }
-}
-
-func TestSchemaMarshalJSON(t *testing.T) {
- f1 := &schema.SchemaField{Id: "f1id", Name: "test1", Type: schema.FieldTypeText}
- f2 := &schema.SchemaField{
- Id: "f2id",
- Name: "test2",
- Type: schema.FieldTypeText,
- Options: &schema.TextOptions{Pattern: "test"},
- }
- testSchema := schema.NewSchema(f1, f2)
-
- result, err := testSchema.MarshalJSON()
- if err != nil {
- t.Fatal(err)
- }
-
- expected := `[{"system":false,"id":"f1id","name":"test1","type":"text","required":false,"presentable":false,"unique":false,"options":{"min":null,"max":null,"pattern":""}},{"system":false,"id":"f2id","name":"test2","type":"text","required":false,"presentable":false,"unique":false,"options":{"min":null,"max":null,"pattern":"test"}}]`
-
- if string(result) != expected {
- t.Fatalf("Expected %s, got %s", expected, string(result))
- }
-}
-
-func TestSchemaUnmarshalJSON(t *testing.T) {
- encoded := `[{"system":false,"id":"fid1", "name":"test1","type":"text","required":false,"unique":false,"options":{"min":null,"max":null,"pattern":""}},{"system":false,"name":"test2","type":"text","required":false,"unique":false,"options":{"min":null,"max":null,"pattern":"test"}}]`
- testSchema := schema.Schema{}
- testSchema.AddField(&schema.SchemaField{Name: "tempField", Type: schema.FieldTypeUrl})
- err := testSchema.UnmarshalJSON([]byte(encoded))
- if err != nil {
- t.Fatal(err)
- }
-
- fields := testSchema.Fields()
- if len(fields) != 2 {
- t.Fatalf("Expected 2 fields, found %v", fields)
- }
-
- f1 := testSchema.GetFieldByName("test1")
- if f1 == nil {
- t.Fatal("Expected to find field 'test1', got nil")
- }
- if f1.Id != "fid1" {
- t.Fatalf("Expected fid1 id, got %s", f1.Id)
- }
- _, ok := f1.Options.(*schema.TextOptions)
- if !ok {
- t.Fatal("'test1' field options are not inited.")
- }
-
- f2 := testSchema.GetFieldByName("test2")
- if f2 == nil {
- t.Fatal("Expected to find field 'test2', got nil")
- }
- if f2.Id == "" {
- t.Fatal("Expected f2 id to be set, got empty string")
- }
- o2, ok := f2.Options.(*schema.TextOptions)
- if !ok {
- t.Fatal("'test2' field options are not inited.")
- }
- if o2.Pattern != "test" {
- t.Fatalf("Expected pattern to be %q, got %q", "test", o2.Pattern)
- }
-}
-
-func TestSchemaValue(t *testing.T) {
- // empty schema
- s1 := schema.Schema{}
- v1, err := s1.Value()
- if err != nil {
- t.Fatal(err)
- }
- if v1 != "[]" {
- t.Fatalf("Expected nil, got %v", v1)
- }
-
- // schema with fields
- f1 := &schema.SchemaField{Id: "f1id", Name: "test1", Type: schema.FieldTypeText}
- s2 := schema.NewSchema(f1)
-
- v2, err := s2.Value()
- if err != nil {
- t.Fatal(err)
- }
- expected := `[{"system":false,"id":"f1id","name":"test1","type":"text","required":false,"presentable":false,"unique":false,"options":{"min":null,"max":null,"pattern":""}}]`
-
- if v2 != expected {
- t.Fatalf("Expected %v, got %v", expected, v2)
- }
-}
-
-func TestSchemaScan(t *testing.T) {
- scenarios := []struct {
- data any
- expectError bool
- expectJson string
- }{
- {nil, false, "[]"},
- {"", false, "[]"},
- {[]byte{}, false, "[]"},
- {"[]", false, "[]"},
- {"invalid", true, "[]"},
- {123, true, "[]"},
- // no field type
- {`[{}]`, true, `[]`},
- // unknown field type
- {
- `[{"system":false,"id":"123","name":"test1","type":"unknown","required":false,"presentable":false,"unique":false}]`,
- true,
- `[]`,
- },
- // without options
- {
- `[{"system":false,"id":"123","name":"test1","type":"text","required":false,"presentable":false,"unique":false}]`,
- false,
- `[{"system":false,"id":"123","name":"test1","type":"text","required":false,"presentable":false,"unique":false,"options":{"min":null,"max":null,"pattern":""}}]`,
- },
- // with options
- {
- `[{"system":false,"id":"123","name":"test1","type":"text","required":false,"presentable":false,"unique":false,"options":{"min":null,"max":null,"pattern":"test"}}]`,
- false,
- `[{"system":false,"id":"123","name":"test1","type":"text","required":false,"presentable":false,"unique":false,"options":{"min":null,"max":null,"pattern":"test"}}]`,
- },
- }
-
- for i, s := range scenarios {
- testSchema := schema.Schema{}
-
- err := testSchema.Scan(s.data)
-
- hasErr := err != nil
- if hasErr != s.expectError {
- t.Errorf("(%d) Expected %v, got %v (%v)", i, s.expectError, hasErr, err)
- continue
- }
-
- json, _ := testSchema.MarshalJSON()
- if string(json) != s.expectJson {
- t.Errorf("(%d) Expected json %v, got %v", i, s.expectJson, string(json))
- }
- }
-}
diff --git a/models/settings/settings.go b/models/settings/settings.go
deleted file mode 100644
index 32f1ebeb..00000000
--- a/models/settings/settings.go
+++ /dev/null
@@ -1,703 +0,0 @@
-package settings
-
-import (
- "encoding/json"
- "errors"
- "fmt"
- "strings"
- "sync"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/go-ozzo/ozzo-validation/v4/is"
- "github.com/pocketbase/pocketbase/tools/auth"
- "github.com/pocketbase/pocketbase/tools/cron"
- "github.com/pocketbase/pocketbase/tools/mailer"
- "github.com/pocketbase/pocketbase/tools/rest"
- "github.com/pocketbase/pocketbase/tools/security"
-)
-
-// SecretMask is the default settings secrets replacement value
-// (see Settings.RedactClone()).
-const SecretMask string = "******"
-
-// Settings defines common app configuration options.
-type Settings struct {
- mux sync.RWMutex
-
- Meta MetaConfig `form:"meta" json:"meta"`
- Logs LogsConfig `form:"logs" json:"logs"`
- Smtp SmtpConfig `form:"smtp" json:"smtp"`
- S3 S3Config `form:"s3" json:"s3"`
- Backups BackupsConfig `form:"backups" json:"backups"`
-
- AdminAuthToken TokenConfig `form:"adminAuthToken" json:"adminAuthToken"`
- AdminPasswordResetToken TokenConfig `form:"adminPasswordResetToken" json:"adminPasswordResetToken"`
- AdminFileToken TokenConfig `form:"adminFileToken" json:"adminFileToken"`
- RecordAuthToken TokenConfig `form:"recordAuthToken" json:"recordAuthToken"`
- RecordPasswordResetToken TokenConfig `form:"recordPasswordResetToken" json:"recordPasswordResetToken"`
- RecordEmailChangeToken TokenConfig `form:"recordEmailChangeToken" json:"recordEmailChangeToken"`
- RecordVerificationToken TokenConfig `form:"recordVerificationToken" json:"recordVerificationToken"`
- RecordFileToken TokenConfig `form:"recordFileToken" json:"recordFileToken"`
-
- // Deprecated: Will be removed in v0.9+
- EmailAuth EmailAuthConfig `form:"emailAuth" json:"emailAuth"`
-
- GoogleAuth AuthProviderConfig `form:"googleAuth" json:"googleAuth"`
- FacebookAuth AuthProviderConfig `form:"facebookAuth" json:"facebookAuth"`
- GithubAuth AuthProviderConfig `form:"githubAuth" json:"githubAuth"`
- GitlabAuth AuthProviderConfig `form:"gitlabAuth" json:"gitlabAuth"`
- DiscordAuth AuthProviderConfig `form:"discordAuth" json:"discordAuth"`
- TwitterAuth AuthProviderConfig `form:"twitterAuth" json:"twitterAuth"`
- MicrosoftAuth AuthProviderConfig `form:"microsoftAuth" json:"microsoftAuth"`
- SpotifyAuth AuthProviderConfig `form:"spotifyAuth" json:"spotifyAuth"`
- KakaoAuth AuthProviderConfig `form:"kakaoAuth" json:"kakaoAuth"`
- TwitchAuth AuthProviderConfig `form:"twitchAuth" json:"twitchAuth"`
- StravaAuth AuthProviderConfig `form:"stravaAuth" json:"stravaAuth"`
- GiteeAuth AuthProviderConfig `form:"giteeAuth" json:"giteeAuth"`
- LivechatAuth AuthProviderConfig `form:"livechatAuth" json:"livechatAuth"`
- GiteaAuth AuthProviderConfig `form:"giteaAuth" json:"giteaAuth"`
- OIDCAuth AuthProviderConfig `form:"oidcAuth" json:"oidcAuth"`
- OIDC2Auth AuthProviderConfig `form:"oidc2Auth" json:"oidc2Auth"`
- OIDC3Auth AuthProviderConfig `form:"oidc3Auth" json:"oidc3Auth"`
- AppleAuth AuthProviderConfig `form:"appleAuth" json:"appleAuth"`
- InstagramAuth AuthProviderConfig `form:"instagramAuth" json:"instagramAuth"`
- VKAuth AuthProviderConfig `form:"vkAuth" json:"vkAuth"`
- YandexAuth AuthProviderConfig `form:"yandexAuth" json:"yandexAuth"`
- PatreonAuth AuthProviderConfig `form:"patreonAuth" json:"patreonAuth"`
- MailcowAuth AuthProviderConfig `form:"mailcowAuth" json:"mailcowAuth"`
- BitbucketAuth AuthProviderConfig `form:"bitbucketAuth" json:"bitbucketAuth"`
- PlanningcenterAuth AuthProviderConfig `form:"planningcenterAuth" json:"planningcenterAuth"`
-}
-
-// New creates and returns a new default Settings instance.
-func New() *Settings {
- return &Settings{
- Meta: MetaConfig{
- AppName: "Acme",
- AppUrl: "http://localhost:8090",
- HideControls: false,
- SenderName: "Support",
- SenderAddress: "support@example.com",
- VerificationTemplate: defaultVerificationTemplate,
- ResetPasswordTemplate: defaultResetPasswordTemplate,
- ConfirmEmailChangeTemplate: defaultConfirmEmailChangeTemplate,
- },
- Logs: LogsConfig{
- MaxDays: 5,
- LogIp: true,
- },
- Smtp: SmtpConfig{
- Enabled: false,
- Host: "smtp.example.com",
- Port: 587,
- Username: "",
- Password: "",
- Tls: false,
- },
- Backups: BackupsConfig{
- CronMaxKeep: 3,
- },
- AdminAuthToken: TokenConfig{
- Secret: security.RandomString(50),
- Duration: 1209600, // 14 days
- },
- AdminPasswordResetToken: TokenConfig{
- Secret: security.RandomString(50),
- Duration: 1800, // 30 minutes
- },
- AdminFileToken: TokenConfig{
- Secret: security.RandomString(50),
- Duration: 120, // 2 minutes
- },
- RecordAuthToken: TokenConfig{
- Secret: security.RandomString(50),
- Duration: 1209600, // 14 days
- },
- RecordPasswordResetToken: TokenConfig{
- Secret: security.RandomString(50),
- Duration: 1800, // 30 minutes
- },
- RecordVerificationToken: TokenConfig{
- Secret: security.RandomString(50),
- Duration: 604800, // 7 days
- },
- RecordFileToken: TokenConfig{
- Secret: security.RandomString(50),
- Duration: 120, // 2 minutes
- },
- RecordEmailChangeToken: TokenConfig{
- Secret: security.RandomString(50),
- Duration: 1800, // 30 minutes
- },
- GoogleAuth: AuthProviderConfig{
- Enabled: false,
- },
- FacebookAuth: AuthProviderConfig{
- Enabled: false,
- },
- GithubAuth: AuthProviderConfig{
- Enabled: false,
- },
- GitlabAuth: AuthProviderConfig{
- Enabled: false,
- },
- DiscordAuth: AuthProviderConfig{
- Enabled: false,
- },
- TwitterAuth: AuthProviderConfig{
- Enabled: false,
- },
- MicrosoftAuth: AuthProviderConfig{
- Enabled: false,
- },
- SpotifyAuth: AuthProviderConfig{
- Enabled: false,
- },
- KakaoAuth: AuthProviderConfig{
- Enabled: false,
- },
- TwitchAuth: AuthProviderConfig{
- Enabled: false,
- },
- StravaAuth: AuthProviderConfig{
- Enabled: false,
- },
- GiteeAuth: AuthProviderConfig{
- Enabled: false,
- },
- LivechatAuth: AuthProviderConfig{
- Enabled: false,
- },
- GiteaAuth: AuthProviderConfig{
- Enabled: false,
- },
- OIDCAuth: AuthProviderConfig{
- Enabled: false,
- },
- OIDC2Auth: AuthProviderConfig{
- Enabled: false,
- },
- OIDC3Auth: AuthProviderConfig{
- Enabled: false,
- },
- AppleAuth: AuthProviderConfig{
- Enabled: false,
- },
- InstagramAuth: AuthProviderConfig{
- Enabled: false,
- },
- VKAuth: AuthProviderConfig{
- Enabled: false,
- },
- YandexAuth: AuthProviderConfig{
- Enabled: false,
- },
- PatreonAuth: AuthProviderConfig{
- Enabled: false,
- },
- MailcowAuth: AuthProviderConfig{
- Enabled: false,
- },
- BitbucketAuth: AuthProviderConfig{
- Enabled: false,
- },
- PlanningcenterAuth: AuthProviderConfig{
- Enabled: false,
- },
- }
-}
-
-// Validate makes Settings validatable by implementing [validation.Validatable] interface.
-func (s *Settings) Validate() error {
- s.mux.Lock()
- defer s.mux.Unlock()
-
- return validation.ValidateStruct(s,
- validation.Field(&s.Meta),
- validation.Field(&s.Logs),
- validation.Field(&s.AdminAuthToken),
- validation.Field(&s.AdminPasswordResetToken),
- validation.Field(&s.AdminFileToken),
- validation.Field(&s.RecordAuthToken),
- validation.Field(&s.RecordPasswordResetToken),
- validation.Field(&s.RecordEmailChangeToken),
- validation.Field(&s.RecordVerificationToken),
- validation.Field(&s.RecordFileToken),
- validation.Field(&s.Smtp),
- validation.Field(&s.S3),
- validation.Field(&s.Backups),
- validation.Field(&s.GoogleAuth),
- validation.Field(&s.FacebookAuth),
- validation.Field(&s.GithubAuth),
- validation.Field(&s.GitlabAuth),
- validation.Field(&s.DiscordAuth),
- validation.Field(&s.TwitterAuth),
- validation.Field(&s.MicrosoftAuth),
- validation.Field(&s.SpotifyAuth),
- validation.Field(&s.KakaoAuth),
- validation.Field(&s.TwitchAuth),
- validation.Field(&s.StravaAuth),
- validation.Field(&s.GiteeAuth),
- validation.Field(&s.LivechatAuth),
- validation.Field(&s.GiteaAuth),
- validation.Field(&s.OIDCAuth),
- validation.Field(&s.OIDC2Auth),
- validation.Field(&s.OIDC3Auth),
- validation.Field(&s.AppleAuth),
- validation.Field(&s.InstagramAuth),
- validation.Field(&s.VKAuth),
- validation.Field(&s.YandexAuth),
- validation.Field(&s.PatreonAuth),
- validation.Field(&s.MailcowAuth),
- validation.Field(&s.BitbucketAuth),
- validation.Field(&s.PlanningcenterAuth),
- )
-}
-
-// Merge merges `other` settings into the current one.
-func (s *Settings) Merge(other *Settings) error {
- s.mux.Lock()
- defer s.mux.Unlock()
-
- bytes, err := json.Marshal(other)
- if err != nil {
- return err
- }
-
- return json.Unmarshal(bytes, s)
-}
-
-// Clone creates a new deep copy of the current settings.
-func (s *Settings) Clone() (*Settings, error) {
- clone := &Settings{}
- if err := clone.Merge(s); err != nil {
- return nil, err
- }
- return clone, nil
-}
-
-// RedactClone creates a new deep copy of the current settings,
-// while replacing the secret values with `******`.
-func (s *Settings) RedactClone() (*Settings, error) {
- clone, err := s.Clone()
- if err != nil {
- return nil, err
- }
-
- sensitiveFields := []*string{
- &clone.Smtp.Password,
- &clone.S3.Secret,
- &clone.Backups.S3.Secret,
- &clone.AdminAuthToken.Secret,
- &clone.AdminPasswordResetToken.Secret,
- &clone.AdminFileToken.Secret,
- &clone.RecordAuthToken.Secret,
- &clone.RecordPasswordResetToken.Secret,
- &clone.RecordEmailChangeToken.Secret,
- &clone.RecordVerificationToken.Secret,
- &clone.RecordFileToken.Secret,
- &clone.GoogleAuth.ClientSecret,
- &clone.FacebookAuth.ClientSecret,
- &clone.GithubAuth.ClientSecret,
- &clone.GitlabAuth.ClientSecret,
- &clone.DiscordAuth.ClientSecret,
- &clone.TwitterAuth.ClientSecret,
- &clone.MicrosoftAuth.ClientSecret,
- &clone.SpotifyAuth.ClientSecret,
- &clone.KakaoAuth.ClientSecret,
- &clone.TwitchAuth.ClientSecret,
- &clone.StravaAuth.ClientSecret,
- &clone.GiteeAuth.ClientSecret,
- &clone.LivechatAuth.ClientSecret,
- &clone.GiteaAuth.ClientSecret,
- &clone.OIDCAuth.ClientSecret,
- &clone.OIDC2Auth.ClientSecret,
- &clone.OIDC3Auth.ClientSecret,
- &clone.AppleAuth.ClientSecret,
- &clone.InstagramAuth.ClientSecret,
- &clone.VKAuth.ClientSecret,
- &clone.YandexAuth.ClientSecret,
- &clone.PatreonAuth.ClientSecret,
- &clone.MailcowAuth.ClientSecret,
- &clone.BitbucketAuth.ClientSecret,
- &clone.PlanningcenterAuth.ClientSecret,
- }
-
- // mask all sensitive fields
- for _, v := range sensitiveFields {
- if v != nil && *v != "" {
- *v = SecretMask
- }
- }
-
- return clone, nil
-}
-
-// NamedAuthProviderConfigs returns a map with all registered OAuth2
-// provider configurations (indexed by their name identifier).
-func (s *Settings) NamedAuthProviderConfigs() map[string]AuthProviderConfig {
- s.mux.RLock()
- defer s.mux.RUnlock()
-
- return map[string]AuthProviderConfig{
- auth.NameGoogle: s.GoogleAuth,
- auth.NameFacebook: s.FacebookAuth,
- auth.NameGithub: s.GithubAuth,
- auth.NameGitlab: s.GitlabAuth,
- auth.NameDiscord: s.DiscordAuth,
- auth.NameTwitter: s.TwitterAuth,
- auth.NameMicrosoft: s.MicrosoftAuth,
- auth.NameSpotify: s.SpotifyAuth,
- auth.NameKakao: s.KakaoAuth,
- auth.NameTwitch: s.TwitchAuth,
- auth.NameStrava: s.StravaAuth,
- auth.NameGitee: s.GiteeAuth,
- auth.NameLivechat: s.LivechatAuth,
- auth.NameGitea: s.GiteaAuth,
- auth.NameOIDC: s.OIDCAuth,
- auth.NameOIDC + "2": s.OIDC2Auth,
- auth.NameOIDC + "3": s.OIDC3Auth,
- auth.NameApple: s.AppleAuth,
- auth.NameInstagram: s.InstagramAuth,
- auth.NameVK: s.VKAuth,
- auth.NameYandex: s.YandexAuth,
- auth.NamePatreon: s.PatreonAuth,
- auth.NameMailcow: s.MailcowAuth,
- auth.NameBitbucket: s.BitbucketAuth,
- auth.NamePlanningcenter: s.PlanningcenterAuth,
- }
-}
-
-// -------------------------------------------------------------------
-
-type TokenConfig struct {
- Secret string `form:"secret" json:"secret"`
- Duration int64 `form:"duration" json:"duration"`
-}
-
-// Validate makes TokenConfig validatable by implementing [validation.Validatable] interface.
-func (c TokenConfig) Validate() error {
- return validation.ValidateStruct(&c,
- validation.Field(&c.Secret, validation.Required, validation.Length(30, 300)),
- validation.Field(&c.Duration, validation.Required, validation.Min(5), validation.Max(63072000)),
- )
-}
-
-// -------------------------------------------------------------------
-
-type SmtpConfig struct {
- Enabled bool `form:"enabled" json:"enabled"`
- Host string `form:"host" json:"host"`
- Port int `form:"port" json:"port"`
- Username string `form:"username" json:"username"`
- Password string `form:"password" json:"password"`
-
- // SMTP AUTH - PLAIN (default) or LOGIN
- AuthMethod string `form:"authMethod" json:"authMethod"`
-
- // Whether to enforce TLS encryption for the mail server connection.
- //
- // When set to false StartTLS command is send, leaving the server
- // to decide whether to upgrade the connection or not.
- Tls bool `form:"tls" json:"tls"`
-
- // LocalName is optional domain name or IP address used for the
- // EHLO/HELO exchange (if not explicitly set, defaults to "localhost").
- //
- // This is required only by some SMTP servers, such as Gmail SMTP-relay.
- LocalName string `form:"localName" json:"localName"`
-}
-
-// Validate makes SmtpConfig validatable by implementing [validation.Validatable] interface.
-func (c SmtpConfig) Validate() error {
- return validation.ValidateStruct(&c,
- validation.Field(
- &c.Host,
- validation.When(c.Enabled, validation.Required),
- is.Host,
- ),
- validation.Field(
- &c.Port,
- validation.When(c.Enabled, validation.Required),
- validation.Min(0),
- ),
- validation.Field(
- &c.AuthMethod,
- // don't require it for backward compatibility
- // (fallback internally to PLAIN)
- // validation.When(c.Enabled, validation.Required),
- validation.In(mailer.SmtpAuthLogin, mailer.SmtpAuthPlain),
- ),
- validation.Field(&c.LocalName, is.Host),
- )
-}
-
-// -------------------------------------------------------------------
-
-type S3Config struct {
- Enabled bool `form:"enabled" json:"enabled"`
- Bucket string `form:"bucket" json:"bucket"`
- Region string `form:"region" json:"region"`
- Endpoint string `form:"endpoint" json:"endpoint"`
- AccessKey string `form:"accessKey" json:"accessKey"`
- Secret string `form:"secret" json:"secret"`
- ForcePathStyle bool `form:"forcePathStyle" json:"forcePathStyle"`
-}
-
-// Validate makes S3Config validatable by implementing [validation.Validatable] interface.
-func (c S3Config) Validate() error {
- return validation.ValidateStruct(&c,
- validation.Field(&c.Endpoint, is.URL, validation.When(c.Enabled, validation.Required)),
- validation.Field(&c.Bucket, validation.When(c.Enabled, validation.Required)),
- validation.Field(&c.Region, validation.When(c.Enabled, validation.Required)),
- validation.Field(&c.AccessKey, validation.When(c.Enabled, validation.Required)),
- validation.Field(&c.Secret, validation.When(c.Enabled, validation.Required)),
- )
-}
-
-// -------------------------------------------------------------------
-
-type BackupsConfig struct {
- // Cron is a cron expression to schedule auto backups, eg. "* * * * *".
- //
- // Leave it empty to disable the auto backups functionality.
- Cron string `form:"cron" json:"cron"`
-
- // CronMaxKeep is the max number of cron generated backups to
- // keep before removing older entries.
- //
- // This field works only when the cron config has valid cron expression.
- CronMaxKeep int `form:"cronMaxKeep" json:"cronMaxKeep"`
-
- // S3 is an optional S3 storage config specifying where to store the app backups.
- S3 S3Config `form:"s3" json:"s3"`
-}
-
-// Validate makes BackupsConfig validatable by implementing [validation.Validatable] interface.
-func (c BackupsConfig) Validate() error {
- return validation.ValidateStruct(&c,
- validation.Field(&c.S3),
- validation.Field(&c.Cron, validation.By(checkCronExpression)),
- validation.Field(
- &c.CronMaxKeep,
- validation.When(c.Cron != "", validation.Required),
- validation.Min(1),
- ),
- )
-}
-
-func checkCronExpression(value any) error {
- v, _ := value.(string)
- if v == "" {
- return nil // nothing to check
- }
-
- _, err := cron.NewSchedule(v)
- if err != nil {
- return validation.NewError("validation_invalid_cron", err.Error())
- }
-
- return nil
-}
-
-// -------------------------------------------------------------------
-
-type MetaConfig struct {
- AppName string `form:"appName" json:"appName"`
- AppUrl string `form:"appUrl" json:"appUrl"`
- HideControls bool `form:"hideControls" json:"hideControls"`
- SenderName string `form:"senderName" json:"senderName"`
- SenderAddress string `form:"senderAddress" json:"senderAddress"`
- VerificationTemplate EmailTemplate `form:"verificationTemplate" json:"verificationTemplate"`
- ResetPasswordTemplate EmailTemplate `form:"resetPasswordTemplate" json:"resetPasswordTemplate"`
- ConfirmEmailChangeTemplate EmailTemplate `form:"confirmEmailChangeTemplate" json:"confirmEmailChangeTemplate"`
-}
-
-// Validate makes MetaConfig validatable by implementing [validation.Validatable] interface.
-func (c MetaConfig) Validate() error {
- return validation.ValidateStruct(&c,
- validation.Field(&c.AppName, validation.Required, validation.Length(1, 255)),
- validation.Field(&c.AppUrl, validation.Required, is.URL),
- validation.Field(&c.SenderName, validation.Required, validation.Length(1, 255)),
- validation.Field(&c.SenderAddress, is.EmailFormat, validation.Required),
- validation.Field(&c.VerificationTemplate, validation.Required),
- validation.Field(&c.ResetPasswordTemplate, validation.Required),
- validation.Field(&c.ConfirmEmailChangeTemplate, validation.Required),
- )
-}
-
-type EmailTemplate struct {
- Body string `form:"body" json:"body"`
- Subject string `form:"subject" json:"subject"`
- ActionUrl string `form:"actionUrl" json:"actionUrl"`
- Hidden bool `form:"hidden" json:"hidden"`
-}
-
-// Validate makes EmailTemplate validatable by implementing [validation.Validatable] interface.
-func (t EmailTemplate) Validate() error {
- return validation.ValidateStruct(&t,
- validation.Field(&t.Subject, validation.Required),
- validation.Field(
- &t.Body,
- validation.Required,
- validation.By(checkPlaceholderParams(EmailPlaceholderActionUrl)),
- ),
- validation.Field(
- &t.ActionUrl,
- validation.Required,
- validation.By(checkPlaceholderParams(EmailPlaceholderToken)),
- ),
- )
-}
-
-func checkPlaceholderParams(params ...string) validation.RuleFunc {
- return func(value any) error {
- v, _ := value.(string)
-
- for _, param := range params {
- if !strings.Contains(v, param) {
- return validation.NewError(
- "validation_missing_required_param",
- fmt.Sprintf("Missing required parameter %q", param),
- )
- }
- }
-
- return nil
- }
-}
-
-// Resolve replaces the placeholder parameters in the current email
-// template and returns its components as ready-to-use strings.
-func (t EmailTemplate) Resolve(
- appName string,
- appUrl,
- token string,
-) (subject, body, actionUrl string) {
- // replace action url placeholder params (if any)
- actionUrlParams := map[string]string{
- EmailPlaceholderAppName: appName,
- EmailPlaceholderAppUrl: appUrl,
- EmailPlaceholderToken: token,
- }
- actionUrl = t.ActionUrl
- for k, v := range actionUrlParams {
- actionUrl = strings.ReplaceAll(actionUrl, k, v)
- }
- actionUrl, _ = rest.NormalizeUrl(actionUrl)
-
- // replace body placeholder params (if any)
- bodyParams := map[string]string{
- EmailPlaceholderAppName: appName,
- EmailPlaceholderAppUrl: appUrl,
- EmailPlaceholderToken: token,
- EmailPlaceholderActionUrl: actionUrl,
- }
- body = t.Body
- for k, v := range bodyParams {
- body = strings.ReplaceAll(body, k, v)
- }
-
- // replace subject placeholder params (if any)
- subjectParams := map[string]string{
- EmailPlaceholderAppName: appName,
- EmailPlaceholderAppUrl: appUrl,
- }
- subject = t.Subject
- for k, v := range subjectParams {
- subject = strings.ReplaceAll(subject, k, v)
- }
-
- return subject, body, actionUrl
-}
-
-// -------------------------------------------------------------------
-
-type LogsConfig struct {
- MaxDays int `form:"maxDays" json:"maxDays"`
- MinLevel int `form:"minLevel" json:"minLevel"`
- LogIp bool `form:"logIp" json:"logIp"`
-}
-
-// Validate makes LogsConfig validatable by implementing [validation.Validatable] interface.
-func (c LogsConfig) Validate() error {
- return validation.ValidateStruct(&c,
- validation.Field(&c.MaxDays, validation.Min(0)),
- )
-}
-
-// -------------------------------------------------------------------
-
-type AuthProviderConfig struct {
- Enabled bool `form:"enabled" json:"enabled"`
- ClientId string `form:"clientId" json:"clientId"`
- ClientSecret string `form:"clientSecret" json:"clientSecret"`
- AuthUrl string `form:"authUrl" json:"authUrl"`
- TokenUrl string `form:"tokenUrl" json:"tokenUrl"`
- UserApiUrl string `form:"userApiUrl" json:"userApiUrl"`
- DisplayName string `form:"displayName" json:"displayName"`
- PKCE *bool `form:"pkce" json:"pkce"`
-}
-
-// Validate makes `ProviderConfig` validatable by implementing [validation.Validatable] interface.
-func (c AuthProviderConfig) Validate() error {
- return validation.ValidateStruct(&c,
- validation.Field(&c.ClientId, validation.When(c.Enabled, validation.Required)),
- validation.Field(&c.ClientSecret, validation.When(c.Enabled, validation.Required)),
- validation.Field(&c.AuthUrl, is.URL),
- validation.Field(&c.TokenUrl, is.URL),
- validation.Field(&c.UserApiUrl, is.URL),
- )
-}
-
-// SetupProvider loads the current AuthProviderConfig into the specified provider.
-func (c AuthProviderConfig) SetupProvider(provider auth.Provider) error {
- if !c.Enabled {
- return errors.New("the provider is not enabled")
- }
-
- if c.ClientId != "" {
- provider.SetClientId(c.ClientId)
- }
-
- if c.ClientSecret != "" {
- provider.SetClientSecret(c.ClientSecret)
- }
-
- if c.AuthUrl != "" {
- provider.SetAuthUrl(c.AuthUrl)
- }
-
- if c.UserApiUrl != "" {
- provider.SetUserApiUrl(c.UserApiUrl)
- }
-
- if c.TokenUrl != "" {
- provider.SetTokenUrl(c.TokenUrl)
- }
-
- if c.DisplayName != "" {
- provider.SetDisplayName(c.DisplayName)
- }
-
- if c.PKCE != nil {
- provider.SetPKCE(*c.PKCE)
- }
-
- return nil
-}
-
-// -------------------------------------------------------------------
-
-// Deprecated: Will be removed in v0.9+
-type EmailAuthConfig struct {
- Enabled bool `form:"enabled" json:"enabled"`
- ExceptDomains []string `form:"exceptDomains" json:"exceptDomains"`
- OnlyDomains []string `form:"onlyDomains" json:"onlyDomains"`
- MinPasswordLength int `form:"minPasswordLength" json:"minPasswordLength"`
-}
-
-// Deprecated: Will be removed in v0.9+
-func (c EmailAuthConfig) Validate() error {
- return nil
-}
diff --git a/models/settings/settings_templates.go b/models/settings/settings_templates.go
deleted file mode 100644
index a0f913a3..00000000
--- a/models/settings/settings_templates.go
+++ /dev/null
@@ -1,54 +0,0 @@
-package settings
-
-// Common settings placeholder tokens
-const (
- EmailPlaceholderAppName string = "{APP_NAME}"
- EmailPlaceholderAppUrl string = "{APP_URL}"
- EmailPlaceholderToken string = "{TOKEN}"
- EmailPlaceholderActionUrl string = "{ACTION_URL}"
-)
-
-var defaultVerificationTemplate = EmailTemplate{
- Subject: "Verify your " + EmailPlaceholderAppName + " email",
- Body: `Hello,
-Thank you for joining us at ` + EmailPlaceholderAppName + `.
-Click on the button below to verify your email address.
-
- Verify
-
-
- Thanks,
- ` + EmailPlaceholderAppName + ` team
-
`,
- ActionUrl: EmailPlaceholderAppUrl + "/_/#/auth/confirm-verification/" + EmailPlaceholderToken,
-}
-
-var defaultResetPasswordTemplate = EmailTemplate{
- Subject: "Reset your " + EmailPlaceholderAppName + " password",
- Body: `Hello,
-Click on the button below to reset your password.
-
- Reset password
-
-If you didn't ask to reset your password, you can ignore this email.
-
- Thanks,
- ` + EmailPlaceholderAppName + ` team
-
`,
- ActionUrl: EmailPlaceholderAppUrl + "/_/#/auth/confirm-password-reset/" + EmailPlaceholderToken,
-}
-
-var defaultConfirmEmailChangeTemplate = EmailTemplate{
- Subject: "Confirm your " + EmailPlaceholderAppName + " new email address",
- Body: `Hello,
-Click on the button below to confirm your new email address.
-
- Confirm new email
-
-If you didn't ask to change your email address, you can ignore this email.
-
- Thanks,
- ` + EmailPlaceholderAppName + ` team
-
`,
- ActionUrl: EmailPlaceholderAppUrl + "/_/#/auth/confirm-email-change/" + EmailPlaceholderToken,
-}
diff --git a/models/settings/settings_test.go b/models/settings/settings_test.go
deleted file mode 100644
index e54ceb5e..00000000
--- a/models/settings/settings_test.go
+++ /dev/null
@@ -1,1034 +0,0 @@
-package settings_test
-
-import (
- "bytes"
- "encoding/json"
- "fmt"
- "strings"
- "testing"
-
- validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/pocketbase/models/settings"
- "github.com/pocketbase/pocketbase/tools/auth"
- "github.com/pocketbase/pocketbase/tools/mailer"
- "github.com/pocketbase/pocketbase/tools/types"
-)
-
-func TestSettingsValidate(t *testing.T) {
- s := settings.New()
-
- // set invalid settings data
- s.Meta.AppName = ""
- s.Logs.MaxDays = -10
- s.Smtp.Enabled = true
- s.Smtp.Host = ""
- s.S3.Enabled = true
- s.S3.Endpoint = "invalid"
- s.AdminAuthToken.Duration = -10
- s.AdminPasswordResetToken.Duration = -10
- s.AdminFileToken.Duration = -10
- s.RecordAuthToken.Duration = -10
- s.RecordPasswordResetToken.Duration = -10
- s.RecordEmailChangeToken.Duration = -10
- s.RecordVerificationToken.Duration = -10
- s.RecordFileToken.Duration = -10
- s.GoogleAuth.Enabled = true
- s.GoogleAuth.ClientId = ""
- s.FacebookAuth.Enabled = true
- s.FacebookAuth.ClientId = ""
- s.GithubAuth.Enabled = true
- s.GithubAuth.ClientId = ""
- s.GitlabAuth.Enabled = true
- s.GitlabAuth.ClientId = ""
- s.DiscordAuth.Enabled = true
- s.DiscordAuth.ClientId = ""
- s.TwitterAuth.Enabled = true
- s.TwitterAuth.ClientId = ""
- s.MicrosoftAuth.Enabled = true
- s.MicrosoftAuth.ClientId = ""
- s.SpotifyAuth.Enabled = true
- s.SpotifyAuth.ClientId = ""
- s.KakaoAuth.Enabled = true
- s.KakaoAuth.ClientId = ""
- s.TwitchAuth.Enabled = true
- s.TwitchAuth.ClientId = ""
- s.StravaAuth.Enabled = true
- s.StravaAuth.ClientId = ""
- s.GiteeAuth.Enabled = true
- s.GiteeAuth.ClientId = ""
- s.LivechatAuth.Enabled = true
- s.LivechatAuth.ClientId = ""
- s.GiteaAuth.Enabled = true
- s.GiteaAuth.ClientId = ""
- s.OIDCAuth.Enabled = true
- s.OIDCAuth.ClientId = ""
- s.OIDC2Auth.Enabled = true
- s.OIDC2Auth.ClientId = ""
- s.OIDC3Auth.Enabled = true
- s.OIDC3Auth.ClientId = ""
- s.AppleAuth.Enabled = true
- s.AppleAuth.ClientId = ""
- s.InstagramAuth.Enabled = true
- s.InstagramAuth.ClientId = ""
- s.VKAuth.Enabled = true
- s.VKAuth.ClientId = ""
- s.YandexAuth.Enabled = true
- s.YandexAuth.ClientId = ""
- s.PatreonAuth.Enabled = true
- s.PatreonAuth.ClientId = ""
- s.MailcowAuth.Enabled = true
- s.MailcowAuth.ClientId = ""
- s.BitbucketAuth.Enabled = true
- s.BitbucketAuth.ClientId = ""
- s.PlanningcenterAuth.Enabled = true
- s.PlanningcenterAuth.ClientId = ""
-
- // check if Validate() is triggering the members validate methods.
- err := s.Validate()
- if err == nil {
- t.Fatalf("Expected error, got nil")
- }
-
- expectations := []string{
- `"meta":{`,
- `"logs":{`,
- `"smtp":{`,
- `"s3":{`,
- `"adminAuthToken":{`,
- `"adminPasswordResetToken":{`,
- `"adminFileToken":{`,
- `"recordAuthToken":{`,
- `"recordPasswordResetToken":{`,
- `"recordEmailChangeToken":{`,
- `"recordVerificationToken":{`,
- `"recordFileToken":{`,
- `"googleAuth":{`,
- `"facebookAuth":{`,
- `"githubAuth":{`,
- `"gitlabAuth":{`,
- `"discordAuth":{`,
- `"twitterAuth":{`,
- `"microsoftAuth":{`,
- `"spotifyAuth":{`,
- `"kakaoAuth":{`,
- `"twitchAuth":{`,
- `"stravaAuth":{`,
- `"giteeAuth":{`,
- `"livechatAuth":{`,
- `"giteaAuth":{`,
- `"oidcAuth":{`,
- `"oidc2Auth":{`,
- `"oidc3Auth":{`,
- `"appleAuth":{`,
- `"instagramAuth":{`,
- `"vkAuth":{`,
- `"yandexAuth":{`,
- `"patreonAuth":{`,
- `"mailcowAuth":{`,
- `"bitbucketAuth":{`,
- `"planningcenterAuth":{`,
- }
-
- errBytes, _ := json.Marshal(err)
- jsonErr := string(errBytes)
- for _, expected := range expectations {
- if !strings.Contains(jsonErr, expected) {
- t.Errorf("Expected error key %s in %v", expected, jsonErr)
- }
- }
-}
-
-func TestSettingsMerge(t *testing.T) {
- s1 := settings.New()
- s1.Meta.AppUrl = "old_app_url"
-
- s2 := settings.New()
- s2.Meta.AppName = "test"
- s2.Logs.MaxDays = 123
- s2.Smtp.Host = "test"
- s2.Smtp.Enabled = true
- s2.S3.Enabled = true
- s2.S3.Endpoint = "test"
- s2.Backups.Cron = "* * * * *"
- s2.AdminAuthToken.Duration = 1
- s2.AdminPasswordResetToken.Duration = 2
- s2.AdminFileToken.Duration = 2
- s2.RecordAuthToken.Duration = 3
- s2.RecordPasswordResetToken.Duration = 4
- s2.RecordEmailChangeToken.Duration = 5
- s2.RecordVerificationToken.Duration = 6
- s2.RecordFileToken.Duration = 7
- s2.GoogleAuth.Enabled = true
- s2.GoogleAuth.ClientId = "google_test"
- s2.FacebookAuth.Enabled = true
- s2.FacebookAuth.ClientId = "facebook_test"
- s2.GithubAuth.Enabled = true
- s2.GithubAuth.ClientId = "github_test"
- s2.GitlabAuth.Enabled = true
- s2.GitlabAuth.ClientId = "gitlab_test"
- s2.DiscordAuth.Enabled = true
- s2.DiscordAuth.ClientId = "discord_test"
- s2.TwitterAuth.Enabled = true
- s2.TwitterAuth.ClientId = "twitter_test"
- s2.MicrosoftAuth.Enabled = true
- s2.MicrosoftAuth.ClientId = "microsoft_test"
- s2.SpotifyAuth.Enabled = true
- s2.SpotifyAuth.ClientId = "spotify_test"
- s2.KakaoAuth.Enabled = true
- s2.KakaoAuth.ClientId = "kakao_test"
- s2.TwitchAuth.Enabled = true
- s2.TwitchAuth.ClientId = "twitch_test"
- s2.StravaAuth.Enabled = true
- s2.StravaAuth.ClientId = "strava_test"
- s2.GiteeAuth.Enabled = true
- s2.GiteeAuth.ClientId = "gitee_test"
- s2.LivechatAuth.Enabled = true
- s2.LivechatAuth.ClientId = "livechat_test"
- s2.GiteaAuth.Enabled = true
- s2.GiteaAuth.ClientId = "gitea_test"
- s2.OIDCAuth.Enabled = true
- s2.OIDCAuth.ClientId = "oidc_test"
- s2.OIDC2Auth.Enabled = true
- s2.OIDC2Auth.ClientId = "oidc2_test"
- s2.OIDC3Auth.Enabled = true
- s2.OIDC3Auth.ClientId = "oidc3_test"
- s2.AppleAuth.Enabled = true
- s2.AppleAuth.ClientId = "apple_test"
- s2.InstagramAuth.Enabled = true
- s2.InstagramAuth.ClientId = "instagram_test"
- s2.VKAuth.Enabled = true
- s2.VKAuth.ClientId = "vk_test"
- s2.YandexAuth.Enabled = true
- s2.YandexAuth.ClientId = "yandex_test"
- s2.PatreonAuth.Enabled = true
- s2.PatreonAuth.ClientId = "patreon_test"
- s2.MailcowAuth.Enabled = true
- s2.MailcowAuth.ClientId = "mailcow_test"
- s2.BitbucketAuth.Enabled = true
- s2.BitbucketAuth.ClientId = "bitbucket_test"
- s2.PlanningcenterAuth.Enabled = true
- s2.PlanningcenterAuth.ClientId = "planningcenter_test"
-
- if err := s1.Merge(s2); err != nil {
- t.Fatal(err)
- }
-
- s1Encoded, err := json.Marshal(s1)
- if err != nil {
- t.Fatal(err)
- }
-
- s2Encoded, err := json.Marshal(s2)
- if err != nil {
- t.Fatal(err)
- }
-
- if string(s1Encoded) != string(s2Encoded) {
- t.Fatalf("Expected the same serialization, got %v VS %v", string(s1Encoded), string(s2Encoded))
- }
-}
-
-func TestSettingsClone(t *testing.T) {
- s1 := settings.New()
-
- s2, err := s1.Clone()
- if err != nil {
- t.Fatal(err)
- }
-
- s1Bytes, err := json.Marshal(s1)
- if err != nil {
- t.Fatal(err)
- }
-
- s2Bytes, err := json.Marshal(s2)
- if err != nil {
- t.Fatal(err)
- }
-
- if string(s1Bytes) != string(s2Bytes) {
- t.Fatalf("Expected equivalent serialization, got %v VS %v", string(s1Bytes), string(s2Bytes))
- }
-
- // verify that it is a deep copy
- s1.Meta.AppName = "new"
- if s1.Meta.AppName == s2.Meta.AppName {
- t.Fatalf("Expected s1 and s2 to have different Meta.AppName, got %s", s1.Meta.AppName)
- }
-}
-
-func TestSettingsRedactClone(t *testing.T) {
- testSecret := "test_secret"
-
- s1 := settings.New()
-
- // control fields
- s1.Meta.AppName = "test123"
-
- // secrets
- s1.Smtp.Password = testSecret
- s1.S3.Secret = testSecret
- s1.Backups.S3.Secret = testSecret
- s1.AdminAuthToken.Secret = testSecret
- s1.AdminPasswordResetToken.Secret = testSecret
- s1.AdminFileToken.Secret = testSecret
- s1.RecordAuthToken.Secret = testSecret
- s1.RecordPasswordResetToken.Secret = testSecret
- s1.RecordEmailChangeToken.Secret = testSecret
- s1.RecordVerificationToken.Secret = testSecret
- s1.RecordFileToken.Secret = testSecret
- s1.GoogleAuth.ClientSecret = testSecret
- s1.FacebookAuth.ClientSecret = testSecret
- s1.GithubAuth.ClientSecret = testSecret
- s1.GitlabAuth.ClientSecret = testSecret
- s1.DiscordAuth.ClientSecret = testSecret
- s1.TwitterAuth.ClientSecret = testSecret
- s1.MicrosoftAuth.ClientSecret = testSecret
- s1.SpotifyAuth.ClientSecret = testSecret
- s1.KakaoAuth.ClientSecret = testSecret
- s1.TwitchAuth.ClientSecret = testSecret
- s1.StravaAuth.ClientSecret = testSecret
- s1.GiteeAuth.ClientSecret = testSecret
- s1.LivechatAuth.ClientSecret = testSecret
- s1.GiteaAuth.ClientSecret = testSecret
- s1.OIDCAuth.ClientSecret = testSecret
- s1.OIDC2Auth.ClientSecret = testSecret
- s1.OIDC3Auth.ClientSecret = testSecret
- s1.AppleAuth.ClientSecret = testSecret
- s1.InstagramAuth.ClientSecret = testSecret
- s1.VKAuth.ClientSecret = testSecret
- s1.YandexAuth.ClientSecret = testSecret
- s1.PatreonAuth.ClientSecret = testSecret
- s1.MailcowAuth.ClientSecret = testSecret
- s1.BitbucketAuth.ClientSecret = testSecret
- s1.PlanningcenterAuth.ClientSecret = testSecret
-
- s1Bytes, err := json.Marshal(s1)
- if err != nil {
- t.Fatal(err)
- }
-
- s2, err := s1.RedactClone()
- if err != nil {
- t.Fatal(err)
- }
-
- s2Bytes, err := json.Marshal(s2)
- if err != nil {
- t.Fatal(err)
- }
-
- if bytes.Equal(s1Bytes, s2Bytes) {
- t.Fatalf("Expected the 2 settings to differ, got \n%s", s2Bytes)
- }
-
- if strings.Contains(string(s2Bytes), testSecret) {
- t.Fatalf("Expected %q secret to be replaced with mask, got \n%s", testSecret, s2Bytes)
- }
-
- if !strings.Contains(string(s2Bytes), settings.SecretMask) {
- t.Fatalf("Expected the secrets to be replaced with the secret mask, got \n%s", s2Bytes)
- }
-
- if !strings.Contains(string(s2Bytes), `"appName":"test123"`) {
- t.Fatalf("Missing control field in \n%s", s2Bytes)
- }
-}
-
-func TestNamedAuthProviderConfigs(t *testing.T) {
- s := settings.New()
-
- s.GoogleAuth.ClientId = "google_test"
- s.FacebookAuth.ClientId = "facebook_test"
- s.GithubAuth.ClientId = "github_test"
- s.GitlabAuth.ClientId = "gitlab_test"
- s.GitlabAuth.Enabled = true // control
- s.DiscordAuth.ClientId = "discord_test"
- s.TwitterAuth.ClientId = "twitter_test"
- s.MicrosoftAuth.ClientId = "microsoft_test"
- s.SpotifyAuth.ClientId = "spotify_test"
- s.KakaoAuth.ClientId = "kakao_test"
- s.TwitchAuth.ClientId = "twitch_test"
- s.StravaAuth.ClientId = "strava_test"
- s.GiteeAuth.ClientId = "gitee_test"
- s.LivechatAuth.ClientId = "livechat_test"
- s.GiteaAuth.ClientId = "gitea_test"
- s.OIDCAuth.ClientId = "oidc_test"
- s.OIDC2Auth.ClientId = "oidc2_test"
- s.OIDC3Auth.ClientId = "oidc3_test"
- s.AppleAuth.ClientId = "apple_test"
- s.InstagramAuth.ClientId = "instagram_test"
- s.VKAuth.ClientId = "vk_test"
- s.YandexAuth.ClientId = "yandex_test"
- s.PatreonAuth.ClientId = "patreon_test"
- s.MailcowAuth.ClientId = "mailcow_test"
- s.BitbucketAuth.ClientId = "bitbucket_test"
- s.PlanningcenterAuth.ClientId = "planningcenter_test"
-
- result := s.NamedAuthProviderConfigs()
-
- encoded, err := json.Marshal(result)
- if err != nil {
- t.Fatal(err)
- }
- encodedStr := string(encoded)
-
- expectedParts := []string{
- `"discord":{"enabled":false,"clientId":"discord_test"`,
- `"facebook":{"enabled":false,"clientId":"facebook_test"`,
- `"github":{"enabled":false,"clientId":"github_test"`,
- `"gitlab":{"enabled":true,"clientId":"gitlab_test"`,
- `"google":{"enabled":false,"clientId":"google_test"`,
- `"microsoft":{"enabled":false,"clientId":"microsoft_test"`,
- `"spotify":{"enabled":false,"clientId":"spotify_test"`,
- `"twitter":{"enabled":false,"clientId":"twitter_test"`,
- `"kakao":{"enabled":false,"clientId":"kakao_test"`,
- `"twitch":{"enabled":false,"clientId":"twitch_test"`,
- `"strava":{"enabled":false,"clientId":"strava_test"`,
- `"gitee":{"enabled":false,"clientId":"gitee_test"`,
- `"livechat":{"enabled":false,"clientId":"livechat_test"`,
- `"gitea":{"enabled":false,"clientId":"gitea_test"`,
- `"oidc":{"enabled":false,"clientId":"oidc_test"`,
- `"oidc2":{"enabled":false,"clientId":"oidc2_test"`,
- `"oidc3":{"enabled":false,"clientId":"oidc3_test"`,
- `"apple":{"enabled":false,"clientId":"apple_test"`,
- `"instagram":{"enabled":false,"clientId":"instagram_test"`,
- `"vk":{"enabled":false,"clientId":"vk_test"`,
- `"yandex":{"enabled":false,"clientId":"yandex_test"`,
- `"patreon":{"enabled":false,"clientId":"patreon_test"`,
- `"mailcow":{"enabled":false,"clientId":"mailcow_test"`,
- `"bitbucket":{"enabled":false,"clientId":"bitbucket_test"`,
- `"planningcenter":{"enabled":false,"clientId":"planningcenter_test"`,
- }
- for _, p := range expectedParts {
- if !strings.Contains(encodedStr, p) {
- t.Fatalf("Expected \n%s \nin \n%s", p, encodedStr)
- }
- }
-}
-
-func TestTokenConfigValidate(t *testing.T) {
- scenarios := []struct {
- config settings.TokenConfig
- expectError bool
- }{
- // zero values
- {
- settings.TokenConfig{},
- true,
- },
- // invalid data
- {
- settings.TokenConfig{
- Secret: strings.Repeat("a", 5),
- Duration: 4,
- },
- true,
- },
- // valid secret but invalid duration
- {
- settings.TokenConfig{
- Secret: strings.Repeat("a", 30),
- Duration: 63072000 + 1,
- },
- true,
- },
- // valid data
- {
- settings.TokenConfig{
- Secret: strings.Repeat("a", 30),
- Duration: 100,
- },
- false,
- },
- }
-
- for i, scenario := range scenarios {
- result := scenario.config.Validate()
-
- if result != nil && !scenario.expectError {
- t.Errorf("(%d) Didn't expect error, got %v", i, result)
- }
-
- if result == nil && scenario.expectError {
- t.Errorf("(%d) Expected error, got nil", i)
- }
- }
-}
-
-func TestSmtpConfigValidate(t *testing.T) {
- scenarios := []struct {
- config settings.SmtpConfig
- expectError bool
- }{
- // zero values (disabled)
- {
- settings.SmtpConfig{},
- false,
- },
- // zero values (enabled)
- {
- settings.SmtpConfig{Enabled: true},
- true,
- },
- // invalid data
- {
- settings.SmtpConfig{
- Enabled: true,
- Host: "test:test:test",
- Port: -10,
- },
- true,
- },
- // invalid auth method
- {
- settings.SmtpConfig{
- Enabled: true,
- Host: "example.com",
- Port: 100,
- AuthMethod: "example",
- },
- true,
- },
- // valid data (no explicit auth method)
- {
- settings.SmtpConfig{
- Enabled: true,
- Host: "example.com",
- Port: 100,
- Tls: true,
- },
- false,
- },
- // valid data (explicit auth method - login)
- {
- settings.SmtpConfig{
- Enabled: true,
- Host: "example.com",
- Port: 100,
- AuthMethod: mailer.SmtpAuthLogin,
- },
- false,
- },
- // invalid ehlo/helo name
- {
- settings.SmtpConfig{
- Enabled: true,
- Host: "example.com",
- Port: 100,
- LocalName: "invalid!",
- },
- true,
- },
- // valid ehlo/helo name
- {
- settings.SmtpConfig{
- Enabled: true,
- Host: "example.com",
- Port: 100,
- LocalName: "example.com",
- },
- false,
- },
- }
-
- for i, scenario := range scenarios {
- result := scenario.config.Validate()
-
- if result != nil && !scenario.expectError {
- t.Errorf("(%d) Didn't expect error, got %v", i, result)
- }
-
- if result == nil && scenario.expectError {
- t.Errorf("(%d) Expected error, got nil", i)
- }
- }
-}
-
-func TestS3ConfigValidate(t *testing.T) {
- scenarios := []struct {
- config settings.S3Config
- expectError bool
- }{
- // zero values (disabled)
- {
- settings.S3Config{},
- false,
- },
- // zero values (enabled)
- {
- settings.S3Config{Enabled: true},
- true,
- },
- // invalid data
- {
- settings.S3Config{
- Enabled: true,
- Endpoint: "test:test:test",
- },
- true,
- },
- // valid data (url endpoint)
- {
- settings.S3Config{
- Enabled: true,
- Endpoint: "https://localhost:8090",
- Bucket: "test",
- Region: "test",
- AccessKey: "test",
- Secret: "test",
- },
- false,
- },
- // valid data (hostname endpoint)
- {
- settings.S3Config{
- Enabled: true,
- Endpoint: "example.com",
- Bucket: "test",
- Region: "test",
- AccessKey: "test",
- Secret: "test",
- },
- false,
- },
- }
-
- for i, scenario := range scenarios {
- result := scenario.config.Validate()
-
- if result != nil && !scenario.expectError {
- t.Errorf("(%d) Didn't expect error, got %v", i, result)
- }
-
- if result == nil && scenario.expectError {
- t.Errorf("(%d) Expected error, got nil", i)
- }
- }
-}
-
-func TestMetaConfigValidate(t *testing.T) {
- invalidTemplate := settings.EmailTemplate{
- Subject: "test",
- ActionUrl: "test",
- Body: "test",
- }
-
- noPlaceholdersTemplate := settings.EmailTemplate{
- Subject: "test",
- ActionUrl: "http://example.com",
- Body: "test",
- }
-
- withPlaceholdersTemplate := settings.EmailTemplate{
- Subject: "test",
- ActionUrl: "http://example.com" + settings.EmailPlaceholderToken,
- Body: "test" + settings.EmailPlaceholderActionUrl,
- }
-
- scenarios := []struct {
- config settings.MetaConfig
- expectError bool
- }{
- // zero values
- {
- settings.MetaConfig{},
- true,
- },
- // invalid data
- {
- settings.MetaConfig{
- AppName: strings.Repeat("a", 300),
- AppUrl: "test",
- SenderName: strings.Repeat("a", 300),
- SenderAddress: "invalid_email",
- VerificationTemplate: invalidTemplate,
- ResetPasswordTemplate: invalidTemplate,
- ConfirmEmailChangeTemplate: invalidTemplate,
- },
- true,
- },
- // invalid data (missing required placeholders)
- {
- settings.MetaConfig{
- AppName: "test",
- AppUrl: "https://example.com",
- SenderName: "test",
- SenderAddress: "test@example.com",
- VerificationTemplate: noPlaceholdersTemplate,
- ResetPasswordTemplate: noPlaceholdersTemplate,
- ConfirmEmailChangeTemplate: noPlaceholdersTemplate,
- },
- true,
- },
- // valid data
- {
- settings.MetaConfig{
- AppName: "test",
- AppUrl: "https://example.com",
- SenderName: "test",
- SenderAddress: "test@example.com",
- VerificationTemplate: withPlaceholdersTemplate,
- ResetPasswordTemplate: withPlaceholdersTemplate,
- ConfirmEmailChangeTemplate: withPlaceholdersTemplate,
- },
- false,
- },
- }
-
- for i, scenario := range scenarios {
- result := scenario.config.Validate()
-
- if result != nil && !scenario.expectError {
- t.Errorf("(%d) Didn't expect error, got %v", i, result)
- }
-
- if result == nil && scenario.expectError {
- t.Errorf("(%d) Expected error, got nil", i)
- }
- }
-}
-
-func TestBackupsConfigValidate(t *testing.T) {
- scenarios := []struct {
- name string
- config settings.BackupsConfig
- expectedErrors []string
- }{
- {
- "zero value",
- settings.BackupsConfig{},
- []string{},
- },
- {
- "invalid cron",
- settings.BackupsConfig{
- Cron: "invalid",
- CronMaxKeep: 0,
- },
- []string{"cron", "cronMaxKeep"},
- },
- {
- "invalid enabled S3",
- settings.BackupsConfig{
- S3: settings.S3Config{
- Enabled: true,
- },
- },
- []string{"s3"},
- },
- {
- "valid data",
- settings.BackupsConfig{
- S3: settings.S3Config{
- Enabled: true,
- Endpoint: "example.com",
- Bucket: "test",
- Region: "test",
- AccessKey: "test",
- Secret: "test",
- },
- Cron: "*/10 * * * *",
- CronMaxKeep: 1,
- },
- []string{},
- },
- }
-
- for _, s := range scenarios {
- result := s.config.Validate()
-
- // parse errors
- errs, ok := result.(validation.Errors)
- if !ok && result != nil {
- t.Errorf("[%s] Failed to parse errors %v", s.name, result)
- continue
- }
-
- // check errors
- if len(errs) > len(s.expectedErrors) {
- t.Errorf("[%s] Expected error keys %v, got %v", s.name, s.expectedErrors, errs)
- }
- for _, k := range s.expectedErrors {
- if _, ok := errs[k]; !ok {
- t.Errorf("[%s] Missing expected error key %q in %v", s.name, k, errs)
- }
- }
- }
-}
-
-func TestEmailTemplateValidate(t *testing.T) {
- scenarios := []struct {
- emailTemplate settings.EmailTemplate
- expectedErrors []string
- }{
- // require values
- {
- settings.EmailTemplate{},
- []string{"subject", "actionUrl", "body"},
- },
- // missing placeholders
- {
- settings.EmailTemplate{
- Subject: "test",
- ActionUrl: "test",
- Body: "test",
- },
- []string{"actionUrl", "body"},
- },
- // valid data
- {
- settings.EmailTemplate{
- Subject: "test",
- ActionUrl: "test" + settings.EmailPlaceholderToken,
- Body: "test" + settings.EmailPlaceholderActionUrl,
- },
- []string{},
- },
- }
-
- for i, s := range scenarios {
- result := s.emailTemplate.Validate()
-
- // parse errors
- errs, ok := result.(validation.Errors)
- if !ok && result != nil {
- t.Errorf("(%d) Failed to parse errors %v", i, result)
- continue
- }
-
- // check errors
- if len(errs) > len(s.expectedErrors) {
- t.Errorf("(%d) Expected error keys %v, got %v", i, s.expectedErrors, errs)
- }
- for _, k := range s.expectedErrors {
- if _, ok := errs[k]; !ok {
- t.Errorf("(%d) Missing expected error key %q in %v", i, k, errs)
- }
- }
- }
-}
-
-func TestEmailTemplateResolve(t *testing.T) {
- allPlaceholders := settings.EmailPlaceholderActionUrl + settings.EmailPlaceholderToken + settings.EmailPlaceholderAppName + settings.EmailPlaceholderAppUrl
-
- scenarios := []struct {
- emailTemplate settings.EmailTemplate
- expectedSubject string
- expectedBody string
- expectedActionUrl string
- }{
- // no placeholders
- {
- emailTemplate: settings.EmailTemplate{
- Subject: "subject:",
- Body: "body:",
- ActionUrl: "/actionUrl////",
- },
- expectedSubject: "subject:",
- expectedActionUrl: "/actionUrl/",
- expectedBody: "body:",
- },
- // with placeholders
- {
- emailTemplate: settings.EmailTemplate{
- ActionUrl: "/actionUrl////" + allPlaceholders,
- Subject: "subject:" + allPlaceholders,
- Body: "body:" + allPlaceholders,
- },
- expectedActionUrl: fmt.Sprintf(
- "/actionUrl/%%7BACTION_URL%%7D%s%s%s",
- "token_test",
- "name_test",
- "url_test",
- ),
- expectedSubject: fmt.Sprintf(
- "subject:%s%s%s%s",
- settings.EmailPlaceholderActionUrl,
- settings.EmailPlaceholderToken,
- "name_test",
- "url_test",
- ),
- expectedBody: fmt.Sprintf(
- "body:%s%s%s%s",
- fmt.Sprintf(
- "/actionUrl/%%7BACTION_URL%%7D%s%s%s",
- "token_test",
- "name_test",
- "url_test",
- ),
- "token_test",
- "name_test",
- "url_test",
- ),
- },
- }
-
- for i, s := range scenarios {
- subject, body, actionUrl := s.emailTemplate.Resolve("name_test", "url_test", "token_test")
-
- if s.expectedSubject != subject {
- t.Errorf("(%d) Expected subject %q got %q", i, s.expectedSubject, subject)
- }
-
- if s.expectedBody != body {
- t.Errorf("(%d) Expected body \n%v got \n%v", i, s.expectedBody, body)
- }
-
- if s.expectedActionUrl != actionUrl {
- t.Errorf("(%d) Expected actionUrl \n%v got \n%v", i, s.expectedActionUrl, actionUrl)
- }
- }
-}
-
-func TestLogsConfigValidate(t *testing.T) {
- scenarios := []struct {
- config settings.LogsConfig
- expectError bool
- }{
- // zero values
- {
- settings.LogsConfig{},
- false,
- },
- // invalid data
- {
- settings.LogsConfig{MaxDays: -10},
- true,
- },
- // valid data
- {
- settings.LogsConfig{MaxDays: 1},
- false,
- },
- }
-
- for i, scenario := range scenarios {
- result := scenario.config.Validate()
-
- if result != nil && !scenario.expectError {
- t.Errorf("(%d) Didn't expect error, got %v", i, result)
- }
-
- if result == nil && scenario.expectError {
- t.Errorf("(%d) Expected error, got nil", i)
- }
- }
-}
-
-func TestAuthProviderConfigValidate(t *testing.T) {
- scenarios := []struct {
- config settings.AuthProviderConfig
- expectError bool
- }{
- // zero values (disabled)
- {
- settings.AuthProviderConfig{},
- false,
- },
- // zero values (enabled)
- {
- settings.AuthProviderConfig{Enabled: true},
- true,
- },
- // invalid data
- {
- settings.AuthProviderConfig{
- Enabled: true,
- ClientId: "",
- ClientSecret: "",
- AuthUrl: "test",
- TokenUrl: "test",
- UserApiUrl: "test",
- },
- true,
- },
- // valid data (only the required)
- {
- settings.AuthProviderConfig{
- Enabled: true,
- ClientId: "test",
- ClientSecret: "test",
- },
- false,
- },
- // valid data (fill all fields)
- {
- settings.AuthProviderConfig{
- Enabled: true,
- ClientId: "test",
- ClientSecret: "test",
- DisplayName: "test",
- PKCE: types.Pointer(true),
- AuthUrl: "https://example.com",
- TokenUrl: "https://example.com",
- UserApiUrl: "https://example.com",
- },
- false,
- },
- }
-
- for i, scenario := range scenarios {
- result := scenario.config.Validate()
-
- if result != nil && !scenario.expectError {
- t.Errorf("(%d) Didn't expect error, got %v", i, result)
- }
-
- if result == nil && scenario.expectError {
- t.Errorf("(%d) Expected error, got nil", i)
- }
- }
-}
-
-func TestAuthProviderConfigSetupProvider(t *testing.T) {
- provider := auth.NewGithubProvider()
-
- // disabled config
- c1 := settings.AuthProviderConfig{Enabled: false}
- if err := c1.SetupProvider(provider); err == nil {
- t.Errorf("Expected error, got nil")
- }
-
- c2 := settings.AuthProviderConfig{
- Enabled: true,
- ClientId: "test_ClientId",
- ClientSecret: "test_ClientSecret",
- AuthUrl: "test_AuthUrl",
- UserApiUrl: "test_UserApiUrl",
- TokenUrl: "test_TokenUrl",
- DisplayName: "test_DisplayName",
- PKCE: types.Pointer(true),
- }
- if err := c2.SetupProvider(provider); err != nil {
- t.Error(err)
- }
-
- if provider.ClientId() != c2.ClientId {
- t.Fatalf("Expected ClientId %s, got %s", c2.ClientId, provider.ClientId())
- }
-
- if provider.ClientSecret() != c2.ClientSecret {
- t.Fatalf("Expected ClientSecret %s, got %s", c2.ClientSecret, provider.ClientSecret())
- }
-
- if provider.AuthUrl() != c2.AuthUrl {
- t.Fatalf("Expected AuthUrl %s, got %s", c2.AuthUrl, provider.AuthUrl())
- }
-
- if provider.UserApiUrl() != c2.UserApiUrl {
- t.Fatalf("Expected UserApiUrl %s, got %s", c2.UserApiUrl, provider.UserApiUrl())
- }
-
- if provider.TokenUrl() != c2.TokenUrl {
- t.Fatalf("Expected TokenUrl %s, got %s", c2.TokenUrl, provider.TokenUrl())
- }
-
- if provider.DisplayName() != c2.DisplayName {
- t.Fatalf("Expected DisplayName %s, got %s", c2.DisplayName, provider.DisplayName())
- }
-
- if provider.PKCE() != *c2.PKCE {
- t.Fatalf("Expected PKCE %v, got %v", *c2.PKCE, provider.PKCE())
- }
-}
diff --git a/models/table_info.go b/models/table_info.go
deleted file mode 100644
index ef62bf88..00000000
--- a/models/table_info.go
+++ /dev/null
@@ -1,15 +0,0 @@
-package models
-
-import "github.com/pocketbase/pocketbase/tools/types"
-
-type TableInfoRow struct {
- // the `db:"pk"` tag has special semantic so we cannot rename
- // the original field without specifying a custom mapper
- PK int
-
- Index int `db:"cid"`
- Name string `db:"name"`
- Type string `db:"type"`
- NotNull bool `db:"notnull"`
- DefaultValue types.JsonRaw `db:"dflt_value"`
-}
diff --git a/plugins/ghupdate/ghupdate.go b/plugins/ghupdate/ghupdate.go
index f85a771c..6d98ac34 100644
--- a/plugins/ghupdate/ghupdate.go
+++ b/plugins/ghupdate/ghupdate.go
@@ -252,6 +252,7 @@ func (p *plugin) update(withBackup bool) error {
fmt.Print("\n")
color.Cyan("Here is a list with some of the %s changes:", latest.Tag)
// remove the update command note to avoid "stuttering"
+ // (@todo consider moving to a config option)
releaseNotes := strings.TrimSpace(strings.Replace(latest.Body, "> _To update the prebuilt executable you can run `./"+p.config.ArchiveExecutable+" update`._", "", 1))
color.Cyan(releaseNotes)
fmt.Print("\n")
diff --git a/plugins/ghupdate/ghupdate_test.go b/plugins/ghupdate/ghupdate_test.go
index fc6bb97c..e692cc8e 100644
--- a/plugins/ghupdate/ghupdate_test.go
+++ b/plugins/ghupdate/ghupdate_test.go
@@ -26,11 +26,13 @@ func TestCompareVersions(t *testing.T) {
{"3.2.4", "3.2.3", -1},
}
- for i, s := range scenarios {
- result := compareVersions(s.a, s.b)
+ for _, s := range scenarios {
+ t.Run(s.a+"VS"+s.b, func(t *testing.T) {
+ result := compareVersions(s.a, s.b)
- if result != s.expected {
- t.Fatalf("[%d] Expected %q vs %q to result in %d, got %d", i, s.a, s.b, s.expected, result)
- }
+ if result != s.expected {
+ t.Fatalf("Expected %q vs %q to result in %d, got %d", s.a, s.b, s.expected, result)
+ }
+ })
}
}
diff --git a/plugins/jsvm/binds.go b/plugins/jsvm/binds.go
index d85962a0..68b4799a 100644
--- a/plugins/jsvm/binds.go
+++ b/plugins/jsvm/binds.go
@@ -12,29 +12,23 @@ import (
"os/exec"
"path/filepath"
"reflect"
+ "slices"
"strings"
"time"
"github.com/dop251/goja"
validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/labstack/echo/v5"
- "github.com/labstack/echo/v5/middleware"
+ "github.com/golang-jwt/jwt/v4"
"github.com/pocketbase/dbx"
"github.com/pocketbase/pocketbase/apis"
"github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
"github.com/pocketbase/pocketbase/forms"
"github.com/pocketbase/pocketbase/mails"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
- "github.com/pocketbase/pocketbase/tokens"
- "github.com/pocketbase/pocketbase/tools/cron"
"github.com/pocketbase/pocketbase/tools/filesystem"
"github.com/pocketbase/pocketbase/tools/hook"
"github.com/pocketbase/pocketbase/tools/inflector"
- "github.com/pocketbase/pocketbase/tools/list"
"github.com/pocketbase/pocketbase/tools/mailer"
- "github.com/pocketbase/pocketbase/tools/rest"
+ "github.com/pocketbase/pocketbase/tools/router"
"github.com/pocketbase/pocketbase/tools/security"
"github.com/pocketbase/pocketbase/tools/subscriptions"
"github.com/pocketbase/pocketbase/tools/types"
@@ -49,11 +43,11 @@ func hooksBinds(app core.App, loader *goja.Runtime, executors *vmsPool) {
appType := reflect.TypeOf(app)
appValue := reflect.ValueOf(app)
totalMethods := appType.NumMethod()
- excludeHooks := []string{"OnBeforeServe"}
+ excludeHooks := []string{"OnServe"}
for i := 0; i < totalMethods; i++ {
method := appType.Method(i)
- if !strings.HasPrefix(method.Name, "On") || list.ExistInSlice(method.Name, excludeHooks) {
+ if !strings.HasPrefix(method.Name, "On") || slices.Contains(excludeHooks, method.Name) {
continue // not a hook or excluded
}
@@ -69,9 +63,9 @@ func hooksBinds(app core.App, loader *goja.Runtime, executors *vmsPool) {
}
hookInstance := appValue.MethodByName(method.Name).Call(tagsAsValues)[0]
- addFunc := hookInstance.MethodByName("Add")
+ hookBindFunc := hookInstance.MethodByName("BindFunc")
- handlerType := addFunc.Type().In(0)
+ handlerType := hookBindFunc.Type().In(0)
handler := reflect.MakeFunc(handlerType, func(args []reflect.Value) (results []reflect.Value) {
handlerArgs := make([]any, len(args))
@@ -84,15 +78,10 @@ func hooksBinds(app core.App, loader *goja.Runtime, executors *vmsPool) {
res, err := executor.RunProgram(pr)
executor.Set("__args", goja.Undefined())
- // check for returned error or false
+ // check for returned error value
if res != nil {
- switch v := res.Export().(type) {
- case error:
- return v
- case bool:
- if !v {
- return hook.StopPropagation
- }
+ if resErr, ok := res.Export().(error); ok {
+ return resErr
}
}
@@ -103,20 +92,16 @@ func hooksBinds(app core.App, loader *goja.Runtime, executors *vmsPool) {
})
// register the wrapped hook handler
- addFunc.Call([]reflect.Value{handler})
+ hookBindFunc.Call([]reflect.Value{handler})
})
}
}
func cronBinds(app core.App, loader *goja.Runtime, executors *vmsPool) {
- scheduler := cron.New()
-
- var wasServeTriggered bool
-
loader.Set("cronAdd", func(jobId, cronExpr, handler string) {
pr := goja.MustCompile("", "{("+handler+").apply(undefined)}", true)
- err := scheduler.Add(jobId, cronExpr, func() {
+ err := app.Cron().Add(jobId, cronExpr, func() {
err := executors.run(func(executor *goja.Runtime) error {
_, err := executor.RunProgram(pr)
return err
@@ -133,32 +118,29 @@ func cronBinds(app core.App, loader *goja.Runtime, executors *vmsPool) {
if err != nil {
panic("[cronAdd] failed to register cron job " + jobId + ": " + err.Error())
}
-
- // start the ticker (if not already)
- if wasServeTriggered && scheduler.Total() > 0 && !scheduler.HasStarted() {
- scheduler.Start()
- }
})
+ // note: it is not necessary needed but it is here for consistency
loader.Set("cronRemove", func(jobId string) {
- scheduler.Remove(jobId)
-
- // stop the ticker if there are no other jobs
- if scheduler.Total() == 0 {
- scheduler.Stop()
- }
+ app.Cron().Remove(jobId)
})
- app.OnBeforeServe().Add(func(e *core.ServeEvent) error {
- // start the ticker (if not already)
- if scheduler.Total() > 0 && !scheduler.HasStarted() {
- scheduler.Start()
- }
+ // register the removal helper also in the executors to allow removing cron jobs from everywhere
+ oldFactory := executors.factory
+ executors.factory = func() *goja.Runtime {
+ vm := oldFactory()
- wasServeTriggered = true
+ vm.Set("cronRemove", func(jobId string) {
+ app.Cron().Remove(jobId)
+ })
- return nil
- })
+ return vm
+ }
+ for _, item := range executors.items {
+ item.vm.Set("cronRemove", func(jobId string) {
+ app.Cron().Remove(jobId)
+ })
+ }
}
func routerBinds(app core.App, loader *goja.Runtime, executors *vmsPool) {
@@ -168,15 +150,15 @@ func routerBinds(app core.App, loader *goja.Runtime, executors *vmsPool) {
panic("[routerAdd] failed to wrap middlewares: " + err.Error())
}
- wrappedHandler, err := wrapHandler(executors, handler)
+ wrappedHandler, err := wrapHandlerFunc(executors, handler)
if err != nil {
panic("[routerAdd] failed to wrap handler: " + err.Error())
}
- app.OnBeforeServe().Add(func(e *core.ServeEvent) error {
- e.Router.Add(strings.ToUpper(method), path, wrappedHandler, wrappedMiddlewares...)
+ app.OnServe().BindFunc(func(e *core.ServeEvent) error {
+ e.Router.Route(strings.ToUpper(method), path, wrappedHandler).Bind(wrappedMiddlewares...)
- return nil
+ return e.Next()
})
})
@@ -186,40 +168,28 @@ func routerBinds(app core.App, loader *goja.Runtime, executors *vmsPool) {
panic("[routerUse] failed to wrap middlewares: " + err.Error())
}
- app.OnBeforeServe().Add(func(e *core.ServeEvent) error {
- e.Router.Use(wrappedMiddlewares...)
- return nil
- })
- })
-
- loader.Set("routerPre", func(middlewares ...goja.Value) {
- wrappedMiddlewares, err := wrapMiddlewares(executors, middlewares...)
- if err != nil {
- panic("[routerPre] failed to wrap middlewares: " + err.Error())
- }
-
- app.OnBeforeServe().Add(func(e *core.ServeEvent) error {
- e.Router.Pre(wrappedMiddlewares...)
- return nil
+ app.OnServe().BindFunc(func(e *core.ServeEvent) error {
+ e.Router.Bind(wrappedMiddlewares...)
+ return e.Next()
})
})
}
-func wrapHandler(executors *vmsPool, handler goja.Value) (echo.HandlerFunc, error) {
+func wrapHandlerFunc(executors *vmsPool, handler goja.Value) (hook.HandlerFunc[*core.RequestEvent], error) {
if handler == nil {
return nil, errors.New("handler must be non-nil")
}
switch h := handler.Export().(type) {
- case echo.HandlerFunc:
- // "native" handler - no need to wrap
+ case hook.HandlerFunc[*core.RequestEvent]:
+ // "native" handler func - no need to wrap
return h, nil
case func(goja.FunctionCall) goja.Value, string:
pr := goja.MustCompile("", "{("+handler.String()+").apply(undefined, __args)}", true)
- wrappedHandler := func(c echo.Context) error {
+ wrappedHandler := func(e *core.RequestEvent) error {
return executors.run(func(executor *goja.Runtime) error {
- executor.Set("__args", []any{c})
+ executor.Set("__args", []any{e})
res, err := executor.RunProgram(pr)
executor.Set("__args", goja.Undefined())
@@ -240,29 +210,44 @@ func wrapHandler(executors *vmsPool, handler goja.Value) (echo.HandlerFunc, erro
}
}
-func wrapMiddlewares(executors *vmsPool, rawMiddlewares ...goja.Value) ([]echo.MiddlewareFunc, error) {
- wrappedMiddlewares := make([]echo.MiddlewareFunc, len(rawMiddlewares))
+type gojaHookHandler struct {
+ priority int
+ id string
+ serializedFunc string
+}
+
+func wrapMiddlewares(executors *vmsPool, rawMiddlewares ...goja.Value) ([]*hook.Handler[*core.RequestEvent], error) {
+ wrappedMiddlewares := make([]*hook.Handler[*core.RequestEvent], len(rawMiddlewares))
for i, m := range rawMiddlewares {
if m == nil {
- return nil, errors.New("middleware func must be non-nil")
+ return nil, errors.New("middleware must be non-nil")
}
switch v := m.Export().(type) {
- case echo.MiddlewareFunc:
- // "native" middleware - no need to wrap
+ case *hook.Handler[*core.RequestEvent]:
+ // "native" middleware handler - no need to wrap
wrappedMiddlewares[i] = v
- case func(goja.FunctionCall) goja.Value, string:
- pr := goja.MustCompile("", "{(("+m.String()+").apply(undefined, __args)).apply(undefined, __args2)}", true)
+ case hook.HandlerFunc[*core.RequestEvent]:
+ // "native" middleware func - wrap as handler
+ wrappedMiddlewares[i] = &hook.Handler[*core.RequestEvent]{
+ Func: v,
+ }
+ case *gojaHookHandler:
+ if v.serializedFunc == "" {
+ return nil, errors.New("missing or invalid Middleware function")
+ }
- wrappedMiddlewares[i] = func(next echo.HandlerFunc) echo.HandlerFunc {
- return func(c echo.Context) error {
+ pr := goja.MustCompile("", "{("+v.serializedFunc+").apply(undefined, __args)}", true)
+
+ wrappedMiddlewares[i] = &hook.Handler[*core.RequestEvent]{
+ Id: v.id,
+ Priority: v.priority,
+ Func: func(e *core.RequestEvent) error {
return executors.run(func(executor *goja.Runtime) error {
- executor.Set("__args", []any{next})
- executor.Set("__args2", []any{c})
+ executor.Set("__args", []any{e})
res, err := executor.RunProgram(pr)
executor.Set("__args", goja.Undefined())
- executor.Set("__args2", goja.Undefined())
// check for returned error
if res != nil {
@@ -273,7 +258,28 @@ func wrapMiddlewares(executors *vmsPool, rawMiddlewares ...goja.Value) ([]echo.M
return err
})
- }
+ },
+ }
+ case func(goja.FunctionCall) goja.Value, string:
+ pr := goja.MustCompile("", "{("+m.String()+").apply(undefined, __args)}", true)
+
+ wrappedMiddlewares[i] = &hook.Handler[*core.RequestEvent]{
+ Func: func(e *core.RequestEvent) error {
+ return executors.run(func(executor *goja.Runtime) error {
+ executor.Set("__args", []any{e})
+ res, err := executor.RunProgram(pr)
+ executor.Set("__args", goja.Undefined())
+
+ // check for returned error
+ if res != nil {
+ if v, ok := res.Export().(error); ok {
+ return v
+ }
+ }
+
+ return err
+ })
+ },
}
default:
return nil, errors.New("unsupported goja middleware type")
@@ -286,9 +292,10 @@ func wrapMiddlewares(executors *vmsPool, rawMiddlewares ...goja.Value) ([]echo.M
func baseBinds(vm *goja.Runtime) {
vm.SetFieldNameMapper(FieldMapper{})
+ // deprecated: use toString
vm.Set("readerToString", func(r io.Reader, maxBytes int) (string, error) {
if maxBytes == 0 {
- maxBytes = rest.DefaultMaxMemory
+ maxBytes = router.DefaultMaxMemory
}
limitReader := io.LimitReader(r, int64(maxBytes))
@@ -301,6 +308,34 @@ func baseBinds(vm *goja.Runtime) {
return string(bodyBytes), nil
})
+ vm.Set("toString", func(raw any, maxReaderBytes int) (string, error) {
+ switch v := raw.(type) {
+ case io.Reader:
+ if maxReaderBytes == 0 {
+ maxReaderBytes = router.DefaultMaxMemory
+ }
+
+ limitReader := io.LimitReader(v, int64(maxReaderBytes))
+
+ bodyBytes, readErr := io.ReadAll(limitReader)
+ if readErr != nil {
+ return "", readErr
+ }
+
+ return string(bodyBytes), nil
+ default:
+ str, err := cast.ToStringE(v)
+ if err == nil {
+ return str, nil
+ }
+
+ // as a last attempt try to json encode the value
+ rawBytes, _ := json.Marshal(raw)
+
+ return string(rawBytes), nil
+ }
+ })
+
vm.Set("sleep", func(milliseconds int64) {
time.Sleep(time.Duration(milliseconds) * time.Millisecond)
})
@@ -313,6 +348,15 @@ func baseBinds(vm *goja.Runtime) {
return elem.Addr().Interface()
})
+ vm.Set("unmarshal", func(data, dst any) error {
+ raw, err := json.Marshal(data)
+ if err != nil {
+ return err
+ }
+
+ return json.Unmarshal(raw, &dst)
+ })
+
vm.Set("DynamicModel", func(call goja.ConstructorCall) *goja.Object {
shape, ok := call.Argument(0).Export().(map[string]any)
if !ok || len(shape) == 0 {
@@ -327,17 +371,17 @@ func baseBinds(vm *goja.Runtime) {
})
vm.Set("Record", func(call goja.ConstructorCall) *goja.Object {
- var instance *models.Record
+ var instance *core.Record
- collection, ok := call.Argument(0).Export().(*models.Collection)
+ collection, ok := call.Argument(0).Export().(*core.Collection)
if ok {
- instance = models.NewRecord(collection)
+ instance = core.NewRecord(collection)
data, ok := call.Argument(1).Export().(map[string]any)
if ok {
instance.Load(data)
}
} else {
- instance = &models.Record{}
+ instance = &core.Record{}
}
instanceValue := vm.ToValue(instance).(*goja.Object)
@@ -347,24 +391,91 @@ func baseBinds(vm *goja.Runtime) {
})
vm.Set("Collection", func(call goja.ConstructorCall) *goja.Object {
- instance := &models.Collection{}
+ instance := &core.Collection{}
+ return structConstructorUnmarshal(vm, call, instance)
+ })
+ registerFactoryAsConstructor(vm, "BaseCollection", core.NewBaseCollection)
+ registerFactoryAsConstructor(vm, "AuthCollection", core.NewAuthCollection)
+ registerFactoryAsConstructor(vm, "ViewCollection", core.NewViewCollection)
+
+ vm.Set("FieldsList", func(call goja.ConstructorCall) *goja.Object {
+ instance := &core.FieldsList{}
return structConstructorUnmarshal(vm, call, instance)
})
- vm.Set("Admin", func(call goja.ConstructorCall) *goja.Object {
- instance := &models.Admin{}
- return structConstructorUnmarshal(vm, call, instance)
- })
+ // fields
+ // ---
+ vm.Set("Field", func(call goja.ConstructorCall) *goja.Object {
+ data, _ := call.Argument(0).Export().(map[string]any)
+ rawDataSlice, _ := json.Marshal([]any{data})
- vm.Set("Schema", func(call goja.ConstructorCall) *goja.Object {
- instance := &schema.Schema{}
- return structConstructorUnmarshal(vm, call, instance)
- })
+ fieldsList := core.NewFieldsList()
+ _ = fieldsList.UnmarshalJSON(rawDataSlice)
- vm.Set("SchemaField", func(call goja.ConstructorCall) *goja.Object {
- instance := &schema.SchemaField{}
+ if len(fieldsList) == 0 {
+ return nil
+ }
+
+ field := fieldsList[0]
+
+ fieldValue := vm.ToValue(field).(*goja.Object)
+ fieldValue.SetPrototype(call.This.Prototype())
+
+ return fieldValue
+ })
+ vm.Set("NumberField", func(call goja.ConstructorCall) *goja.Object {
+ instance := &core.NumberField{}
return structConstructorUnmarshal(vm, call, instance)
})
+ vm.Set("BoolField", func(call goja.ConstructorCall) *goja.Object {
+ instance := &core.BoolField{}
+ return structConstructorUnmarshal(vm, call, instance)
+ })
+ vm.Set("TextField", func(call goja.ConstructorCall) *goja.Object {
+ instance := &core.TextField{}
+ return structConstructorUnmarshal(vm, call, instance)
+ })
+ vm.Set("URLField", func(call goja.ConstructorCall) *goja.Object {
+ instance := &core.URLField{}
+ return structConstructorUnmarshal(vm, call, instance)
+ })
+ vm.Set("EmailField", func(call goja.ConstructorCall) *goja.Object {
+ instance := &core.EmailField{}
+ return structConstructorUnmarshal(vm, call, instance)
+ })
+ vm.Set("EditorField", func(call goja.ConstructorCall) *goja.Object {
+ instance := &core.EditorField{}
+ return structConstructorUnmarshal(vm, call, instance)
+ })
+ vm.Set("PasswordField", func(call goja.ConstructorCall) *goja.Object {
+ instance := &core.PasswordField{}
+ return structConstructorUnmarshal(vm, call, instance)
+ })
+ vm.Set("DateField", func(call goja.ConstructorCall) *goja.Object {
+ instance := &core.DateField{}
+ return structConstructorUnmarshal(vm, call, instance)
+ })
+ vm.Set("AutodateField", func(call goja.ConstructorCall) *goja.Object {
+ instance := &core.AutodateField{}
+ return structConstructorUnmarshal(vm, call, instance)
+ })
+ vm.Set("JSONField", func(call goja.ConstructorCall) *goja.Object {
+ instance := &core.JSONField{}
+ return structConstructorUnmarshal(vm, call, instance)
+ })
+ vm.Set("RelationField", func(call goja.ConstructorCall) *goja.Object {
+ instance := &core.RelationField{}
+ return structConstructorUnmarshal(vm, call, instance)
+ })
+ vm.Set("SelectField", func(call goja.ConstructorCall) *goja.Object {
+ instance := &core.SelectField{}
+ return structConstructorUnmarshal(vm, call, instance)
+ })
+ vm.Set("FileField", func(call goja.ConstructorCall) *goja.Object {
+ instance := &core.FileField{}
+ return structConstructorUnmarshal(vm, call, instance)
+ })
+ // ---
vm.Set("MailerMessage", func(call goja.ConstructorCall) *goja.Object {
instance := &mailer.Message{}
@@ -377,10 +488,28 @@ func baseBinds(vm *goja.Runtime) {
})
vm.Set("RequestInfo", func(call goja.ConstructorCall) *goja.Object {
- instance := &models.RequestInfo{Context: models.RequestInfoContextDefault}
+ instance := &core.RequestInfo{Context: core.RequestInfoContextDefault}
return structConstructor(vm, call, instance)
})
+ // ```js
+ // new Middleware((e) => {
+ // return e.next()
+ // }, 100, "example_middleware")
+ // ```
+ vm.Set("Middleware", func(call goja.ConstructorCall) *goja.Object {
+ instance := &gojaHookHandler{}
+
+ instance.serializedFunc = call.Argument(0).String()
+ instance.priority = cast.ToInt(call.Argument(1).Export())
+ instance.id = cast.ToString(call.Argument(2).Export())
+
+ instanceValue := vm.ToValue(instance).(*goja.Object)
+ instanceValue.SetPrototype(call.This.Prototype())
+
+ return instanceValue
+ })
+
vm.Set("DateTime", func(call goja.ConstructorCall) *goja.Object {
instance := types.NowDateTime()
@@ -406,24 +535,6 @@ func baseBinds(vm *goja.Runtime) {
return instanceValue
})
- vm.Set("Dao", func(call goja.ConstructorCall) *goja.Object {
- concurrentDB, _ := call.Argument(0).Export().(dbx.Builder)
- if concurrentDB == nil {
- panic("[Dao] missing required Dao(concurrentDB, [nonconcurrentDB]) argument")
- }
-
- nonConcurrentDB, _ := call.Argument(1).Export().(dbx.Builder)
- if nonConcurrentDB == nil {
- nonConcurrentDB = concurrentDB
- }
-
- instance := daos.NewMultiDB(concurrentDB, nonConcurrentDB)
- instanceValue := vm.ToValue(instance).(*goja.Object)
- instanceValue.SetPrototype(call.This.Prototype())
-
- return instanceValue
- })
-
vm.Set("Cookie", func(call goja.ConstructorCall) *goja.Object {
instance := &http.Cookie{}
return structConstructor(vm, call, instance)
@@ -462,30 +573,10 @@ func mailsBinds(vm *goja.Runtime) {
obj := vm.NewObject()
vm.Set("$mails", obj)
- // admin
- obj.Set("sendAdminPasswordReset", mails.SendAdminPasswordReset)
-
- // record
obj.Set("sendRecordPasswordReset", mails.SendRecordPasswordReset)
obj.Set("sendRecordVerification", mails.SendRecordVerification)
obj.Set("sendRecordChangeEmail", mails.SendRecordChangeEmail)
-}
-
-func tokensBinds(vm *goja.Runtime) {
- obj := vm.NewObject()
- vm.Set("$tokens", obj)
-
- // admin
- obj.Set("adminAuthToken", tokens.NewAdminAuthToken)
- obj.Set("adminResetPasswordToken", tokens.NewAdminResetPasswordToken)
- obj.Set("adminFileToken", tokens.NewAdminFileToken)
-
- // record
- obj.Set("recordAuthToken", tokens.NewRecordAuthToken)
- obj.Set("recordVerifyToken", tokens.NewRecordVerifyToken)
- obj.Set("recordResetPasswordToken", tokens.NewRecordResetPasswordToken)
- obj.Set("recordChangeEmailToken", tokens.NewRecordChangeEmailToken)
- obj.Set("recordFileToken", tokens.NewRecordFileToken)
+ obj.Set("sendRecordOTP", mails.SendRecordOTP)
}
func securityBinds(vm *goja.Runtime) {
@@ -502,6 +593,7 @@ func securityBinds(vm *goja.Runtime) {
// random
obj.Set("randomString", security.RandomString)
+ obj.Set("randomStringByRegex", security.RandomStringByRegex)
obj.Set("randomStringWithAlphabet", security.RandomStringWithAlphabet)
obj.Set("pseudorandomString", security.PseudorandomString)
obj.Set("pseudorandomStringWithAlphabet", security.PseudorandomStringWithAlphabet)
@@ -513,7 +605,9 @@ func securityBinds(vm *goja.Runtime) {
obj.Set("parseJWT", func(token string, verificationKey string) (map[string]any, error) {
return security.ParseJWT(token, verificationKey)
})
- obj.Set("createJWT", security.NewJWT)
+ obj.Set("createJWT", func(payload jwt.MapClaims, signingKey string, secDuration int) (string, error) {
+ return security.NewJWT(payload, signingKey, time.Duration(secDuration)*time.Second)
+ })
// encryption
obj.Set("encrypt", security.Encrypt)
@@ -535,7 +629,7 @@ func filesystemBinds(vm *goja.Runtime) {
obj.Set("fileFromPath", filesystem.NewFileFromPath)
obj.Set("fileFromBytes", filesystem.NewFileFromBytes)
obj.Set("fileFromMultipart", filesystem.NewFileFromMultipart)
- obj.Set("fileFromUrl", func(url string, secTimeout int) (*filesystem.File, error) {
+ obj.Set("fileFromURL", func(url string, secTimeout int) (*filesystem.File, error) {
if secTimeout == 0 {
secTimeout = 120
}
@@ -543,7 +637,7 @@ func filesystemBinds(vm *goja.Runtime) {
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(secTimeout)*time.Second)
defer cancel()
- return filesystem.NewFileFromUrl(ctx, url)
+ return filesystem.NewFileFromURL(ctx, url)
})
}
@@ -592,24 +686,8 @@ func osBinds(vm *goja.Runtime) {
}
func formsBinds(vm *goja.Runtime) {
- registerFactoryAsConstructor(vm, "AdminLoginForm", forms.NewAdminLogin)
- registerFactoryAsConstructor(vm, "AdminPasswordResetConfirmForm", forms.NewAdminPasswordResetConfirm)
- registerFactoryAsConstructor(vm, "AdminPasswordResetRequestForm", forms.NewAdminPasswordResetRequest)
- registerFactoryAsConstructor(vm, "AdminUpsertForm", forms.NewAdminUpsert)
registerFactoryAsConstructor(vm, "AppleClientSecretCreateForm", forms.NewAppleClientSecretCreate)
- registerFactoryAsConstructor(vm, "CollectionUpsertForm", forms.NewCollectionUpsert)
- registerFactoryAsConstructor(vm, "CollectionsImportForm", forms.NewCollectionsImport)
- registerFactoryAsConstructor(vm, "RealtimeSubscribeForm", forms.NewRealtimeSubscribe)
- registerFactoryAsConstructor(vm, "RecordEmailChangeConfirmForm", forms.NewRecordEmailChangeConfirm)
- registerFactoryAsConstructor(vm, "RecordEmailChangeRequestForm", forms.NewRecordEmailChangeRequest)
- registerFactoryAsConstructor(vm, "RecordOAuth2LoginForm", forms.NewRecordOAuth2Login)
- registerFactoryAsConstructor(vm, "RecordPasswordLoginForm", forms.NewRecordPasswordLogin)
- registerFactoryAsConstructor(vm, "RecordPasswordResetConfirmForm", forms.NewRecordPasswordResetConfirm)
- registerFactoryAsConstructor(vm, "RecordPasswordResetRequestForm", forms.NewRecordPasswordResetRequest)
registerFactoryAsConstructor(vm, "RecordUpsertForm", forms.NewRecordUpsert)
- registerFactoryAsConstructor(vm, "RecordVerificationConfirmForm", forms.NewRecordVerificationConfirm)
- registerFactoryAsConstructor(vm, "RecordVerificationRequestForm", forms.NewRecordVerificationRequest)
- registerFactoryAsConstructor(vm, "SettingsUpsertForm", forms.NewSettingsUpsert)
registerFactoryAsConstructor(vm, "TestEmailSendForm", forms.NewTestEmailSend)
registerFactoryAsConstructor(vm, "TestS3FilesystemForm", forms.NewTestS3Filesystem)
}
@@ -618,33 +696,33 @@ func apisBinds(vm *goja.Runtime) {
obj := vm.NewObject()
vm.Set("$apis", obj)
- obj.Set("staticDirectoryHandler", func(dir string, indexFallback bool) echo.HandlerFunc {
- return apis.StaticDirectoryHandler(os.DirFS(dir), indexFallback)
+ obj.Set("static", func(dir string, indexFallback bool) hook.HandlerFunc[*core.RequestEvent] {
+ return apis.Static(os.DirFS(dir), indexFallback)
})
// middlewares
obj.Set("requireGuestOnly", apis.RequireGuestOnly)
- obj.Set("requireRecordAuth", apis.RequireRecordAuth)
- obj.Set("requireAdminAuth", apis.RequireAdminAuth)
- obj.Set("requireAdminAuthOnlyIfAny", apis.RequireAdminAuthOnlyIfAny)
- obj.Set("requireAdminOrRecordAuth", apis.RequireAdminOrRecordAuth)
- obj.Set("requireAdminOrOwnerAuth", apis.RequireAdminOrOwnerAuth)
- obj.Set("activityLogger", apis.ActivityLogger)
- obj.Set("gzip", middleware.Gzip)
- obj.Set("bodyLimit", middleware.BodyLimit)
+ obj.Set("requireAuth", apis.RequireAuth)
+ obj.Set("requireSuperuserAuth", apis.RequireSuperuserAuth)
+ obj.Set("requireSuperuserAuthOnlyIfAny", apis.RequireSuperuserAuthOnlyIfAny)
+ obj.Set("requireSuperuserOrOwnerAuth", apis.RequireSuperuserOrOwnerAuth)
+ obj.Set("skipSuccessActivityLog", apis.SkipSuccessActivityLog)
+ obj.Set("gzip", apis.Gzip)
+ obj.Set("bodyLimit", apis.BodyLimit)
// record helpers
- obj.Set("requestInfo", apis.RequestInfo)
obj.Set("recordAuthResponse", apis.RecordAuthResponse)
obj.Set("enrichRecord", apis.EnrichRecord)
obj.Set("enrichRecords", apis.EnrichRecords)
// api errors
- registerFactoryAsConstructor(vm, "ApiError", apis.NewApiError)
- registerFactoryAsConstructor(vm, "NotFoundError", apis.NewNotFoundError)
- registerFactoryAsConstructor(vm, "BadRequestError", apis.NewBadRequestError)
- registerFactoryAsConstructor(vm, "ForbiddenError", apis.NewForbiddenError)
- registerFactoryAsConstructor(vm, "UnauthorizedError", apis.NewUnauthorizedError)
+ registerFactoryAsConstructor(vm, "ApiError", router.NewApiError)
+ registerFactoryAsConstructor(vm, "NotFoundError", router.NewNotFoundError)
+ registerFactoryAsConstructor(vm, "BadRequestError", router.NewBadRequestError)
+ registerFactoryAsConstructor(vm, "ForbiddenError", router.NewForbiddenError)
+ registerFactoryAsConstructor(vm, "UnauthorizedError", router.NewUnauthorizedError)
+ registerFactoryAsConstructor(vm, "TooManyRequestsError", router.NewTooManyRequestsError)
+ registerFactoryAsConstructor(vm, "InternalServerError", router.NewInternalServerError)
}
func httpClientBinds(vm *goja.Runtime) {
@@ -661,7 +739,7 @@ func httpClientBinds(vm *goja.Runtime) {
})
type sendResult struct {
- Json any `json:"json"`
+ JSON any `json:"json"`
Headers map[string][]string `json:"headers"`
Cookies map[string]*http.Cookie `json:"cookies"`
Raw string `json:"raw"`
@@ -727,6 +805,8 @@ func httpClientBinds(vm *goja.Runtime) {
reqBody = bytes.NewReader(encoded)
} else {
switch v := config.Body.(type) {
+ case io.Reader:
+ reqBody = v
case FormData:
body, mp, err := v.toMultipart()
if err != nil {
@@ -755,13 +835,6 @@ func httpClientBinds(vm *goja.Runtime) {
req.Header.Set("content-type", contentType)
}
- // @todo consider removing during the refactoring
- //
- // fallback to json content-type
- if req.Header.Get("content-type") == "" {
- req.Header.Set("content-type", "application/json")
- }
-
res, err := http.DefaultClient.Do(req)
if err != nil {
return nil, err
@@ -787,12 +860,12 @@ func httpClientBinds(vm *goja.Runtime) {
if len(result.Raw) != 0 {
// try as map
- result.Json = map[string]any{}
- if err := json.Unmarshal(bodyRaw, &result.Json); err != nil {
+ result.JSON = map[string]any{}
+ if err := json.Unmarshal(bodyRaw, &result.JSON); err != nil {
// try as slice
- result.Json = []any{}
- if err := json.Unmarshal(bodyRaw, &result.Json); err != nil {
- result.Json = nil
+ result.JSON = []any{}
+ if err := json.Unmarshal(bodyRaw, &result.JSON); err != nil {
+ result.JSON = nil
}
}
}
@@ -864,7 +937,7 @@ func structConstructor(vm *goja.Runtime, call goja.ConstructorCall, instance any
func structConstructorUnmarshal(vm *goja.Runtime, call goja.ConstructorCall, instance any) *goja.Object {
if data := call.Argument(0).Export(); data != nil {
if raw, err := json.Marshal(data); err == nil {
- json.Unmarshal(raw, instance)
+ _ = json.Unmarshal(raw, instance)
}
}
@@ -893,13 +966,13 @@ func newDynamicModel(shape map[string]any) any {
switch kind := vt.Kind(); kind {
case reflect.Map:
raw, _ := json.Marshal(v)
- newV := types.JsonMap{}
+ newV := types.JSONMap[any]{}
newV.Scan(raw)
v = newV
vt = reflect.TypeOf(v)
case reflect.Slice, reflect.Array:
raw, _ := json.Marshal(v)
- newV := types.JsonArray[any]{}
+ newV := types.JSONArray[any]{}
newV.Scan(raw)
v = newV
vt = reflect.TypeOf(newV)
diff --git a/plugins/jsvm/binds_test.go b/plugins/jsvm/binds_test.go
index d2568dc6..faeaf8ee 100644
--- a/plugins/jsvm/binds_test.go
+++ b/plugins/jsvm/binds_test.go
@@ -15,16 +15,12 @@ import (
"github.com/dop251/goja"
validation "github.com/go-ozzo/ozzo-validation/v4"
- "github.com/pocketbase/dbx"
"github.com/pocketbase/pocketbase/apis"
"github.com/pocketbase/pocketbase/core"
- "github.com/pocketbase/pocketbase/daos"
- "github.com/pocketbase/pocketbase/models"
- "github.com/pocketbase/pocketbase/models/schema"
"github.com/pocketbase/pocketbase/tests"
"github.com/pocketbase/pocketbase/tools/filesystem"
"github.com/pocketbase/pocketbase/tools/mailer"
- "github.com/pocketbase/pocketbase/tools/security"
+ "github.com/pocketbase/pocketbase/tools/router"
"github.com/spf13/cast"
)
@@ -47,13 +43,10 @@ func TestBaseBindsCount(t *testing.T) {
vm := goja.New()
baseBinds(vm)
- testBindsCount(vm, "this", 17, t)
+ testBindsCount(vm, "this", 34, t)
}
func TestBaseBindsSleep(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
vm := goja.New()
baseBinds(vm)
vm.Set("reader", strings.NewReader("test"))
@@ -73,9 +66,6 @@ func TestBaseBindsSleep(t *testing.T) {
}
func TestBaseBindsReaderToString(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
vm := goja.New()
baseBinds(vm)
vm.Set("reader", strings.NewReader("test"))
@@ -92,10 +82,63 @@ func TestBaseBindsReaderToString(t *testing.T) {
}
}
-func TestBaseBindsCookie(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
+func TestBaseBindsToString(t *testing.T) {
+ vm := goja.New()
+ baseBinds(vm)
+ vm.Set("scenarios", []struct {
+ Name string
+ Value any
+ Expected string
+ }{
+ {"null", nil, ""},
+ {"string", "test", "test"},
+ {"number", -12.4, "-12.4"},
+ {"bool", true, "true"},
+ {"arr", []int{1, 2, 3}, `[1,2,3]`},
+ {"obj", map[string]any{"test": 123}, `{"test":123}`},
+ {"reader", strings.NewReader("test"), "test"},
+ {"struct", struct {
+ Name string
+ private string
+ }{Name: "123", private: "456"}, `{"Name":"123"}`},
+ })
+ _, err := vm.RunString(`
+ for (let s of scenarios) {
+ let result = toString(s.value)
+
+ if (result != s.expected) {
+ throw new Error('[' + s.name + '] Expected string ' + s.expected + ', got ' + result);
+ }
+ }
+ `)
+ if err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestBaseBindsUnmarshal(t *testing.T) {
+ vm := goja.New()
+ baseBinds(vm)
+ vm.Set("data", &map[string]any{"a": 123})
+
+ _, err := vm.RunString(`
+ unmarshal({"b": 456}, data)
+
+ if (data.a != 123) {
+ throw new Error('Expected data.a 123, got ' + data.a);
+ }
+
+ if (data.b != 456) {
+ throw new Error('Expected data.b 456, got ' + data.b);
+ }
+ `)
+ if err != nil {
+ t.Fatal(err)
+ }
+}
+
+func TestBaseBindsCookie(t *testing.T) {
vm := goja.New()
baseBinds(vm)
@@ -125,9 +168,6 @@ func TestBaseBindsCookie(t *testing.T) {
}
func TestBaseBindsSubscriptionMessage(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
vm := goja.New()
baseBinds(vm)
vm.Set("bytesToString", func(b []byte) string {
@@ -159,7 +199,7 @@ func TestBaseBindsRecord(t *testing.T) {
app, _ := tests.NewTestApp()
defer app.Cleanup()
- collection, err := app.Dao().FindCollectionByNameOrId("users")
+ collection, err := app.FindCachedCollectionByNameOrId("users")
if err != nil {
t.Fatal(err)
}
@@ -175,7 +215,7 @@ func TestBaseBindsRecord(t *testing.T) {
t.Fatal(err)
}
- m1, ok := v1.Export().(*models.Record)
+ m1, ok := v1.Export().(*core.Record)
if !ok {
t.Fatalf("Expected m1 to be models.Record, got \n%v", m1)
}
@@ -187,9 +227,9 @@ func TestBaseBindsRecord(t *testing.T) {
t.Fatal(err)
}
- m2, ok := v2.Export().(*models.Record)
+ m2, ok := v2.Export().(*core.Record)
if !ok {
- t.Fatalf("Expected m2 to be models.Record, got \n%v", m2)
+ t.Fatalf("Expected m2 to be core.Record, got \n%v", m2)
}
if m2.Collection().Name != "users" {
@@ -205,14 +245,14 @@ func TestBaseBindsCollection(t *testing.T) {
vm := goja.New()
baseBinds(vm)
- v, err := vm.RunString(`new Collection({ name: "test", createRule: "@request.auth.id != ''", schema: [{name: "title", "type": "text"}] })`)
+ v, err := vm.RunString(`new Collection({ name: "test", createRule: "@request.auth.id != ''", fields: [{name: "title", "type": "text"}] })`)
if err != nil {
t.Fatal(err)
}
- m, ok := v.Export().(*models.Collection)
+ m, ok := v.Export().(*core.Collection)
if !ok {
- t.Fatalf("Expected models.Collection, got %v", m)
+ t.Fatalf("Expected core.Collection, got %v", m)
}
if m.Name != "test" {
@@ -224,61 +264,174 @@ func TestBaseBindsCollection(t *testing.T) {
t.Fatalf("Expected create rule %q, got %v", "@request.auth.id != ''", m.CreateRule)
}
- if f := m.Schema.GetFieldByName("title"); f == nil {
- t.Fatalf("Expected schema to be set, got %v", m.Schema)
+ if f := m.Fields.GetByName("title"); f == nil {
+ t.Fatalf("Expected fields to be set, got %v", m.Fields)
}
}
-func TestBaseVMAdminBind(t *testing.T) {
+func TestBaseBindsCollectionFactories(t *testing.T) {
vm := goja.New()
baseBinds(vm)
- v, err := vm.RunString(`new Admin({ email: "test@example.com" })`)
+ scenarios := []struct {
+ js string
+ expectedType string
+ }{
+ {"new BaseCollection('test')", core.CollectionTypeBase},
+ {"new ViewCollection('test')", core.CollectionTypeView},
+ {"new AuthCollection('test')", core.CollectionTypeAuth},
+ }
+
+ for _, s := range scenarios {
+ t.Run(s.js, func(t *testing.T) {
+ v, err := vm.RunString(s.js)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ c, ok := v.Export().(*core.Collection)
+ if !ok {
+ t.Fatalf("Expected *core.Collection instance, got %T (%v)", c, c)
+ }
+
+ if c.Name != "test" {
+ t.Fatalf("Expected collection name %q, got %v", "test", c.Name)
+ }
+
+ if c.Type != s.expectedType {
+ t.Fatalf("Expected collection type %q, got %v", s.expectedType, c.Type)
+ }
+ })
+ }
+}
+
+func TestBaseBindsFieldsList(t *testing.T) {
+ vm := goja.New()
+ baseBinds(vm)
+
+ v, err := vm.RunString(`new FieldsList([{name: "title", "type": "text"}])`)
if err != nil {
t.Fatal(err)
}
- m, ok := v.Export().(*models.Admin)
+ m, ok := v.Export().(*core.FieldsList)
if !ok {
- t.Fatalf("Expected models.Admin, got %v", m)
+ t.Fatalf("Expected core.FieldsList, got %v", m)
+ }
+
+ if f := m.GetByName("title"); f == nil {
+ t.Fatalf("Expected fields list to be loaded, got %v", m)
}
}
-func TestBaseBindsSchema(t *testing.T) {
+func TestBaseBindsField(t *testing.T) {
vm := goja.New()
baseBinds(vm)
- v, err := vm.RunString(`new Schema([{name: "title", "type": "text"}])`)
+ v, err := vm.RunString(`new Field({name: "test", "type": "bool"})`)
if err != nil {
t.Fatal(err)
}
- m, ok := v.Export().(*schema.Schema)
+ f, ok := v.Export().(*core.BoolField)
if !ok {
- t.Fatalf("Expected schema.Schema, got %v", m)
+ t.Fatalf("Expected *core.BoolField, got %v", f)
}
- if f := m.GetFieldByName("title"); f == nil {
- t.Fatalf("Expected schema fields to be loaded, got %v", m.Fields())
+ if f.Name != "test" {
+ t.Fatalf("Expected field %q, got %v", "test", f)
}
}
-func TestBaseBindsSchemaField(t *testing.T) {
+func isType[T any](v any) bool {
+ _, ok := v.(T)
+ return ok
+}
+
+func TestBaseBindsNamedFields(t *testing.T) {
+ t.Parallel()
+
vm := goja.New()
baseBinds(vm)
- v, err := vm.RunString(`new SchemaField({name: "title", "type": "text"})`)
- if err != nil {
- t.Fatal(err)
+ scenarios := []struct {
+ js string
+ typeFunc func(v any) bool
+ }{
+ {
+ "new NumberField({name: 'test'})",
+ isType[*core.NumberField],
+ },
+ {
+ "new BoolField({name: 'test'})",
+ isType[*core.BoolField],
+ },
+ {
+ "new TextField({name: 'test'})",
+ isType[*core.TextField],
+ },
+ {
+ "new URLField({name: 'test'})",
+ isType[*core.URLField],
+ },
+ {
+ "new EmailField({name: 'test'})",
+ isType[*core.EmailField],
+ },
+ {
+ "new EditorField({name: 'test'})",
+ isType[*core.EditorField],
+ },
+ {
+ "new PasswordField({name: 'test'})",
+ isType[*core.PasswordField],
+ },
+ {
+ "new DateField({name: 'test'})",
+ isType[*core.DateField],
+ },
+ {
+ "new AutodateField({name: 'test'})",
+ isType[*core.AutodateField],
+ },
+ {
+ "new JSONField({name: 'test'})",
+ isType[*core.JSONField],
+ },
+ {
+ "new RelationField({name: 'test'})",
+ isType[*core.RelationField],
+ },
+ {
+ "new SelectField({name: 'test'})",
+ isType[*core.SelectField],
+ },
+ {
+ "new FileField({name: 'test'})",
+ isType[*core.FileField],
+ },
}
- f, ok := v.Export().(*schema.SchemaField)
- if !ok {
- t.Fatalf("Expected schema.SchemaField, got %v", f)
- }
+ for _, s := range scenarios {
+ t.Run(s.js, func(t *testing.T) {
+ v, err := vm.RunString(s.js)
+ if err != nil {
+ t.Fatal(err)
+ }
- if f.Name != "title" {
- t.Fatalf("Expected field %q, got %v", "title", f)
+ f, ok := v.Export().(core.Field)
+ if !ok {
+ t.Fatalf("Expected core.Field instance, got %T (%v)", f, f)
+ }
+
+ if !s.typeFunc(f) {
+ t.Fatalf("Unexpected field type %T (%v)", f, f)
+ }
+
+ if f.GetName() != "test" {
+ t.Fatalf("Expected field %q, got %v", "test", f)
+ }
+ })
}
}
@@ -363,17 +516,32 @@ func TestBaseBindsRequestInfo(t *testing.T) {
baseBinds(vm)
_, err := vm.RunString(`
- let info = new RequestInfo({
- admin: new Admin({id: "test1"}),
- data: {"name": "test2"}
+ const info = new RequestInfo({
+ body: {"name": "test2"}
});
- if (info.admin?.id != "test1") {
- throw new Error('Expected info.admin.id to be test1, got: ' + info.admin?.id);
+ if (info.body?.name != "test2") {
+ throw new Error('Expected info.body.name to be test2, got: ' + info.body?.name);
}
+ `)
+ if err != nil {
+ t.Fatal(err)
+ }
+}
- if (info.data?.name != "test2") {
- throw new Error('Expected info.data.name to be test2, got: ' + info.data?.name);
+func TestBaseBindsMiddleware(t *testing.T) {
+ vm := goja.New()
+ baseBinds(vm)
+
+ _, err := vm.RunString(`
+ const m = new Middleware(
+ (e) => {},
+ 10,
+ "test"
+ );
+
+ if (!m) {
+ throw new Error('Expected non-empty Middleware instance');
}
`)
if err != nil {
@@ -449,59 +617,12 @@ func TestBaseBindsValidationError(t *testing.T) {
}
}
-func TestBaseBindsDao(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- vm := goja.New()
- baseBinds(vm)
- vm.Set("db", app.Dao().ConcurrentDB())
- vm.Set("db2", app.Dao().NonconcurrentDB())
-
- scenarios := []struct {
- js string
- concurrentDB dbx.Builder
- nonconcurrentDB dbx.Builder
- }{
- {
- js: "new Dao(db)",
- concurrentDB: app.Dao().ConcurrentDB(),
- nonconcurrentDB: app.Dao().ConcurrentDB(),
- },
- {
- js: "new Dao(db, db2)",
- concurrentDB: app.Dao().ConcurrentDB(),
- nonconcurrentDB: app.Dao().NonconcurrentDB(),
- },
- }
-
- for _, s := range scenarios {
- v, err := vm.RunString(s.js)
- if err != nil {
- t.Fatalf("[%s] Failed to execute js script, got %v", s.js, err)
- }
-
- d, ok := v.Export().(*daos.Dao)
- if !ok {
- t.Fatalf("[%s] Expected daos.Dao, got %v", s.js, d)
- }
-
- if d.ConcurrentDB() != s.concurrentDB {
- t.Fatalf("[%s] The ConcurrentDB instances doesn't match", s.js)
- }
-
- if d.NonconcurrentDB() != s.nonconcurrentDB {
- t.Fatalf("[%s] The NonconcurrentDB instances doesn't match", s.js)
- }
- }
-}
-
func TestDbxBinds(t *testing.T) {
app, _ := tests.NewTestApp()
defer app.Cleanup()
vm := goja.New()
- vm.Set("db", app.Dao().DB())
+ vm.Set("db", app.DB())
baseBinds(vm)
dbxBinds(vm)
@@ -602,12 +723,7 @@ func TestMailsBinds(t *testing.T) {
app, _ := tests.NewTestApp()
defer app.Cleanup()
- admin, err := app.Dao().FindAdminByEmail("test@example.com")
- if err != nil {
- t.Fatal(err)
- }
-
- record, err := app.Dao().FindAuthRecordByEmail("users", "test@example.com")
+ record, err := app.FindAuthRecordByEmail("users", "test@example.com")
if err != nil {
t.Fatal(err)
}
@@ -616,28 +732,27 @@ func TestMailsBinds(t *testing.T) {
baseBinds(vm)
mailsBinds(vm)
vm.Set("$app", app)
- vm.Set("admin", admin)
vm.Set("record", record)
_, vmErr := vm.RunString(`
- $mails.sendAdminPasswordReset($app, admin);
- if (!$app.testMailer.lastMessage.html.includes("/_/#/confirm-password-reset/")) {
- throw new Error("Expected admin password reset email")
- }
-
$mails.sendRecordPasswordReset($app, record);
- if (!$app.testMailer.lastMessage.html.includes("/_/#/auth/confirm-password-reset/")) {
- throw new Error("Expected record password reset email")
+ if (!$app.testMailer.lastMessage().html.includes("/_/#/auth/confirm-password-reset/")) {
+ throw new Error("Expected record password reset email, got:" + JSON.stringify($app.testMailer.lastMessage()))
}
$mails.sendRecordVerification($app, record);
- if (!$app.testMailer.lastMessage.html.includes("/_/#/auth/confirm-verification/")) {
- throw new Error("Expected record verification email")
+ if (!$app.testMailer.lastMessage().html.includes("/_/#/auth/confirm-verification/")) {
+ throw new Error("Expected record verification email, got:" + JSON.stringify($app.testMailer.lastMessage()))
}
$mails.sendRecordChangeEmail($app, record, "new@example.com");
- if (!$app.testMailer.lastMessage.html.includes("/_/#/auth/confirm-email-change/")) {
- throw new Error("Expected record email change email")
+ if (!$app.testMailer.lastMessage().html.includes("/_/#/auth/confirm-email-change/")) {
+ throw new Error("Expected record email change email, got:" + JSON.stringify($app.testMailer.lastMessage()))
+ }
+
+ $mails.sendRecordOTP($app, record, "test_otp_id", "test_otp_pass");
+ if (!$app.testMailer.lastMessage().html.includes("test_otp_pass")) {
+ throw new Error("Expected record OTP email, got:" + JSON.stringify($app.testMailer.lastMessage()))
}
`)
if vmErr != nil {
@@ -645,97 +760,14 @@ func TestMailsBinds(t *testing.T) {
}
}
-func TestTokensBindsCount(t *testing.T) {
- vm := goja.New()
- tokensBinds(vm)
-
- testBindsCount(vm, "$tokens", 8, t)
-}
-
-func TestTokensBinds(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
- admin, err := app.Dao().FindAdminByEmail("test@example.com")
- if err != nil {
- t.Fatal(err)
- }
-
- record, err := app.Dao().FindAuthRecordByEmail("users", "test@example.com")
- if err != nil {
- t.Fatal(err)
- }
-
- vm := goja.New()
- baseBinds(vm)
- tokensBinds(vm)
- vm.Set("$app", app)
- vm.Set("admin", admin)
- vm.Set("record", record)
-
- sceneraios := []struct {
- js string
- key string
- }{
- {
- `$tokens.adminAuthToken($app, admin)`,
- admin.TokenKey + app.Settings().AdminAuthToken.Secret,
- },
- {
- `$tokens.adminResetPasswordToken($app, admin)`,
- admin.TokenKey + app.Settings().AdminPasswordResetToken.Secret,
- },
- {
- `$tokens.adminFileToken($app, admin)`,
- admin.TokenKey + app.Settings().AdminFileToken.Secret,
- },
- {
- `$tokens.recordAuthToken($app, record)`,
- record.TokenKey() + app.Settings().RecordAuthToken.Secret,
- },
- {
- `$tokens.recordVerifyToken($app, record)`,
- record.TokenKey() + app.Settings().RecordVerificationToken.Secret,
- },
- {
- `$tokens.recordResetPasswordToken($app, record)`,
- record.TokenKey() + app.Settings().RecordPasswordResetToken.Secret,
- },
- {
- `$tokens.recordChangeEmailToken($app, record)`,
- record.TokenKey() + app.Settings().RecordEmailChangeToken.Secret,
- },
- {
- `$tokens.recordFileToken($app, record)`,
- record.TokenKey() + app.Settings().RecordFileToken.Secret,
- },
- }
-
- for _, s := range sceneraios {
- result, err := vm.RunString(s.js)
- if err != nil {
- t.Fatalf("[%s] Failed to execute js script, got %v", s.js, err)
- }
-
- v, _ := result.Export().(string)
-
- if _, err := security.ParseJWT(v, s.key); err != nil {
- t.Fatalf("[%s] Failed to parse JWT %v, got %v", s.js, v, err)
- }
- }
-}
-
func TestSecurityBindsCount(t *testing.T) {
vm := goja.New()
securityBinds(vm)
- testBindsCount(vm, "$security", 15, t)
+ testBindsCount(vm, "$security", 16, t)
}
func TestSecurityCryptoBinds(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
vm := goja.New()
baseBinds(vm)
securityBinds(vm)
@@ -770,9 +802,6 @@ func TestSecurityCryptoBinds(t *testing.T) {
}
func TestSecurityRandomStringBinds(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
vm := goja.New()
baseBinds(vm)
securityBinds(vm)
@@ -785,6 +814,7 @@ func TestSecurityRandomStringBinds(t *testing.T) {
{`$security.randomStringWithAlphabet(7, "abc")`, 7},
{`$security.pseudorandomString(8)`, 8},
{`$security.pseudorandomStringWithAlphabet(9, "abc")`, 9},
+ {`$security.randomStringByRegex("abc")`, 3},
}
for _, s := range sceneraios {
@@ -804,9 +834,6 @@ func TestSecurityRandomStringBinds(t *testing.T) {
}
func TestSecurityJWTBinds(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
sceneraios := []struct {
name string
js string
@@ -864,9 +891,6 @@ func TestSecurityJWTBinds(t *testing.T) {
}
func TestSecurityEncryptAndDecryptBinds(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
vm := goja.New()
baseBinds(vm)
securityBinds(vm)
@@ -903,7 +927,7 @@ func TestFilesystemBinds(t *testing.T) {
vm := goja.New()
vm.Set("mh", &multipart.FileHeader{Filename: "test"})
vm.Set("testFile", filepath.Join(app.DataDir(), "data.db"))
- vm.Set("baseUrl", srv.URL)
+ vm.Set("baseURL", srv.URL)
baseBinds(vm)
filesystemBinds(vm)
@@ -951,9 +975,9 @@ func TestFilesystemBinds(t *testing.T) {
}
}
- // fileFromUrl (success)
+ // fileFromURL (success)
{
- v, err := vm.RunString(`$filesystem.fileFromUrl(baseUrl + "/test")`)
+ v, err := vm.RunString(`$filesystem.fileFromURL(baseURL + "/test")`)
if err != nil {
t.Fatal(err)
}
@@ -961,13 +985,13 @@ func TestFilesystemBinds(t *testing.T) {
file, _ := v.Export().(*filesystem.File)
if file == nil || file.OriginalName != "test" {
- t.Fatalf("[fileFromUrl] Expected file with name %q, got %v", file.OriginalName, file)
+ t.Fatalf("[fileFromURL] Expected file with name %q, got %v", file.OriginalName, file)
}
}
- // fileFromUrl (failure)
+ // fileFromURL (failure)
{
- _, err := vm.RunString(`$filesystem.fileFromUrl(baseUrl + "/error")`)
+ _, err := vm.RunString(`$filesystem.fileFromURL(baseURL + "/error")`)
if err == nil {
t.Fatal("Expected url fetch error")
}
@@ -978,30 +1002,24 @@ func TestFormsBinds(t *testing.T) {
vm := goja.New()
formsBinds(vm)
- testBindsCount(vm, "this", 20, t)
+ testBindsCount(vm, "this", 4, t)
}
func TestApisBindsCount(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
vm := goja.New()
apisBinds(vm)
- testBindsCount(vm, "this", 6, t)
- testBindsCount(vm, "$apis", 14, t)
+ testBindsCount(vm, "this", 8, t)
+ testBindsCount(vm, "$apis", 12, t)
}
func TestApisBindsApiError(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
vm := goja.New()
apisBinds(vm)
scenarios := []struct {
js string
- expectCode int
+ expectStatus int
expectMessage string
expectData string
}{
@@ -1013,8 +1031,12 @@ func TestApisBindsApiError(t *testing.T) {
{"new BadRequestError('test', {'test': 1})", 400, "Test.", `{"test":1}`},
{"new ForbiddenError()", 403, "You are not allowed to perform this request.", "null"},
{"new ForbiddenError('test', {'test': 1})", 403, "Test.", `{"test":1}`},
- {"new UnauthorizedError()", 401, "Missing or invalid authentication token.", "null"},
+ {"new UnauthorizedError()", 401, "Missing or invalid authentication.", "null"},
{"new UnauthorizedError('test', {'test': 1})", 401, "Test.", `{"test":1}`},
+ {"new TooManyRequestsError()", 429, "Too Many Requests.", "null"},
+ {"new TooManyRequestsError('test', {'test': 1})", 429, "Test.", `{"test":1}`},
+ {"new InternalServerError()", 500, "Something went wrong while processing your request.", "null"},
+ {"new InternalServerError('test', {'test': 1})", 500, "Test.", `{"test":1}`},
}
for _, s := range scenarios {
@@ -1024,14 +1046,14 @@ func TestApisBindsApiError(t *testing.T) {
continue
}
- apiErr, ok := v.Export().(*apis.ApiError)
+ apiErr, ok := v.Export().(*router.ApiError)
if !ok {
t.Errorf("[%s] Expected ApiError, got %v", s.js, v)
continue
}
- if apiErr.Code != s.expectCode {
- t.Errorf("[%s] Expected Code %d, got %d", s.js, s.expectCode, apiErr.Code)
+ if apiErr.Status != s.expectStatus {
+ t.Errorf("[%s] Expected Status %d, got %d", s.js, s.expectStatus, apiErr.Status)
}
if apiErr.Message != s.expectMessage {
@@ -1065,7 +1087,7 @@ func TestLoadingDynamicModel(t *testing.T) {
obj: {},
})
- $app.dao().db()
+ $app.db()
.select("text", "bool", "number", "select_many", "json", "('{\"test\": 1}') as obj")
.from("demo1")
.where($dbx.hashExp({"id": "84nmscqy84lsi1t"}))
@@ -1116,7 +1138,7 @@ func TestLoadingArrayOf(t *testing.T) {
text: "",
}))
- $app.dao().db()
+ $app.db()
.select("id", "text")
.from("demo1")
.where($dbx.exp("id='84nmscqy84lsi1t' OR id='al1h9ijdeojtsjy'"))
@@ -1159,6 +1181,8 @@ func TestHttpClientBindsCount(t *testing.T) {
}
func TestHttpClientBindsSend(t *testing.T) {
+ t.Parallel()
+
// start a test server
server := httptest.NewServer(http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) {
if req.URL.Query().Get("testError") != "" {
@@ -1203,7 +1227,7 @@ func TestHttpClientBindsSend(t *testing.T) {
vm := goja.New()
baseBinds(vm)
httpClientBinds(vm)
- vm.Set("testUrl", server.URL)
+ vm.Set("testURL", server.URL)
_, err := vm.RunString(`
function getNestedVal(data, path) {
@@ -1228,7 +1252,7 @@ func TestHttpClientBindsSend(t *testing.T) {
let testTimeout;
try {
$http.send({
- url: testUrl + "?testTimeout=3",
+ url: testURL + "?testTimeout=3",
timeout: 1
})
} catch (err) {
@@ -1240,20 +1264,20 @@ func TestHttpClientBindsSend(t *testing.T) {
// error response check
const test0 = $http.send({
- url: testUrl + "?testError=1",
+ url: testURL + "?testError=1",
})
// basic fields check
const test1 = $http.send({
method: "post",
- url: testUrl,
+ url: testURL,
headers: {"header1": "123", "header2": "456"},
body: '789',
})
// with custom content-type header
const test2 = $http.send({
- url: testUrl,
+ url: testURL,
headers: {"content-type": "text/plain"},
})
@@ -1261,7 +1285,7 @@ func TestHttpClientBindsSend(t *testing.T) {
const formData = new FormData()
formData.append("title", "123")
const test3 = $http.send({
- url: testUrl,
+ url: testURL,
body: formData,
headers: {"content-type": "text/plain"}, // should be ignored
})
@@ -1277,7 +1301,6 @@ func TestHttpClientBindsSend(t *testing.T) {
"json.method": "POST",
"json.headers.header1": "123",
"json.headers.header2": "456",
- "json.headers.content_type": "application/json", // default
"json.body": "789",
}],
[test2, {
@@ -1334,9 +1357,17 @@ func TestCronBindsCount(t *testing.T) {
defer app.Cleanup()
vm := goja.New()
- cronBinds(app, vm, nil)
+
+ pool := newPool(1, func() *goja.Runtime { return goja.New() })
+
+ cronBinds(app, vm, pool)
testBindsCount(vm, "this", 2, t)
+
+ pool.run(func(poolVM *goja.Runtime) error {
+ testBindsCount(poolVM, "this", 1, t)
+ return nil
+ })
}
func TestHooksBindsCount(t *testing.T) {
@@ -1346,7 +1377,7 @@ func TestHooksBindsCount(t *testing.T) {
vm := goja.New()
hooksBinds(app, vm, nil)
- testBindsCount(vm, "this", 88, t)
+ testBindsCount(vm, "this", 82, t)
}
func TestHooksBinds(t *testing.T) {
@@ -1371,35 +1402,41 @@ func TestHooksBinds(t *testing.T) {
hooksBinds(app, vm, pool)
_, err := vm.RunString(`
- onModelBeforeUpdate((e) => {
+ onModelUpdate((e) => {
result.called++;
+ e.next()
}, "demo1")
- onModelBeforeUpdate((e) => {
+ onModelUpdate((e) => {
throw new Error("example");
}, "demo1")
- onModelBeforeUpdate((e) => {
+ onModelUpdate((e) => {
result.called++;
+ e.next();
}, "demo2")
- onModelBeforeUpdate((e) => {
+ onModelUpdate((e) => {
result.called++;
+ e.next()
}, "demo2")
- onModelBeforeUpdate((e) => {
- return false
+ onModelUpdate((e) => {
+ // stop propagation
}, "demo2")
- onModelBeforeUpdate((e) => {
+ onModelUpdate((e) => {
result.called++;
+ e.next();
}, "demo2")
- onAfterBootstrap(() => {
+ onBootstrap((e) => {
+ e.next()
+
// check hooks propagation and tags filtering
- const recordA = $app.dao().findFirstRecordByFilter("demo2", "1=1")
+ const recordA = $app.findFirstRecordByFilter("demo2", "1=1")
recordA.set("title", "update")
- $app.dao().saveRecord(recordA)
+ $app.save(recordA)
if (result.called != 2) {
throw new Error("Expected result.called to be 2, got " + result.called)
}
@@ -1410,9 +1447,9 @@ func TestHooksBinds(t *testing.T) {
// check error handling
let hasErr = false
try {
- const recordB = $app.dao().findFirstRecordByFilter("demo1", "1=1")
+ const recordB = $app.findFirstRecordByFilter("demo1", "1=1")
recordB.set("text", "update")
- $app.dao().saveRecord(recordB)
+ $app.save(recordB)
} catch (err) {
hasErr = true
}
@@ -1438,7 +1475,7 @@ func TestRouterBindsCount(t *testing.T) {
vm := goja.New()
routerBinds(app, vm, nil)
- testBindsCount(vm, "this", 3, t)
+ testBindsCount(vm, "this", 2, t)
}
func TestRouterBinds(t *testing.T) {
@@ -1446,9 +1483,8 @@ func TestRouterBinds(t *testing.T) {
defer app.Cleanup()
result := &struct {
- AddCount int
- UseCount int
- PreCount int
+ AddCount int
+ WithCount int
}{}
vmFactory := func() *goja.Runtime {
@@ -1467,68 +1503,52 @@ func TestRouterBinds(t *testing.T) {
_, err := vm.RunString(`
routerAdd("GET", "/test", (e) => {
result.addCount++;
- }, (next) => {
- return (c) => {
- result.addCount++;
-
- return next(c);
- }
+ }, (e) => {
+ result.addCount++;
+ return e.next();
})
- routerUse((next) => {
- return (c) => {
- result.useCount++;
+ routerUse((e) => {
+ result.withCount++;
- return next(c)
- }
- })
-
- routerPre((next) => {
- return (c) => {
- result.preCount++;
-
- return next(c)
- }
+ return e.next();
})
`)
if err != nil {
t.Fatal(err)
}
- e, err := apis.InitApi(app)
+ pbRouter, err := apis.NewRouter(app)
if err != nil {
t.Fatal(err)
}
- serveEvent := &core.ServeEvent{
- App: app,
- Router: e,
- }
- if err := app.OnBeforeServe().Trigger(serveEvent); err != nil {
+ serveEvent := new(core.ServeEvent)
+ serveEvent.App = app
+ serveEvent.Router = pbRouter
+ if err = app.OnServe().Trigger(serveEvent); err != nil {
t.Fatal(err)
}
rec := httptest.NewRecorder()
req := httptest.NewRequest("GET", "/test", nil)
- e.ServeHTTP(rec, req)
+
+ mux, err := serveEvent.Router.BuildMux()
+ if err != nil {
+ t.Fatalf("Failed to build router mux: %v", err)
+ }
+ mux.ServeHTTP(rec, req)
if result.AddCount != 2 {
t.Fatalf("Expected AddCount %d, got %d", 2, result.AddCount)
}
- if result.UseCount != 1 {
- t.Fatalf("Expected UseCount %d, got %d", 1, result.UseCount)
- }
-
- if result.PreCount != 1 {
- t.Fatalf("Expected PreCount %d, got %d", 1, result.PreCount)
+ if result.WithCount != 1 {
+ t.Fatalf("Expected WithCount %d, got %d", 1, result.WithCount)
}
}
func TestFilepathBindsCount(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
vm := goja.New()
filepathBinds(vm)
@@ -1536,9 +1556,6 @@ func TestFilepathBindsCount(t *testing.T) {
}
func TestOsBindsCount(t *testing.T) {
- app, _ := tests.NewTestApp()
- defer app.Cleanup()
-
vm := goja.New()
osBinds(vm)
diff --git a/plugins/jsvm/form_data.go b/plugins/jsvm/form_data.go
index d4215761..ff413198 100644
--- a/plugins/jsvm/form_data.go
+++ b/plugins/jsvm/form_data.go
@@ -84,9 +84,7 @@ func (data FormData) Values() []any {
result := make([]any, 0, len(data))
for _, values := range data {
- for _, v := range values {
- result = append(result, v)
- }
+ result = append(result, values...)
}
return result
diff --git a/plugins/jsvm/internal/types/generated/types.d.ts b/plugins/jsvm/internal/types/generated/types.d.ts
index dac56c61..b02be9d0 100644
--- a/plugins/jsvm/internal/types/generated/types.d.ts
+++ b/plugins/jsvm/internal/types/generated/types.d.ts
@@ -1,4 +1,4 @@
-// 1710682789
+// 1727605671
// GENERATED CODE - DO NOT MODIFY BY HAND
// -------------------------------------------------------------------
@@ -55,9 +55,9 @@ declare function cronRemove(jobId: string): void;
* Example:
*
* ```js
- * routerAdd("GET", "/hello", (c) => {
- * return c.json(200, {"message": "Hello!"})
- * }, $apis.requireAdminOrRecordAuth())
+ * routerAdd("GET", "/hello", (e) => {
+ * return e.json(200, {"message": "Hello!"})
+ * }, $apis.requireAuth())
* ```
*
* _Note that this method is available only in pb_hooks context._
@@ -67,8 +67,8 @@ declare function cronRemove(jobId: string): void;
declare function routerAdd(
method: string,
path: string,
- handler: echo.HandlerFunc,
- ...middlewares: Array,
+ handler: (e: core.RequestEvent) => void,
+ ...middlewares: Array void)|Middleware>,
): void;
/**
@@ -78,11 +78,9 @@ declare function routerAdd(
* Example:
*
* ```js
- * routerUse((next) => {
- * return (c) => {
- * console.log(c.path())
- * return next(c)
- * }
+ * routerUse((e) => {
+ * console.log(e.request.url.path)
+ * return e.next()
* })
* ```
*
@@ -90,34 +88,7 @@ declare function routerAdd(
*
* @group PocketBase
*/
-declare function routerUse(...middlewares: Array): void;
-
-/**
- * RouterPre registers one or more global middlewares that are executed
- * BEFORE the router processes the request. It is usually used for making
- * changes to the request properties, for example, adding or removing
- * a trailing slash or adding segments to a path so it matches a route.
- *
- * NB! Since the router will not have processed the request yet,
- * middlewares registered at this level won't have access to any path
- * related APIs from echo.Context.
- *
- * Example:
- *
- * ```js
- * routerPre((next) => {
- * return (c) => {
- * console.log(c.request().url)
- * return next(c)
- * }
- * })
- * ```
- *
- * _Note that this method is available only in pb_hooks context._
- *
- * @group PocketBase
- */
-declare function routerPre(...middlewares: Array): void;
+declare function routerUse(...middlewares: Array void)|Middleware): void;
// -------------------------------------------------------------------
// baseBinds
@@ -135,7 +106,7 @@ declare var __hooks: string
//
// See https://www.typescriptlang.org/docs/handbook/2/mapped-types.html#key-remapping-via-as
type excludeHooks = {
- [Property in keyof Type as Exclude]: Type[Property]
+ [Property in keyof Type as Exclude]: Type[Property]
};
// CoreApp without the on* hook methods
@@ -178,22 +149,34 @@ declare var $app: PocketBase
declare var $template: template.Registry
/**
- * readerToString reads the content of the specified io.Reader until
- * EOF or maxBytes are reached.
+ * This method is superseded by toString.
*
- * If maxBytes is not specified it will read up to 32MB.
+ * @deprecated
+ * @group PocketBase
+ */
+declare function readerToString(reader: any, maxBytes?: number): string;
+
+/**
+ * toString stringifies the specified value.
*
- * Note that after this call the reader can't be used anymore.
+ * Support optional second maxBytes argument to limit the max read bytes
+ * when the value is a io.Reader (default to 32MB).
+ *
+ * Types that don't have explicit string representation are json serialized.
*
* Example:
*
* ```js
- * const rawBody = readerToString(c.request().body)
+ * // io.Reader
+ * const ex1 = toString(e.request.body)
+ *
+ * // slice of bytes ("hello")
+ * const ex2 = toString([104 101 108 108 111])
* ```
*
* @group PocketBase
*/
-declare function readerToString(reader: any, maxBytes?: number): string;
+declare function toString(val: any, maxBytes?: number): string;
/**
* sleep pauses the current goroutine for at least the specified user duration (in ms).
@@ -228,15 +211,18 @@ declare function arrayOf(model: T): Array;
/**
* DynamicModel creates a new dynamic model with fields from the provided data shape.
*
+ * Note that in order to use 0 as double/float initialization number you have to use negative zero (`-0`).
+ *
* Example:
*
* ```js
* const model = new DynamicModel({
- * name: ""
- * age: 0,
- * active: false,
- * roles: [],
- * meta: {}
+ * name: ""
+ * age: 0, // int64
+ * totalSpent: -0, // float64
+ * active: false,
+ * roles: [],
+ * meta: {}
* })
* ```
*
@@ -263,12 +249,12 @@ declare class DynamicModel {
* @group PocketBase
*/
declare const Record: {
- new(collection?: models.Collection, data?: { [key:string]: any }): models.Record
+ new(collection?: core.Collection, data?: { [key:string]: any }): core.Record
// note: declare as "newable" const due to conflict with the Record TS utility type
}
-interface Collection extends models.Collection{} // merge
+interface Collection extends core.Collection{} // merge
/**
* Collection model class.
*
@@ -279,12 +265,13 @@ interface Collection extends models.Collection{} // merge
* listRule: "@request.auth.id != '' || status = 'public'",
* viewRule: "@request.auth.id != '' || status = 'public'",
* deleteRule: "@request.auth.id != ''",
- * schema: [
+ * fields: [
* {
* name: "title",
* type: "text",
* required: true,
- * options: { min: 6, max: 100 },
+ * min: 6,
+ * max: 100,
* },
* {
* name: "description",
@@ -296,44 +283,242 @@ interface Collection extends models.Collection{} // merge
*
* @group PocketBase
*/
-declare class Collection implements models.Collection {
- constructor(data?: Partial)
+declare class Collection implements core.Collection {
+ constructor(data?: Partial)
}
-interface Admin extends models.Admin{} // merge
+interface BaseCollection extends core.Collection{} // merge
/**
- * Admin model class.
+ * Alias for a "base" collection class.
*
* ```js
- * const admin = new Admin()
- * admin.email = "test@example.com"
- * admin.setPassword(1234567890)
+ * const collection = new BaseCollection({
+ * name: "article",
+ * listRule: "@request.auth.id != '' || status = 'public'",
+ * viewRule: "@request.auth.id != '' || status = 'public'",
+ * deleteRule: "@request.auth.id != ''",
+ * fields: [
+ * {
+ * name: "title",
+ * type: "text",
+ * required: true,
+ * min: 6,
+ * max: 100,
+ * },
+ * {
+ * name: "description",
+ * type: "text",
+ * },
+ * ]
+ * })
* ```
*
* @group PocketBase
*/
-declare class Admin implements models.Admin {
- constructor(data?: Partial)
+declare class BaseCollection implements core.Collection {
+ constructor(data?: Partial)
}
-interface Schema extends schema.Schema{} // merge
+interface AuthCollection extends core.Collection{} // merge
/**
- * Schema model class, usually used to define the Collection.schema field.
+ * Alias for an "auth" collection class.
+ *
+ * ```js
+ * const collection = new AuthCollection({
+ * name: "clients",
+ * listRule: "@request.auth.id != '' || status = 'public'",
+ * viewRule: "@request.auth.id != '' || status = 'public'",
+ * deleteRule: "@request.auth.id != ''",
+ * fields: [
+ * {
+ * name: "title",
+ * type: "text",
+ * required: true,
+ * min: 6,
+ * max: 100,
+ * },
+ * {
+ * name: "description",
+ * type: "text",
+ * },
+ * ]
+ * })
+ * ```
*
* @group PocketBase
*/
-declare class Schema implements schema.Schema {
- constructor(data?: Partial)
+declare class AuthCollection implements core.Collection {
+ constructor(data?: Partial)
}
-interface SchemaField extends schema.SchemaField{} // merge
+interface ViewCollection extends core.Collection{} // merge
/**
- * SchemaField model class, usually used as part of the Schema model.
+ * Alias for a "view" collection class.
+ *
+ * ```js
+ * const collection = new ViewCollection({
+ * name: "clients",
+ * listRule: "@request.auth.id != '' || status = 'public'",
+ * viewRule: "@request.auth.id != '' || status = 'public'",
+ * deleteRule: "@request.auth.id != ''",
+ * viewQuery: "SELECT id, title from posts",
+ * })
+ * ```
*
* @group PocketBase
*/
-declare class SchemaField implements schema.SchemaField {
- constructor(data?: Partial)
+declare class ViewCollection implements core.Collection {
+ constructor(data?: Partial)
+}
+
+interface FieldsList extends core.FieldsList{} // merge
+/**
+ * FieldsList model class, usually used to define the Collection.fields.
+ *
+ * @group PocketBase
+ */
+declare class FieldsList implements core.FieldsList {
+ constructor(data?: Partial)
+}
+
+interface Field extends core.Field{} // merge
+/**
+ * Field model class, usually used as part of the FieldsList model.
+ *
+ * @group PocketBase
+ */
+declare class Field implements core.Field {
+ constructor(data?: Partial)
+}
+
+interface NumberField extends core.NumberField{} // merge
+/**
+ * NumberField class defines a single "number" collection field.
+ *
+ * @group PocketBase
+ */
+declare class NumberField implements core.NumberField {
+ constructor(data?: Partial)
+}
+
+interface BoolField extends core.BoolField{} // merge
+/**
+ * BoolField class defines a single "bool" collection field.
+ *
+ * @group PocketBase
+ */
+declare class BoolField implements core.BoolField {
+ constructor(data?: Partial)
+}
+
+interface TextField extends core.TextField{} // merge
+/**
+ * TextField class defines a single "text" collection field.
+ *
+ * @group PocketBase
+ */
+declare class TextField implements core.TextField {
+ constructor(data?: Partial)
+}
+
+interface URLField extends core.URLField{} // merge
+/**
+ * URLField class defines a single "url" collection field.
+ *
+ * @group PocketBase
+ */
+declare class URLField implements core.URLField {
+ constructor(data?: Partial)
+}
+
+interface EmailField extends core.EmailField{} // merge
+/**
+ * EmailField class defines a single "email" collection field.
+ *
+ * @group PocketBase
+ */
+declare class EmailField implements core.EmailField {
+ constructor(data?: Partial)
+}
+
+interface EditorField extends core.EditorField{} // merge
+/**
+ * EditorField class defines a single "editor" collection field.
+ *
+ * @group PocketBase
+ */
+declare class EditorField implements core.EditorField {
+ constructor(data?: Partial)
+}
+
+interface PasswordField extends core.PasswordField{} // merge
+/**
+ * PasswordField class defines a single "password" collection field.
+ *
+ * @group PocketBase
+ */
+declare class PasswordField implements core.PasswordField {
+ constructor(data?: Partial)
+}
+
+interface DateField extends core.DateField{} // merge
+/**
+ * DateField class defines a single "date" collection field.
+ *
+ * @group PocketBase
+ */
+declare class DateField implements core.DateField {
+ constructor(data?: Partial)
+}
+
+interface AutodateField extends core.AutodateField{} // merge
+/**
+ * AutodateField class defines a single "autodate" collection field.
+ *
+ * @group PocketBase
+ */
+declare class AutodateField implements core.AutodateField {
+ constructor(data?: Partial)
+}
+
+interface JSONField extends core.JSONField{} // merge
+/**
+ * JSONField class defines a single "json" collection field.
+ *
+ * @group PocketBase
+ */
+declare class JSONField implements core.JSONField {
+ constructor(data?: Partial)
+}
+
+interface RelationField extends core.RelationField{} // merge
+/**
+ * RelationField class defines a single "relation" collection field.
+ *
+ * @group PocketBase
+ */
+declare class RelationField implements core.RelationField {
+ constructor(data?: Partial)
+}
+
+interface SelectField extends core.SelectField{} // merge
+/**
+ * SelectField class defines a single "select" collection field.
+ *
+ * @group PocketBase
+ */
+declare class SelectField implements core.SelectField {
+ constructor(data?: Partial)
+}
+
+interface FileField extends core.FileField{} // merge
+/**
+ * FileField class defines a single "file" collection field.
+ *
+ * @group PocketBase
+ */
+declare class FileField implements core.FileField {
+ constructor(data?: Partial)
}
interface MailerMessage extends mailer.Message{} // merge
@@ -381,31 +566,55 @@ declare class Command implements cobra.Command {
constructor(cmd?: Partial)
}
-interface RequestInfo extends models.RequestInfo{} // merge
+interface RequestInfo extends core.RequestInfo{} // merge
/**
- * RequestInfo defines a single models.RequestInfo instance, usually used
+ * RequestInfo defines a single core.RequestInfo instance, usually used
* as part of various filter checks.
*
* Example:
*
* ```js
- * const authRecord = $app.dao().findAuthRecordByEmail("users", "test@example.com")
+ * const authRecord = $app.findAuthRecordByEmail("users", "test@example.com")
*
* const info = new RequestInfo({
- * authRecord: authRecord,
- * data: {"name": 123},
- * headers: {"x-token": "..."},
+ * auth: authRecord,
+ * body: {"name": 123},
+ * headers: {"x-token": "..."},
* })
*
- * const record = $app.dao().findFirstRecordByData("articles", "slug", "hello")
+ * const record = $app.findFirstRecordByData("articles", "slug", "hello")
*
- * const canAccess = $app.dao().canAccessRecord(record, info, "@request.auth.id != '' && @request.data.name = 123")
+ * const canAccess = $app.canAccessRecord(record, info, "@request.auth.id != '' && @request.body.name = 123")
* ```
*
* @group PocketBase
*/
-declare class RequestInfo implements models.RequestInfo {
- constructor(date?: Partial)
+declare class RequestInfo implements core.RequestInfo {
+ constructor(info?: Partial)
+}
+
+/**
+ * Middleware defines a single request middleware handler.
+ *
+ * This class is usually used when you want to explicitly specify a priority to your custom route middleware.
+ *
+ * Example:
+ *
+ * ```js
+ * routerUse(new Middleware((e) => {
+ * console.log(e.request.url.path)
+ * return e.next()
+ * }, -10))
+ * ```
+ *
+ * @group PocketBase
+ */
+declare class Middleware {
+ constructor(
+ func: string|((e: core.RequestEvent) => void),
+ priority?: number,
+ id?: string,
+ )
}
interface DateTime extends types.DateTime{} // merge
@@ -441,15 +650,6 @@ declare class ValidationError implements ozzo_validation.Error {
constructor(code?: string, message?: string)
}
-interface Dao extends daos.Dao{} // merge
-/**
- * @inheritDoc
- * @group PocketBase
- */
-declare class Dao implements daos.Dao {
- constructor(concurrentDB?: dbx.Builder, nonconcurrentDB?: dbx.Builder)
-}
-
interface Cookie extends http.Cookie{} // merge
/**
* A Cookie represents an HTTP cookie as sent in the Set-Cookie header of an
@@ -535,44 +735,21 @@ declare namespace $dbx {
export let notBetween: dbx.notBetween
}
-// -------------------------------------------------------------------
-// tokensBinds
-// -------------------------------------------------------------------
-
-/**
- * `$tokens` defines high level helpers to generate
- * various admins and auth records tokens (auth, forgotten password, etc.).
- *
- * For more control over the generated token, you can check `$security`.
- *
- * @group PocketBase
- */
-declare namespace $tokens {
- let adminAuthToken: tokens.newAdminAuthToken
- let adminResetPasswordToken: tokens.newAdminResetPasswordToken
- let adminFileToken: tokens.newAdminFileToken
- let recordAuthToken: tokens.newRecordAuthToken
- let recordVerifyToken: tokens.newRecordVerifyToken
- let recordResetPasswordToken: tokens.newRecordResetPasswordToken
- let recordChangeEmailToken: tokens.newRecordChangeEmailToken
- let recordFileToken: tokens.newRecordFileToken
-}
-
// -------------------------------------------------------------------
// mailsBinds
// -------------------------------------------------------------------
/**
* `$mails` defines helpers to send common
- * admins and auth records emails like verification, password reset, etc.
+ * auth records emails like verification, password reset, etc.
*
* @group PocketBase
*/
declare namespace $mails {
- let sendAdminPasswordReset: mails.sendAdminPasswordReset
let sendRecordPasswordReset: mails.sendRecordPasswordReset
let sendRecordVerification: mails.sendRecordVerification
let sendRecordChangeEmail: mails.sendRecordChangeEmail
+ let sendRecordOTP: mails.sendRecordOTP
}
// -------------------------------------------------------------------
@@ -588,6 +765,7 @@ declare namespace $mails {
declare namespace $security {
let randomString: security.randomString
let randomStringWithAlphabet: security.randomStringWithAlphabet
+ let randomStringByRegex: security.randomStringByRegex
let pseudorandomString: security.pseudorandomString
let pseudorandomStringWithAlphabet: security.pseudorandomStringWithAlphabet
let encrypt: security.encrypt
@@ -598,7 +776,11 @@ declare namespace $security {
let md5: security.md5
let sha256: security.sha256
let sha512: security.sha512
- let createJWT: security.newJWT
+
+ /**
+ * {@inheritDoc security.newJWT}
+ */
+ export function createJWT(payload: { [key:string]: any }, signingKey: string, secDuration: number): string
/**
* {@inheritDoc security.parseUnverifiedJWT}
@@ -723,42 +905,6 @@ declare namespace $os {
// formsBinds
// -------------------------------------------------------------------
-interface AdminLoginForm extends forms.AdminLogin{} // merge
-/**
- * @inheritDoc
- * @group PocketBase
- */
-declare class AdminLoginForm implements forms.AdminLogin {
- constructor(app: CoreApp)
-}
-
-interface AdminPasswordResetConfirmForm extends forms.AdminPasswordResetConfirm{} // merge
-/**
- * @inheritDoc
- * @group PocketBase
- */
-declare class AdminPasswordResetConfirmForm implements forms.AdminPasswordResetConfirm {
- constructor(app: CoreApp)
-}
-
-interface AdminPasswordResetRequestForm extends forms.AdminPasswordResetRequest{} // merge
-/**
- * @inheritDoc
- * @group PocketBase
- */
-declare class AdminPasswordResetRequestForm implements forms.AdminPasswordResetRequest {
- constructor(app: CoreApp)
-}
-
-interface AdminUpsertForm extends forms.AdminUpsert{} // merge
-/**
- * @inheritDoc
- * @group PocketBase
- */
-declare class AdminUpsertForm implements forms.AdminUpsert {
- constructor(app: CoreApp, admin: models.Admin)
-}
-
interface AppleClientSecretCreateForm extends forms.AppleClientSecretCreate{} // merge
/**
* @inheritDoc
@@ -768,119 +914,13 @@ declare class AppleClientSecretCreateForm implements forms.AppleClientSecretCrea
constructor(app: CoreApp)
}
-interface CollectionUpsertForm extends forms.CollectionUpsert{} // merge
-/**
- * @inheritDoc
- * @group PocketBase
- */
-declare class CollectionUpsertForm implements forms.CollectionUpsert {
- constructor(app: CoreApp, collection: models.Collection)
-}
-
-interface CollectionsImportForm extends forms.CollectionsImport{} // merge
-/**
- * @inheritDoc
- * @group PocketBase
- */
-declare class CollectionsImportForm implements forms.CollectionsImport {
- constructor(app: CoreApp)
-}
-
-interface RealtimeSubscribeForm extends forms.RealtimeSubscribe{} // merge
-/**
- * @inheritDoc
- * @group PocketBase
- */
-declare class RealtimeSubscribeForm implements forms.RealtimeSubscribe {}
-
-interface RecordEmailChangeConfirmForm extends forms.RecordEmailChangeConfirm{} // merge
-/**
- * @inheritDoc
- * @group PocketBase
- */
-declare class RecordEmailChangeConfirmForm implements forms.RecordEmailChangeConfirm {
- constructor(app: CoreApp, collection: models.Collection)
-}
-
-interface RecordEmailChangeRequestForm extends forms.RecordEmailChangeRequest{} // merge
-/**
- * @inheritDoc
- * @group PocketBase
- */
-declare class RecordEmailChangeRequestForm implements forms.RecordEmailChangeRequest {
- constructor(app: CoreApp, record: models.Record)
-}
-
-interface RecordOAuth2LoginForm extends forms.RecordOAuth2Login{} // merge
-/**
- * @inheritDoc
- * @group PocketBase
- */
-declare class RecordOAuth2LoginForm implements forms.RecordOAuth2Login {
- constructor(app: CoreApp, collection: models.Collection, optAuthRecord?: models.Record)
-}
-
-interface RecordPasswordLoginForm extends forms.RecordPasswordLogin{} // merge
-/**
- * @inheritDoc
- * @group PocketBase
- */
-declare class RecordPasswordLoginForm implements forms.RecordPasswordLogin {
- constructor(app: CoreApp, collection: models.Collection)
-}
-
-interface RecordPasswordResetConfirmForm extends forms.RecordPasswordResetConfirm{} // merge
-/**
- * @inheritDoc
- * @group PocketBase
- */
-declare class RecordPasswordResetConfirmForm implements forms.RecordPasswordResetConfirm {
- constructor(app: CoreApp, collection: models.Collection)
-}
-
-interface RecordPasswordResetRequestForm extends forms.RecordPasswordResetRequest{} // merge
-/**
- * @inheritDoc
- * @group PocketBase
- */
-declare class RecordPasswordResetRequestForm implements forms.RecordPasswordResetRequest {
- constructor(app: CoreApp, collection: models.Collection)
-}
-
interface RecordUpsertForm extends forms.RecordUpsert{} // merge
/**
* @inheritDoc
* @group PocketBase
*/
declare class RecordUpsertForm implements forms.RecordUpsert {
- constructor(app: CoreApp, record: models.Record)
-}
-
-interface RecordVerificationConfirmForm extends forms.RecordVerificationConfirm{} // merge
-/**
- * @inheritDoc
- * @group PocketBase
- */
-declare class RecordVerificationConfirmForm implements forms.RecordVerificationConfirm {
- constructor(app: CoreApp, collection: models.Collection)
-}
-
-interface RecordVerificationRequestForm extends forms.RecordVerificationRequest{} // merge
-/**
- * @inheritDoc
- * @group PocketBase
- */
-declare class RecordVerificationRequestForm implements forms.RecordVerificationRequest {
- constructor(app: CoreApp, collection: models.Collection)
-}
-
-interface SettingsUpsertForm extends forms.SettingsUpsert{} // merge
-/**
- * @inheritDoc
- * @group PocketBase
- */
-declare class SettingsUpsertForm implements forms.SettingsUpsert {
- constructor(app: CoreApp)
+ constructor(app: CoreApp, record: core.Record)
}
interface TestEmailSendForm extends forms.TestEmailSend{} // merge
@@ -955,6 +995,26 @@ declare class UnauthorizedError implements apis.ApiError {
constructor(message?: string, data?: any)
}
+interface TooManyRequestsError extends apis.ApiError{} // merge
+/**
+ * TooManyRequestsError returns 429 ApiError.
+ *
+ * @group PocketBase
+ */
+declare class TooManyRequestsError implements apis.ApiError {
+ constructor(message?: string, data?: any)
+}
+
+interface InternalServerError extends apis.ApiError{} // merge
+/**
+ * InternalServerError returns 429 ApiError.
+ *
+ * @group PocketBase
+ */
+declare class InternalServerError implements apis.ApiError {
+ constructor(message?: string, data?: any)
+}
+
/**
* `$apis` defines commonly used PocketBase api helpers and middlewares.
*
@@ -967,21 +1027,19 @@ declare namespace $apis {
* If a file resource is missing and indexFallback is set, the request
* will be forwarded to the base index.html (useful for SPA).
*/
- export function staticDirectoryHandler(dir: string, indexFallback: boolean): echo.HandlerFunc
+ export function static(dir: string, indexFallback: boolean): (e: core.RequestEvent) => void
- let requireGuestOnly: apis.requireGuestOnly
- let requireRecordAuth: apis.requireRecordAuth
- let requireAdminAuth: apis.requireAdminAuth
- let requireAdminAuthOnlyIfAny: apis.requireAdminAuthOnlyIfAny
- let requireAdminOrRecordAuth: apis.requireAdminOrRecordAuth
- let requireAdminOrOwnerAuth: apis.requireAdminOrOwnerAuth
- let activityLogger: apis.activityLogger
- let requestInfo: apis.requestInfo
- let recordAuthResponse: apis.recordAuthResponse
- let gzip: middleware.gzip
- let bodyLimit: middleware.bodyLimit
- let enrichRecord: apis.enrichRecord
- let enrichRecords: apis.enrichRecords
+ let requireGuestOnly: apis.requireGuestOnly
+ let requireAuth: apis.requireAuth
+ let requireSuperuserAuth: apis.requireSuperuserAuth
+ let requireSuperuserAuthOnlyIfAny: apis.requireSuperuserAuthOnlyIfAny
+ let requireSuperuserOrOwnerAuth: apis.requireSuperuserOrOwnerAuth
+ let skipSuccessActivityLog: apis.skipSuccessActivityLog
+ let gzip: apis.gzip
+ let bodyLimit: apis.bodyLimit
+ let recordAuthResponse: apis.recordAuthResponse
+ let enrichRecord: apis.enrichRecord
+ let enrichRecords: apis.enrichRecords
}
// -------------------------------------------------------------------
@@ -1007,9 +1065,10 @@ declare namespace $http {
*
* ```js
* const res = $http.send({
- * url: "https://example.com",
- * body: JSON.stringify({"title": "test"})
- * method: "post",
+ * method: "POST",
+ * url: "https://example.com",
+ * body: JSON.stringify({"title": "test"}),
+ * headers: { 'Content-Type': 'application/json' }
* })
*
* console.log(res.statusCode) // the response HTTP status code
@@ -1026,7 +1085,7 @@ declare namespace $http {
headers?: { [key:string]: string },
timeout?: number, // default to 120
- // deprecated, please use body instead
+ // @deprecated please use body instead
data?: { [key:string]: any },
}): {
statusCode: number,
@@ -1049,96 +1108,90 @@ declare namespace $http {
* @group PocketBase
*/
declare function migrate(
- up: (db: dbx.Builder) => void,
- down?: (db: dbx.Builder) => void
+ up: (txApp: CoreApp) => void,
+ down?: (txApp: CoreApp) => void
): void;
-/** @group PocketBase */declare function onAdminAfterAuthRefreshRequest(handler: (e: core.AdminAuthRefreshEvent) => void): void
-/** @group PocketBase */declare function onAdminAfterAuthWithPasswordRequest(handler: (e: core.AdminAuthWithPasswordEvent) => void): void
-/** @group PocketBase */declare function onAdminAfterConfirmPasswordResetRequest(handler: (e: core.AdminConfirmPasswordResetEvent) => void): void
-/** @group PocketBase */declare function onAdminAfterCreateRequest(handler: (e: core.AdminCreateEvent) => void): void
-/** @group PocketBase */declare function onAdminAfterDeleteRequest(handler: (e: core.AdminDeleteEvent) => void): void
-/** @group PocketBase */declare function onAdminAfterRequestPasswordResetRequest(handler: (e: core.AdminRequestPasswordResetEvent) => void): void
-/** @group PocketBase */declare function onAdminAfterUpdateRequest(handler: (e: core.AdminUpdateEvent) => void): void
-/** @group PocketBase */declare function onAdminAuthRequest(handler: (e: core.AdminAuthEvent) => void): void
-/** @group PocketBase */declare function onAdminBeforeAuthRefreshRequest(handler: (e: core.AdminAuthRefreshEvent) => void): void
-/** @group PocketBase */declare function onAdminBeforeAuthWithPasswordRequest(handler: (e: core.AdminAuthWithPasswordEvent) => void): void
-/** @group PocketBase */declare function onAdminBeforeConfirmPasswordResetRequest(handler: (e: core.AdminConfirmPasswordResetEvent) => void): void
-/** @group PocketBase */declare function onAdminBeforeCreateRequest(handler: (e: core.AdminCreateEvent) => void): void
-/** @group PocketBase */declare function onAdminBeforeDeleteRequest(handler: (e: core.AdminDeleteEvent) => void): void
-/** @group PocketBase */declare function onAdminBeforeRequestPasswordResetRequest(handler: (e: core.AdminRequestPasswordResetEvent) => void): void
-/** @group PocketBase */declare function onAdminBeforeUpdateRequest(handler: (e: core.AdminUpdateEvent) => void): void
-/** @group PocketBase */declare function onAdminViewRequest(handler: (e: core.AdminViewEvent) => void): void
-/** @group PocketBase */declare function onAdminsListRequest(handler: (e: core.AdminsListEvent) => void): void
-/** @group PocketBase */declare function onAfterApiError(handler: (e: core.ApiErrorEvent) => void): void
-/** @group PocketBase */declare function onAfterBootstrap(handler: (e: core.BootstrapEvent) => void): void
-/** @group PocketBase */declare function onBeforeApiError(handler: (e: core.ApiErrorEvent) => void): void
-/** @group PocketBase */declare function onBeforeBootstrap(handler: (e: core.BootstrapEvent) => void): void
-/** @group PocketBase */declare function onCollectionAfterCreateRequest(handler: (e: core.CollectionCreateEvent) => void): void
-/** @group PocketBase */declare function onCollectionAfterDeleteRequest(handler: (e: core.CollectionDeleteEvent) => void): void
-/** @group PocketBase */declare function onCollectionAfterUpdateRequest(handler: (e: core.CollectionUpdateEvent) => void): void
-/** @group PocketBase */declare function onCollectionBeforeCreateRequest(handler: (e: core.CollectionCreateEvent) => void): void
-/** @group PocketBase */declare function onCollectionBeforeDeleteRequest(handler: (e: core.CollectionDeleteEvent) => void): void
-/** @group PocketBase */declare function onCollectionBeforeUpdateRequest(handler: (e: core.CollectionUpdateEvent) => void): void
-/** @group PocketBase */declare function onCollectionViewRequest(handler: (e: core.CollectionViewEvent) => void): void
-/** @group PocketBase */declare function onCollectionsAfterImportRequest(handler: (e: core.CollectionsImportEvent) => void): void
-/** @group PocketBase */declare function onCollectionsBeforeImportRequest(handler: (e: core.CollectionsImportEvent) => void): void
-/** @group PocketBase */declare function onCollectionsListRequest(handler: (e: core.CollectionsListEvent) => void): void
-/** @group PocketBase */declare function onFileAfterTokenRequest(handler: (e: core.FileTokenEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onFileBeforeTokenRequest(handler: (e: core.FileTokenEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onFileDownloadRequest(handler: (e: core.FileDownloadEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onMailerAfterAdminResetPasswordSend(handler: (e: core.MailerAdminEvent) => void): void
-/** @group PocketBase */declare function onMailerAfterRecordChangeEmailSend(handler: (e: core.MailerRecordEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onMailerAfterRecordResetPasswordSend(handler: (e: core.MailerRecordEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onMailerAfterRecordVerificationSend(handler: (e: core.MailerRecordEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onMailerBeforeAdminResetPasswordSend(handler: (e: core.MailerAdminEvent) => void): void
-/** @group PocketBase */declare function onMailerBeforeRecordChangeEmailSend(handler: (e: core.MailerRecordEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onMailerBeforeRecordResetPasswordSend(handler: (e: core.MailerRecordEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onMailerBeforeRecordVerificationSend(handler: (e: core.MailerRecordEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onModelAfterCreate(handler: (e: core.ModelEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onModelAfterDelete(handler: (e: core.ModelEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onModelAfterUpdate(handler: (e: core.ModelEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onModelBeforeCreate(handler: (e: core.ModelEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onModelBeforeDelete(handler: (e: core.ModelEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onModelBeforeUpdate(handler: (e: core.ModelEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRealtimeAfterMessageSend(handler: (e: core.RealtimeMessageEvent) => void): void
-/** @group PocketBase */declare function onRealtimeAfterSubscribeRequest(handler: (e: core.RealtimeSubscribeEvent) => void): void
-/** @group PocketBase */declare function onRealtimeBeforeMessageSend(handler: (e: core.RealtimeMessageEvent) => void): void
-/** @group PocketBase */declare function onRealtimeBeforeSubscribeRequest(handler: (e: core.RealtimeSubscribeEvent) => void): void
-/** @group PocketBase */declare function onRealtimeConnectRequest(handler: (e: core.RealtimeConnectEvent) => void): void
-/** @group PocketBase */declare function onRealtimeDisconnectRequest(handler: (e: core.RealtimeDisconnectEvent) => void): void
-/** @group PocketBase */declare function onRecordAfterAuthRefreshRequest(handler: (e: core.RecordAuthRefreshEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordAfterAuthWithOAuth2Request(handler: (e: core.RecordAuthWithOAuth2Event) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordAfterAuthWithPasswordRequest(handler: (e: core.RecordAuthWithPasswordEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordAfterConfirmEmailChangeRequest(handler: (e: core.RecordConfirmEmailChangeEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordAfterConfirmPasswordResetRequest(handler: (e: core.RecordConfirmPasswordResetEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordAfterConfirmVerificationRequest(handler: (e: core.RecordConfirmVerificationEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordAfterCreateRequest(handler: (e: core.RecordCreateEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordAfterDeleteRequest(handler: (e: core.RecordDeleteEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordAfterRequestEmailChangeRequest(handler: (e: core.RecordRequestEmailChangeEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordAfterRequestPasswordResetRequest(handler: (e: core.RecordRequestPasswordResetEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordAfterRequestVerificationRequest(handler: (e: core.RecordRequestVerificationEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordAfterUnlinkExternalAuthRequest(handler: (e: core.RecordUnlinkExternalAuthEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordAfterUpdateRequest(handler: (e: core.RecordUpdateEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordAuthRequest(handler: (e: core.RecordAuthEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordBeforeAuthRefreshRequest(handler: (e: core.RecordAuthRefreshEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordBeforeAuthWithOAuth2Request(handler: (e: core.RecordAuthWithOAuth2Event) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordBeforeAuthWithPasswordRequest(handler: (e: core.RecordAuthWithPasswordEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordBeforeConfirmEmailChangeRequest(handler: (e: core.RecordConfirmEmailChangeEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordBeforeConfirmPasswordResetRequest(handler: (e: core.RecordConfirmPasswordResetEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordBeforeConfirmVerificationRequest(handler: (e: core.RecordConfirmVerificationEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordBeforeCreateRequest(handler: (e: core.RecordCreateEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordBeforeDeleteRequest(handler: (e: core.RecordDeleteEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordBeforeRequestEmailChangeRequest(handler: (e: core.RecordRequestEmailChangeEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordBeforeRequestPasswordResetRequest(handler: (e: core.RecordRequestPasswordResetEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordBeforeRequestVerificationRequest(handler: (e: core.RecordRequestVerificationEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordBeforeUnlinkExternalAuthRequest(handler: (e: core.RecordUnlinkExternalAuthEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordBeforeUpdateRequest(handler: (e: core.RecordUpdateEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordListExternalAuthsRequest(handler: (e: core.RecordListExternalAuthsEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordViewRequest(handler: (e: core.RecordViewEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onRecordsListRequest(handler: (e: core.RecordsListEvent) => void, ...tags: string[]): void
-/** @group PocketBase */declare function onSettingsAfterUpdateRequest(handler: (e: core.SettingsUpdateEvent) => void): void
-/** @group PocketBase */declare function onSettingsBeforeUpdateRequest(handler: (e: core.SettingsUpdateEvent) => void): void
-/** @group PocketBase */declare function onSettingsListRequest(handler: (e: core.SettingsListEvent) => void): void
+/** @group PocketBase */declare function onBackupCreate(handler: (e: core.BackupEvent) => void): void
+/** @group PocketBase */declare function onBackupRestore(handler: (e: core.BackupEvent) => void): void
+/** @group PocketBase */declare function onBatchRequest(handler: (e: core.BatchRequestEvent) => void): void
+/** @group PocketBase */declare function onBootstrap(handler: (e: core.BootstrapEvent) => void): void
+/** @group PocketBase */declare function onCollectionAfterCreateError(handler: (e: core.CollectionErrorEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onCollectionAfterCreateSuccess(handler: (e: core.CollectionEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onCollectionAfterDeleteError(handler: (e: core.CollectionErrorEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onCollectionAfterDeleteSuccess(handler: (e: core.CollectionEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onCollectionAfterUpdateError(handler: (e: core.CollectionErrorEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onCollectionAfterUpdateSuccess(handler: (e: core.CollectionEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onCollectionCreate(handler: (e: core.CollectionEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onCollectionCreateExecute(handler: (e: core.CollectionEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onCollectionCreateRequest(handler: (e: core.CollectionRequestEvent) => void): void
+/** @group PocketBase */declare function onCollectionDelete(handler: (e: core.CollectionEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onCollectionDeleteExecute(handler: (e: core.CollectionEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onCollectionDeleteRequest(handler: (e: core.CollectionRequestEvent) => void): void
+/** @group PocketBase */declare function onCollectionUpdate(handler: (e: core.CollectionEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onCollectionUpdateExecute(handler: (e: core.CollectionEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onCollectionUpdateRequest(handler: (e: core.CollectionRequestEvent) => void): void
+/** @group PocketBase */declare function onCollectionValidate(handler: (e: core.CollectionEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onCollectionViewRequest(handler: (e: core.CollectionRequestEvent) => void): void
+/** @group PocketBase */declare function onCollectionsImportRequest(handler: (e: core.CollectionsImportRequestEvent) => void): void
+/** @group PocketBase */declare function onCollectionsListRequest(handler: (e: core.CollectionsListRequestEvent) => void): void
+/** @group PocketBase */declare function onFileDownloadRequest(handler: (e: core.FileDownloadRequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onFileTokenRequest(handler: (e: core.FileTokenRequestEvent) => void): void
+/** @group PocketBase */declare function onMailerRecordAuthAlertSend(handler: (e: core.MailerRecordEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onMailerRecordEmailChangeSend(handler: (e: core.MailerRecordEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onMailerRecordOTPSend(handler: (e: core.MailerRecordEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onMailerRecordPasswordResetSend(handler: (e: core.MailerRecordEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onMailerRecordVerificationSend(handler: (e: core.MailerRecordEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onMailerSend(handler: (e: core.MailerEvent) => void): void
+/** @group PocketBase */declare function onModelAfterCreateError(handler: (e: core.ModelErrorEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onModelAfterCreateSuccess(handler: (e: core.ModelEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onModelAfterDeleteError(handler: (e: core.ModelErrorEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onModelAfterDeleteSuccess(handler: (e: core.ModelEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onModelAfterUpdateError(handler: (e: core.ModelErrorEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onModelAfterUpdateSuccess(handler: (e: core.ModelEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onModelCreate(handler: (e: core.ModelEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onModelCreateExecute(handler: (e: core.ModelEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onModelDelete(handler: (e: core.ModelEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onModelDeleteExecute(handler: (e: core.ModelEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onModelUpdate(handler: (e: core.ModelEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onModelUpdateExecute(handler: (e: core.ModelEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onModelValidate(handler: (e: core.ModelEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRealtimeConnectRequest(handler: (e: core.RealtimeConnectRequestEvent) => void): void
+/** @group PocketBase */declare function onRealtimeMessageSend(handler: (e: core.RealtimeMessageEvent) => void): void
+/** @group PocketBase */declare function onRealtimeSubscribeRequest(handler: (e: core.RealtimeSubscribeRequestEvent) => void): void
+/** @group PocketBase */declare function onRecordAfterCreateError(handler: (e: core.RecordErrorEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordAfterCreateSuccess(handler: (e: core.RecordEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordAfterDeleteError(handler: (e: core.RecordErrorEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordAfterDeleteSuccess(handler: (e: core.RecordEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordAfterUpdateError(handler: (e: core.RecordErrorEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordAfterUpdateSuccess(handler: (e: core.RecordEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordAuthRefreshRequest(handler: (e: core.RecordAuthRefreshRequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordAuthRequest(handler: (e: core.RecordAuthRequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordAuthWithOAuth2Request(handler: (e: core.RecordAuthWithOAuth2RequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordAuthWithOTPRequest(handler: (e: core.RecordAuthWithOTPRequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordAuthWithPasswordRequest(handler: (e: core.RecordAuthWithPasswordRequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordConfirmEmailChangeRequest(handler: (e: core.RecordConfirmEmailChangeRequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordConfirmPasswordResetRequest(handler: (e: core.RecordConfirmPasswordResetRequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordConfirmVerificationRequest(handler: (e: core.RecordConfirmVerificationRequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordCreate(handler: (e: core.RecordEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordCreateExecute(handler: (e: core.RecordEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordCreateRequest(handler: (e: core.RecordRequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordDelete(handler: (e: core.RecordEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordDeleteExecute(handler: (e: core.RecordEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordDeleteRequest(handler: (e: core.RecordRequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordEnrich(handler: (e: core.RecordEnrichEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordRequestEmailChangeRequest(handler: (e: core.RecordRequestEmailChangeRequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordRequestOTPRequest(handler: (e: core.RecordCreateOTPRequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordRequestPasswordResetRequest(handler: (e: core.RecordRequestPasswordResetRequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordRequestVerificationRequest(handler: (e: core.RecordRequestVerificationRequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordUpdate(handler: (e: core.RecordEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordUpdateExecute(handler: (e: core.RecordEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordUpdateRequest(handler: (e: core.RecordRequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordValidate(handler: (e: core.RecordEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordViewRequest(handler: (e: core.RecordRequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onRecordsListRequest(handler: (e: core.RecordsListRequestEvent) => void, ...tags: string[]): void
+/** @group PocketBase */declare function onSettingsListRequest(handler: (e: core.SettingsListRequestEvent) => void): void
+/** @group PocketBase */declare function onSettingsReload(handler: (e: core.SettingsReloadEvent) => void): void
+/** @group PocketBase */declare function onSettingsUpdateRequest(handler: (e: core.SettingsUpdateRequestEvent) => void): void
/** @group PocketBase */declare function onTerminate(handler: (e: core.TerminateEvent) => void): void
type _TygojaDict = { [key:string | number | symbol]: any; }
type _TygojaAny = any
@@ -1148,9 +1201,9 @@ type _TygojaAny = any
* functionality. The design is Unix-like, although the error handling is
* Go-like; failing calls return values of type error rather than error numbers.
* Often, more information is available within the error. For example,
- * if a call that takes a file name fails, such as Open or Stat, the error
+ * if a call that takes a file name fails, such as [Open] or [Stat], the error
* will include the failing file name when printed and will be of type
- * *PathError, which may be unpacked for more information.
+ * [*PathError], which may be unpacked for more information.
*
* The os interface is intended to be uniform across all operating systems.
* Features not generally available appear in the system-specific package syscall.
@@ -1182,22 +1235,26 @@ type _TygojaAny = any
* fmt.Printf("read %d bytes: %q\n", count, data[:count])
* ```
*
- * Note: The maximum number of concurrent operations on a File may be limited by
- * the OS or the system. The number should be high, but exceeding it may degrade
- * performance or cause other issues.
+ * # Concurrency
+ *
+ * The methods of [File] correspond to file system operations. All are
+ * safe for concurrent use. The maximum number of concurrent
+ * operations on a File may be limited by the OS or the system. The
+ * number should be high, but exceeding it may degrade performance or
+ * cause other issues.
*/
namespace os {
interface readdirMode extends Number{}
interface File {
/**
* Readdir reads the contents of the directory associated with file and
- * returns a slice of up to n FileInfo values, as would be returned
- * by Lstat, in directory order. Subsequent calls on the same file will yield
+ * returns a slice of up to n [FileInfo] values, as would be returned
+ * by [Lstat], in directory order. Subsequent calls on the same file will yield
* further FileInfos.
*
* If n > 0, Readdir returns at most n FileInfo structures. In this case, if
* Readdir returns an empty slice, it will return a non-nil error
- * explaining why. At the end of a directory, the error is io.EOF.
+ * explaining why. At the end of a directory, the error is [io.EOF].
*
* If n <= 0, Readdir returns all the FileInfo from the directory in
* a single slice. In this case, if Readdir succeeds (reads all
@@ -1219,7 +1276,7 @@ namespace os {
*
* If n > 0, Readdirnames returns at most n names. In this case, if
* Readdirnames returns an empty slice, it will return a non-nil error
- * explaining why. At the end of a directory, the error is io.EOF.
+ * explaining why. At the end of a directory, the error is [io.EOF].
*
* If n <= 0, Readdirnames returns all the names from the directory in
* a single slice. In this case, if Readdirnames succeeds (reads all
@@ -1232,18 +1289,18 @@ namespace os {
}
/**
* A DirEntry is an entry read from a directory
- * (using the ReadDir function or a File's ReadDir method).
+ * (using the [ReadDir] function or a [File.ReadDir] method).
*/
interface DirEntry extends fs.DirEntry{}
interface File {
/**
* ReadDir reads the contents of the directory associated with the file f
- * and returns a slice of DirEntry values in directory order.
+ * and returns a slice of [DirEntry] values in directory order.
* Subsequent calls on the same file will yield later DirEntry records in the directory.
*
* If n > 0, ReadDir returns at most n DirEntry records.
* In this case, if ReadDir returns an empty slice, it will return an error explaining why.
- * At the end of a directory, the error is io.EOF.
+ * At the end of a directory, the error is [io.EOF].
*
* If n <= 0, ReadDir returns all the DirEntry records remaining in the directory.
* When it succeeds, it returns a nil error (not io.EOF).
@@ -1260,6 +1317,28 @@ namespace os {
*/
(name: string): Array
}
+ interface copyFS {
+ /**
+ * CopyFS copies the file system fsys into the directory dir,
+ * creating dir if necessary.
+ *
+ * Files are created with mode 0o666 plus any execute permissions
+ * from the source, and directories are created with mode 0o777
+ * (before umask).
+ *
+ * CopyFS will not overwrite existing files. If a file name in fsys
+ * already exists in the destination, CopyFS will return an error
+ * such that errors.Is(err, fs.ErrExist) will be true.
+ *
+ * Symbolic links in fsys are not supported. A *PathError with Err set
+ * to ErrInvalid is returned when copying from a symbolic link.
+ *
+ * Symbolic links in dir are followed.
+ *
+ * Copying stops at and returns the first error encountered.
+ */
+ (dir: string, fsys: fs.FS): void
+ }
/**
* Auxiliary information if the File describes a directory
*/
@@ -1268,7 +1347,7 @@ namespace os {
interface expand {
/**
* Expand replaces ${var} or $var in the string based on the mapping function.
- * For example, os.ExpandEnv(s) is equivalent to os.Expand(s, os.Getenv).
+ * For example, [os.ExpandEnv](s) is equivalent to [os.Expand](s, [os.Getenv]).
*/
(s: string, mapping: (_arg0: string) => string): string
}
@@ -1284,7 +1363,7 @@ namespace os {
/**
* Getenv retrieves the value of the environment variable named by the key.
* It returns the value, which will be empty if the variable is not present.
- * To distinguish between an empty value and an unset value, use LookupEnv.
+ * To distinguish between an empty value and an unset value, use [LookupEnv].
*/
(key: string): string
}
@@ -1353,7 +1432,7 @@ namespace os {
}
interface newSyscallError {
/**
- * NewSyscallError returns, as an error, a new SyscallError
+ * NewSyscallError returns, as an error, a new [SyscallError]
* with the given system call name and error details.
* As a convenience, if err is nil, NewSyscallError returns nil.
*/
@@ -1361,53 +1440,55 @@ namespace os {
}
interface isExist {
/**
- * IsExist returns a boolean indicating whether the error is known to report
- * that a file or directory already exists. It is satisfied by ErrExist as
+ * IsExist returns a boolean indicating whether its argument is known to report
+ * that a file or directory already exists. It is satisfied by [ErrExist] as
* well as some syscall errors.
*
- * This function predates errors.Is. It only supports errors returned by
+ * This function predates [errors.Is]. It only supports errors returned by
* the os package. New code should use errors.Is(err, fs.ErrExist).
*/
(err: Error): boolean
}
interface isNotExist {
/**
- * IsNotExist returns a boolean indicating whether the error is known to
+ * IsNotExist returns a boolean indicating whether its argument is known to
* report that a file or directory does not exist. It is satisfied by
- * ErrNotExist as well as some syscall errors.
+ * [ErrNotExist] as well as some syscall errors.
*
- * This function predates errors.Is. It only supports errors returned by
+ * This function predates [errors.Is]. It only supports errors returned by
* the os package. New code should use errors.Is(err, fs.ErrNotExist).
*/
(err: Error): boolean
}
interface isPermission {
/**
- * IsPermission returns a boolean indicating whether the error is known to
- * report that permission is denied. It is satisfied by ErrPermission as well
+ * IsPermission returns a boolean indicating whether its argument is known to
+ * report that permission is denied. It is satisfied by [ErrPermission] as well
* as some syscall errors.
*
- * This function predates errors.Is. It only supports errors returned by
+ * This function predates [errors.Is]. It only supports errors returned by
* the os package. New code should use errors.Is(err, fs.ErrPermission).
*/
(err: Error): boolean
}
interface isTimeout {
/**
- * IsTimeout returns a boolean indicating whether the error is known
+ * IsTimeout returns a boolean indicating whether its argument is known
* to report that a timeout occurred.
*
- * This function predates errors.Is, and the notion of whether an
+ * This function predates [errors.Is], and the notion of whether an
* error indicates a timeout can be ambiguous. For example, the Unix
* error EWOULDBLOCK sometimes indicates a timeout and sometimes does not.
* New code should use errors.Is with a value appropriate to the call
- * returning the error, such as os.ErrDeadlineExceeded.
+ * returning the error, such as [os.ErrDeadlineExceeded].
*/
(err: Error): boolean
}
interface syscallErrorType extends syscall.Errno{}
+ interface processMode extends Number{}
+ interface processStatus extends Number{}
/**
- * Process stores the information about a process created by StartProcess.
+ * Process stores the information about a process created by [StartProcess].
*/
interface Process {
pid: number
@@ -1473,7 +1554,7 @@ namespace os {
/**
* FindProcess looks for a running process by its pid.
*
- * The Process it returns can be used to obtain information
+ * The [Process] it returns can be used to obtain information
* about the underlying operating system process.
*
* On Unix systems, FindProcess always succeeds and returns a Process
@@ -1486,32 +1567,32 @@ namespace os {
interface startProcess {
/**
* StartProcess starts a new process with the program, arguments and attributes
- * specified by name, argv and attr. The argv slice will become os.Args in the
+ * specified by name, argv and attr. The argv slice will become [os.Args] in the
* new process, so it normally starts with the program name.
*
* If the calling goroutine has locked the operating system thread
- * with runtime.LockOSThread and modified any inheritable OS-level
+ * with [runtime.LockOSThread] and modified any inheritable OS-level
* thread state (for example, Linux or Plan 9 name spaces), the new
* process will inherit the caller's thread state.
*
- * StartProcess is a low-level interface. The os/exec package provides
+ * StartProcess is a low-level interface. The [os/exec] package provides
* higher-level interfaces.
*
- * If there is an error, it will be of type *PathError.
+ * If there is an error, it will be of type [*PathError].
*/
(name: string, argv: Array, attr: ProcAttr): (Process)
}
interface Process {
/**
- * Release releases any resources associated with the Process p,
+ * Release releases any resources associated with the [Process] p,
* rendering it unusable in the future.
- * Release only needs to be called if Wait is not.
+ * Release only needs to be called if [Process.Wait] is not.
*/
release(): void
}
interface Process {
/**
- * Kill causes the Process to exit immediately. Kill does not wait until
+ * Kill causes the [Process] to exit immediately. Kill does not wait until
* the Process has actually exited. This only kills the Process itself,
* not any other processes it may have started.
*/
@@ -1519,7 +1600,7 @@ namespace os {
}
interface Process {
/**
- * Wait waits for the Process to exit, and then returns a
+ * Wait waits for the [Process] to exit, and then returns a
* ProcessState describing its status and an error, if any.
* Wait releases any resources associated with the Process.
* On most operating systems, the Process must be a child
@@ -1529,8 +1610,8 @@ namespace os {
}
interface Process {
/**
- * Signal sends a signal to the Process.
- * Sending Interrupt on Windows is not implemented.
+ * Signal sends a signal to the [Process].
+ * Sending [Interrupt] on Windows is not implemented.
*/
signal(sig: Signal): void
}
@@ -1565,7 +1646,7 @@ namespace os {
/**
* Sys returns system-dependent exit information about
* the process. Convert it to the appropriate underlying
- * type, such as syscall.WaitStatus on Unix, to access its contents.
+ * type, such as [syscall.WaitStatus] on Unix, to access its contents.
*/
sys(): any
}
@@ -1573,7 +1654,7 @@ namespace os {
/**
* SysUsage returns system-dependent resource usage information about
* the exited process. Convert it to the appropriate underlying
- * type, such as *syscall.Rusage on Unix, to access its contents.
+ * type, such as [*syscall.Rusage] on Unix, to access its contents.
* (On Unix, *syscall.Rusage matches struct rusage as defined in the
* getrusage(2) manual page.)
*/
@@ -1607,7 +1688,7 @@ namespace os {
* pointing to the correct executable. If a symlink was used to start
* the process, depending on the operating system, the result might
* be the symlink or the path it pointed to. If a stable result is
- * needed, path/filepath.EvalSymlinks might help.
+ * needed, [path/filepath.EvalSymlinks] might help.
*
* Executable returns an absolute path unless an error occurred.
*
@@ -1619,6 +1700,8 @@ namespace os {
interface File {
/**
* Name returns the name of the file as presented to Open.
+ *
+ * It is safe to call Name after [Close].
*/
name(): string
}
@@ -1679,8 +1762,8 @@ namespace os {
* than ReadFrom. This is used to permit ReadFrom to call io.Copy
* without leading to a recursive call to ReadFrom.
*/
- type _subezgYh = noReadFrom&File
- interface fileWithoutReadFrom extends _subezgYh {
+ type _subrrRcn = noReadFrom&File
+ interface fileWithoutReadFrom extends _subrrRcn {
}
interface File {
/**
@@ -1724,8 +1807,8 @@ namespace os {
* than WriteTo. This is used to permit WriteTo to call io.Copy
* without leading to a recursive call to WriteTo.
*/
- type _subJsbVf = noWriteTo&File
- interface fileWithoutWriteTo extends _subJsbVf {
+ type _subSJIke = noWriteTo&File
+ interface fileWithoutWriteTo extends _subSJIke {
}
interface File {
/**
@@ -1771,7 +1854,7 @@ namespace os {
interface create {
/**
* Create creates or truncates the named file. If the file already exists,
- * it is truncated. If the file does not exist, it is created with mode 0666
+ * it is truncated. If the file does not exist, it is created with mode 0o666
* (before umask). If successful, methods on the returned File can
* be used for I/O; the associated file descriptor has mode O_RDWR.
* If there is an error, it will be of type *PathError.
@@ -1884,11 +1967,11 @@ namespace os {
* On Unix, the mode's permission bits, ModeSetuid, ModeSetgid, and
* ModeSticky are used.
*
- * On Windows, only the 0200 bit (owner writable) of mode is used; it
+ * On Windows, only the 0o200 bit (owner writable) of mode is used; it
* controls whether the file's read-only attribute is set or cleared.
* The other bits are currently unused. For compatibility with Go 1.12
- * and earlier, use a non-zero mode. Use mode 0400 for a read-only
- * file and 0600 for a readable+writable file.
+ * and earlier, use a non-zero mode. Use mode 0o400 for a read-only
+ * file and 0o600 for a readable+writable file.
*
* On Plan 9, the mode's permission bits, ModeAppend, ModeExclusive,
* and ModeTemporary are used.
@@ -2022,9 +2105,9 @@ namespace os {
}
interface File {
/**
- * Close closes the File, rendering it unusable for I/O.
- * On files that support SetDeadline, any pending I/O operations will
- * be canceled and return immediately with an ErrClosed error.
+ * Close closes the [File], rendering it unusable for I/O.
+ * On files that support [File.SetDeadline], any pending I/O operations will
+ * be canceled and return immediately with an [ErrClosed] error.
* Close will return an error if it has already been called.
*/
close(): void
@@ -2034,9 +2117,9 @@ namespace os {
* Chown changes the numeric uid and gid of the named file.
* If the file is a symbolic link, it changes the uid and gid of the link's target.
* A uid or gid of -1 means to not change that value.
- * If there is an error, it will be of type *PathError.
+ * If there is an error, it will be of type [*PathError].
*
- * On Windows or Plan 9, Chown always returns the syscall.EWINDOWS or
+ * On Windows or Plan 9, Chown always returns the [syscall.EWINDOWS] or
* EPLAN9 error, wrapped in *PathError.
*/
(name: string, uid: number, gid: number): void
@@ -2045,9 +2128,9 @@ namespace os {
/**
* Lchown changes the numeric uid and gid of the named file.
* If the file is a symbolic link, it changes the uid and gid of the link itself.
- * If there is an error, it will be of type *PathError.
+ * If there is an error, it will be of type [*PathError].
*
- * On Windows, it always returns the syscall.EWINDOWS error, wrapped
+ * On Windows, it always returns the [syscall.EWINDOWS] error, wrapped
* in *PathError.
*/
(name: string, uid: number, gid: number): void
@@ -2055,9 +2138,9 @@ namespace os {
interface File {
/**
* Chown changes the numeric uid and gid of the named file.
- * If there is an error, it will be of type *PathError.
+ * If there is an error, it will be of type [*PathError].
*
- * On Windows, it always returns the syscall.EWINDOWS error, wrapped
+ * On Windows, it always returns the [syscall.EWINDOWS] error, wrapped
* in *PathError.
*/
chown(uid: number, gid: number): void
@@ -2066,7 +2149,7 @@ namespace os {
/**
* Truncate changes the size of the file.
* It does not change the I/O offset.
- * If there is an error, it will be of type *PathError.
+ * If there is an error, it will be of type [*PathError].
*/
truncate(size: number): void
}
@@ -2082,11 +2165,11 @@ namespace os {
/**
* Chtimes changes the access and modification times of the named
* file, similar to the Unix utime() or utimes() functions.
- * A zero time.Time value will leave the corresponding file time unchanged.
+ * A zero [time.Time] value will leave the corresponding file time unchanged.
*
* The underlying filesystem may truncate or round the values to a
* less precise time unit.
- * If there is an error, it will be of type *PathError.
+ * If there is an error, it will be of type [*PathError].
*/
(name: string, atime: time.Time, mtime: time.Time): void
}
@@ -2094,7 +2177,7 @@ namespace os {
/**
* Chdir changes the current working directory to the file,
* which must be a directory.
- * If there is an error, it will be of type *PathError.
+ * If there is an error, it will be of type [*PathError].
*/
chdir(): void
}
@@ -2111,11 +2194,11 @@ namespace os {
* Fd returns the integer Unix file descriptor referencing the open file.
* If f is closed, the file descriptor becomes invalid.
* If f is garbage collected, a finalizer may close the file descriptor,
- * making it invalid; see runtime.SetFinalizer for more information on when
- * a finalizer might be run. On Unix systems this will cause the SetDeadline
+ * making it invalid; see [runtime.SetFinalizer] for more information on when
+ * a finalizer might be run. On Unix systems this will cause the [File.SetDeadline]
* methods to stop working.
* Because file descriptors can be reused, the returned file descriptor may
- * only be closed through the Close method of f, or by its finalizer during
+ * only be closed through the [File.Close] method of f, or by its finalizer during
* garbage collection. Otherwise, during garbage collection the finalizer
* may close an unrelated file descriptor with the same (reused) number.
*
@@ -2216,7 +2299,7 @@ namespace os {
* It removes everything it can but returns the first error
* it encounters. If the path does not exist, RemoveAll
* returns nil (no error).
- * If there is an error, it will be of type *PathError.
+ * If there is an error, it will be of type [*PathError].
*/
(path: string): void
}
@@ -2269,7 +2352,7 @@ namespace os {
/**
* Getgroups returns a list of the numeric ids of groups that the caller belongs to.
*
- * On Windows, it returns syscall.EWINDOWS. See the os/user package
+ * On Windows, it returns [syscall.EWINDOWS]. See the [os/user] package
* for a possible alternative.
*/
(): Array
@@ -2300,17 +2383,17 @@ namespace os {
}
interface stat {
/**
- * Stat returns a FileInfo describing the named file.
- * If there is an error, it will be of type *PathError.
+ * Stat returns a [FileInfo] describing the named file.
+ * If there is an error, it will be of type [*PathError].
*/
(name: string): FileInfo
}
interface lstat {
/**
- * Lstat returns a FileInfo describing the named file.
+ * Lstat returns a [FileInfo] describing the named file.
* If the file is a symbolic link, the returned FileInfo
* describes the symbolic link. Lstat makes no attempt to follow the link.
- * If there is an error, it will be of type *PathError.
+ * If there is an error, it will be of type [*PathError].
*
* On Windows, if the file is a reparse point that is a surrogate for another
* named entity (such as a symbolic link or mounted folder), the returned
@@ -2320,8 +2403,8 @@ namespace os {
}
interface File {
/**
- * Stat returns the FileInfo structure describing file.
- * If there is an error, it will be of type *PathError.
+ * Stat returns the [FileInfo] structure describing file.
+ * If there is an error, it will be of type [*PathError].
*/
stat(): FileInfo
}
@@ -2337,7 +2420,8 @@ namespace os {
* opens the file for reading and writing, and returns the resulting file.
* The filename is generated by taking pattern and adding a random string to the end.
* If pattern includes a "*", the random string replaces the last "*".
- * If dir is the empty string, CreateTemp uses the default directory for temporary files, as returned by TempDir.
+ * The file is created with mode 0o600 (before umask).
+ * If dir is the empty string, CreateTemp uses the default directory for temporary files, as returned by [TempDir].
* Multiple programs or goroutines calling CreateTemp simultaneously will not choose the same file.
* The caller can use the file's Name method to find the pathname of the file.
* It is the caller's responsibility to remove the file when it is no longer needed.
@@ -2350,6 +2434,7 @@ namespace os {
* and returns the pathname of the new directory.
* The new directory's name is generated by adding a random string to the end of pattern.
* If pattern includes a "*", the random string replaces the last "*" instead.
+ * The directory is created with mode 0o700 (before umask).
* If dir is the empty string, MkdirTemp uses the default directory for temporary files, as returned by TempDir.
* Multiple programs or goroutines calling MkdirTemp simultaneously will not choose the same directory.
* It is the caller's responsibility to remove the directory when it is no longer needed.
@@ -2364,12 +2449,14 @@ namespace os {
}
/**
* File represents an open file descriptor.
+ *
+ * The methods of File are safe for concurrent use.
*/
- type _subzNURo = file
- interface File extends _subzNURo {
+ type _subcHuOE = file
+ interface File extends _subcHuOE {
}
/**
- * A FileInfo describes a file and is returned by Stat and Lstat.
+ * A FileInfo describes a file and is returned by [Stat] and [Lstat].
*/
interface FileInfo extends fs.FileInfo{}
/**
@@ -2377,7 +2464,7 @@ namespace os {
* The bits have the same definition on all systems, so that
* information about files can be moved from one system
* to another portably. Not all bits apply to all systems.
- * The only required bit is ModeDir for directories.
+ * The only required bit is [ModeDir] for directories.
*/
interface FileMode extends fs.FileMode{}
interface fileStat {
@@ -2392,7 +2479,7 @@ namespace os {
* For example, on Unix this means that the device and inode fields
* of the two underlying structures are identical; on other systems
* the decision may be based on the path names.
- * SameFile only applies to results returned by this package's Stat.
+ * SameFile only applies to results returned by this package's [Stat].
* It returns false in other cases.
*/
(fi1: FileInfo, fi2: FileInfo): boolean
@@ -2470,14 +2557,6 @@ namespace filepath {
*/
(pattern: string): Array
}
- /**
- * A lazybuf is a lazily constructed path buffer.
- * It supports append, reading previously appended bytes,
- * and retrieving the final string. It does not allocate a buffer
- * to hold the output until that output diverges from s.
- */
- interface lazybuf {
- }
interface clean {
/**
* Clean returns the shortest path name equivalent to path
@@ -2535,6 +2614,19 @@ namespace filepath {
*/
(path: string): boolean
}
+ interface localize {
+ /**
+ * Localize converts a slash-separated path into an operating system path.
+ * The input path must be a valid path as reported by [io/fs.ValidPath].
+ *
+ * Localize returns an error if the path cannot be represented by the operating system.
+ * For example, the path a\b is rejected on Windows, on which \ is a separator
+ * character and cannot be part of a filename.
+ *
+ * The path returned by Localize will always be local, as reported by IsLocal.
+ */
+ (path: string): string
+ }
interface toSlash {
/**
* ToSlash returns the result of replacing each separator character
@@ -2548,6 +2640,9 @@ namespace filepath {
* FromSlash returns the result of replacing each slash ('/') character
* in path with a separator character. Multiple slashes are replaced
* by multiple separators.
+ *
+ * See also the Localize function, which converts a slash-separated path
+ * as used by the io/fs package to an operating system path.
*/
(path: string): string
}
@@ -2601,6 +2696,12 @@ namespace filepath {
*/
(path: string): string
}
+ interface isAbs {
+ /**
+ * IsAbs reports whether the path is absolute.
+ */
+ (path: string): boolean
+ }
interface abs {
/**
* Abs returns an absolute representation of path.
@@ -2733,12 +2834,6 @@ namespace filepath {
*/
(path: string): string
}
- interface isAbs {
- /**
- * IsAbs reports whether the path is absolute.
- */
- (path: string): boolean
- }
interface hasPrefix {
/**
* HasPrefix exists for historical compatibility and should not be used.
@@ -2761,7 +2856,7 @@ namespace filepath {
* pipelines, or redirections typically done by shells. The package
* behaves more like C's "exec" family of functions. To expand glob
* patterns, either call the shell directly, taking care to escape any
- * dangerous input, or use the path/filepath package's Glob function.
+ * dangerous input, or use the [path/filepath] package's Glob function.
* To expand environment variables, use package os's ExpandEnv.
*
* Note that the examples in this package assume a Unix system.
@@ -2770,7 +2865,7 @@ namespace filepath {
*
* # Executables in the current directory
*
- * The functions Command and LookPath look for a program
+ * The functions [Command] and [LookPath] look for a program
* in the directories listed in the current path, following the
* conventions of the host operating system.
* Operating systems have for decades included the current
@@ -2781,10 +2876,10 @@ namespace filepath {
*
* To avoid those security problems, as of Go 1.19, this package will not resolve a program
* using an implicit or explicit path entry relative to the current directory.
- * That is, if you run exec.LookPath("go"), it will not successfully return
+ * That is, if you run [LookPath]("go"), it will not successfully return
* ./go on Unix nor .\go.exe on Windows, no matter how the path is configured.
* Instead, if the usual path algorithms would result in that answer,
- * these functions return an error err satisfying errors.Is(err, ErrDot).
+ * these functions return an error err satisfying [errors.Is](err, [ErrDot]).
*
* For example, consider these two program snippets:
*
@@ -2849,12 +2944,12 @@ namespace filepath {
namespace exec {
interface command {
/**
- * Command returns the Cmd struct to execute the named program with
+ * Command returns the [Cmd] struct to execute the named program with
* the given arguments.
*
* It sets only the Path and Args in the returned structure.
*
- * If name contains no path separators, Command uses LookPath to
+ * If name contains no path separators, Command uses [LookPath] to
* resolve name to a complete path if possible. Otherwise it uses name
* directly as Path.
*
@@ -2876,471 +2971,6 @@ namespace exec {
}
}
-namespace security {
- interface s256Challenge {
- /**
- * S256Challenge creates base64 encoded sha256 challenge string derived from code.
- * The padding of the result base64 string is stripped per [RFC 7636].
- *
- * [RFC 7636]: https://datatracker.ietf.org/doc/html/rfc7636#section-4.2
- */
- (code: string): string
- }
- interface md5 {
- /**
- * MD5 creates md5 hash from the provided plain text.
- */
- (text: string): string
- }
- interface sha256 {
- /**
- * SHA256 creates sha256 hash as defined in FIPS 180-4 from the provided text.
- */
- (text: string): string
- }
- interface sha512 {
- /**
- * SHA512 creates sha512 hash as defined in FIPS 180-4 from the provided text.
- */
- (text: string): string
- }
- interface hs256 {
- /**
- * HS256 creates a HMAC hash with sha256 digest algorithm.
- */
- (text: string, secret: string): string
- }
- interface hs512 {
- /**
- * HS512 creates a HMAC hash with sha512 digest algorithm.
- */
- (text: string, secret: string): string
- }
- interface equal {
- /**
- * Equal compares two hash strings for equality without leaking timing information.
- */
- (hash1: string, hash2: string): boolean
- }
- // @ts-ignore
- import crand = rand
- interface encrypt {
- /**
- * Encrypt encrypts "data" with the specified "key" (must be valid 32 char AES key).
- *
- * This method uses AES-256-GCM block cypher mode.
- */
- (data: string|Array, key: string): string
- }
- interface decrypt {
- /**
- * Decrypt decrypts encrypted text with key (must be valid 32 chars AES key).
- *
- * This method uses AES-256-GCM block cypher mode.
- */
- (cipherText: string, key: string): string|Array
- }
- interface parseUnverifiedJWT {
- /**
- * ParseUnverifiedJWT parses JWT and returns its claims
- * but DOES NOT verify the signature.
- *
- * It verifies only the exp, iat and nbf claims.
- */
- (token: string): jwt.MapClaims
- }
- interface parseJWT {
- /**
- * ParseJWT verifies and parses JWT and returns its claims.
- */
- (token: string, verificationKey: string): jwt.MapClaims
- }
- interface newJWT {
- /**
- * NewJWT generates and returns new HS256 signed JWT.
- */
- (payload: jwt.MapClaims, signingKey: string, secondsDuration: number): string
- }
- interface newToken {
- /**
- * Deprecated:
- * Consider replacing with NewJWT().
- *
- * NewToken is a legacy alias for NewJWT that generates a HS256 signed JWT.
- */
- (payload: jwt.MapClaims, signingKey: string, secondsDuration: number): string
- }
- // @ts-ignore
- import cryptoRand = rand
- // @ts-ignore
- import mathRand = rand
- interface randomString {
- /**
- * RandomString generates a cryptographically random string with the specified length.
- *
- * The generated string matches [A-Za-z0-9]+ and it's transparent to URL-encoding.
- */
- (length: number): string
- }
- interface randomStringWithAlphabet {
- /**
- * RandomStringWithAlphabet generates a cryptographically random string
- * with the specified length and characters set.
- *
- * It panics if for some reason rand.Int returns a non-nil error.
- */
- (length: number, alphabet: string): string
- }
- interface pseudorandomString {
- /**
- * PseudorandomString generates a pseudorandom string with the specified length.
- *
- * The generated string matches [A-Za-z0-9]+ and it's transparent to URL-encoding.
- *
- * For a cryptographically random string (but a little bit slower) use RandomString instead.
- */
- (length: number): string
- }
- interface pseudorandomStringWithAlphabet {
- /**
- * PseudorandomStringWithAlphabet generates a pseudorandom string
- * with the specified length and characters set.
- *
- * For a cryptographically random (but a little bit slower) use RandomStringWithAlphabet instead.
- */
- (length: number, alphabet: string): string
- }
-}
-
-namespace filesystem {
- /**
- * FileReader defines an interface for a file resource reader.
- */
- interface FileReader {
- [key:string]: any;
- open(): io.ReadSeekCloser
- }
- /**
- * File defines a single file [io.ReadSeekCloser] resource.
- *
- * The file could be from a local path, multipart/form-data header, etc.
- */
- interface File {
- reader: FileReader
- name: string
- originalName: string
- size: number
- }
- interface newFileFromPath {
- /**
- * NewFileFromPath creates a new File instance from the provided local file path.
- */
- (path: string): (File)
- }
- interface newFileFromBytes {
- /**
- * NewFileFromBytes creates a new File instance from the provided byte slice.
- */
- (b: string|Array, name: string): (File)
- }
- interface newFileFromMultipart {
- /**
- * NewFileFromMultipart creates a new File from the provided multipart header.
- */
- (mh: multipart.FileHeader): (File)
- }
- interface newFileFromUrl {
- /**
- * NewFileFromUrl creates a new File from the provided url by
- * downloading the resource and load it as BytesReader.
- *
- * Example
- *
- * ```
- * ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
- * defer cancel()
- *
- * file, err := filesystem.NewFileFromUrl(ctx, "https://example.com/image.png")
- * ```
- */
- (ctx: context.Context, url: string): (File)
- }
- /**
- * MultipartReader defines a FileReader from [multipart.FileHeader].
- */
- interface MultipartReader {
- header?: multipart.FileHeader
- }
- interface MultipartReader {
- /**
- * Open implements the [filesystem.FileReader] interface.
- */
- open(): io.ReadSeekCloser
- }
- /**
- * PathReader defines a FileReader from a local file path.
- */
- interface PathReader {
- path: string
- }
- interface PathReader {
- /**
- * Open implements the [filesystem.FileReader] interface.
- */
- open(): io.ReadSeekCloser
- }
- /**
- * BytesReader defines a FileReader from bytes content.
- */
- interface BytesReader {
- bytes: string|Array
- }
- interface BytesReader {
- /**
- * Open implements the [filesystem.FileReader] interface.
- */
- open(): io.ReadSeekCloser
- }
- type _subRtcDW = bytes.Reader
- interface bytesReadSeekCloser extends _subRtcDW {
- }
- interface bytesReadSeekCloser {
- /**
- * Close implements the [io.ReadSeekCloser] interface.
- */
- close(): void
- }
- interface System {
- }
- interface newS3 {
- /**
- * NewS3 initializes an S3 filesystem instance.
- *
- * NB! Make sure to call `Close()` after you are done working with it.
- */
- (bucketName: string, region: string, endpoint: string, accessKey: string, secretKey: string, s3ForcePathStyle: boolean): (System)
- }
- interface newLocal {
- /**
- * NewLocal initializes a new local filesystem instance.
- *
- * NB! Make sure to call `Close()` after you are done working with it.
- */
- (dirPath: string): (System)
- }
- interface System {
- /**
- * SetContext assigns the specified context to the current filesystem.
- */
- setContext(ctx: context.Context): void
- }
- interface System {
- /**
- * Close releases any resources used for the related filesystem.
- */
- close(): void
- }
- interface System {
- /**
- * Exists checks if file with fileKey path exists or not.
- */
- exists(fileKey: string): boolean
- }
- interface System {
- /**
- * Attributes returns the attributes for the file with fileKey path.
- */
- attributes(fileKey: string): (blob.Attributes)
- }
- interface System {
- /**
- * GetFile returns a file content reader for the given fileKey.
- *
- * NB! Make sure to call `Close()` after you are done working with it.
- */
- getFile(fileKey: string): (blob.Reader)
- }
- interface System {
- /**
- * Copy copies the file stored at srcKey to dstKey.
- *
- * If dstKey file already exists, it is overwritten.
- */
- copy(srcKey: string, dstKey: string): void
- }
- interface System {
- /**
- * List returns a flat list with info for all files under the specified prefix.
- */
- list(prefix: string): Array<(blob.ListObject | undefined)>
- }
- interface System {
- /**
- * Upload writes content into the fileKey location.
- */
- upload(content: string|Array, fileKey: string): void
- }
- interface System {
- /**
- * UploadFile uploads the provided multipart file to the fileKey location.
- */
- uploadFile(file: File, fileKey: string): void
- }
- interface System {
- /**
- * UploadMultipart uploads the provided multipart file to the fileKey location.
- */
- uploadMultipart(fh: multipart.FileHeader, fileKey: string): void
- }
- interface System {
- /**
- * Delete deletes stored file at fileKey location.
- */
- delete(fileKey: string): void
- }
- interface System {
- /**
- * DeletePrefix deletes everything starting with the specified prefix.
- */
- deletePrefix(prefix: string): Array
- }
- interface System {
- /**
- * Serve serves the file at fileKey location to an HTTP response.
- *
- * If the `download` query parameter is used the file will be always served for
- * download no matter of its type (aka. with "Content-Disposition: attachment").
- */
- serve(res: http.ResponseWriter, req: http.Request, fileKey: string, name: string): void
- }
- interface System {
- /**
- * CreateThumb creates a new thumb image for the file at originalKey location.
- * The new thumb file is stored at thumbKey location.
- *
- * thumbSize is in the format:
- * - 0xH (eg. 0x100) - resize to H height preserving the aspect ratio
- * - Wx0 (eg. 300x0) - resize to W width preserving the aspect ratio
- * - WxH (eg. 300x100) - resize and crop to WxH viewbox (from center)
- * - WxHt (eg. 300x100t) - resize and crop to WxH viewbox (from top)
- * - WxHb (eg. 300x100b) - resize and crop to WxH viewbox (from bottom)
- * - WxHf (eg. 300x100f) - fit inside a WxH viewbox (without cropping)
- */
- createThumb(originalKey: string, thumbKey: string, thumbSize: string): void
- }
- // @ts-ignore
- import v4 = signer
- // @ts-ignore
- import smithyhttp = http
- interface ignoredHeadersKey {
- }
-}
-
-/**
- * Package template is a thin wrapper around the standard html/template
- * and text/template packages that implements a convenient registry to
- * load and cache templates on the fly concurrently.
- *
- * It was created to assist the JSVM plugin HTML rendering, but could be used in other Go code.
- *
- * Example:
- *
- * ```
- * registry := template.NewRegistry()
- *
- * html1, err := registry.LoadFiles(
- * // the files set wil be parsed only once and then cached
- * "layout.html",
- * "content.html",
- * ).Render(map[string]any{"name": "John"})
- *
- * html2, err := registry.LoadFiles(
- * // reuse the already parsed and cached files set
- * "layout.html",
- * "content.html",
- * ).Render(map[string]any{"name": "Jane"})
- * ```
- */
-namespace template {
- interface newRegistry {
- /**
- * NewRegistry creates and initializes a new templates registry with
- * some defaults (eg. global "raw" template function for unescaped HTML).
- *
- * Use the Registry.Load* methods to load templates into the registry.
- */
- (): (Registry)
- }
- /**
- * Registry defines a templates registry that is safe to be used by multiple goroutines.
- *
- * Use the Registry.Load* methods to load templates into the registry.
- */
- interface Registry {
- }
- interface Registry {
- /**
- * AddFuncs registers new global template functions.
- *
- * The key of each map entry is the function name that will be used in the templates.
- * If a function with the map entry name already exists it will be replaced with the new one.
- *
- * The value of each map entry is a function that must have either a
- * single return value, or two return values of which the second has type error.
- *
- * Example:
- *
- * r.AddFuncs(map[string]any{
- * ```
- * "toUpper": func(str string) string {
- * return strings.ToUppser(str)
- * },
- * ...
- * ```
- * })
- */
- addFuncs(funcs: _TygojaDict): (Registry)
- }
- interface Registry {
- /**
- * LoadFiles caches (if not already) the specified filenames set as a
- * single template and returns a ready to use Renderer instance.
- *
- * There must be at least 1 filename specified.
- */
- loadFiles(...filenames: string[]): (Renderer)
- }
- interface Registry {
- /**
- * LoadString caches (if not already) the specified inline string as a
- * single template and returns a ready to use Renderer instance.
- */
- loadString(text: string): (Renderer)
- }
- interface Registry {
- /**
- * LoadFS caches (if not already) the specified fs and globPatterns
- * pair as single template and returns a ready to use Renderer instance.
- *
- * There must be at least 1 file matching the provided globPattern(s)
- * (note that most file names serves as glob patterns matching themselves).
- */
- loadFS(fsys: fs.FS, ...globPatterns: string[]): (Renderer)
- }
- /**
- * Renderer defines a single parsed template.
- */
- interface Renderer {
- }
- interface Renderer {
- /**
- * Render executes the template with the specified data as the dot object
- * and returns the result as plain string.
- */
- render(data: any): string
- }
-}
-
/**
* Package validation provides configurable and extensible rules for validating data of various types.
*/
@@ -3359,25 +2989,6 @@ namespace ozzo_validation {
}
}
-namespace middleware {
- interface bodyLimit {
- /**
- * BodyLimit returns a BodyLimit middleware.
- *
- * BodyLimit middleware sets the maximum allowed size for a request body, if the size exceeds the configured limit, it
- * sends "413 - Request Entity Too Large" response. The BodyLimit is determined based on both `Content-Length` request
- * header and actual content read, which makes it super secure.
- */
- (limitBytes: number): echo.MiddlewareFunc
- }
- interface gzip {
- /**
- * Gzip returns a middleware which compresses HTTP response using gzip compression scheme.
- */
- (): echo.MiddlewareFunc
- }
-}
-
/**
* Package dbx provides a set of DB-agnostic and easy-to-use query building methods for relational databases.
*/
@@ -3714,14 +3325,14 @@ namespace dbx {
/**
* MssqlBuilder is the builder for SQL Server databases.
*/
- type _subrFKDD = BaseBuilder
- interface MssqlBuilder extends _subrFKDD {
+ type _subtdfax = BaseBuilder
+ interface MssqlBuilder extends _subtdfax {
}
/**
* MssqlQueryBuilder is the query builder for SQL Server databases.
*/
- type _submHtvV = BaseQueryBuilder
- interface MssqlQueryBuilder extends _submHtvV {
+ type _sublQypF = BaseQueryBuilder
+ interface MssqlQueryBuilder extends _sublQypF {
}
interface newMssqlBuilder {
/**
@@ -3792,8 +3403,8 @@ namespace dbx {
/**
* MysqlBuilder is the builder for MySQL databases.
*/
- type _subIVLoN = BaseBuilder
- interface MysqlBuilder extends _subIVLoN {
+ type _subtKrsG = BaseBuilder
+ interface MysqlBuilder extends _subtKrsG {
}
interface newMysqlBuilder {
/**
@@ -3868,14 +3479,14 @@ namespace dbx {
/**
* OciBuilder is the builder for Oracle databases.
*/
- type _subfiTVV = BaseBuilder
- interface OciBuilder extends _subfiTVV {
+ type _subTevke = BaseBuilder
+ interface OciBuilder extends _subTevke {
}
/**
* OciQueryBuilder is the query builder for Oracle databases.
*/
- type _subrSBRI = BaseQueryBuilder
- interface OciQueryBuilder extends _subrSBRI {
+ type _subFRAPn = BaseQueryBuilder
+ interface OciQueryBuilder extends _subFRAPn {
}
interface newOciBuilder {
/**
@@ -3938,8 +3549,8 @@ namespace dbx {
/**
* PgsqlBuilder is the builder for PostgreSQL databases.
*/
- type _subtFJti = BaseBuilder
- interface PgsqlBuilder extends _subtFJti {
+ type _subKKaBH = BaseBuilder
+ interface PgsqlBuilder extends _subKKaBH {
}
interface newPgsqlBuilder {
/**
@@ -4006,8 +3617,8 @@ namespace dbx {
/**
* SqliteBuilder is the builder for SQLite databases.
*/
- type _subrBNop = BaseBuilder
- interface SqliteBuilder extends _subrBNop {
+ type _subRyJlz = BaseBuilder
+ interface SqliteBuilder extends _subRyJlz {
}
interface newSqliteBuilder {
/**
@@ -4106,8 +3717,8 @@ namespace dbx {
/**
* StandardBuilder is the builder that is used by DB for an unknown driver.
*/
- type _subesQFA = BaseBuilder
- interface StandardBuilder extends _subesQFA {
+ type _subkIcuU = BaseBuilder
+ interface StandardBuilder extends _subkIcuU {
}
interface newStandardBuilder {
/**
@@ -4173,8 +3784,8 @@ namespace dbx {
* DB enhances sql.DB by providing a set of DB-agnostic query building methods.
* DB allows easier query building and population of data into Go variables.
*/
- type _subkWabA = Builder
- interface DB extends _subkWabA {
+ type _subExmlQ = Builder
+ interface DB extends _subExmlQ {
/**
* FieldMapper maps struct fields to DB columns. Defaults to DefaultFieldMapFunc.
*/
@@ -4978,8 +4589,8 @@ namespace dbx {
* Rows enhances sql.Rows by providing additional data query methods.
* Rows can be obtained by calling Query.Rows(). It is mainly used to populate data row by row.
*/
- type _subrMzGi = sql.Rows
- interface Rows extends _subrMzGi {
+ type _subSZGyU = sql.Rows
+ interface Rows extends _subSZGyU {
}
interface Rows {
/**
@@ -5337,8 +4948,8 @@ namespace dbx {
}): string }
interface structInfo {
}
- type _subSCEkW = structInfo
- interface structValue extends _subSCEkW {
+ type _subOCBls = structInfo
+ interface structValue extends _subOCBls {
}
interface fieldInfo {
}
@@ -5377,8 +4988,8 @@ namespace dbx {
/**
* Tx enhances sql.Tx with additional querying methods.
*/
- type _subXvIkD = Builder
- interface Tx extends _subXvIkD {
+ type _subWmBdO = Builder
+ interface Tx extends _subWmBdO {
}
interface Tx {
/**
@@ -5394,57 +5005,403 @@ namespace dbx {
}
}
-/**
- * Package tokens implements various user and admin tokens generation methods.
- */
-namespace tokens {
- interface newAdminAuthToken {
+namespace security {
+ interface s256Challenge {
/**
- * NewAdminAuthToken generates and returns a new admin authentication token.
+ * S256Challenge creates base64 encoded sha256 challenge string derived from code.
+ * The padding of the result base64 string is stripped per [RFC 7636].
+ *
+ * [RFC 7636]: https://datatracker.ietf.org/doc/html/rfc7636#section-4.2
*/
- (app: CoreApp, admin: models.Admin): string
+ (code: string): string
}
- interface newAdminResetPasswordToken {
+ interface md5 {
/**
- * NewAdminResetPasswordToken generates and returns a new admin password reset request token.
+ * MD5 creates md5 hash from the provided plain text.
*/
- (app: CoreApp, admin: models.Admin): string
+ (text: string): string
}
- interface newAdminFileToken {
+ interface sha256 {
/**
- * NewAdminFileToken generates and returns a new admin private file access token.
+ * SHA256 creates sha256 hash as defined in FIPS 180-4 from the provided text.
*/
- (app: CoreApp, admin: models.Admin): string
+ (text: string): string
}
- interface newRecordAuthToken {
+ interface sha512 {
/**
- * NewRecordAuthToken generates and returns a new auth record authentication token.
+ * SHA512 creates sha512 hash as defined in FIPS 180-4 from the provided text.
*/
- (app: CoreApp, record: models.Record): string
+ (text: string): string
}
- interface newRecordVerifyToken {
+ interface hs256 {
/**
- * NewRecordVerifyToken generates and returns a new record verification token.
+ * HS256 creates a HMAC hash with sha256 digest algorithm.
*/
- (app: CoreApp, record: models.Record): string
+ (text: string, secret: string): string
}
- interface newRecordResetPasswordToken {
+ interface hs512 {
/**
- * NewRecordResetPasswordToken generates and returns a new auth record password reset request token.
+ * HS512 creates a HMAC hash with sha512 digest algorithm.
*/
- (app: CoreApp, record: models.Record): string
+ (text: string, secret: string): string
}
- interface newRecordChangeEmailToken {
+ interface equal {
/**
- * NewRecordChangeEmailToken generates and returns a new auth record change email request token.
+ * Equal compares two hash strings for equality without leaking timing information.
*/
- (app: CoreApp, record: models.Record, newEmail: string): string
+ (hash1: string, hash2: string): boolean
}
- interface newRecordFileToken {
+ // @ts-ignore
+ import crand = rand
+ interface encrypt {
/**
- * NewRecordFileToken generates and returns a new record private file access token.
+ * Encrypt encrypts "data" with the specified "key" (must be valid 32 char AES key).
+ *
+ * This method uses AES-256-GCM block cypher mode.
*/
- (app: CoreApp, record: models.Record): string
+ (data: string|Array, key: string): string
+ }
+ interface decrypt {
+ /**
+ * Decrypt decrypts encrypted text with key (must be valid 32 chars AES key).
+ *
+ * This method uses AES-256-GCM block cypher mode.
+ */
+ (cipherText: string, key: string): string|Array
+ }
+ interface parseUnverifiedJWT {
+ /**
+ * ParseUnverifiedJWT parses JWT and returns its claims
+ * but DOES NOT verify the signature.
+ *
+ * It verifies only the exp, iat and nbf claims.
+ */
+ (token: string): jwt.MapClaims
+ }
+ interface parseJWT {
+ /**
+ * ParseJWT verifies and parses JWT and returns its claims.
+ */
+ (token: string, verificationKey: string): jwt.MapClaims
+ }
+ interface newJWT {
+ /**
+ * NewJWT generates and returns new HS256 signed JWT.
+ */
+ (payload: jwt.MapClaims, signingKey: string, duration: time.Duration): string
+ }
+ // @ts-ignore
+ import cryptoRand = rand
+ // @ts-ignore
+ import mathRand = rand
+ interface randomString {
+ /**
+ * RandomString generates a cryptographically random string with the specified length.
+ *
+ * The generated string matches [A-Za-z0-9]+ and it's transparent to URL-encoding.
+ */
+ (length: number): string
+ }
+ interface randomStringWithAlphabet {
+ /**
+ * RandomStringWithAlphabet generates a cryptographically random string
+ * with the specified length and characters set.
+ *
+ * It panics if for some reason rand.Int returns a non-nil error.
+ */
+ (length: number, alphabet: string): string
+ }
+ interface pseudorandomString {
+ /**
+ * PseudorandomString generates a pseudorandom string with the specified length.
+ *
+ * The generated string matches [A-Za-z0-9]+ and it's transparent to URL-encoding.
+ *
+ * For a cryptographically random string (but a little bit slower) use RandomString instead.
+ */
+ (length: number): string
+ }
+ interface pseudorandomStringWithAlphabet {
+ /**
+ * PseudorandomStringWithAlphabet generates a pseudorandom string
+ * with the specified length and characters set.
+ *
+ * For a cryptographically random (but a little bit slower) use RandomStringWithAlphabet instead.
+ */
+ (length: number, alphabet: string): string
+ }
+ interface randomStringByRegex {
+ /**
+ * RandomStringByRegex generates a random string matching the regex pattern.
+ * If optFlags is not set, fallbacks to [syntax.Perl].
+ *
+ * NB! While the source of the randomness comes from [crypto/rand] this method
+ * is not recommended to be used on its own in critical secure contexts because
+ * the generated length could vary too much on the used pattern and may not be
+ * as secure as simply calling [security.RandomString].
+ * If you still insist on using it for such purposes, consider at least
+ * a large enough minimum length for the generated string, e.g. `[a-z0-9]{30}`.
+ *
+ * This function is inspired by github.com/pipe01/revregexp, github.com/lucasjones/reggen and other similar packages.
+ */
+ (pattern: string, ...optFlags: syntax.Flags[]): string
+ }
+}
+
+namespace filesystem {
+ /**
+ * FileReader defines an interface for a file resource reader.
+ */
+ interface FileReader {
+ [key:string]: any;
+ open(): io.ReadSeekCloser
+ }
+ /**
+ * File defines a single file [io.ReadSeekCloser] resource.
+ *
+ * The file could be from a local path, multipart/form-data header, etc.
+ */
+ interface File {
+ reader: FileReader
+ name: string
+ originalName: string
+ size: number
+ }
+ interface File {
+ /**
+ * AsMap implements [core.mapExtractor] and returns a value suitable
+ * to be used in an API rule expression.
+ */
+ asMap(): _TygojaDict
+ }
+ interface newFileFromPath {
+ /**
+ * NewFileFromPath creates a new File instance from the provided local file path.
+ */
+ (path: string): (File)
+ }
+ interface newFileFromBytes {
+ /**
+ * NewFileFromBytes creates a new File instance from the provided byte slice.
+ */
+ (b: string|Array, name: string): (File)
+ }
+ interface newFileFromMultipart {
+ /**
+ * NewFileFromMultipart creates a new File from the provided multipart header.
+ */
+ (mh: multipart.FileHeader): (File)
+ }
+ interface newFileFromURL {
+ /**
+ * NewFileFromURL creates a new File from the provided url by
+ * downloading the resource and load it as BytesReader.
+ *
+ * Example
+ *
+ * ```
+ * ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ * defer cancel()
+ *
+ * file, err := filesystem.NewFileFromURL(ctx, "https://example.com/image.png")
+ * ```
+ */
+ (ctx: context.Context, url: string): (File)
+ }
+ /**
+ * MultipartReader defines a FileReader from [multipart.FileHeader].
+ */
+ interface MultipartReader {
+ header?: multipart.FileHeader
+ }
+ interface MultipartReader {
+ /**
+ * Open implements the [filesystem.FileReader] interface.
+ */
+ open(): io.ReadSeekCloser
+ }
+ /**
+ * PathReader defines a FileReader from a local file path.
+ */
+ interface PathReader {
+ path: string
+ }
+ interface PathReader {
+ /**
+ * Open implements the [filesystem.FileReader] interface.
+ */
+ open(): io.ReadSeekCloser
+ }
+ /**
+ * BytesReader defines a FileReader from bytes content.
+ */
+ interface BytesReader {
+ bytes: string|Array
+ }
+ interface BytesReader {
+ /**
+ * Open implements the [filesystem.FileReader] interface.
+ */
+ open(): io.ReadSeekCloser
+ }
+ type _subteRxd = bytes.Reader
+ interface bytesReadSeekCloser extends _subteRxd {
+ }
+ interface bytesReadSeekCloser {
+ /**
+ * Close implements the [io.ReadSeekCloser] interface.
+ */
+ close(): void
+ }
+ interface System {
+ }
+ interface newS3 {
+ /**
+ * NewS3 initializes an S3 filesystem instance.
+ *
+ * NB! Make sure to call `Close()` after you are done working with it.
+ */
+ (bucketName: string, region: string, endpoint: string, accessKey: string, secretKey: string, s3ForcePathStyle: boolean): (System)
+ }
+ interface newLocal {
+ /**
+ * NewLocal initializes a new local filesystem instance.
+ *
+ * NB! Make sure to call `Close()` after you are done working with it.
+ */
+ (dirPath: string): (System)
+ }
+ interface System {
+ /**
+ * SetContext assigns the specified context to the current filesystem.
+ */
+ setContext(ctx: context.Context): void
+ }
+ interface System {
+ /**
+ * Close releases any resources used for the related filesystem.
+ */
+ close(): void
+ }
+ interface System {
+ /**
+ * Exists checks if file with fileKey path exists or not.
+ *
+ * If the file doesn't exist returns false and ErrNotFound.
+ */
+ exists(fileKey: string): boolean
+ }
+ interface System {
+ /**
+ * Attributes returns the attributes for the file with fileKey path.
+ *
+ * If the file doesn't exist it returns ErrNotFound.
+ */
+ attributes(fileKey: string): (blob.Attributes)
+ }
+ interface System {
+ /**
+ * GetFile returns a file content reader for the given fileKey.
+ *
+ * NB! Make sure to call Close() on the file after you are done working with it.
+ *
+ * If the file doesn't exist returns ErrNotFound.
+ */
+ getFile(fileKey: string): (blob.Reader)
+ }
+ interface System {
+ /**
+ * Copy copies the file stored at srcKey to dstKey.
+ *
+ * If srcKey file doesn't exist, it returns ErrNotFound.
+ *
+ * If dstKey file already exists, it is overwritten.
+ */
+ copy(srcKey: string, dstKey: string): void
+ }
+ interface System {
+ /**
+ * List returns a flat list with info for all files under the specified prefix.
+ */
+ list(prefix: string): Array<(blob.ListObject | undefined)>
+ }
+ interface System {
+ /**
+ * Upload writes content into the fileKey location.
+ */
+ upload(content: string|Array, fileKey: string): void
+ }
+ interface System {
+ /**
+ * UploadFile uploads the provided File to the fileKey location.
+ */
+ uploadFile(file: File, fileKey: string): void
+ }
+ interface System {
+ /**
+ * UploadMultipart uploads the provided multipart file to the fileKey location.
+ */
+ uploadMultipart(fh: multipart.FileHeader, fileKey: string): void
+ }
+ interface System {
+ /**
+ * Delete deletes stored file at fileKey location.
+ *
+ * If the file doesn't exist returns ErrNotFound.
+ */
+ delete(fileKey: string): void
+ }
+ interface System {
+ /**
+ * DeletePrefix deletes everything starting with the specified prefix.
+ *
+ * The prefix could be subpath (ex. "/a/b/") or filename prefix (ex. "/a/b/file_").
+ */
+ deletePrefix(prefix: string): Array
+ }
+ interface System {
+ /**
+ * Checks if the provided dir prefix doesn't have any files.
+ *
+ * A trailing slash will be appended to a non-empty dir string argument
+ * to ensure that the checked prefix is a "directory".
+ *
+ * Returns "false" in case the has at least one file, otherwise - "true".
+ */
+ isEmptyDir(dir: string): boolean
+ }
+ interface System {
+ /**
+ * Serve serves the file at fileKey location to an HTTP response.
+ *
+ * If the `download` query parameter is used the file will be always served for
+ * download no matter of its type (aka. with "Content-Disposition: attachment").
+ *
+ * Internally this method uses [http.ServeContent] so Range requests,
+ * If-Match, If-Unmodified-Since, etc. headers are handled transparently.
+ */
+ serve(res: http.ResponseWriter, req: http.Request, fileKey: string, name: string): void
+ }
+ interface System {
+ /**
+ * CreateThumb creates a new thumb image for the file at originalKey location.
+ * The new thumb file is stored at thumbKey location.
+ *
+ * thumbSize is in the format:
+ * - 0xH (eg. 0x100) - resize to H height preserving the aspect ratio
+ * - Wx0 (eg. 300x0) - resize to W width preserving the aspect ratio
+ * - WxH (eg. 300x100) - resize and crop to WxH viewbox (from center)
+ * - WxHt (eg. 300x100t) - resize and crop to WxH viewbox (from top)
+ * - WxHb (eg. 300x100b) - resize and crop to WxH viewbox (from bottom)
+ * - WxHf (eg. 300x100f) - fit inside a WxH viewbox (without cropping)
+ */
+ createThumb(originalKey: string, thumbKey: string, thumbSize: string): void
+ }
+ // @ts-ignore
+ import v4 = signer
+ // @ts-ignore
+ import smithyhttp = http
+ interface ignoredHeadersKey {
}
}
@@ -5453,203 +5410,146 @@ namespace tokens {
* emails like forgotten password, verification, etc.
*/
namespace mails {
- interface sendAdminPasswordReset {
+ interface sendRecordAuthAlert {
/**
- * SendAdminPasswordReset sends a password reset request email to the specified admin.
+ * SendRecordAuthAlert sends a new device login alert to the specified auth record.
*/
- (app: CoreApp, admin: models.Admin): void
+ (app: CoreApp, authRecord: core.Record): void
+ }
+ interface sendRecordOTP {
+ /**
+ * SendRecordOTP sends OTP email to the specified auth record.
+ */
+ (app: CoreApp, authRecord: core.Record, otpId: string, pass: string): void
}
interface sendRecordPasswordReset {
/**
- * SendRecordPasswordReset sends a password reset request email to the specified user.
+ * SendRecordPasswordReset sends a password reset request email to the specified auth record.
*/
- (app: CoreApp, authRecord: models.Record): void
+ (app: CoreApp, authRecord: core.Record): void
}
interface sendRecordVerification {
/**
- * SendRecordVerification sends a verification request email to the specified user.
+ * SendRecordVerification sends a verification request email to the specified auth record.
*/
- (app: CoreApp, authRecord: models.Record): void
+ (app: CoreApp, authRecord: core.Record): void
}
interface sendRecordChangeEmail {
/**
- * SendRecordChangeEmail sends a change email confirmation email to the specified user.
+ * SendRecordChangeEmail sends a change email confirmation email to the specified auth record.
*/
- (app: CoreApp, record: models.Record, newEmail: string): void
+ (app: CoreApp, authRecord: core.Record, newEmail: string): void
}
}
/**
- * Package models implements various services used for request data
- * validation and applying changes to existing DB models through the app Dao.
+ * Package template is a thin wrapper around the standard html/template
+ * and text/template packages that implements a convenient registry to
+ * load and cache templates on the fly concurrently.
+ *
+ * It was created to assist the JSVM plugin HTML rendering, but could be used in other Go code.
+ *
+ * Example:
+ *
+ * ```
+ * registry := template.NewRegistry()
+ *
+ * html1, err := registry.LoadFiles(
+ * // the files set wil be parsed only once and then cached
+ * "layout.html",
+ * "content.html",
+ * ).Render(map[string]any{"name": "John"})
+ *
+ * html2, err := registry.LoadFiles(
+ * // reuse the already parsed and cached files set
+ * "layout.html",
+ * "content.html",
+ * ).Render(map[string]any{"name": "Jane"})
+ * ```
*/
+namespace template {
+ interface newRegistry {
+ /**
+ * NewRegistry creates and initializes a new templates registry with
+ * some defaults (eg. global "raw" template function for unescaped HTML).
+ *
+ * Use the Registry.Load* methods to load templates into the registry.
+ */
+ (): (Registry)
+ }
+ /**
+ * Registry defines a templates registry that is safe to be used by multiple goroutines.
+ *
+ * Use the Registry.Load* methods to load templates into the registry.
+ */
+ interface Registry {
+ }
+ interface Registry {
+ /**
+ * AddFuncs registers new global template functions.
+ *
+ * The key of each map entry is the function name that will be used in the templates.
+ * If a function with the map entry name already exists it will be replaced with the new one.
+ *
+ * The value of each map entry is a function that must have either a
+ * single return value, or two return values of which the second has type error.
+ *
+ * Example:
+ *
+ * ```
+ * r.AddFuncs(map[string]any{
+ * "toUpper": func(str string) string {
+ * return strings.ToUppser(str)
+ * },
+ * ...
+ * })
+ * ```
+ */
+ addFuncs(funcs: _TygojaDict): (Registry)
+ }
+ interface Registry {
+ /**
+ * LoadFiles caches (if not already) the specified filenames set as a
+ * single template and returns a ready to use Renderer instance.
+ *
+ * There must be at least 1 filename specified.
+ */
+ loadFiles(...filenames: string[]): (Renderer)
+ }
+ interface Registry {
+ /**
+ * LoadString caches (if not already) the specified inline string as a
+ * single template and returns a ready to use Renderer instance.
+ */
+ loadString(text: string): (Renderer)
+ }
+ interface Registry {
+ /**
+ * LoadFS caches (if not already) the specified fs and globPatterns
+ * pair as single template and returns a ready to use Renderer instance.
+ *
+ * There must be at least 1 file matching the provided globPattern(s)
+ * (note that most file names serves as glob patterns matching themselves).
+ */
+ loadFS(fsys: fs.FS, ...globPatterns: string[]): (Renderer)
+ }
+ /**
+ * Renderer defines a single parsed template.
+ */
+ interface Renderer {
+ }
+ interface Renderer {
+ /**
+ * Render executes the template with the specified data as the dot object
+ * and returns the result as plain string.
+ */
+ render(data: any): string
+ }
+}
+
namespace forms {
// @ts-ignore
import validation = ozzo_validation
- /**
- * AdminLogin is an admin email/pass login form.
- */
- interface AdminLogin {
- identity: string
- password: string
- }
- interface newAdminLogin {
- /**
- * NewAdminLogin creates a new [AdminLogin] form initialized with
- * the provided [CoreApp] instance.
- *
- * If you want to submit the form as part of a transaction,
- * you can change the default Dao via [SetDao()].
- */
- (app: CoreApp): (AdminLogin)
- }
- interface AdminLogin {
- /**
- * SetDao replaces the default form Dao instance with the provided one.
- */
- setDao(dao: daos.Dao): void
- }
- interface AdminLogin {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- */
- validate(): void
- }
- interface AdminLogin {
- /**
- * Submit validates and submits the admin form.
- * On success returns the authorized admin model.
- *
- * You can optionally provide a list of InterceptorFunc to
- * further modify the form behavior before persisting it.
- */
- submit(...interceptors: InterceptorFunc[]): (models.Admin)
- }
- /**
- * AdminPasswordResetConfirm is an admin password reset confirmation form.
- */
- interface AdminPasswordResetConfirm {
- token: string
- password: string
- passwordConfirm: string
- }
- interface newAdminPasswordResetConfirm {
- /**
- * NewAdminPasswordResetConfirm creates a new [AdminPasswordResetConfirm]
- * form initialized with from the provided [CoreApp] instance.
- *
- * If you want to submit the form as part of a transaction,
- * you can change the default Dao via [SetDao()].
- */
- (app: CoreApp): (AdminPasswordResetConfirm)
- }
- interface AdminPasswordResetConfirm {
- /**
- * SetDao replaces the form Dao instance with the provided one.
- *
- * This is useful if you want to use a specific transaction Dao instance
- * instead of the default app.Dao().
- */
- setDao(dao: daos.Dao): void
- }
- interface AdminPasswordResetConfirm {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- */
- validate(): void
- }
- interface AdminPasswordResetConfirm {
- /**
- * Submit validates and submits the admin password reset confirmation form.
- * On success returns the updated admin model associated to `form.Token`.
- *
- * You can optionally provide a list of InterceptorFunc to further
- * modify the form behavior before persisting it.
- */
- submit(...interceptors: InterceptorFunc[]): (models.Admin)
- }
- /**
- * AdminPasswordResetRequest is an admin password reset request form.
- */
- interface AdminPasswordResetRequest {
- email: string
- }
- interface newAdminPasswordResetRequest {
- /**
- * NewAdminPasswordResetRequest creates a new [AdminPasswordResetRequest]
- * form initialized with from the provided [CoreApp] instance.
- *
- * If you want to submit the form as part of a transaction,
- * you can change the default Dao via [SetDao()].
- */
- (app: CoreApp): (AdminPasswordResetRequest)
- }
- interface AdminPasswordResetRequest {
- /**
- * SetDao replaces the default form Dao instance with the provided one.
- */
- setDao(dao: daos.Dao): void
- }
- interface AdminPasswordResetRequest {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- *
- * This method doesn't verify that admin with `form.Email` exists (this is done on Submit).
- */
- validate(): void
- }
- interface AdminPasswordResetRequest {
- /**
- * Submit validates and submits the form.
- * On success sends a password reset email to the `form.Email` admin.
- *
- * You can optionally provide a list of InterceptorFunc to further
- * modify the form behavior before persisting it.
- */
- submit(...interceptors: InterceptorFunc[]): void
- }
- /**
- * AdminUpsert is a [models.Admin] upsert (create/update) form.
- */
- interface AdminUpsert {
- id: string
- avatar: number
- email: string
- password: string
- passwordConfirm: string
- }
- interface newAdminUpsert {
- /**
- * NewAdminUpsert creates a new [AdminUpsert] form with initializer
- * config created from the provided [CoreApp] and [models.Admin] instances
- * (for create you could pass a pointer to an empty Admin - `&models.Admin{}`).
- *
- * If you want to submit the form as part of a transaction,
- * you can change the default Dao via [SetDao()].
- */
- (app: CoreApp, admin: models.Admin): (AdminUpsert)
- }
- interface AdminUpsert {
- /**
- * SetDao replaces the default form Dao instance with the provided one.
- */
- setDao(dao: daos.Dao): void
- }
- interface AdminUpsert {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- */
- validate(): void
- }
- interface AdminUpsert {
- /**
- * Submit validates the form and upserts the form admin model.
- *
- * You can optionally provide a list of InterceptorFunc to further
- * modify the form behavior before persisting it.
- */
- submit(...interceptors: InterceptorFunc[]): void
- }
/**
* AppleClientSecretCreate is a form struct to generate a new Apple Client Secret.
*
@@ -5700,744 +5600,98 @@ namespace forms {
*/
submit(): string
}
- /**
- * BackupCreate is a request form for creating a new app backup.
- */
- interface BackupCreate {
- name: string
- }
- interface newBackupCreate {
- /**
- * NewBackupCreate creates new BackupCreate request form.
- */
- (app: CoreApp): (BackupCreate)
- }
- interface BackupCreate {
- /**
- * SetContext replaces the default form context with the provided one.
- */
- setContext(ctx: context.Context): void
- }
- interface BackupCreate {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- */
- validate(): void
- }
- interface BackupCreate {
- /**
- * Submit validates the form and creates the app backup.
- *
- * You can optionally provide a list of InterceptorFunc to further
- * modify the form behavior before creating the backup.
- */
- submit(...interceptors: InterceptorFunc[]): void
- }
- /**
- * BackupUpload is a request form for uploading a new app backup.
- */
- interface BackupUpload {
- file?: filesystem.File
- }
- interface newBackupUpload {
- /**
- * NewBackupUpload creates new BackupUpload request form.
- */
- (app: CoreApp): (BackupUpload)
- }
- interface BackupUpload {
- /**
- * SetContext replaces the default form upload context with the provided one.
- */
- setContext(ctx: context.Context): void
- }
- interface BackupUpload {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- */
- validate(): void
- }
- interface BackupUpload {
- /**
- * Submit validates the form and upload the backup file.
- *
- * You can optionally provide a list of InterceptorFunc to further
- * modify the form behavior before uploading the backup.
- */
- submit(...interceptors: InterceptorFunc[]): void
- }
- /**
- * InterceptorNextFunc is a interceptor handler function.
- * Usually used in combination with InterceptorFunc.
- */
- interface InterceptorNextFunc {(t: T): void }
- /**
- * InterceptorFunc defines a single interceptor function that
- * will execute the provided next func handler.
- */
- interface InterceptorFunc {(next: InterceptorNextFunc): InterceptorNextFunc }
- /**
- * CollectionUpsert is a [models.Collection] upsert (create/update) form.
- */
- interface CollectionUpsert {
- id: string
- type: string
- name: string
- system: boolean
- schema: schema.Schema
- indexes: types.JsonArray
- listRule?: string
- viewRule?: string
- createRule?: string
- updateRule?: string
- deleteRule?: string
- options: types.JsonMap
- }
- interface newCollectionUpsert {
- /**
- * NewCollectionUpsert creates a new [CollectionUpsert] form with initializer
- * config created from the provided [CoreApp] and [models.Collection] instances
- * (for create you could pass a pointer to an empty Collection - `&models.Collection{}`).
- *
- * If you want to submit the form as part of a transaction,
- * you can change the default Dao via [SetDao()].
- */
- (app: CoreApp, collection: models.Collection): (CollectionUpsert)
- }
- interface CollectionUpsert {
- /**
- * SetDao replaces the default form Dao instance with the provided one.
- */
- setDao(dao: daos.Dao): void
- }
- interface CollectionUpsert {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- */
- validate(): void
- }
- interface CollectionUpsert {
- /**
- * Submit validates the form and upserts the form's Collection model.
- *
- * On success the related record table schema will be auto updated.
- *
- * You can optionally provide a list of InterceptorFunc to further
- * modify the form behavior before persisting it.
- */
- submit(...interceptors: InterceptorFunc[]): void
- }
- /**
- * CollectionsImport is a form model to bulk import
- * (create, replace and delete) collections from a user provided list.
- */
- interface CollectionsImport {
- collections: Array<(models.Collection | undefined)>
- deleteMissing: boolean
- }
- interface newCollectionsImport {
- /**
- * NewCollectionsImport creates a new [CollectionsImport] form with
- * initialized with from the provided [CoreApp] instance.
- *
- * If you want to submit the form as part of a transaction,
- * you can change the default Dao via [SetDao()].
- */
- (app: CoreApp): (CollectionsImport)
- }
- interface CollectionsImport {
- /**
- * SetDao replaces the default form Dao instance with the provided one.
- */
- setDao(dao: daos.Dao): void
- }
- interface CollectionsImport {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- */
- validate(): void
- }
- interface CollectionsImport {
- /**
- * Submit applies the import, aka.:
- * - imports the form collections (create or replace)
- * - sync the collection changes with their related records table
- * - ensures the integrity of the imported structure (aka. run validations for each collection)
- * - if [form.DeleteMissing] is set, deletes all local collections that are not found in the imports list
- *
- * All operations are wrapped in a single transaction that are
- * rollbacked on the first encountered error.
- *
- * You can optionally provide a list of InterceptorFunc to further
- * modify the form behavior before persisting it.
- */
- submit(...interceptors: InterceptorFunc>[]): void
- }
- /**
- * RealtimeSubscribe is a realtime subscriptions request form.
- */
- interface RealtimeSubscribe {
- clientId: string
- subscriptions: Array
- }
- interface newRealtimeSubscribe {
- /**
- * NewRealtimeSubscribe creates new RealtimeSubscribe request form.
- */
- (): (RealtimeSubscribe)
- }
- interface RealtimeSubscribe {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- */
- validate(): void
- }
- /**
- * RecordEmailChangeConfirm is an auth record email change confirmation form.
- */
- interface RecordEmailChangeConfirm {
- token: string
- password: string
- }
- interface newRecordEmailChangeConfirm {
- /**
- * NewRecordEmailChangeConfirm creates a new [RecordEmailChangeConfirm] form
- * initialized with from the provided [CoreApp] and [models.Collection] instances.
- *
- * If you want to submit the form as part of a transaction,
- * you can change the default Dao via [SetDao()].
- */
- (app: CoreApp, collection: models.Collection): (RecordEmailChangeConfirm)
- }
- interface RecordEmailChangeConfirm {
- /**
- * SetDao replaces the default form Dao instance with the provided one.
- */
- setDao(dao: daos.Dao): void
- }
- interface RecordEmailChangeConfirm {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- */
- validate(): void
- }
- interface RecordEmailChangeConfirm {
- /**
- * Submit validates and submits the auth record email change confirmation form.
- * On success returns the updated auth record associated to `form.Token`.
- *
- * You can optionally provide a list of InterceptorFunc to
- * further modify the form behavior before persisting it.
- */
- submit(...interceptors: InterceptorFunc[]): (models.Record)
- }
- /**
- * RecordEmailChangeRequest is an auth record email change request form.
- */
- interface RecordEmailChangeRequest {
- newEmail: string
- }
- interface newRecordEmailChangeRequest {
- /**
- * NewRecordEmailChangeRequest creates a new [RecordEmailChangeRequest] form
- * initialized with from the provided [CoreApp] and [models.Record] instances.
- *
- * If you want to submit the form as part of a transaction,
- * you can change the default Dao via [SetDao()].
- */
- (app: CoreApp, record: models.Record): (RecordEmailChangeRequest)
- }
- interface RecordEmailChangeRequest {
- /**
- * SetDao replaces the default form Dao instance with the provided one.
- */
- setDao(dao: daos.Dao): void
- }
- interface RecordEmailChangeRequest {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- */
- validate(): void
- }
- interface RecordEmailChangeRequest {
- /**
- * Submit validates and sends the change email request.
- *
- * You can optionally provide a list of InterceptorFunc to
- * further modify the form behavior before persisting it.
- */
- submit(...interceptors: InterceptorFunc[]): void
- }
- /**
- * RecordOAuth2LoginData defines the OA
- */
- interface RecordOAuth2LoginData {
- externalAuth?: models.ExternalAuth
- record?: models.Record
- oAuth2User?: auth.AuthUser
- providerClient: auth.Provider
- }
- /**
- * BeforeOAuth2RecordCreateFunc defines a callback function that will
- * be called before OAuth2 new Record creation.
- */
- interface BeforeOAuth2RecordCreateFunc {(createForm: RecordUpsert, authRecord: models.Record, authUser: auth.AuthUser): void }
- /**
- * RecordOAuth2Login is an auth record OAuth2 login form.
- */
- interface RecordOAuth2Login {
- /**
- * The name of the OAuth2 client provider (eg. "google")
- */
- provider: string
- /**
- * The authorization code returned from the initial request.
- */
- code: string
- /**
- * The optional PKCE code verifier as part of the code_challenge sent with the initial request.
- */
- codeVerifier: string
- /**
- * The redirect url sent with the initial request.
- */
- redirectUrl: string
- /**
- * Additional data that will be used for creating a new auth record
- * if an existing OAuth2 account doesn't exist.
- */
- createData: _TygojaDict
- }
- interface newRecordOAuth2Login {
- /**
- * NewRecordOAuth2Login creates a new [RecordOAuth2Login] form with
- * initialized with from the provided [CoreApp] instance.
- *
- * If you want to submit the form as part of a transaction,
- * you can change the default Dao via [SetDao()].
- */
- (app: CoreApp, collection: models.Collection, optAuthRecord: models.Record): (RecordOAuth2Login)
- }
- interface RecordOAuth2Login {
- /**
- * SetDao replaces the default form Dao instance with the provided one.
- */
- setDao(dao: daos.Dao): void
- }
- interface RecordOAuth2Login {
- /**
- * SetBeforeNewRecordCreateFunc sets a before OAuth2 record create callback handler.
- */
- setBeforeNewRecordCreateFunc(f: BeforeOAuth2RecordCreateFunc): void
- }
- interface RecordOAuth2Login {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- */
- validate(): void
- }
- interface RecordOAuth2Login {
- /**
- * Submit validates and submits the form.
- *
- * If an auth record doesn't exist, it will make an attempt to create it
- * based on the fetched OAuth2 profile data via a local [RecordUpsert] form.
- * You can intercept/modify the Record create form with [form.SetBeforeNewRecordCreateFunc()].
- *
- * You can also optionally provide a list of InterceptorFunc to
- * further modify the form behavior before persisting it.
- *
- * On success returns the authorized record model and the fetched provider's data.
- */
- submit(...interceptors: InterceptorFunc[]): [(models.Record), (auth.AuthUser)]
- }
- /**
- * RecordPasswordLogin is record username/email + password login form.
- */
- interface RecordPasswordLogin {
- identity: string
- password: string
- }
- interface newRecordPasswordLogin {
- /**
- * NewRecordPasswordLogin creates a new [RecordPasswordLogin] form initialized
- * with from the provided [CoreApp] and [models.Collection] instance.
- *
- * If you want to submit the form as part of a transaction,
- * you can change the default Dao via [SetDao()].
- */
- (app: CoreApp, collection: models.Collection): (RecordPasswordLogin)
- }
- interface RecordPasswordLogin {
- /**
- * SetDao replaces the default form Dao instance with the provided one.
- */
- setDao(dao: daos.Dao): void
- }
- interface RecordPasswordLogin {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- */
- validate(): void
- }
- interface RecordPasswordLogin {
- /**
- * Submit validates and submits the form.
- * On success returns the authorized record model.
- *
- * You can optionally provide a list of InterceptorFunc to
- * further modify the form behavior before persisting it.
- */
- submit(...interceptors: InterceptorFunc[]): (models.Record)
- }
- /**
- * RecordPasswordResetConfirm is an auth record password reset confirmation form.
- */
- interface RecordPasswordResetConfirm {
- token: string
- password: string
- passwordConfirm: string
- }
- interface newRecordPasswordResetConfirm {
- /**
- * NewRecordPasswordResetConfirm creates a new [RecordPasswordResetConfirm]
- * form initialized with from the provided [CoreApp] instance.
- *
- * If you want to submit the form as part of a transaction,
- * you can change the default Dao via [SetDao()].
- */
- (app: CoreApp, collection: models.Collection): (RecordPasswordResetConfirm)
- }
- interface RecordPasswordResetConfirm {
- /**
- * SetDao replaces the default form Dao instance with the provided one.
- */
- setDao(dao: daos.Dao): void
- }
- interface RecordPasswordResetConfirm {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- */
- validate(): void
- }
- interface RecordPasswordResetConfirm {
- /**
- * Submit validates and submits the form.
- * On success returns the updated auth record associated to `form.Token`.
- *
- * You can optionally provide a list of InterceptorFunc to further
- * modify the form behavior before persisting it.
- */
- submit(...interceptors: InterceptorFunc[]): (models.Record)
- }
- /**
- * RecordPasswordResetRequest is an auth record reset password request form.
- */
- interface RecordPasswordResetRequest {
- email: string
- }
- interface newRecordPasswordResetRequest {
- /**
- * NewRecordPasswordResetRequest creates a new [RecordPasswordResetRequest]
- * form initialized with from the provided [CoreApp] instance.
- *
- * If you want to submit the form as part of a transaction,
- * you can change the default Dao via [SetDao()].
- */
- (app: CoreApp, collection: models.Collection): (RecordPasswordResetRequest)
- }
- interface RecordPasswordResetRequest {
- /**
- * SetDao replaces the default form Dao instance with the provided one.
- */
- setDao(dao: daos.Dao): void
- }
- interface RecordPasswordResetRequest {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- *
- * This method doesn't check whether auth record with `form.Email` exists (this is done on Submit).
- */
- validate(): void
- }
- interface RecordPasswordResetRequest {
- /**
- * Submit validates and submits the form.
- * On success, sends a password reset email to the `form.Email` auth record.
- *
- * You can optionally provide a list of InterceptorFunc to further
- * modify the form behavior before persisting it.
- */
- submit(...interceptors: InterceptorFunc[]): void
- }
- /**
- * RecordUpsert is a [models.Record] upsert (create/update) form.
- */
interface RecordUpsert {
/**
- * base model fields
+ * extra password fields
*/
- id: string
- /**
- * auth collection fields
- * ---
- */
- username: string
- email: string
- emailVisibility: boolean
- verified: boolean
password: string
passwordConfirm: string
oldPassword: string
}
interface newRecordUpsert {
/**
- * NewRecordUpsert creates a new [RecordUpsert] form with initializer
- * config created from the provided [CoreApp] and [models.Record] instances
- * (for create you could pass a pointer to an empty Record - models.NewRecord(collection)).
- *
- * If you want to submit the form as part of a transaction,
- * you can change the default Dao via [SetDao()].
+ * NewRecordUpsert creates a new [RecordUpsert] form from the provided [CoreApp] and [core.Record] instances
+ * (for create you could pass a pointer to an empty Record - core.NewRecord(collection)).
*/
- (app: CoreApp, record: models.Record): (RecordUpsert)
+ (app: CoreApp, record: core.Record): (RecordUpsert)
}
interface RecordUpsert {
/**
- * Data returns the loaded form's data.
+ * SetContext assigns ctx as context of the current form.
*/
- data(): _TygojaDict
+ setContext(ctx: context.Context): void
}
interface RecordUpsert {
/**
- * SetFullManageAccess sets the manageAccess bool flag of the current
- * form to enable/disable directly changing some system record fields
- * (often used with auth collection records).
+ * SetApp replaces the current form app instance.
+ *
+ * This could be used for example if you want to change at later stage
+ * before submission to change from regular -> transactional app instance.
*/
- setFullManageAccess(fullManageAccess: boolean): void
+ setApp(app: CoreApp): void
}
interface RecordUpsert {
/**
- * SetDao replaces the default form Dao instance with the provided one.
+ * SetRecord replaces the current form record instance.
*/
- setDao(dao: daos.Dao): void
+ setRecord(record: core.Record): void
}
interface RecordUpsert {
/**
- * LoadRequest extracts the json or multipart/form-data request data
- * and lods it into the form.
- *
- * File upload is supported only via multipart/form-data.
+ * ResetAccess resets the form access level to the accessLevelDefault.
*/
- loadRequest(r: http.Request, keyPrefix: string): void
+ resetAccess(): void
}
interface RecordUpsert {
/**
- * FilesToUpload returns the parsed request files ready for upload.
+ * GrantManagerAccess updates the form access level to "manager" allowing
+ * directly changing some system record fields (often used with auth collection records).
*/
- filesToUpload(): _TygojaDict
+ grantManagerAccess(): void
}
interface RecordUpsert {
/**
- * FilesToUpload returns the parsed request filenames ready to be deleted.
+ * GrantSuperuserAccess updates the form access level to "superuser" allowing
+ * directly changing all system record fields, including those marked as "Hidden".
*/
- filesToDelete(): Array
+ grantSuperuserAccess(): void
}
interface RecordUpsert {
/**
- * AddFiles adds the provided file(s) to the specified file field.
- *
- * If the file field is a SINGLE-value file field (aka. "Max Select = 1"),
- * then the newly added file will REPLACE the existing one.
- * In this case if you pass more than 1 files only the first one will be assigned.
- *
- * If the file field is a MULTI-value file field (aka. "Max Select > 1"),
- * then the newly added file(s) will be APPENDED to the existing one(s).
- *
- * Example
- *
- * ```
- * f1, _ := filesystem.NewFileFromPath("/path/to/file1.txt")
- * f2, _ := filesystem.NewFileFromPath("/path/to/file2.txt")
- * form.AddFiles("documents", f1, f2)
- * ```
+ * HasManageAccess reports whether the form has "manager" or "superuser" level access.
*/
- addFiles(key: string, ...files: (filesystem.File | undefined)[]): void
+ hasManageAccess(): boolean
}
interface RecordUpsert {
/**
- * RemoveFiles removes a single or multiple file from the specified file field.
- *
- * NB! If filesToDelete is not set it will remove all existing files
- * assigned to the file field (including those assigned with AddFiles)!
- *
- * Example
- *
- * ```
- * // mark only only 2 files for removal
- * form.RemoveFiles("documents", "file1_aw4bdrvws6.txt", "file2_xwbs36bafv.txt")
- *
- * // mark all "documents" files for removal
- * form.RemoveFiles("documents")
- * ```
+ * Load loads the provided data into the form and the related record.
*/
- removeFiles(key: string, ...toDelete: string[]): void
+ load(data: _TygojaDict): void
}
interface RecordUpsert {
/**
- * LoadData loads and normalizes the provided regular record data fields into the form.
+ * @todo consider removing and executing the Create API rule without dummy insert.
+ *
+ * DrySubmit performs a temp form submit within a transaction and reverts it at the end.
+ * For actual record persistence, check the [RecordUpsert.Submit()] method.
+ *
+ * This method doesn't perform validations, handle file uploads/deletes or trigger app save events!
*/
- loadData(requestData: _TygojaDict): void
+ drySubmit(callback: (txApp: CoreApp, drySavedRecord: core.Record) => void): void
}
interface RecordUpsert {
/**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ * Submit validates the form specific validations and attempts to save the form record.
*/
- validate(): void
- }
- interface RecordUpsert {
- validateAndFill(): void
- }
- interface RecordUpsert {
- /**
- * DrySubmit performs a form submit within a transaction and reverts it.
- * For actual record persistence, check the `form.Submit()` method.
- *
- * This method doesn't handle file uploads/deletes or trigger any app events!
- */
- drySubmit(callback: (txDao: daos.Dao) => void): void
- }
- interface RecordUpsert {
- /**
- * Submit validates the form and upserts the form Record model.
- *
- * You can optionally provide a list of InterceptorFunc to further
- * modify the form behavior before persisting it.
- */
- submit(...interceptors: InterceptorFunc[]): void
- }
- /**
- * RecordVerificationConfirm is an auth record email verification confirmation form.
- */
- interface RecordVerificationConfirm {
- token: string
- }
- interface newRecordVerificationConfirm {
- /**
- * NewRecordVerificationConfirm creates a new [RecordVerificationConfirm]
- * form initialized with from the provided [CoreApp] instance.
- *
- * If you want to submit the form as part of a transaction,
- * you can change the default Dao via [SetDao()].
- */
- (app: CoreApp, collection: models.Collection): (RecordVerificationConfirm)
- }
- interface RecordVerificationConfirm {
- /**
- * SetDao replaces the default form Dao instance with the provided one.
- */
- setDao(dao: daos.Dao): void
- }
- interface RecordVerificationConfirm {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- */
- validate(): void
- }
- interface RecordVerificationConfirm {
- /**
- * Submit validates and submits the form.
- * On success returns the verified auth record associated to `form.Token`.
- *
- * You can optionally provide a list of InterceptorFunc to further
- * modify the form behavior before persisting it.
- */
- submit(...interceptors: InterceptorFunc[]): (models.Record)
- }
- /**
- * RecordVerificationRequest is an auth record email verification request form.
- */
- interface RecordVerificationRequest {
- email: string
- }
- interface newRecordVerificationRequest {
- /**
- * NewRecordVerificationRequest creates a new [RecordVerificationRequest]
- * form initialized with from the provided [CoreApp] instance.
- *
- * If you want to submit the form as part of a transaction,
- * you can change the default Dao via [SetDao()].
- */
- (app: CoreApp, collection: models.Collection): (RecordVerificationRequest)
- }
- interface RecordVerificationRequest {
- /**
- * SetDao replaces the default form Dao instance with the provided one.
- */
- setDao(dao: daos.Dao): void
- }
- interface RecordVerificationRequest {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- *
- * // This method doesn't verify that auth record with `form.Email` exists (this is done on Submit).
- */
- validate(): void
- }
- interface RecordVerificationRequest {
- /**
- * Submit validates and sends a verification request email
- * to the `form.Email` auth record.
- *
- * You can optionally provide a list of InterceptorFunc to further
- * modify the form behavior before persisting it.
- */
- submit(...interceptors: InterceptorFunc[]): void
- }
- /**
- * SettingsUpsert is a [settings.Settings] upsert (create/update) form.
- */
- type _subxZPsK = settings.Settings
- interface SettingsUpsert extends _subxZPsK {
- }
- interface newSettingsUpsert {
- /**
- * NewSettingsUpsert creates a new [SettingsUpsert] form with initializer
- * config created from the provided [CoreApp] instance.
- *
- * If you want to submit the form as part of a transaction,
- * you can change the default Dao via [SetDao()].
- */
- (app: CoreApp): (SettingsUpsert)
- }
- interface SettingsUpsert {
- /**
- * SetDao replaces the default form Dao instance with the provided one.
- */
- setDao(dao: daos.Dao): void
- }
- interface SettingsUpsert {
- /**
- * Validate makes the form validatable by implementing [validation.Validatable] interface.
- */
- validate(): void
- }
- interface SettingsUpsert {
- /**
- * Submit validates the form and upserts the loaded settings.
- *
- * On success the app settings will be refreshed with the form ones.
- *
- * You can optionally provide a list of InterceptorFunc to further
- * modify the form behavior before persisting it.
- */
- submit(...interceptors: InterceptorFunc[]): void
+ submit(): void
}
/**
* TestEmailSend is a email template test request form.
*/
interface TestEmailSend {
- template: string
email: string
+ template: string
+ collection: string // optional, fallbacks to _superusers
}
interface newTestEmailSend {
/**
@@ -6486,230 +5740,470 @@ namespace forms {
}
}
-/**
- * Package apis implements the default PocketBase api services and middlewares.
- */
namespace apis {
- interface adminApi {
- }
- // @ts-ignore
- import validation = ozzo_validation
- /**
- * ApiError defines the struct for a basic api error response.
- */
- interface ApiError {
- code: number
- message: string
- data: _TygojaDict
- }
- interface ApiError {
+ interface newApiError {
/**
- * Error makes it compatible with the `error` interface.
+ * NewApiError is an alias for [router.NewApiError].
*/
- error(): string
- }
- interface ApiError {
- /**
- * RawData returns the unformatted error data (could be an internal error, text, etc.)
- */
- rawData(): any
- }
- interface newNotFoundError {
- /**
- * NewNotFoundError creates and returns 404 `ApiError`.
- */
- (message: string, data: any): (ApiError)
+ (status: number, message: string, errData: any): (router.ApiError)
}
interface newBadRequestError {
/**
- * NewBadRequestError creates and returns 400 `ApiError`.
+ * NewBadRequestError is an alias for [router.NewBadRequestError].
*/
- (message: string, data: any): (ApiError)
+ (message: string, errData: any): (router.ApiError)
+ }
+ interface newNotFoundError {
+ /**
+ * NewNotFoundError is an alias for [router.NewNotFoundError].
+ */
+ (message: string, errData: any): (router.ApiError)
}
interface newForbiddenError {
/**
- * NewForbiddenError creates and returns 403 `ApiError`.
+ * NewForbiddenError is an alias for [router.NewForbiddenError].
*/
- (message: string, data: any): (ApiError)
+ (message: string, errData: any): (router.ApiError)
}
interface newUnauthorizedError {
/**
- * NewUnauthorizedError creates and returns 401 `ApiError`.
+ * NewUnauthorizedError is an alias for [router.NewUnauthorizedError].
*/
- (message: string, data: any): (ApiError)
+ (message: string, errData: any): (router.ApiError)
}
- interface newApiError {
+ interface newTooManyRequestsError {
/**
- * NewApiError creates and returns new normalized `ApiError` instance.
+ * NewTooManyRequestsError is an alias for [router.NewTooManyRequestsError].
*/
- (status: number, message: string, data: any): (ApiError)
+ (message: string, errData: any): (router.ApiError)
}
- interface backupApi {
- }
- interface initApi {
+ interface newInternalServerError {
/**
- * InitApi creates a configured echo instance with registered
- * system and app specific routes and middlewares.
+ * NewInternalServerError is an alias for [router.NewInternalServerError].
*/
- (app: CoreApp): (echo.Echo)
+ (message: string, errData: any): (router.ApiError)
}
- interface staticDirectoryHandler {
+ interface backupFileInfo {
+ modified: types.DateTime
+ key: string
+ size: number
+ }
+ // @ts-ignore
+ import validation = ozzo_validation
+ interface backupCreateForm {
+ name: string
+ }
+ interface backupUploadForm {
+ file?: filesystem.File
+ }
+ interface newRouter {
/**
- * StaticDirectoryHandler is similar to `echo.StaticDirectoryHandler`
- * but without the directory redirect which conflicts with RemoveTrailingSlash middleware.
+ * NewRouter returns a new router instance loaded with the default app middlewares and api routes.
+ */
+ (app: CoreApp): (router.Router)
+ }
+ interface wrapStdHandler {
+ /**
+ * WrapStdHandler wraps Go [http.Handler] into a PocketBase handler func.
+ */
+ (h: http.Handler): hook.HandlerFunc
+ }
+ interface wrapStdMiddleware {
+ /**
+ * WrapStdMiddleware wraps Go [func(http.Handler) http.Handle] into a PocketBase middleware func.
+ */
+ (m: (_arg0: http.Handler) => http.Handler): hook.HandlerFunc
+ }
+ interface mustSubFS {
+ /**
+ * MustSubFS returns an [fs.FS] corresponding to the subtree rooted at fsys's dir.
+ *
+ * This is similar to [fs.Sub] but panics on failure.
+ */
+ (fsys: fs.FS, dir: string): fs.FS
+ }
+ interface _static {
+ /**
+ * Static is a handler function to serve static directory content from fsys.
*
* If a file resource is missing and indexFallback is set, the request
- * will be forwarded to the base index.html (useful also for SPA).
+ * will be forwarded to the base index.html (useful for SPA with pretty urls).
*
- * @see https://github.com/labstack/echo/issues/2211
+ * NB! Expects the route to have a "{path...}" wildcard parameter.
+ *
+ * Special redirects:
+ * ```
+ * - if "path" is a file that ends in index.html, it is redirected to its non-index.html version (eg. /test/index.html -> /test/)
+ * - if "path" is a directory that has index.html, the index.html file is rendered,
+ * otherwise if missing - returns 404 or fallback to the root index.html if indexFallback is set
+ * ```
+ *
+ * Example:
+ *
+ * ```
+ * fsys := os.DirFS("./pb_public")
+ * router.GET("/files/{path...}", apis.Static(fsys, false))
+ * ```
*/
- (fileSystem: fs.FS, indexFallback: boolean): echo.HandlerFunc
+ (fsys: fs.FS, indexFallback: boolean): hook.HandlerFunc
}
- interface collectionApi {
+ interface findUploadedFiles {
+ /**
+ * FindUploadedFiles extracts all form files of "key" from a http request
+ * and returns a slice with filesystem.File instances (if any).
+ */
+ (r: http.Request, key: string): Array<(filesystem.File | undefined)>
+ }
+ interface HandleFunc {(e: core.RequestEvent): void }
+ interface BatchActionHandlerFunc {(app: CoreApp, ir: core.InternalRequest, params: _TygojaDict, next: () => void): HandleFunc }
+ interface BatchRequestResult {
+ body: any
+ status: number
+ }
+ interface batchRequestsForm {
+ requests: Array<(core.InternalRequest | undefined)>
+ }
+ interface batchProcessor {
+ }
+ interface batchProcessor {
+ process(batch: Array<(core.InternalRequest | undefined)>, timeout: time.Duration): void
+ }
+ interface BatchResponseError {
+ }
+ interface BatchResponseError {
+ error(): string
+ }
+ interface BatchResponseError {
+ code(): string
+ }
+ interface BatchResponseError {
+ resolve(errData: _TygojaDict): any
+ }
+ interface BatchResponseError {
+ marshalJSON(): string|Array
+ }
+ interface collectionsImportForm {
+ collections: Array<_TygojaDict>
+ deleteMissing: boolean
}
interface fileApi {
}
- interface healthApi {
- }
- interface healthCheckResponse {
- message: string
- code: number
- data: {
- canBackup: boolean
- }
- }
- interface logsApi {
- }
interface requireGuestOnly {
/**
* RequireGuestOnly middleware requires a request to NOT have a valid
* Authorization header.
*
- * This middleware is the opposite of [apis.RequireAdminOrRecordAuth()].
+ * This middleware is the opposite of [apis.RequireAuth()].
*/
- (): echo.MiddlewareFunc
+ (): (hook.Handler)
}
- interface requireRecordAuth {
+ interface requireAuth {
/**
- * RequireRecordAuth middleware requires a request to have
- * a valid record auth Authorization header.
+ * RequireAuth middleware requires a request to have a valid record Authorization header.
*
* The auth record could be from any collection.
- *
- * You can further filter the allowed record auth collections by
- * specifying their names.
+ * You can further filter the allowed record auth collections by specifying their names.
*
* Example:
*
* ```
- * apis.RequireRecordAuth()
+ * apis.RequireAuth() // any auth collection
+ * apis.RequireAuth("_superusers", "users") // only the listed auth collections
* ```
- *
- * Or:
- *
- * ```
- * apis.RequireRecordAuth("users", "supervisors")
- * ```
- *
- * To restrict the auth record only to the loaded context collection,
- * use [apis.RequireSameContextRecordAuth()] instead.
*/
- (...optCollectionNames: string[]): echo.MiddlewareFunc
+ (...optCollectionNames: string[]): (hook.Handler)
}
- interface requireSameContextRecordAuth {
+ interface requireSuperuserAuth {
/**
- * RequireSameContextRecordAuth middleware requires a request to have
- * a valid record Authorization header.
- *
- * The auth record must be from the same collection already loaded in the context.
+ * RequireSuperuserAuth middleware requires a request to have
+ * a valid superuser Authorization header.
*/
- (): echo.MiddlewareFunc
+ (): (hook.Handler)
}
- interface requireAdminAuth {
+ interface requireSuperuserAuthOnlyIfAny {
/**
- * RequireAdminAuth middleware requires a request to have
- * a valid admin Authorization header.
+ * RequireSuperuserAuthOnlyIfAny middleware requires a request to have
+ * a valid superuser Authorization header ONLY if the application has
+ * at least 1 existing superuser.
*/
- (): echo.MiddlewareFunc
+ (): (hook.Handler)
}
- interface requireAdminAuthOnlyIfAny {
+ interface requireSuperuserOrOwnerAuth {
/**
- * RequireAdminAuthOnlyIfAny middleware requires a request to have
- * a valid admin Authorization header ONLY if the application has
- * at least 1 existing Admin model.
- */
- (app: CoreApp): echo.MiddlewareFunc
- }
- interface requireAdminOrRecordAuth {
- /**
- * RequireAdminOrRecordAuth middleware requires a request to have
- * a valid admin or record Authorization header set.
+ * RequireSuperuserOrOwnerAuth middleware requires a request to have
+ * a valid superuser or regular record owner Authorization header set.
*
- * You can further filter the allowed auth record collections by providing their names.
- *
- * This middleware is the opposite of [apis.RequireGuestOnly()].
- */
- (...optCollectionNames: string[]): echo.MiddlewareFunc
- }
- interface requireAdminOrOwnerAuth {
- /**
- * RequireAdminOrOwnerAuth middleware requires a request to have
- * a valid admin or auth record owner Authorization header set.
- *
- * This middleware is similar to [apis.RequireAdminOrRecordAuth()] but
+ * This middleware is similar to [apis.RequireAuth()] but
* for the auth record token expects to have the same id as the path
- * parameter ownerIdParam (default to "id" if empty).
+ * parameter ownerIdPathParam (default to "id" if empty).
*/
- (ownerIdParam: string): echo.MiddlewareFunc
+ (ownerIdPathParam: string): (hook.Handler)
}
- interface loadAuthContext {
+ interface requireSameCollectionContextAuth {
/**
- * LoadAuthContext middleware reads the Authorization request header
- * and loads the token related record or admin instance into the
- * request's context.
- *
- * This middleware is expected to be already registered by default for all routes.
+ * RequireSameCollectionContextAuth middleware requires a request to have
+ * a valid record Authorization header and the auth record's collection to
+ * match the one from the route path parameter (default to "collection" if collectionParam is empty).
*/
- (app: CoreApp): echo.MiddlewareFunc
+ (collectionPathParam: string): (hook.Handler)
}
- interface loadCollectionContext {
+ interface skipSuccessActivityLog {
/**
- * LoadCollectionContext middleware finds the collection with related
- * path identifier and loads it into the request context.
- *
- * Set optCollectionTypes to further filter the found collection by its type.
+ * SkipSuccessActivityLog is a helper middleware that instructs the global
+ * activity logger to log only requests that have failed/returned an error.
*/
- (app: CoreApp, ...optCollectionTypes: string[]): echo.MiddlewareFunc
+ (): (hook.Handler)
}
- interface activityLogger {
+ interface bodyLimit {
/**
- * ActivityLogger middleware takes care to save the request information
- * into the logs database.
+ * BodyLimit returns a middleware function that changes the default request body size limit.
*
- * The middleware does nothing if the app logs retention period is zero
- * (aka. app.Settings().Logs.MaxDays = 0).
+ * Note that in order to have effect this middleware should be registered
+ * before other middlewares that reads the request body.
+ *
+ * If limitBytes <= 0, no limit is applied.
+ *
+ * Otherwise, if the request body size exceeds the configured limitBytes,
+ * it sends 413 error response.
*/
- (app: CoreApp): echo.MiddlewareFunc
+ (limitBytes: number): (hook.Handler)
}
- interface realtimeApi {
+ type _subWJQoF = io.ReadCloser
+ interface limitedReader extends _subWJQoF {
+ }
+ interface limitedReader {
+ read(b: string|Array): number
+ }
+ interface limitedReader {
+ reread(): void
+ }
+ /**
+ * CORSConfig defines the config for CORS middleware.
+ */
+ interface CORSConfig {
+ /**
+ * AllowOrigins determines the value of the Access-Control-Allow-Origin
+ * response header. This header defines a list of origins that may access the
+ * resource. The wildcard characters '*' and '?' are supported and are
+ * converted to regex fragments '.*' and '.' accordingly.
+ *
+ * Security: use extreme caution when handling the origin, and carefully
+ * validate any logic. Remember that attackers may register hostile domain names.
+ * See https://blog.portswigger.net/2016/10/exploiting-cors-misconfigurations-for.html
+ *
+ * Optional. Default value []string{"*"}.
+ *
+ * See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Origin
+ */
+ allowOrigins: Array
+ /**
+ * AllowOriginFunc is a custom function to validate the origin. It takes the
+ * origin as an argument and returns true if allowed or false otherwise. If
+ * an error is returned, it is returned by the handler. If this option is
+ * set, AllowOrigins is ignored.
+ *
+ * Security: use extreme caution when handling the origin, and carefully
+ * validate any logic. Remember that attackers may register hostile domain names.
+ * See https://blog.portswigger.net/2016/10/exploiting-cors-misconfigurations-for.html
+ *
+ * Optional.
+ */
+ allowOriginFunc: (origin: string) => boolean
+ /**
+ * AllowMethods determines the value of the Access-Control-Allow-Methods
+ * response header. This header specified the list of methods allowed when
+ * accessing the resource. This is used in response to a preflight request.
+ *
+ * Optional. Default value DefaultCORSConfig.AllowMethods.
+ *
+ * See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Methods
+ */
+ allowMethods: Array
+ /**
+ * AllowHeaders determines the value of the Access-Control-Allow-Headers
+ * response header. This header is used in response to a preflight request to
+ * indicate which HTTP headers can be used when making the actual request.
+ *
+ * Optional. Default value []string{}.
+ *
+ * See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Headers
+ */
+ allowHeaders: Array
+ /**
+ * AllowCredentials determines the value of the
+ * Access-Control-Allow-Credentials response header. This header indicates
+ * whether or not the response to the request can be exposed when the
+ * credentials mode (Request.credentials) is true. When used as part of a
+ * response to a preflight request, this indicates whether or not the actual
+ * request can be made using credentials. See also
+ * [MDN: Access-Control-Allow-Credentials].
+ *
+ * Optional. Default value false, in which case the header is not set.
+ *
+ * Security: avoid using `AllowCredentials = true` with `AllowOrigins = *`.
+ * See "Exploiting CORS misconfigurations for Bitcoins and bounties",
+ * https://blog.portswigger.net/2016/10/exploiting-cors-misconfigurations-for.html
+ *
+ * See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Allow-Credentials
+ */
+ allowCredentials: boolean
+ /**
+ * UnsafeWildcardOriginWithAllowCredentials UNSAFE/INSECURE: allows wildcard '*' origin to be used with AllowCredentials
+ * flag. In that case we consider any origin allowed and send it back to the client with `Access-Control-Allow-Origin` header.
+ *
+ * This is INSECURE and potentially leads to [cross-origin](https://portswigger.net/research/exploiting-cors-misconfigurations-for-bitcoins-and-bounties)
+ * attacks. See: https://github.com/labstack/echo/issues/2400 for discussion on the subject.
+ *
+ * Optional. Default value is false.
+ */
+ unsafeWildcardOriginWithAllowCredentials: boolean
+ /**
+ * ExposeHeaders determines the value of Access-Control-Expose-Headers, which
+ * defines a list of headers that clients are allowed to access.
+ *
+ * Optional. Default value []string{}, in which case the header is not set.
+ *
+ * See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Expose-Header
+ */
+ exposeHeaders: Array
+ /**
+ * MaxAge determines the value of the Access-Control-Max-Age response header.
+ * This header indicates how long (in seconds) the results of a preflight
+ * request can be cached.
+ * The header is set only if MaxAge != 0, negative value sends "0" which instructs browsers not to cache that response.
+ *
+ * Optional. Default value 0 - meaning header is not sent.
+ *
+ * See also: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Max-Age
+ */
+ maxAge: number
+ }
+ interface corsWithConfig {
+ /**
+ * CORSWithConfig returns a CORS middleware with config.
+ */
+ (config: CORSConfig): hook.HandlerFunc
+ }
+ /**
+ * GzipConfig defines the config for Gzip middleware.
+ */
+ interface GzipConfig {
+ /**
+ * Gzip compression level.
+ * Optional. Default value -1.
+ */
+ level: number
+ /**
+ * Length threshold before gzip compression is applied.
+ * Optional. Default value 0.
+ *
+ * Most of the time you will not need to change the default. Compressing
+ * a short response might increase the transmitted data because of the
+ * gzip format overhead. Compressing the response will also consume CPU
+ * and time on the server and the client (for decompressing). Depending on
+ * your use case such a threshold might be useful.
+ *
+ * See also:
+ * https://webmasters.stackexchange.com/questions/31750/what-is-recommended-minimum-object-size-for-gzip-performance-benefits
+ */
+ minLength: number
+ }
+ interface gzip {
+ /**
+ * Gzip returns a middleware which compresses HTTP response using gzip compression scheme.
+ */
+ (): hook.HandlerFunc
+ }
+ interface gzipWithConfig {
+ /**
+ * GzipWithConfig returns a middleware which compresses HTTP response using gzip compression scheme.
+ */
+ (config: GzipConfig): hook.HandlerFunc
+ }
+ type _subpcIaI = http.ResponseWriter&io.Writer
+ interface gzipResponseWriter extends _subpcIaI {
+ }
+ interface gzipResponseWriter {
+ writeHeader(code: number): void
+ }
+ interface gzipResponseWriter {
+ write(b: string|Array): number
+ }
+ interface gzipResponseWriter {
+ flush(): void
+ }
+ interface gzipResponseWriter {
+ hijack(): [net.Conn, (bufio.ReadWriter)]
+ }
+ interface gzipResponseWriter {
+ push(target: string, opts: http.PushOptions): void
+ }
+ interface gzipResponseWriter {
+ readFrom(r: io.Reader): number
+ }
+ interface gzipResponseWriter {
+ unwrap(): http.ResponseWriter
+ }
+ type _subaqIgM = sync.RWMutex
+ interface rateLimiter extends _subaqIgM {
+ }
+ type _subPpcNi = sync.Mutex
+ interface fixedWindow extends _subPpcNi {
+ }
+ interface realtimeSubscribeForm {
+ clientId: string
+ subscriptions: Array
}
/**
* recordData represents the broadcasted record subscrition message data.
*/
interface recordData {
- record: any // map or models.Record
+ record: any // map or core.Record
action: string
}
interface getter {
[key:string]: any;
get(_arg0: string): any
}
- interface recordAuthApi {
+ interface EmailChangeConfirmForm {
+ token: string
+ password: string
+ }
+ interface emailChangeRequestForm {
+ newEmail: string
+ }
+ interface impersonateForm {
+ /**
+ * Duration is the optional custom token duration in seconds.
+ */
+ duration: number
+ }
+ interface otpResponse {
+ enabled: boolean
+ duration: number // in seconds
+ }
+ interface mfaResponse {
+ enabled: boolean
+ duration: number // in seconds
+ }
+ interface passwordResponse {
+ identityFields: Array
+ enabled: boolean
+ }
+ interface oauth2Response {
+ providers: Array
+ enabled: boolean
}
interface providerInfo {
name: string
displayName: string
state: string
+ authURL: string
+ /**
+ * @todo
+ * deprecated: use AuthURL instead
+ * AuthUrl will be removed after dropping v0.22 support
+ */
authUrl: string
/**
* technically could be omitted if the provider doesn't support PKCE,
@@ -6719,32 +6213,95 @@ namespace apis {
codeChallenge: string
codeChallengeMethod: string
}
- interface oauth2EventMessage {
+ interface authMethodsResponse {
+ password: passwordResponse
+ oAuth2: oauth2Response
+ mfa: mfaResponse
+ otp: otpResponse
+ /**
+ * legacy fields
+ * @todo remove after dropping v0.22 support
+ */
+ authProviders: Array
+ usernamePassword: boolean
+ emailPassword: boolean
+ }
+ interface createOTPForm {
+ email: string
+ }
+ interface recordConfirmPasswordResetForm {
+ token: string
+ password: string
+ passwordConfirm: string
+ }
+ interface recordRequestPasswordResetForm {
+ email: string
+ }
+ interface recordConfirmVerificationForm {
+ token: string
+ }
+ interface recordRequestVerificationForm {
+ email: string
+ }
+ interface recordOAuth2LoginForm {
+ /**
+ * Additional data that will be used for creating a new auth record
+ * if an existing OAuth2 account doesn't exist.
+ */
+ createData: _TygojaDict
+ /**
+ * The name of the OAuth2 client provider (eg. "google")
+ */
+ provider: string
+ /**
+ * The authorization code returned from the initial request.
+ */
+ code: string
+ /**
+ * The optional PKCE code verifier as part of the code_challenge sent with the initial request.
+ */
+ codeVerifier: string
+ /**
+ * The redirect url sent with the initial request.
+ */
+ redirectURL: string
+ /**
+ * @todo
+ * deprecated: use RedirectURL instead
+ * RedirectUrl will be removed after dropping v0.22 support
+ */
+ redirectUrl: string
+ }
+ interface oauth2RedirectData {
state: string
code: string
error: string
}
- interface recordApi {
+ interface authWithOTPForm {
+ otpId: string
+ password: string
}
- interface requestData {
+ interface authWithPasswordForm {
+ identity: string
+ password: string
/**
- * Deprecated: Use RequestInfo instead.
+ * IdentityField specifies the field to use to search for the identity
+ * (leave it empty for "auto" detection).
*/
- (c: echo.Context): (models.RequestInfo)
- }
- interface requestInfo {
- /**
- * RequestInfo exports cached common request data fields
- * (query, body, logged auth state, etc.) from the provided context.
- */
- (c: echo.Context): (models.RequestInfo)
+ identityField: string
}
interface recordAuthResponse {
/**
- * RecordAuthResponse writes standardised json record auth response
+ * RecordAuthResponse writes standardized json record auth response
* into the specified request context.
+ *
+ * The authMethod argument specify the name of the current authentication method (eg. password, oauth2, etc.)
+ * that it is used primarily as an auth identifier during MFA and for login alerts.
+ *
+ * Set authMethod to empty string if you want to ignore the MFA checks and the login alerts
+ * (can be also adjusted additionally via the OnRecordAuthRequest hook).
*/
- (app: CoreApp, c: echo.Context, authRecord: models.Record, meta: any, ...finalizers: ((token: string) => void)[]): void
+ (e: core.RequestEvent, authRecord: core.Record, authMethod: string, meta: any): void
}
interface enrichRecord {
/**
@@ -6752,10 +6309,10 @@ namespace apis {
* ```
* - expands relations (if defaultExpands and/or ?expand query param is set)
* - ensures that the emails of the auth record and its expanded auth relations
- * are visible only for the current logged admin, record owner or record with manage access
+ * are visible only for the current logged superuser, record owner or record with manage access
* ```
*/
- (c: echo.Context, dao: daos.Dao, record: models.Record, ...defaultExpands: string[]): void
+ (e: core.RequestEvent, record: core.Record, ...defaultExpands: string[]): void
}
interface enrichRecords {
/**
@@ -6763,10 +6320,14 @@ namespace apis {
* ```
* - expands relations (if defaultExpands and/or ?expand query param is set)
* - ensures that the emails of the auth records and their expanded auth relations
- * are visible only for the current logged admin, record owner or record with manage access
+ * are visible only for the current logged superuser, record owner or record with manage access
* ```
+ *
+ * Note: Expects all records to be from the same collection!
*/
- (c: echo.Context, dao: daos.Dao, records: Array<(models.Record | undefined)>, ...defaultExpands: string[]): void
+ (e: core.RequestEvent, records: Array<(core.Record | undefined)>, ...defaultExpands: string[]): void
+ }
+ interface iterator {
}
/**
* ServeConfig defines a configuration struct for apis.Serve().
@@ -6777,11 +6338,18 @@ namespace apis {
*/
showStartBanner: boolean
/**
- * HttpAddr is the TCP address to listen for the HTTP server (eg. `127.0.0.1:80`).
+ * DashboardPath specifies the route path to the superusers dashboard interface
+ * (default to "/_/{path...}").
+ *
+ * Note: Must include the "{path...}" wildcard parameter.
+ */
+ dashboardPath: string
+ /**
+ * HttpAddr is the TCP address to listen for the HTTP server (eg. "127.0.0.1:80").
*/
httpAddr: string
/**
- * HttpsAddr is the TCP address to listen for the HTTPS server (eg. `127.0.0.1:443`).
+ * HttpsAddr is the TCP address to listen for the HTTPS server (eg. "127.0.0.1:443").
*/
httpsAddr: string
/**
@@ -6814,31 +6382,24 @@ namespace apis {
* })
* ```
*/
- (app: CoreApp, config: ServeConfig): (http.Server)
+ (app: CoreApp, config: ServeConfig): void
}
- interface migrationsConnection {
- db?: dbx.DB
- migrationsList: migrate.MigrationsList
+ interface serverErrorLogWriter {
}
- interface settingsApi {
+ interface serverErrorLogWriter {
+ write(p: string|Array): number
}
}
namespace pocketbase {
- /**
- * appWrapper serves as a private CoreApp instance wrapper.
- */
- type _subXbotK = CoreApp
- interface appWrapper extends _subXbotK {
- }
/**
* PocketBase defines a PocketBase app launcher.
*
* It implements [CoreApp] via embedding and all of the app interface methods
* could be accessed directly through the instance (eg. PocketBase.DataDir()).
*/
- type _subYRHBu = appWrapper
- interface PocketBase extends _subYRHBu {
+ type _subltdOS = CoreApp
+ interface PocketBase extends _subltdOS {
/**
* RootCmd is the main console command
*/
@@ -6848,23 +6409,25 @@ namespace pocketbase {
* Config is the PocketBase initialization config struct.
*/
interface Config {
+ /**
+ * hide the default console server info on app startup
+ */
+ hideStartBanner: boolean
/**
* optional default values for the console flags
*/
defaultDev: boolean
defaultDataDir: string // if not set, it will fallback to "./pb_data"
defaultEncryptionEnv: string
- /**
- * hide the default console server info on app startup
- */
- hideStartBanner: boolean
/**
* optional DB configurations
*/
dataMaxOpenConns: number // default to core.DefaultDataMaxOpenConns
dataMaxIdleConns: number // default to core.DefaultDataMaxIdleConns
- logsMaxOpenConns: number // default to core.DefaultLogsMaxOpenConns
- logsMaxIdleConns: number // default to core.DefaultLogsMaxIdleConns
+ auxMaxOpenConns: number // default to core.DefaultAuxMaxOpenConns
+ auxMaxIdleConns: number // default to core.DefaultAuxMaxIdleConns
+ queryTimeout: number // default to core.DefaultQueryTimeout (in seconds)
+ dbConnect: core.DBConnectFunc // default to core.dbConnect
}
interface _new {
/**
@@ -6922,159 +6485,149 @@ namespace pocketbase {
}
/**
- * Package io provides basic interfaces to I/O primitives.
- * Its primary job is to wrap existing implementations of such primitives,
- * such as those in package os, into shared public interfaces that
- * abstract the functionality, plus some other related primitives.
+ * Package sync provides basic synchronization primitives such as mutual
+ * exclusion locks. Other than the [Once] and [WaitGroup] types, most are intended
+ * for use by low-level library routines. Higher-level synchronization is
+ * better done via channels and communication.
*
- * Because these interfaces and primitives wrap lower-level operations with
- * various implementations, unless otherwise informed clients should not
- * assume they are safe for parallel execution.
+ * Values containing the types defined in this package should not be copied.
*/
-namespace io {
+namespace sync {
/**
- * Reader is the interface that wraps the basic Read method.
+ * A Mutex is a mutual exclusion lock.
+ * The zero value for a Mutex is an unlocked mutex.
*
- * Read reads up to len(p) bytes into p. It returns the number of bytes
- * read (0 <= n <= len(p)) and any error encountered. Even if Read
- * returns n < len(p), it may use all of p as scratch space during the call.
- * If some data is available but not len(p) bytes, Read conventionally
- * returns what is available instead of waiting for more.
+ * A Mutex must not be copied after first use.
*
- * When Read encounters an error or end-of-file condition after
- * successfully reading n > 0 bytes, it returns the number of
- * bytes read. It may return the (non-nil) error from the same call
- * or return the error (and n == 0) from a subsequent call.
- * An instance of this general case is that a Reader returning
- * a non-zero number of bytes at the end of the input stream may
- * return either err == EOF or err == nil. The next Read should
- * return 0, EOF.
+ * In the terminology of [the Go memory model],
+ * the n'th call to [Mutex.Unlock] “synchronizes before” the m'th call to [Mutex.Lock]
+ * for any n < m.
+ * A successful call to [Mutex.TryLock] is equivalent to a call to Lock.
+ * A failed call to TryLock does not establish any “synchronizes before”
+ * relation at all.
*
- * Callers should always process the n > 0 bytes returned before
- * considering the error err. Doing so correctly handles I/O errors
- * that happen after reading some bytes and also both of the
- * allowed EOF behaviors.
- *
- * If len(p) == 0, Read should always return n == 0. It may return a
- * non-nil error if some error condition is known, such as EOF.
- *
- * Implementations of Read are discouraged from returning a
- * zero byte count with a nil error, except when len(p) == 0.
- * Callers should treat a return of 0 and nil as indicating that
- * nothing happened; in particular it does not indicate EOF.
- *
- * Implementations must not retain p.
+ * [the Go memory model]: https://go.dev/ref/mem
*/
- interface Reader {
- [key:string]: any;
- read(p: string|Array): number
+ interface Mutex {
+ }
+ interface Mutex {
+ /**
+ * Lock locks m.
+ * If the lock is already in use, the calling goroutine
+ * blocks until the mutex is available.
+ */
+ lock(): void
+ }
+ interface Mutex {
+ /**
+ * TryLock tries to lock m and reports whether it succeeded.
+ *
+ * Note that while correct uses of TryLock do exist, they are rare,
+ * and use of TryLock is often a sign of a deeper problem
+ * in a particular use of mutexes.
+ */
+ tryLock(): boolean
+ }
+ interface Mutex {
+ /**
+ * Unlock unlocks m.
+ * It is a run-time error if m is not locked on entry to Unlock.
+ *
+ * A locked [Mutex] is not associated with a particular goroutine.
+ * It is allowed for one goroutine to lock a Mutex and then
+ * arrange for another goroutine to unlock it.
+ */
+ unlock(): void
}
/**
- * Writer is the interface that wraps the basic Write method.
+ * A RWMutex is a reader/writer mutual exclusion lock.
+ * The lock can be held by an arbitrary number of readers or a single writer.
+ * The zero value for a RWMutex is an unlocked mutex.
*
- * Write writes len(p) bytes from p to the underlying data stream.
- * It returns the number of bytes written from p (0 <= n <= len(p))
- * and any error encountered that caused the write to stop early.
- * Write must return a non-nil error if it returns n < len(p).
- * Write must not modify the slice data, even temporarily.
+ * A RWMutex must not be copied after first use.
*
- * Implementations must not retain p.
+ * If any goroutine calls [RWMutex.Lock] while the lock is already held by
+ * one or more readers, concurrent calls to [RWMutex.RLock] will block until
+ * the writer has acquired (and released) the lock, to ensure that
+ * the lock eventually becomes available to the writer.
+ * Note that this prohibits recursive read-locking.
+ *
+ * In the terminology of [the Go memory model],
+ * the n'th call to [RWMutex.Unlock] “synchronizes before” the m'th call to Lock
+ * for any n < m, just as for [Mutex].
+ * For any call to RLock, there exists an n such that
+ * the n'th call to Unlock “synchronizes before” that call to RLock,
+ * and the corresponding call to [RWMutex.RUnlock] “synchronizes before”
+ * the n+1'th call to Lock.
+ *
+ * [the Go memory model]: https://go.dev/ref/mem
*/
- interface Writer {
- [key:string]: any;
- write(p: string|Array): number
+ interface RWMutex {
}
- /**
- * ReadSeekCloser is the interface that groups the basic Read, Seek and Close
- * methods.
- */
- interface ReadSeekCloser {
- [key:string]: any;
- }
-}
-
-/**
- * Package bytes implements functions for the manipulation of byte slices.
- * It is analogous to the facilities of the [strings] package.
- */
-namespace bytes {
- /**
- * A Reader implements the io.Reader, io.ReaderAt, io.WriterTo, io.Seeker,
- * io.ByteScanner, and io.RuneScanner interfaces by reading from
- * a byte slice.
- * Unlike a [Buffer], a Reader is read-only and supports seeking.
- * The zero value for Reader operates like a Reader of an empty slice.
- */
- interface Reader {
- }
- interface Reader {
+ interface RWMutex {
/**
- * Len returns the number of bytes of the unread portion of the
- * slice.
+ * RLock locks rw for reading.
+ *
+ * It should not be used for recursive read locking; a blocked Lock
+ * call excludes new readers from acquiring the lock. See the
+ * documentation on the [RWMutex] type.
*/
- len(): number
+ rLock(): void
}
- interface Reader {
+ interface RWMutex {
/**
- * Size returns the original length of the underlying byte slice.
- * Size is the number of bytes available for reading via [Reader.ReadAt].
- * The result is unaffected by any method calls except [Reader.Reset].
+ * TryRLock tries to lock rw for reading and reports whether it succeeded.
+ *
+ * Note that while correct uses of TryRLock do exist, they are rare,
+ * and use of TryRLock is often a sign of a deeper problem
+ * in a particular use of mutexes.
*/
- size(): number
+ tryRLock(): boolean
}
- interface Reader {
+ interface RWMutex {
/**
- * Read implements the [io.Reader] interface.
+ * RUnlock undoes a single [RWMutex.RLock] call;
+ * it does not affect other simultaneous readers.
+ * It is a run-time error if rw is not locked for reading
+ * on entry to RUnlock.
*/
- read(b: string|Array): number
+ rUnlock(): void
}
- interface Reader {
+ interface RWMutex {
/**
- * ReadAt implements the [io.ReaderAt] interface.
+ * Lock locks rw for writing.
+ * If the lock is already locked for reading or writing,
+ * Lock blocks until the lock is available.
*/
- readAt(b: string|Array, off: number): number
+ lock(): void
}
- interface Reader {
+ interface RWMutex {
/**
- * ReadByte implements the [io.ByteReader] interface.
+ * TryLock tries to lock rw for writing and reports whether it succeeded.
+ *
+ * Note that while correct uses of TryLock do exist, they are rare,
+ * and use of TryLock is often a sign of a deeper problem
+ * in a particular use of mutexes.
*/
- readByte(): number
+ tryLock(): boolean
}
- interface Reader {
+ interface RWMutex {
/**
- * UnreadByte complements [Reader.ReadByte] in implementing the [io.ByteScanner] interface.
+ * Unlock unlocks rw for writing. It is a run-time error if rw is
+ * not locked for writing on entry to Unlock.
+ *
+ * As with Mutexes, a locked [RWMutex] is not associated with a particular
+ * goroutine. One goroutine may [RWMutex.RLock] ([RWMutex.Lock]) a RWMutex and then
+ * arrange for another goroutine to [RWMutex.RUnlock] ([RWMutex.Unlock]) it.
*/
- unreadByte(): void
+ unlock(): void
}
- interface Reader {
+ interface RWMutex {
/**
- * ReadRune implements the [io.RuneReader] interface.
+ * RLocker returns a [Locker] interface that implements
+ * the [Locker.Lock] and [Locker.Unlock] methods by calling rw.RLock and rw.RUnlock.
*/
- readRune(): [number, number]
- }
- interface Reader {
- /**
- * UnreadRune complements [Reader.ReadRune] in implementing the [io.RuneScanner] interface.
- */
- unreadRune(): void
- }
- interface Reader {
- /**
- * Seek implements the [io.Seeker] interface.
- */
- seek(offset: number, whence: number): number
- }
- interface Reader {
- /**
- * WriteTo implements the [io.WriterTo] interface.
- */
- writeTo(w: io.Writer): number
- }
- interface Reader {
- /**
- * Reset resets the [Reader.Reader] to be reading from b.
- */
- reset(b: string|Array): void
+ rLocker(): Locker
}
}
@@ -7093,7 +6646,7 @@ namespace bytes {
* the manuals for the appropriate operating system.
* These calls return err == nil to indicate success; otherwise
* err is an operating system error describing the failure.
- * On most systems, that error has type syscall.Errno.
+ * On most systems, that error has type [Errno].
*
* NOTE: Most of the functions, types, and constants defined in
* this package are also available in the [golang.org/x/sys] package.
@@ -7193,6 +6746,8 @@ namespace syscall {
*/
write(f: (fd: number) => boolean): void
}
+ // @ts-ignore
+ import runtimesyscall = syscall
/**
* An Errno is an unsigned number describing an error condition.
* It implements the error interface. The zero Errno is by convention
@@ -7205,7 +6760,7 @@ namespace syscall {
* }
* ```
*
- * Errno values can be tested against error values using errors.Is.
+ * Errno values can be tested against error values using [errors.Is].
* For example:
*
* ```
@@ -7228,6 +6783,169 @@ namespace syscall {
}
}
+/**
+ * Package io provides basic interfaces to I/O primitives.
+ * Its primary job is to wrap existing implementations of such primitives,
+ * such as those in package os, into shared public interfaces that
+ * abstract the functionality, plus some other related primitives.
+ *
+ * Because these interfaces and primitives wrap lower-level operations with
+ * various implementations, unless otherwise informed clients should not
+ * assume they are safe for parallel execution.
+ */
+namespace io {
+ /**
+ * Reader is the interface that wraps the basic Read method.
+ *
+ * Read reads up to len(p) bytes into p. It returns the number of bytes
+ * read (0 <= n <= len(p)) and any error encountered. Even if Read
+ * returns n < len(p), it may use all of p as scratch space during the call.
+ * If some data is available but not len(p) bytes, Read conventionally
+ * returns what is available instead of waiting for more.
+ *
+ * When Read encounters an error or end-of-file condition after
+ * successfully reading n > 0 bytes, it returns the number of
+ * bytes read. It may return the (non-nil) error from the same call
+ * or return the error (and n == 0) from a subsequent call.
+ * An instance of this general case is that a Reader returning
+ * a non-zero number of bytes at the end of the input stream may
+ * return either err == EOF or err == nil. The next Read should
+ * return 0, EOF.
+ *
+ * Callers should always process the n > 0 bytes returned before
+ * considering the error err. Doing so correctly handles I/O errors
+ * that happen after reading some bytes and also both of the
+ * allowed EOF behaviors.
+ *
+ * If len(p) == 0, Read should always return n == 0. It may return a
+ * non-nil error if some error condition is known, such as EOF.
+ *
+ * Implementations of Read are discouraged from returning a
+ * zero byte count with a nil error, except when len(p) == 0.
+ * Callers should treat a return of 0 and nil as indicating that
+ * nothing happened; in particular it does not indicate EOF.
+ *
+ * Implementations must not retain p.
+ */
+ interface Reader {
+ [key:string]: any;
+ read(p: string|Array): number
+ }
+ /**
+ * Writer is the interface that wraps the basic Write method.
+ *
+ * Write writes len(p) bytes from p to the underlying data stream.
+ * It returns the number of bytes written from p (0 <= n <= len(p))
+ * and any error encountered that caused the write to stop early.
+ * Write must return a non-nil error if it returns n < len(p).
+ * Write must not modify the slice data, even temporarily.
+ *
+ * Implementations must not retain p.
+ */
+ interface Writer {
+ [key:string]: any;
+ write(p: string|Array): number
+ }
+ /**
+ * ReadCloser is the interface that groups the basic Read and Close methods.
+ */
+ interface ReadCloser {
+ [key:string]: any;
+ }
+ /**
+ * ReadSeekCloser is the interface that groups the basic Read, Seek and Close
+ * methods.
+ */
+ interface ReadSeekCloser {
+ [key:string]: any;
+ }
+}
+
+/**
+ * Package bytes implements functions for the manipulation of byte slices.
+ * It is analogous to the facilities of the [strings] package.
+ */
+namespace bytes {
+ /**
+ * A Reader implements the [io.Reader], [io.ReaderAt], [io.WriterTo], [io.Seeker],
+ * [io.ByteScanner], and [io.RuneScanner] interfaces by reading from
+ * a byte slice.
+ * Unlike a [Buffer], a Reader is read-only and supports seeking.
+ * The zero value for Reader operates like a Reader of an empty slice.
+ */
+ interface Reader {
+ }
+ interface Reader {
+ /**
+ * Len returns the number of bytes of the unread portion of the
+ * slice.
+ */
+ len(): number
+ }
+ interface Reader {
+ /**
+ * Size returns the original length of the underlying byte slice.
+ * Size is the number of bytes available for reading via [Reader.ReadAt].
+ * The result is unaffected by any method calls except [Reader.Reset].
+ */
+ size(): number
+ }
+ interface Reader {
+ /**
+ * Read implements the [io.Reader] interface.
+ */
+ read(b: string|Array): number
+ }
+ interface Reader {
+ /**
+ * ReadAt implements the [io.ReaderAt] interface.
+ */
+ readAt(b: string|Array, off: number): number
+ }
+ interface Reader {
+ /**
+ * ReadByte implements the [io.ByteReader] interface.
+ */
+ readByte(): number
+ }
+ interface Reader {
+ /**
+ * UnreadByte complements [Reader.ReadByte] in implementing the [io.ByteScanner] interface.
+ */
+ unreadByte(): void
+ }
+ interface Reader {
+ /**
+ * ReadRune implements the [io.RuneReader] interface.
+ */
+ readRune(): [number, number]
+ }
+ interface Reader {
+ /**
+ * UnreadRune complements [Reader.ReadRune] in implementing the [io.RuneScanner] interface.
+ */
+ unreadRune(): void
+ }
+ interface Reader {
+ /**
+ * Seek implements the [io.Seeker] interface.
+ */
+ seek(offset: number, whence: number): number
+ }
+ interface Reader {
+ /**
+ * WriteTo implements the [io.WriterTo] interface.
+ */
+ writeTo(w: io.Writer): number
+ }
+ interface Reader {
+ /**
+ * Reset resets the [Reader] to be reading from b.
+ */
+ reset(b: string|Array): void
+ }
+}
+
/**
* Package time provides functionality for measuring and displaying time.
*
@@ -7240,7 +6958,7 @@ namespace syscall {
* changes for clock synchronization, and a “monotonic clock,” which is
* not. The general rule is that the wall clock is for telling time and
* the monotonic clock is for measuring time. Rather than split the API,
- * in this package the Time returned by time.Now contains both a wall
+ * in this package the Time returned by [time.Now] contains both a wall
* clock reading and a monotonic clock reading; later time-telling
* operations use the wall clock reading, but later time-measuring
* operations, specifically comparisons and subtractions, use the
@@ -7257,7 +6975,7 @@ namespace syscall {
* elapsed := t.Sub(start)
* ```
*
- * Other idioms, such as time.Since(start), time.Until(deadline), and
+ * Other idioms, such as [time.Since](start), [time.Until](deadline), and
* time.Now().Before(deadline), are similarly robust against wall clock
* resets.
*
@@ -7282,23 +7000,26 @@ namespace syscall {
*
* On some systems the monotonic clock will stop if the computer goes to sleep.
* On such a system, t.Sub(u) may not accurately reflect the actual
- * time that passed between t and u.
+ * time that passed between t and u. The same applies to other functions and
+ * methods that subtract times, such as [Since], [Until], [Before], [After],
+ * [Add], [Sub], [Equal] and [Compare]. In some cases, you may need to strip
+ * the monotonic clock to get accurate results.
*
* Because the monotonic clock reading has no meaning outside
* the current process, the serialized forms generated by t.GobEncode,
* t.MarshalBinary, t.MarshalJSON, and t.MarshalText omit the monotonic
* clock reading, and t.Format provides no format for it. Similarly, the
- * constructors time.Date, time.Parse, time.ParseInLocation, and time.Unix,
+ * constructors [time.Date], [time.Parse], [time.ParseInLocation], and [time.Unix],
* as well as the unmarshalers t.GobDecode, t.UnmarshalBinary.
* t.UnmarshalJSON, and t.UnmarshalText always create times with
* no monotonic clock reading.
*
- * The monotonic clock reading exists only in Time values. It is not
- * a part of Duration values or the Unix times returned by t.Unix and
+ * The monotonic clock reading exists only in [Time] values. It is not
+ * a part of [Duration] values or the Unix times returned by t.Unix and
* friends.
*
* Note that the Go == operator compares not just the time instant but
- * also the Location and the monotonic clock reading. See the
+ * also the [Location] and the monotonic clock reading. See the
* documentation for the Time type for a discussion of equality
* testing for Time values.
*
@@ -7308,10 +7029,11 @@ namespace syscall {
*
* # Timer Resolution
*
- * Timer resolution varies depending on the Go runtime, the operating system
+ * [Timer] resolution varies depending on the Go runtime, the operating system
* and the underlying hardware.
- * On Unix, the resolution is approximately 1ms.
- * On Windows, the default resolution is approximately 16ms, but
+ * On Unix, the resolution is ~1ms.
+ * On Windows version 1803 and newer, the resolution is ~0.5ms.
+ * On older Windows versions, the default resolution is ~16ms, but
* a higher resolution may be requested using [golang.org/x/sys/windows.TimeBeginPeriod].
*/
namespace time {
@@ -7335,7 +7057,7 @@ namespace time {
}
interface Time {
/**
- * GoString implements fmt.GoStringer and formats t to be printed in Go source
+ * GoString implements [fmt.GoStringer] and formats t to be printed in Go source
* code.
*/
goString(): string
@@ -7344,16 +7066,16 @@ namespace time {
/**
* Format returns a textual representation of the time value formatted according
* to the layout defined by the argument. See the documentation for the
- * constant called Layout to see how to represent the layout format.
+ * constant called [Layout] to see how to represent the layout format.
*
- * The executable example for Time.Format demonstrates the working
+ * The executable example for [Time.Format] demonstrates the working
* of the layout string in detail and is a good reference.
*/
format(layout: string): string
}
interface Time {
/**
- * AppendFormat is like Format but appends the textual
+ * AppendFormat is like [Time.Format] but appends the textual
* representation to b and returns the extended buffer.
*/
appendFormat(b: string|Array, layout: string): string|Array
@@ -7363,27 +7085,27 @@ namespace time {
*
* Programs using times should typically store and pass them as values,
* not pointers. That is, time variables and struct fields should be of
- * type time.Time, not *time.Time.
+ * type [time.Time], not *time.Time.
*
* A Time value can be used by multiple goroutines simultaneously except
- * that the methods GobDecode, UnmarshalBinary, UnmarshalJSON and
- * UnmarshalText are not concurrency-safe.
+ * that the methods [Time.GobDecode], [Time.UnmarshalBinary], [Time.UnmarshalJSON] and
+ * [Time.UnmarshalText] are not concurrency-safe.
*
- * Time instants can be compared using the Before, After, and Equal methods.
- * The Sub method subtracts two instants, producing a Duration.
- * The Add method adds a Time and a Duration, producing a Time.
+ * Time instants can be compared using the [Time.Before], [Time.After], and [Time.Equal] methods.
+ * The [Time.Sub] method subtracts two instants, producing a [Duration].
+ * The [Time.Add] method adds a Time and a Duration, producing a Time.
*
* The zero value of type Time is January 1, year 1, 00:00:00.000000000 UTC.
- * As this time is unlikely to come up in practice, the IsZero method gives
+ * As this time is unlikely to come up in practice, the [Time.IsZero] method gives
* a simple way of detecting a time that has not been initialized explicitly.
*
- * Each time has an associated Location. The methods Local, UTC, and In return a
+ * Each time has an associated [Location]. The methods [Time.Local], [Time.UTC], and Time.In return a
* Time with a specific Location. Changing the Location of a Time value with
* these methods does not change the actual instant it represents, only the time
* zone in which to interpret it.
*
- * Representations of a Time value saved by the GobEncode, MarshalBinary,
- * MarshalJSON, and MarshalText methods store the Time.Location's offset, but not
+ * Representations of a Time value saved by the [Time.GobEncode], [Time.MarshalBinary],
+ * [Time.MarshalJSON], and [Time.MarshalText] methods store the [Time.Location]'s offset, but not
* the location name. They therefore lose information about Daylight Saving Time.
*
* In addition to the required “wall clock” reading, a Time may contain an optional
@@ -7579,7 +7301,7 @@ namespace time {
* Round returns the result of rounding d to the nearest multiple of m.
* The rounding behavior for halfway values is to round away from zero.
* If the result exceeds the maximum (or minimum)
- * value that can be stored in a Duration,
+ * value that can be stored in a [Duration],
* Round returns the maximum (or minimum) duration.
* If m <= 0, Round returns d unchanged.
*/
@@ -7588,7 +7310,7 @@ namespace time {
interface Duration {
/**
* Abs returns the absolute value of d.
- * As a special case, math.MinInt64 is converted to math.MaxInt64.
+ * As a special case, [math.MinInt64] is converted to [math.MaxInt64].
*/
abs(): Duration
}
@@ -7601,7 +7323,7 @@ namespace time {
interface Time {
/**
* Sub returns the duration t-u. If the result exceeds the maximum (or minimum)
- * value that can be stored in a Duration, the maximum (or minimum) duration
+ * value that can be stored in a [Duration], the maximum (or minimum) duration
* will be returned.
* To compute t-d for a duration d, use t.Add(-d).
*/
@@ -7742,7 +7464,7 @@ namespace time {
}
interface Time {
/**
- * MarshalJSON implements the json.Marshaler interface.
+ * MarshalJSON implements the [json.Marshaler] interface.
* The time is a quoted string in the RFC 3339 format with sub-second precision.
* If the timestamp cannot be represented as valid RFC 3339
* (e.g., the year is out of range), then an error is reported.
@@ -7751,14 +7473,14 @@ namespace time {
}
interface Time {
/**
- * UnmarshalJSON implements the json.Unmarshaler interface.
+ * UnmarshalJSON implements the [json.Unmarshaler] interface.
* The time must be a quoted string in the RFC 3339 format.
*/
unmarshalJSON(data: string|Array): void
}
interface Time {
/**
- * MarshalText implements the encoding.TextMarshaler interface.
+ * MarshalText implements the [encoding.TextMarshaler] interface.
* The time is formatted in RFC 3339 format with sub-second precision.
* If the timestamp cannot be represented as valid RFC 3339
* (e.g., the year is out of range), then an error is reported.
@@ -7767,7 +7489,7 @@ namespace time {
}
interface Time {
/**
- * UnmarshalText implements the encoding.TextUnmarshaler interface.
+ * UnmarshalText implements the [encoding.TextUnmarshaler] interface.
* The time must be in the RFC 3339 format.
*/
unmarshalText(data: string|Array): void
@@ -7805,6 +7527,169 @@ namespace time {
}
}
+/**
+ * Package context defines the Context type, which carries deadlines,
+ * cancellation signals, and other request-scoped values across API boundaries
+ * and between processes.
+ *
+ * Incoming requests to a server should create a [Context], and outgoing
+ * calls to servers should accept a Context. The chain of function
+ * calls between them must propagate the Context, optionally replacing
+ * it with a derived Context created using [WithCancel], [WithDeadline],
+ * [WithTimeout], or [WithValue]. When a Context is canceled, all
+ * Contexts derived from it are also canceled.
+ *
+ * The [WithCancel], [WithDeadline], and [WithTimeout] functions take a
+ * Context (the parent) and return a derived Context (the child) and a
+ * [CancelFunc]. Calling the CancelFunc cancels the child and its
+ * children, removes the parent's reference to the child, and stops
+ * any associated timers. Failing to call the CancelFunc leaks the
+ * child and its children until the parent is canceled or the timer
+ * fires. The go vet tool checks that CancelFuncs are used on all
+ * control-flow paths.
+ *
+ * The [WithCancelCause] function returns a [CancelCauseFunc], which
+ * takes an error and records it as the cancellation cause. Calling
+ * [Cause] on the canceled context or any of its children retrieves
+ * the cause. If no cause is specified, Cause(ctx) returns the same
+ * value as ctx.Err().
+ *
+ * Programs that use Contexts should follow these rules to keep interfaces
+ * consistent across packages and enable static analysis tools to check context
+ * propagation:
+ *
+ * Do not store Contexts inside a struct type; instead, pass a Context
+ * explicitly to each function that needs it. The Context should be the first
+ * parameter, typically named ctx:
+ *
+ * ```
+ * func DoSomething(ctx context.Context, arg Arg) error {
+ * // ... use ctx ...
+ * }
+ * ```
+ *
+ * Do not pass a nil [Context], even if a function permits it. Pass [context.TODO]
+ * if you are unsure about which Context to use.
+ *
+ * Use context Values only for request-scoped data that transits processes and
+ * APIs, not for passing optional parameters to functions.
+ *
+ * The same Context may be passed to functions running in different goroutines;
+ * Contexts are safe for simultaneous use by multiple goroutines.
+ *
+ * See https://blog.golang.org/context for example code for a server that uses
+ * Contexts.
+ */
+namespace context {
+ /**
+ * A Context carries a deadline, a cancellation signal, and other values across
+ * API boundaries.
+ *
+ * Context's methods may be called by multiple goroutines simultaneously.
+ */
+ interface Context {
+ [key:string]: any;
+ /**
+ * Deadline returns the time when work done on behalf of this context
+ * should be canceled. Deadline returns ok==false when no deadline is
+ * set. Successive calls to Deadline return the same results.
+ */
+ deadline(): [time.Time, boolean]
+ /**
+ * Done returns a channel that's closed when work done on behalf of this
+ * context should be canceled. Done may return nil if this context can
+ * never be canceled. Successive calls to Done return the same value.
+ * The close of the Done channel may happen asynchronously,
+ * after the cancel function returns.
+ *
+ * WithCancel arranges for Done to be closed when cancel is called;
+ * WithDeadline arranges for Done to be closed when the deadline
+ * expires; WithTimeout arranges for Done to be closed when the timeout
+ * elapses.
+ *
+ * Done is provided for use in select statements:
+ *
+ * // Stream generates values with DoSomething and sends them to out
+ * // until DoSomething returns an error or ctx.Done is closed.
+ * func Stream(ctx context.Context, out chan<- Value) error {
+ * for {
+ * v, err := DoSomething(ctx)
+ * if err != nil {
+ * return err
+ * }
+ * select {
+ * case <-ctx.Done():
+ * return ctx.Err()
+ * case out <- v:
+ * }
+ * }
+ * }
+ *
+ * See https://blog.golang.org/pipelines for more examples of how to use
+ * a Done channel for cancellation.
+ */
+ done(): undefined
+ /**
+ * If Done is not yet closed, Err returns nil.
+ * If Done is closed, Err returns a non-nil error explaining why:
+ * Canceled if the context was canceled
+ * or DeadlineExceeded if the context's deadline passed.
+ * After Err returns a non-nil error, successive calls to Err return the same error.
+ */
+ err(): void
+ /**
+ * Value returns the value associated with this context for key, or nil
+ * if no value is associated with key. Successive calls to Value with
+ * the same key returns the same result.
+ *
+ * Use context values only for request-scoped data that transits
+ * processes and API boundaries, not for passing optional parameters to
+ * functions.
+ *
+ * A key identifies a specific value in a Context. Functions that wish
+ * to store values in Context typically allocate a key in a global
+ * variable then use that key as the argument to context.WithValue and
+ * Context.Value. A key can be any type that supports equality;
+ * packages should define keys as an unexported type to avoid
+ * collisions.
+ *
+ * Packages that define a Context key should provide type-safe accessors
+ * for the values stored using that key:
+ *
+ * ```
+ * // Package user defines a User type that's stored in Contexts.
+ * package user
+ *
+ * import "context"
+ *
+ * // User is the type of value stored in the Contexts.
+ * type User struct {...}
+ *
+ * // key is an unexported type for keys defined in this package.
+ * // This prevents collisions with keys defined in other packages.
+ * type key int
+ *
+ * // userKey is the key for user.User values in Contexts. It is
+ * // unexported; clients use user.NewContext and user.FromContext
+ * // instead of using this key directly.
+ * var userKey key
+ *
+ * // NewContext returns a new Context that carries value u.
+ * func NewContext(ctx context.Context, u *User) context.Context {
+ * return context.WithValue(ctx, userKey, u)
+ * }
+ *
+ * // FromContext returns the User value stored in ctx, if any.
+ * func FromContext(ctx context.Context) (*User, bool) {
+ * u, ok := ctx.Value(userKey).(*User)
+ * return u, ok
+ * }
+ * ```
+ */
+ value(key: any): any
+ }
+}
+
/**
* Package fs defines basic interfaces to a file system.
* A file system can be provided by the host operating system
@@ -8005,169 +7890,6 @@ namespace fs {
interface WalkDirFunc {(path: string, d: DirEntry, err: Error): void }
}
-/**
- * Package context defines the Context type, which carries deadlines,
- * cancellation signals, and other request-scoped values across API boundaries
- * and between processes.
- *
- * Incoming requests to a server should create a [Context], and outgoing
- * calls to servers should accept a Context. The chain of function
- * calls between them must propagate the Context, optionally replacing
- * it with a derived Context created using [WithCancel], [WithDeadline],
- * [WithTimeout], or [WithValue]. When a Context is canceled, all
- * Contexts derived from it are also canceled.
- *
- * The [WithCancel], [WithDeadline], and [WithTimeout] functions take a
- * Context (the parent) and return a derived Context (the child) and a
- * [CancelFunc]. Calling the CancelFunc cancels the child and its
- * children, removes the parent's reference to the child, and stops
- * any associated timers. Failing to call the CancelFunc leaks the
- * child and its children until the parent is canceled or the timer
- * fires. The go vet tool checks that CancelFuncs are used on all
- * control-flow paths.
- *
- * The [WithCancelCause] function returns a [CancelCauseFunc], which
- * takes an error and records it as the cancellation cause. Calling
- * [Cause] on the canceled context or any of its children retrieves
- * the cause. If no cause is specified, Cause(ctx) returns the same
- * value as ctx.Err().
- *
- * Programs that use Contexts should follow these rules to keep interfaces
- * consistent across packages and enable static analysis tools to check context
- * propagation:
- *
- * Do not store Contexts inside a struct type; instead, pass a Context
- * explicitly to each function that needs it. The Context should be the first
- * parameter, typically named ctx:
- *
- * ```
- * func DoSomething(ctx context.Context, arg Arg) error {
- * // ... use ctx ...
- * }
- * ```
- *
- * Do not pass a nil [Context], even if a function permits it. Pass [context.TODO]
- * if you are unsure about which Context to use.
- *
- * Use context Values only for request-scoped data that transits processes and
- * APIs, not for passing optional parameters to functions.
- *
- * The same Context may be passed to functions running in different goroutines;
- * Contexts are safe for simultaneous use by multiple goroutines.
- *
- * See https://blog.golang.org/context for example code for a server that uses
- * Contexts.
- */
-namespace context {
- /**
- * A Context carries a deadline, a cancellation signal, and other values across
- * API boundaries.
- *
- * Context's methods may be called by multiple goroutines simultaneously.
- */
- interface Context {
- [key:string]: any;
- /**
- * Deadline returns the time when work done on behalf of this context
- * should be canceled. Deadline returns ok==false when no deadline is
- * set. Successive calls to Deadline return the same results.
- */
- deadline(): [time.Time, boolean]
- /**
- * Done returns a channel that's closed when work done on behalf of this
- * context should be canceled. Done may return nil if this context can
- * never be canceled. Successive calls to Done return the same value.
- * The close of the Done channel may happen asynchronously,
- * after the cancel function returns.
- *
- * WithCancel arranges for Done to be closed when cancel is called;
- * WithDeadline arranges for Done to be closed when the deadline
- * expires; WithTimeout arranges for Done to be closed when the timeout
- * elapses.
- *
- * Done is provided for use in select statements:
- *
- * // Stream generates values with DoSomething and sends them to out
- * // until DoSomething returns an error or ctx.Done is closed.
- * func Stream(ctx context.Context, out chan<- Value) error {
- * for {
- * v, err := DoSomething(ctx)
- * if err != nil {
- * return err
- * }
- * select {
- * case <-ctx.Done():
- * return ctx.Err()
- * case out <- v:
- * }
- * }
- * }
- *
- * See https://blog.golang.org/pipelines for more examples of how to use
- * a Done channel for cancellation.
- */
- done(): undefined
- /**
- * If Done is not yet closed, Err returns nil.
- * If Done is closed, Err returns a non-nil error explaining why:
- * Canceled if the context was canceled
- * or DeadlineExceeded if the context's deadline passed.
- * After Err returns a non-nil error, successive calls to Err return the same error.
- */
- err(): void
- /**
- * Value returns the value associated with this context for key, or nil
- * if no value is associated with key. Successive calls to Value with
- * the same key returns the same result.
- *
- * Use context values only for request-scoped data that transits
- * processes and API boundaries, not for passing optional parameters to
- * functions.
- *
- * A key identifies a specific value in a Context. Functions that wish
- * to store values in Context typically allocate a key in a global
- * variable then use that key as the argument to context.WithValue and
- * Context.Value. A key can be any type that supports equality;
- * packages should define keys as an unexported type to avoid
- * collisions.
- *
- * Packages that define a Context key should provide type-safe accessors
- * for the values stored using that key:
- *
- * ```
- * // Package user defines a User type that's stored in Contexts.
- * package user
- *
- * import "context"
- *
- * // User is the type of value stored in the Contexts.
- * type User struct {...}
- *
- * // key is an unexported type for keys defined in this package.
- * // This prevents collisions with keys defined in other packages.
- * type key int
- *
- * // userKey is the key for user.User values in Contexts. It is
- * // unexported; clients use user.NewContext and user.FromContext
- * // instead of using this key directly.
- * var userKey key
- *
- * // NewContext returns a new Context that carries value u.
- * func NewContext(ctx context.Context, u *User) context.Context {
- * return context.WithValue(ctx, userKey, u)
- * }
- *
- * // FromContext returns the User value stored in ctx, if any.
- * func FromContext(ctx context.Context) (*User, bool) {
- * u, ok := ctx.Value(userKey).(*User)
- * return u, ok
- * }
- * ```
- */
- value(key: any): any
- }
-}
-
/**
* Package sql provides a generic interface around SQL (or SQL-like)
* databases.
@@ -8811,6 +8533,781 @@ namespace sql {
}
}
+/**
+ * Package bufio implements buffered I/O. It wraps an io.Reader or io.Writer
+ * object, creating another object (Reader or Writer) that also implements
+ * the interface but provides buffering and some help for textual I/O.
+ */
+namespace bufio {
+ /**
+ * ReadWriter stores pointers to a [Reader] and a [Writer].
+ * It implements [io.ReadWriter].
+ */
+ type _subxjvIW = Reader&Writer
+ interface ReadWriter extends _subxjvIW {
+ }
+}
+
+/**
+ * Package syntax parses regular expressions into parse trees and compiles
+ * parse trees into programs. Most clients of regular expressions will use the
+ * facilities of package [regexp] (such as [regexp.Compile] and [regexp.Match]) instead of this package.
+ *
+ * # Syntax
+ *
+ * The regular expression syntax understood by this package when parsing with the [Perl] flag is as follows.
+ * Parts of the syntax can be disabled by passing alternate flags to [Parse].
+ *
+ * Single characters:
+ *
+ * ```
+ * . any character, possibly including newline (flag s=true)
+ * [xyz] character class
+ * [^xyz] negated character class
+ * \d Perl character class
+ * \D negated Perl character class
+ * [[:alpha:]] ASCII character class
+ * [[:^alpha:]] negated ASCII character class
+ * \pN Unicode character class (one-letter name)
+ * \p{Greek} Unicode character class
+ * \PN negated Unicode character class (one-letter name)
+ * \P{Greek} negated Unicode character class
+ * ```
+ *
+ * Composites:
+ *
+ * ```
+ * xy x followed by y
+ * x|y x or y (prefer x)
+ * ```
+ *
+ * Repetitions:
+ *
+ * ```
+ * x* zero or more x, prefer more
+ * x+ one or more x, prefer more
+ * x? zero or one x, prefer one
+ * x{n,m} n or n+1 or ... or m x, prefer more
+ * x{n,} n or more x, prefer more
+ * x{n} exactly n x
+ * x*? zero or more x, prefer fewer
+ * x+? one or more x, prefer fewer
+ * x?? zero or one x, prefer zero
+ * x{n,m}? n or n+1 or ... or m x, prefer fewer
+ * x{n,}? n or more x, prefer fewer
+ * x{n}? exactly n x
+ * ```
+ *
+ * Implementation restriction: The counting forms x{n,m}, x{n,}, and x{n}
+ * reject forms that create a minimum or maximum repetition count above 1000.
+ * Unlimited repetitions are not subject to this restriction.
+ *
+ * Grouping:
+ *
+ * ```
+ * (re) numbered capturing group (submatch)
+ * (?Pre) named & numbered capturing group (submatch)
+ * (?re) named & numbered capturing group (submatch)
+ * (?:re) non-capturing group
+ * (?flags) set flags within current group; non-capturing
+ * (?flags:re) set flags during re; non-capturing
+ *
+ * Flag syntax is xyz (set) or -xyz (clear) or xy-z (set xy, clear z). The flags are:
+ *
+ * i case-insensitive (default false)
+ * m multi-line mode: ^ and $ match begin/end line in addition to begin/end text (default false)
+ * s let . match \n (default false)
+ * U ungreedy: swap meaning of x* and x*?, x+ and x+?, etc (default false)
+ * ```
+ *
+ * Empty strings:
+ *
+ * ```
+ * ^ at beginning of text or line (flag m=true)
+ * $ at end of text (like \z not \Z) or line (flag m=true)
+ * \A at beginning of text
+ * \b at ASCII word boundary (\w on one side and \W, \A, or \z on the other)
+ * \B not at ASCII word boundary
+ * \z at end of text
+ * ```
+ *
+ * Escape sequences:
+ *
+ * ```
+ * \a bell (== \007)
+ * \f form feed (== \014)
+ * \t horizontal tab (== \011)
+ * \n newline (== \012)
+ * \r carriage return (== \015)
+ * \v vertical tab character (== \013)
+ * \* literal *, for any punctuation character *
+ * \123 octal character code (up to three digits)
+ * \x7F hex character code (exactly two digits)
+ * \x{10FFFF} hex character code
+ * \Q...\E literal text ... even if ... has punctuation
+ * ```
+ *
+ * Character class elements:
+ *
+ * ```
+ * x single character
+ * A-Z character range (inclusive)
+ * \d Perl character class
+ * [:foo:] ASCII character class foo
+ * \p{Foo} Unicode character class Foo
+ * \pF Unicode character class F (one-letter name)
+ * ```
+ *
+ * Named character classes as character class elements:
+ *
+ * ```
+ * [\d] digits (== \d)
+ * [^\d] not digits (== \D)
+ * [\D] not digits (== \D)
+ * [^\D] not not digits (== \d)
+ * [[:name:]] named ASCII class inside character class (== [:name:])
+ * [^[:name:]] named ASCII class inside negated character class (== [:^name:])
+ * [\p{Name}] named Unicode property inside character class (== \p{Name})
+ * [^\p{Name}] named Unicode property inside negated character class (== \P{Name})
+ * ```
+ *
+ * Perl character classes (all ASCII-only):
+ *
+ * ```
+ * \d digits (== [0-9])
+ * \D not digits (== [^0-9])
+ * \s whitespace (== [\t\n\f\r ])
+ * \S not whitespace (== [^\t\n\f\r ])
+ * \w word characters (== [0-9A-Za-z_])
+ * \W not word characters (== [^0-9A-Za-z_])
+ * ```
+ *
+ * ASCII character classes:
+ *
+ * ```
+ * [[:alnum:]] alphanumeric (== [0-9A-Za-z])
+ * [[:alpha:]] alphabetic (== [A-Za-z])
+ * [[:ascii:]] ASCII (== [\x00-\x7F])
+ * [[:blank:]] blank (== [\t ])
+ * [[:cntrl:]] control (== [\x00-\x1F\x7F])
+ * [[:digit:]] digits (== [0-9])
+ * [[:graph:]] graphical (== [!-~] == [A-Za-z0-9!"#$%&'()*+,\-./:;<=>?@[\\\]^_`{|}~])
+ * [[:lower:]] lower case (== [a-z])
+ * [[:print:]] printable (== [ -~] == [ [:graph:]])
+ * [[:punct:]] punctuation (== [!-/:-@[-`{-~])
+ * [[:space:]] whitespace (== [\t\n\v\f\r ])
+ * [[:upper:]] upper case (== [A-Z])
+ * [[:word:]] word characters (== [0-9A-Za-z_])
+ * [[:xdigit:]] hex digit (== [0-9A-Fa-f])
+ * ```
+ *
+ * Unicode character classes are those in [unicode.Categories] and [unicode.Scripts].
+ */
+namespace syntax {
+ /**
+ * Flags control the behavior of the parser and record information about regexp context.
+ */
+ interface Flags extends Number{}
+}
+
+/**
+ * Package exec runs external commands. It wraps os.StartProcess to make it
+ * easier to remap stdin and stdout, connect I/O with pipes, and do other
+ * adjustments.
+ *
+ * Unlike the "system" library call from C and other languages, the
+ * os/exec package intentionally does not invoke the system shell and
+ * does not expand any glob patterns or handle other expansions,
+ * pipelines, or redirections typically done by shells. The package
+ * behaves more like C's "exec" family of functions. To expand glob
+ * patterns, either call the shell directly, taking care to escape any
+ * dangerous input, or use the [path/filepath] package's Glob function.
+ * To expand environment variables, use package os's ExpandEnv.
+ *
+ * Note that the examples in this package assume a Unix system.
+ * They may not run on Windows, and they do not run in the Go Playground
+ * used by golang.org and godoc.org.
+ *
+ * # Executables in the current directory
+ *
+ * The functions [Command] and [LookPath] look for a program
+ * in the directories listed in the current path, following the
+ * conventions of the host operating system.
+ * Operating systems have for decades included the current
+ * directory in this search, sometimes implicitly and sometimes
+ * configured explicitly that way by default.
+ * Modern practice is that including the current directory
+ * is usually unexpected and often leads to security problems.
+ *
+ * To avoid those security problems, as of Go 1.19, this package will not resolve a program
+ * using an implicit or explicit path entry relative to the current directory.
+ * That is, if you run [LookPath]("go"), it will not successfully return
+ * ./go on Unix nor .\go.exe on Windows, no matter how the path is configured.
+ * Instead, if the usual path algorithms would result in that answer,
+ * these functions return an error err satisfying [errors.Is](err, [ErrDot]).
+ *
+ * For example, consider these two program snippets:
+ *
+ * ```
+ * path, err := exec.LookPath("prog")
+ * if err != nil {
+ * log.Fatal(err)
+ * }
+ * use(path)
+ * ```
+ *
+ * and
+ *
+ * ```
+ * cmd := exec.Command("prog")
+ * if err := cmd.Run(); err != nil {
+ * log.Fatal(err)
+ * }
+ * ```
+ *
+ * These will not find and run ./prog or .\prog.exe,
+ * no matter how the current path is configured.
+ *
+ * Code that always wants to run a program from the current directory
+ * can be rewritten to say "./prog" instead of "prog".
+ *
+ * Code that insists on including results from relative path entries
+ * can instead override the error using an errors.Is check:
+ *
+ * ```
+ * path, err := exec.LookPath("prog")
+ * if errors.Is(err, exec.ErrDot) {
+ * err = nil
+ * }
+ * if err != nil {
+ * log.Fatal(err)
+ * }
+ * use(path)
+ * ```
+ *
+ * and
+ *
+ * ```
+ * cmd := exec.Command("prog")
+ * if errors.Is(cmd.Err, exec.ErrDot) {
+ * cmd.Err = nil
+ * }
+ * if err := cmd.Run(); err != nil {
+ * log.Fatal(err)
+ * }
+ * ```
+ *
+ * Setting the environment variable GODEBUG=execerrdot=0
+ * disables generation of ErrDot entirely, temporarily restoring the pre-Go 1.19
+ * behavior for programs that are unable to apply more targeted fixes.
+ * A future version of Go may remove support for this variable.
+ *
+ * Before adding such overrides, make sure you understand the
+ * security implications of doing so.
+ * See https://go.dev/blog/path-security for more information.
+ */
+namespace exec {
+ /**
+ * Cmd represents an external command being prepared or run.
+ *
+ * A Cmd cannot be reused after calling its [Cmd.Run], [Cmd.Output] or [Cmd.CombinedOutput]
+ * methods.
+ */
+ interface Cmd {
+ /**
+ * Path is the path of the command to run.
+ *
+ * This is the only field that must be set to a non-zero
+ * value. If Path is relative, it is evaluated relative
+ * to Dir.
+ */
+ path: string
+ /**
+ * Args holds command line arguments, including the command as Args[0].
+ * If the Args field is empty or nil, Run uses {Path}.
+ *
+ * In typical use, both Path and Args are set by calling Command.
+ */
+ args: Array
+ /**
+ * Env specifies the environment of the process.
+ * Each entry is of the form "key=value".
+ * If Env is nil, the new process uses the current process's
+ * environment.
+ * If Env contains duplicate environment keys, only the last
+ * value in the slice for each duplicate key is used.
+ * As a special case on Windows, SYSTEMROOT is always added if
+ * missing and not explicitly set to the empty string.
+ */
+ env: Array
+ /**
+ * Dir specifies the working directory of the command.
+ * If Dir is the empty string, Run runs the command in the
+ * calling process's current directory.
+ */
+ dir: string
+ /**
+ * Stdin specifies the process's standard input.
+ *
+ * If Stdin is nil, the process reads from the null device (os.DevNull).
+ *
+ * If Stdin is an *os.File, the process's standard input is connected
+ * directly to that file.
+ *
+ * Otherwise, during the execution of the command a separate
+ * goroutine reads from Stdin and delivers that data to the command
+ * over a pipe. In this case, Wait does not complete until the goroutine
+ * stops copying, either because it has reached the end of Stdin
+ * (EOF or a read error), or because writing to the pipe returned an error,
+ * or because a nonzero WaitDelay was set and expired.
+ */
+ stdin: io.Reader
+ /**
+ * Stdout and Stderr specify the process's standard output and error.
+ *
+ * If either is nil, Run connects the corresponding file descriptor
+ * to the null device (os.DevNull).
+ *
+ * If either is an *os.File, the corresponding output from the process
+ * is connected directly to that file.
+ *
+ * Otherwise, during the execution of the command a separate goroutine
+ * reads from the process over a pipe and delivers that data to the
+ * corresponding Writer. In this case, Wait does not complete until the
+ * goroutine reaches EOF or encounters an error or a nonzero WaitDelay
+ * expires.
+ *
+ * If Stdout and Stderr are the same writer, and have a type that can
+ * be compared with ==, at most one goroutine at a time will call Write.
+ */
+ stdout: io.Writer
+ stderr: io.Writer
+ /**
+ * ExtraFiles specifies additional open files to be inherited by the
+ * new process. It does not include standard input, standard output, or
+ * standard error. If non-nil, entry i becomes file descriptor 3+i.
+ *
+ * ExtraFiles is not supported on Windows.
+ */
+ extraFiles: Array<(os.File | undefined)>
+ /**
+ * SysProcAttr holds optional, operating system-specific attributes.
+ * Run passes it to os.StartProcess as the os.ProcAttr's Sys field.
+ */
+ sysProcAttr?: syscall.SysProcAttr
+ /**
+ * Process is the underlying process, once started.
+ */
+ process?: os.Process
+ /**
+ * ProcessState contains information about an exited process.
+ * If the process was started successfully, Wait or Run will
+ * populate its ProcessState when the command completes.
+ */
+ processState?: os.ProcessState
+ err: Error // LookPath error, if any.
+ /**
+ * If Cancel is non-nil, the command must have been created with
+ * CommandContext and Cancel will be called when the command's
+ * Context is done. By default, CommandContext sets Cancel to
+ * call the Kill method on the command's Process.
+ *
+ * Typically a custom Cancel will send a signal to the command's
+ * Process, but it may instead take other actions to initiate cancellation,
+ * such as closing a stdin or stdout pipe or sending a shutdown request on a
+ * network socket.
+ *
+ * If the command exits with a success status after Cancel is
+ * called, and Cancel does not return an error equivalent to
+ * os.ErrProcessDone, then Wait and similar methods will return a non-nil
+ * error: either an error wrapping the one returned by Cancel,
+ * or the error from the Context.
+ * (If the command exits with a non-success status, or Cancel
+ * returns an error that wraps os.ErrProcessDone, Wait and similar methods
+ * continue to return the command's usual exit status.)
+ *
+ * If Cancel is set to nil, nothing will happen immediately when the command's
+ * Context is done, but a nonzero WaitDelay will still take effect. That may
+ * be useful, for example, to work around deadlocks in commands that do not
+ * support shutdown signals but are expected to always finish quickly.
+ *
+ * Cancel will not be called if Start returns a non-nil error.
+ */
+ cancel: () => void
+ /**
+ * If WaitDelay is non-zero, it bounds the time spent waiting on two sources
+ * of unexpected delay in Wait: a child process that fails to exit after the
+ * associated Context is canceled, and a child process that exits but leaves
+ * its I/O pipes unclosed.
+ *
+ * The WaitDelay timer starts when either the associated Context is done or a
+ * call to Wait observes that the child process has exited, whichever occurs
+ * first. When the delay has elapsed, the command shuts down the child process
+ * and/or its I/O pipes.
+ *
+ * If the child process has failed to exit — perhaps because it ignored or
+ * failed to receive a shutdown signal from a Cancel function, or because no
+ * Cancel function was set — then it will be terminated using os.Process.Kill.
+ *
+ * Then, if the I/O pipes communicating with the child process are still open,
+ * those pipes are closed in order to unblock any goroutines currently blocked
+ * on Read or Write calls.
+ *
+ * If pipes are closed due to WaitDelay, no Cancel call has occurred,
+ * and the command has otherwise exited with a successful status, Wait and
+ * similar methods will return ErrWaitDelay instead of nil.
+ *
+ * If WaitDelay is zero (the default), I/O pipes will be read until EOF,
+ * which might not occur until orphaned subprocesses of the command have
+ * also closed their descriptors for the pipes.
+ */
+ waitDelay: time.Duration
+ }
+ interface Cmd {
+ /**
+ * String returns a human-readable description of c.
+ * It is intended only for debugging.
+ * In particular, it is not suitable for use as input to a shell.
+ * The output of String may vary across Go releases.
+ */
+ string(): string
+ }
+ interface Cmd {
+ /**
+ * Run starts the specified command and waits for it to complete.
+ *
+ * The returned error is nil if the command runs, has no problems
+ * copying stdin, stdout, and stderr, and exits with a zero exit
+ * status.
+ *
+ * If the command starts but does not complete successfully, the error is of
+ * type [*ExitError]. Other error types may be returned for other situations.
+ *
+ * If the calling goroutine has locked the operating system thread
+ * with [runtime.LockOSThread] and modified any inheritable OS-level
+ * thread state (for example, Linux or Plan 9 name spaces), the new
+ * process will inherit the caller's thread state.
+ */
+ run(): void
+ }
+ interface Cmd {
+ /**
+ * Start starts the specified command but does not wait for it to complete.
+ *
+ * If Start returns successfully, the c.Process field will be set.
+ *
+ * After a successful call to Start the [Cmd.Wait] method must be called in
+ * order to release associated system resources.
+ */
+ start(): void
+ }
+ interface Cmd {
+ /**
+ * Wait waits for the command to exit and waits for any copying to
+ * stdin or copying from stdout or stderr to complete.
+ *
+ * The command must have been started by [Cmd.Start].
+ *
+ * The returned error is nil if the command runs, has no problems
+ * copying stdin, stdout, and stderr, and exits with a zero exit
+ * status.
+ *
+ * If the command fails to run or doesn't complete successfully, the
+ * error is of type [*ExitError]. Other error types may be
+ * returned for I/O problems.
+ *
+ * If any of c.Stdin, c.Stdout or c.Stderr are not an [*os.File], Wait also waits
+ * for the respective I/O loop copying to or from the process to complete.
+ *
+ * Wait releases any resources associated with the [Cmd].
+ */
+ wait(): void
+ }
+ interface Cmd {
+ /**
+ * Output runs the command and returns its standard output.
+ * Any returned error will usually be of type [*ExitError].
+ * If c.Stderr was nil, Output populates [ExitError.Stderr].
+ */
+ output(): string|Array
+ }
+ interface Cmd {
+ /**
+ * CombinedOutput runs the command and returns its combined standard
+ * output and standard error.
+ */
+ combinedOutput(): string|Array
+ }
+ interface Cmd {
+ /**
+ * StdinPipe returns a pipe that will be connected to the command's
+ * standard input when the command starts.
+ * The pipe will be closed automatically after [Cmd.Wait] sees the command exit.
+ * A caller need only call Close to force the pipe to close sooner.
+ * For example, if the command being run will not exit until standard input
+ * is closed, the caller must close the pipe.
+ */
+ stdinPipe(): io.WriteCloser
+ }
+ interface Cmd {
+ /**
+ * StdoutPipe returns a pipe that will be connected to the command's
+ * standard output when the command starts.
+ *
+ * [Cmd.Wait] will close the pipe after seeing the command exit, so most callers
+ * need not close the pipe themselves. It is thus incorrect to call Wait
+ * before all reads from the pipe have completed.
+ * For the same reason, it is incorrect to call [Cmd.Run] when using StdoutPipe.
+ * See the example for idiomatic usage.
+ */
+ stdoutPipe(): io.ReadCloser
+ }
+ interface Cmd {
+ /**
+ * StderrPipe returns a pipe that will be connected to the command's
+ * standard error when the command starts.
+ *
+ * [Cmd.Wait] will close the pipe after seeing the command exit, so most callers
+ * need not close the pipe themselves. It is thus incorrect to call Wait
+ * before all reads from the pipe have completed.
+ * For the same reason, it is incorrect to use [Cmd.Run] when using StderrPipe.
+ * See the StdoutPipe example for idiomatic usage.
+ */
+ stderrPipe(): io.ReadCloser
+ }
+ interface Cmd {
+ /**
+ * Environ returns a copy of the environment in which the command would be run
+ * as it is currently configured.
+ */
+ environ(): Array
+ }
+}
+
+/**
+ * Package net provides a portable interface for network I/O, including
+ * TCP/IP, UDP, domain name resolution, and Unix domain sockets.
+ *
+ * Although the package provides access to low-level networking
+ * primitives, most clients will need only the basic interface provided
+ * by the [Dial], [Listen], and Accept functions and the associated
+ * [Conn] and [Listener] interfaces. The crypto/tls package uses
+ * the same interfaces and similar Dial and Listen functions.
+ *
+ * The Dial function connects to a server:
+ *
+ * ```
+ * conn, err := net.Dial("tcp", "golang.org:80")
+ * if err != nil {
+ * // handle error
+ * }
+ * fmt.Fprintf(conn, "GET / HTTP/1.0\r\n\r\n")
+ * status, err := bufio.NewReader(conn).ReadString('\n')
+ * // ...
+ * ```
+ *
+ * The Listen function creates servers:
+ *
+ * ```
+ * ln, err := net.Listen("tcp", ":8080")
+ * if err != nil {
+ * // handle error
+ * }
+ * for {
+ * conn, err := ln.Accept()
+ * if err != nil {
+ * // handle error
+ * }
+ * go handleConnection(conn)
+ * }
+ * ```
+ *
+ * # Name Resolution
+ *
+ * The method for resolving domain names, whether indirectly with functions like Dial
+ * or directly with functions like [LookupHost] and [LookupAddr], varies by operating system.
+ *
+ * On Unix systems, the resolver has two options for resolving names.
+ * It can use a pure Go resolver that sends DNS requests directly to the servers
+ * listed in /etc/resolv.conf, or it can use a cgo-based resolver that calls C
+ * library routines such as getaddrinfo and getnameinfo.
+ *
+ * On Unix the pure Go resolver is preferred over the cgo resolver, because a blocked DNS
+ * request consumes only a goroutine, while a blocked C call consumes an operating system thread.
+ * When cgo is available, the cgo-based resolver is used instead under a variety of
+ * conditions: on systems that do not let programs make direct DNS requests (OS X),
+ * when the LOCALDOMAIN environment variable is present (even if empty),
+ * when the RES_OPTIONS or HOSTALIASES environment variable is non-empty,
+ * when the ASR_CONFIG environment variable is non-empty (OpenBSD only),
+ * when /etc/resolv.conf or /etc/nsswitch.conf specify the use of features that the
+ * Go resolver does not implement.
+ *
+ * On all systems (except Plan 9), when the cgo resolver is being used
+ * this package applies a concurrent cgo lookup limit to prevent the system
+ * from running out of system threads. Currently, it is limited to 500 concurrent lookups.
+ *
+ * The resolver decision can be overridden by setting the netdns value of the
+ * GODEBUG environment variable (see package runtime) to go or cgo, as in:
+ *
+ * ```
+ * export GODEBUG=netdns=go # force pure Go resolver
+ * export GODEBUG=netdns=cgo # force native resolver (cgo, win32)
+ * ```
+ *
+ * The decision can also be forced while building the Go source tree
+ * by setting the netgo or netcgo build tag.
+ *
+ * A numeric netdns setting, as in GODEBUG=netdns=1, causes the resolver
+ * to print debugging information about its decisions.
+ * To force a particular resolver while also printing debugging information,
+ * join the two settings by a plus sign, as in GODEBUG=netdns=go+1.
+ *
+ * The Go resolver will send an EDNS0 additional header with a DNS request,
+ * to signal a willingness to accept a larger DNS packet size.
+ * This can reportedly cause sporadic failures with the DNS server run
+ * by some modems and routers. Setting GODEBUG=netedns0=0 will disable
+ * sending the additional header.
+ *
+ * On macOS, if Go code that uses the net package is built with
+ * -buildmode=c-archive, linking the resulting archive into a C program
+ * requires passing -lresolv when linking the C code.
+ *
+ * On Plan 9, the resolver always accesses /net/cs and /net/dns.
+ *
+ * On Windows, in Go 1.18.x and earlier, the resolver always used C
+ * library functions, such as GetAddrInfo and DnsQuery.
+ */
+namespace net {
+ /**
+ * Conn is a generic stream-oriented network connection.
+ *
+ * Multiple goroutines may invoke methods on a Conn simultaneously.
+ */
+ interface Conn {
+ [key:string]: any;
+ /**
+ * Read reads data from the connection.
+ * Read can be made to time out and return an error after a fixed
+ * time limit; see SetDeadline and SetReadDeadline.
+ */
+ read(b: string|Array): number
+ /**
+ * Write writes data to the connection.
+ * Write can be made to time out and return an error after a fixed
+ * time limit; see SetDeadline and SetWriteDeadline.
+ */
+ write(b: string|Array): number
+ /**
+ * Close closes the connection.
+ * Any blocked Read or Write operations will be unblocked and return errors.
+ */
+ close(): void
+ /**
+ * LocalAddr returns the local network address, if known.
+ */
+ localAddr(): Addr
+ /**
+ * RemoteAddr returns the remote network address, if known.
+ */
+ remoteAddr(): Addr
+ /**
+ * SetDeadline sets the read and write deadlines associated
+ * with the connection. It is equivalent to calling both
+ * SetReadDeadline and SetWriteDeadline.
+ *
+ * A deadline is an absolute time after which I/O operations
+ * fail instead of blocking. The deadline applies to all future
+ * and pending I/O, not just the immediately following call to
+ * Read or Write. After a deadline has been exceeded, the
+ * connection can be refreshed by setting a deadline in the future.
+ *
+ * If the deadline is exceeded a call to Read or Write or to other
+ * I/O methods will return an error that wraps os.ErrDeadlineExceeded.
+ * This can be tested using errors.Is(err, os.ErrDeadlineExceeded).
+ * The error's Timeout method will return true, but note that there
+ * are other possible errors for which the Timeout method will
+ * return true even if the deadline has not been exceeded.
+ *
+ * An idle timeout can be implemented by repeatedly extending
+ * the deadline after successful Read or Write calls.
+ *
+ * A zero value for t means I/O operations will not time out.
+ */
+ setDeadline(t: time.Time): void
+ /**
+ * SetReadDeadline sets the deadline for future Read calls
+ * and any currently-blocked Read call.
+ * A zero value for t means Read will not time out.
+ */
+ setReadDeadline(t: time.Time): void
+ /**
+ * SetWriteDeadline sets the deadline for future Write calls
+ * and any currently-blocked Write call.
+ * Even if write times out, it may return n > 0, indicating that
+ * some of the data was successfully written.
+ * A zero value for t means Write will not time out.
+ */
+ setWriteDeadline(t: time.Time): void
+ }
+}
+
+/**
+ * Package jwt is a Go implementation of JSON Web Tokens: http://self-issued.info/docs/draft-jones-json-web-token.html
+ *
+ * See README.md for more info.
+ */
+namespace jwt {
+ /**
+ * MapClaims is a claims type that uses the map[string]interface{} for JSON decoding.
+ * This is the default claims type if you don't supply one
+ */
+ interface MapClaims extends _TygojaDict{}
+ interface MapClaims {
+ /**
+ * VerifyAudience Compares the aud claim against cmp.
+ * If required is false, this method will return true if the value matches or is unset
+ */
+ verifyAudience(cmp: string, req: boolean): boolean
+ }
+ interface MapClaims {
+ /**
+ * VerifyExpiresAt compares the exp claim against cmp (cmp <= exp).
+ * If req is false, it will return true, if exp is unset.
+ */
+ verifyExpiresAt(cmp: number, req: boolean): boolean
+ }
+ interface MapClaims {
+ /**
+ * VerifyIssuedAt compares the exp claim against cmp (cmp >= iat).
+ * If req is false, it will return true, if iat is unset.
+ */
+ verifyIssuedAt(cmp: number, req: boolean): boolean
+ }
+ interface MapClaims {
+ /**
+ * VerifyNotBefore compares the nbf claim against cmp (cmp >= nbf).
+ * If req is false, it will return true, if nbf is unset.
+ */
+ verifyNotBefore(cmp: number, req: boolean): boolean
+ }
+ interface MapClaims {
+ /**
+ * VerifyIssuer compares the iss claim against cmp.
+ * If required is false, this method will return true if the value matches or is unset
+ */
+ verifyIssuer(cmp: string, req: boolean): boolean
+ }
+ interface MapClaims {
+ /**
+ * Valid validates time based claims "exp, iat, nbf".
+ * There is no accounting for clock skew.
+ * As well, if any of the above claims are not in the token, it will still
+ * be considered a valid claim.
+ */
+ valid(): void
+ }
+}
+
/**
* Package multipart implements MIME multipart parsing, as defined in RFC
* 2046.
@@ -8823,8 +9320,8 @@ namespace sql {
* To protect against malicious inputs, this package sets limits on the size
* of the MIME data it processes.
*
- * Reader.NextPart and Reader.NextRawPart limit the number of headers in a
- * part to 10000 and Reader.ReadForm limits the total number of headers in all
+ * [Reader.NextPart] and [Reader.NextRawPart] limit the number of headers in a
+ * part to 10000 and [Reader.ReadForm] limits the total number of headers in all
* FileHeaders to 10000.
* These limits may be adjusted with the GODEBUG=multipartmaxheaders=
* setting.
@@ -8833,11 +9330,6 @@ namespace sql {
* This limit may be adjusted with the GODEBUG=multipartmaxparts=
* setting.
*/
-/**
- * Copyright 2023 The Go Authors. All rights reserved.
- * Use of this source code is governed by a BSD-style
- * license that can be found in the LICENSE file.
- */
namespace multipart {
/**
* A FileHeader describes a file part of a multipart request.
@@ -8849,7 +9341,7 @@ namespace multipart {
}
interface FileHeader {
/**
- * Open opens and returns the FileHeader's associated File.
+ * Open opens and returns the [FileHeader]'s associated File.
*/
open(): File
}
@@ -8977,6 +9469,22 @@ namespace multipart {
namespace http {
// @ts-ignore
import mathrand = rand
+ /**
+ * PushOptions describes options for [Pusher.Push].
+ */
+ interface PushOptions {
+ /**
+ * Method specifies the HTTP method for the promised request.
+ * If set, it must be "GET" or "HEAD". Empty means "GET".
+ */
+ method: string
+ /**
+ * Header specifies additional promised request headers. This cannot
+ * include HTTP/2 pseudo header fields like ":path" and ":scheme",
+ * which will be added automatically.
+ */
+ header: Header
+ }
// @ts-ignore
import urlpkg = url
/**
@@ -9222,6 +9730,11 @@ namespace http {
* redirects.
*/
response?: Response
+ /**
+ * Pattern is the [ServeMux] pattern that matched the request.
+ * It is empty if the request was not matched against a pattern.
+ */
+ pattern: string
}
interface Request {
/**
@@ -9258,6 +9771,8 @@ namespace http {
* Clone returns a deep copy of r with its context changed to ctx.
* The provided ctx must be non-nil.
*
+ * Clone only makes a shallow copy of the Body field.
+ *
* For an outgoing client request, the context controls the entire
* lifetime of a request and its response: obtaining a connection,
* sending the request, and reading the response headers and body.
@@ -9283,6 +9798,13 @@ namespace http {
*/
cookies(): Array<(Cookie | undefined)>
}
+ interface Request {
+ /**
+ * CookiesNamed parses and returns the named HTTP cookies sent with the request
+ * or an empty slice if none matched.
+ */
+ cookiesNamed(name: string): Array<(Cookie | undefined)>
+ }
interface Request {
/**
* Cookie returns the named cookie provided in the request or
@@ -9466,6 +9988,36 @@ namespace http {
*/
setPathValue(name: string, value: string): void
}
+ /**
+ * A Handler responds to an HTTP request.
+ *
+ * [Handler.ServeHTTP] should write reply headers and data to the [ResponseWriter]
+ * and then return. Returning signals that the request is finished; it
+ * is not valid to use the [ResponseWriter] or read from the
+ * [Request.Body] after or concurrently with the completion of the
+ * ServeHTTP call.
+ *
+ * Depending on the HTTP client software, HTTP protocol version, and
+ * any intermediaries between the client and the Go server, it may not
+ * be possible to read from the [Request.Body] after writing to the
+ * [ResponseWriter]. Cautious handlers should read the [Request.Body]
+ * first, and then reply.
+ *
+ * Except for reading the body, handlers should not modify the
+ * provided Request.
+ *
+ * If ServeHTTP panics, the server (the caller of ServeHTTP) assumes
+ * that the effect of the panic was isolated to the active request.
+ * It recovers the panic, logs a stack trace to the server error log,
+ * and either closes the network connection or sends an HTTP/2
+ * RST_STREAM, depending on the HTTP protocol. To abort a handler so
+ * the client sees an interrupted response but the server doesn't log
+ * an error, panic with the value [ErrAbortHandler].
+ */
+ interface Handler {
+ [key:string]: any;
+ serveHTTP(_arg0: ResponseWriter, _arg1: Request): void
+ }
/**
* A ResponseWriter interface is used by an HTTP handler to
* construct an HTTP response.
@@ -9543,674 +10095,6 @@ namespace http {
*/
writeHeader(statusCode: number): void
}
- /**
- * A Server defines parameters for running an HTTP server.
- * The zero value for Server is a valid configuration.
- */
- interface Server {
- /**
- * Addr optionally specifies the TCP address for the server to listen on,
- * in the form "host:port". If empty, ":http" (port 80) is used.
- * The service names are defined in RFC 6335 and assigned by IANA.
- * See net.Dial for details of the address format.
- */
- addr: string
- handler: Handler // handler to invoke, http.DefaultServeMux if nil
- /**
- * DisableGeneralOptionsHandler, if true, passes "OPTIONS *" requests to the Handler,
- * otherwise responds with 200 OK and Content-Length: 0.
- */
- disableGeneralOptionsHandler: boolean
- /**
- * TLSConfig optionally provides a TLS configuration for use
- * by ServeTLS and ListenAndServeTLS. Note that this value is
- * cloned by ServeTLS and ListenAndServeTLS, so it's not
- * possible to modify the configuration with methods like
- * tls.Config.SetSessionTicketKeys. To use
- * SetSessionTicketKeys, use Server.Serve with a TLS Listener
- * instead.
- */
- tlsConfig?: any
- /**
- * ReadTimeout is the maximum duration for reading the entire
- * request, including the body. A zero or negative value means
- * there will be no timeout.
- *
- * Because ReadTimeout does not let Handlers make per-request
- * decisions on each request body's acceptable deadline or
- * upload rate, most users will prefer to use
- * ReadHeaderTimeout. It is valid to use them both.
- */
- readTimeout: time.Duration
- /**
- * ReadHeaderTimeout is the amount of time allowed to read
- * request headers. The connection's read deadline is reset
- * after reading the headers and the Handler can decide what
- * is considered too slow for the body. If ReadHeaderTimeout
- * is zero, the value of ReadTimeout is used. If both are
- * zero, there is no timeout.
- */
- readHeaderTimeout: time.Duration
- /**
- * WriteTimeout is the maximum duration before timing out
- * writes of the response. It is reset whenever a new
- * request's header is read. Like ReadTimeout, it does not
- * let Handlers make decisions on a per-request basis.
- * A zero or negative value means there will be no timeout.
- */
- writeTimeout: time.Duration
- /**
- * IdleTimeout is the maximum amount of time to wait for the
- * next request when keep-alives are enabled. If IdleTimeout
- * is zero, the value of ReadTimeout is used. If both are
- * zero, there is no timeout.
- */
- idleTimeout: time.Duration
- /**
- * MaxHeaderBytes controls the maximum number of bytes the
- * server will read parsing the request header's keys and
- * values, including the request line. It does not limit the
- * size of the request body.
- * If zero, DefaultMaxHeaderBytes is used.
- */
- maxHeaderBytes: number
- /**
- * TLSNextProto optionally specifies a function to take over
- * ownership of the provided TLS connection when an ALPN
- * protocol upgrade has occurred. The map key is the protocol
- * name negotiated. The Handler argument should be used to
- * handle HTTP requests and will initialize the Request's TLS
- * and RemoteAddr if not already set. The connection is
- * automatically closed when the function returns.
- * If TLSNextProto is not nil, HTTP/2 support is not enabled
- * automatically.
- */
- tlsNextProto: _TygojaDict
- /**
- * ConnState specifies an optional callback function that is
- * called when a client connection changes state. See the
- * ConnState type and associated constants for details.
- */
- connState: (_arg0: net.Conn, _arg1: ConnState) => void
- /**
- * ErrorLog specifies an optional logger for errors accepting
- * connections, unexpected behavior from handlers, and
- * underlying FileSystem errors.
- * If nil, logging is done via the log package's standard logger.
- */
- errorLog?: any
- /**
- * BaseContext optionally specifies a function that returns
- * the base context for incoming requests on this server.
- * The provided Listener is the specific Listener that's
- * about to start accepting requests.
- * If BaseContext is nil, the default is context.Background().
- * If non-nil, it must return a non-nil context.
- */
- baseContext: (_arg0: net.Listener) => context.Context
- /**
- * ConnContext optionally specifies a function that modifies
- * the context used for a new connection c. The provided ctx
- * is derived from the base context and has a ServerContextKey
- * value.
- */
- connContext: (ctx: context.Context, c: net.Conn) => context.Context
- }
- interface Server {
- /**
- * Close immediately closes all active net.Listeners and any
- * connections in state [StateNew], [StateActive], or [StateIdle]. For a
- * graceful shutdown, use [Server.Shutdown].
- *
- * Close does not attempt to close (and does not even know about)
- * any hijacked connections, such as WebSockets.
- *
- * Close returns any error returned from closing the [Server]'s
- * underlying Listener(s).
- */
- close(): void
- }
- interface Server {
- /**
- * Shutdown gracefully shuts down the server without interrupting any
- * active connections. Shutdown works by first closing all open
- * listeners, then closing all idle connections, and then waiting
- * indefinitely for connections to return to idle and then shut down.
- * If the provided context expires before the shutdown is complete,
- * Shutdown returns the context's error, otherwise it returns any
- * error returned from closing the [Server]'s underlying Listener(s).
- *
- * When Shutdown is called, [Serve], [ListenAndServe], and
- * [ListenAndServeTLS] immediately return [ErrServerClosed]. Make sure the
- * program doesn't exit and waits instead for Shutdown to return.
- *
- * Shutdown does not attempt to close nor wait for hijacked
- * connections such as WebSockets. The caller of Shutdown should
- * separately notify such long-lived connections of shutdown and wait
- * for them to close, if desired. See [Server.RegisterOnShutdown] for a way to
- * register shutdown notification functions.
- *
- * Once Shutdown has been called on a server, it may not be reused;
- * future calls to methods such as Serve will return ErrServerClosed.
- */
- shutdown(ctx: context.Context): void
- }
- interface Server {
- /**
- * RegisterOnShutdown registers a function to call on [Server.Shutdown].
- * This can be used to gracefully shutdown connections that have
- * undergone ALPN protocol upgrade or that have been hijacked.
- * This function should start protocol-specific graceful shutdown,
- * but should not wait for shutdown to complete.
- */
- registerOnShutdown(f: () => void): void
- }
- interface Server {
- /**
- * ListenAndServe listens on the TCP network address srv.Addr and then
- * calls [Serve] to handle requests on incoming connections.
- * Accepted connections are configured to enable TCP keep-alives.
- *
- * If srv.Addr is blank, ":http" is used.
- *
- * ListenAndServe always returns a non-nil error. After [Server.Shutdown] or [Server.Close],
- * the returned error is [ErrServerClosed].
- */
- listenAndServe(): void
- }
- interface Server {
- /**
- * Serve accepts incoming connections on the Listener l, creating a
- * new service goroutine for each. The service goroutines read requests and
- * then call srv.Handler to reply to them.
- *
- * HTTP/2 support is only enabled if the Listener returns [*tls.Conn]
- * connections and they were configured with "h2" in the TLS
- * Config.NextProtos.
- *
- * Serve always returns a non-nil error and closes l.
- * After [Server.Shutdown] or [Server.Close], the returned error is [ErrServerClosed].
- */
- serve(l: net.Listener): void
- }
- interface Server {
- /**
- * ServeTLS accepts incoming connections on the Listener l, creating a
- * new service goroutine for each. The service goroutines perform TLS
- * setup and then read requests, calling srv.Handler to reply to them.
- *
- * Files containing a certificate and matching private key for the
- * server must be provided if neither the [Server]'s
- * TLSConfig.Certificates nor TLSConfig.GetCertificate are populated.
- * If the certificate is signed by a certificate authority, the
- * certFile should be the concatenation of the server's certificate,
- * any intermediates, and the CA's certificate.
- *
- * ServeTLS always returns a non-nil error. After [Server.Shutdown] or [Server.Close], the
- * returned error is [ErrServerClosed].
- */
- serveTLS(l: net.Listener, certFile: string, keyFile: string): void
- }
- interface Server {
- /**
- * SetKeepAlivesEnabled controls whether HTTP keep-alives are enabled.
- * By default, keep-alives are always enabled. Only very
- * resource-constrained environments or servers in the process of
- * shutting down should disable them.
- */
- setKeepAlivesEnabled(v: boolean): void
- }
- interface Server {
- /**
- * ListenAndServeTLS listens on the TCP network address srv.Addr and
- * then calls [ServeTLS] to handle requests on incoming TLS connections.
- * Accepted connections are configured to enable TCP keep-alives.
- *
- * Filenames containing a certificate and matching private key for the
- * server must be provided if neither the [Server]'s TLSConfig.Certificates
- * nor TLSConfig.GetCertificate are populated. If the certificate is
- * signed by a certificate authority, the certFile should be the
- * concatenation of the server's certificate, any intermediates, and
- * the CA's certificate.
- *
- * If srv.Addr is blank, ":https" is used.
- *
- * ListenAndServeTLS always returns a non-nil error. After [Server.Shutdown] or
- * [Server.Close], the returned error is [ErrServerClosed].
- */
- listenAndServeTLS(certFile: string, keyFile: string): void
- }
-}
-
-/**
- * Package exec runs external commands. It wraps os.StartProcess to make it
- * easier to remap stdin and stdout, connect I/O with pipes, and do other
- * adjustments.
- *
- * Unlike the "system" library call from C and other languages, the
- * os/exec package intentionally does not invoke the system shell and
- * does not expand any glob patterns or handle other expansions,
- * pipelines, or redirections typically done by shells. The package
- * behaves more like C's "exec" family of functions. To expand glob
- * patterns, either call the shell directly, taking care to escape any
- * dangerous input, or use the path/filepath package's Glob function.
- * To expand environment variables, use package os's ExpandEnv.
- *
- * Note that the examples in this package assume a Unix system.
- * They may not run on Windows, and they do not run in the Go Playground
- * used by golang.org and godoc.org.
- *
- * # Executables in the current directory
- *
- * The functions Command and LookPath look for a program
- * in the directories listed in the current path, following the
- * conventions of the host operating system.
- * Operating systems have for decades included the current
- * directory in this search, sometimes implicitly and sometimes
- * configured explicitly that way by default.
- * Modern practice is that including the current directory
- * is usually unexpected and often leads to security problems.
- *
- * To avoid those security problems, as of Go 1.19, this package will not resolve a program
- * using an implicit or explicit path entry relative to the current directory.
- * That is, if you run exec.LookPath("go"), it will not successfully return
- * ./go on Unix nor .\go.exe on Windows, no matter how the path is configured.
- * Instead, if the usual path algorithms would result in that answer,
- * these functions return an error err satisfying errors.Is(err, ErrDot).
- *
- * For example, consider these two program snippets:
- *
- * ```
- * path, err := exec.LookPath("prog")
- * if err != nil {
- * log.Fatal(err)
- * }
- * use(path)
- * ```
- *
- * and
- *
- * ```
- * cmd := exec.Command("prog")
- * if err := cmd.Run(); err != nil {
- * log.Fatal(err)
- * }
- * ```
- *
- * These will not find and run ./prog or .\prog.exe,
- * no matter how the current path is configured.
- *
- * Code that always wants to run a program from the current directory
- * can be rewritten to say "./prog" instead of "prog".
- *
- * Code that insists on including results from relative path entries
- * can instead override the error using an errors.Is check:
- *
- * ```
- * path, err := exec.LookPath("prog")
- * if errors.Is(err, exec.ErrDot) {
- * err = nil
- * }
- * if err != nil {
- * log.Fatal(err)
- * }
- * use(path)
- * ```
- *
- * and
- *
- * ```
- * cmd := exec.Command("prog")
- * if errors.Is(cmd.Err, exec.ErrDot) {
- * cmd.Err = nil
- * }
- * if err := cmd.Run(); err != nil {
- * log.Fatal(err)
- * }
- * ```
- *
- * Setting the environment variable GODEBUG=execerrdot=0
- * disables generation of ErrDot entirely, temporarily restoring the pre-Go 1.19
- * behavior for programs that are unable to apply more targeted fixes.
- * A future version of Go may remove support for this variable.
- *
- * Before adding such overrides, make sure you understand the
- * security implications of doing so.
- * See https://go.dev/blog/path-security for more information.
- */
-namespace exec {
- /**
- * Cmd represents an external command being prepared or run.
- *
- * A Cmd cannot be reused after calling its Run, Output or CombinedOutput
- * methods.
- */
- interface Cmd {
- /**
- * Path is the path of the command to run.
- *
- * This is the only field that must be set to a non-zero
- * value. If Path is relative, it is evaluated relative
- * to Dir.
- */
- path: string
- /**
- * Args holds command line arguments, including the command as Args[0].
- * If the Args field is empty or nil, Run uses {Path}.
- *
- * In typical use, both Path and Args are set by calling Command.
- */
- args: Array
- /**
- * Env specifies the environment of the process.
- * Each entry is of the form "key=value".
- * If Env is nil, the new process uses the current process's
- * environment.
- * If Env contains duplicate environment keys, only the last
- * value in the slice for each duplicate key is used.
- * As a special case on Windows, SYSTEMROOT is always added if
- * missing and not explicitly set to the empty string.
- */
- env: Array
- /**
- * Dir specifies the working directory of the command.
- * If Dir is the empty string, Run runs the command in the
- * calling process's current directory.
- */
- dir: string
- /**
- * Stdin specifies the process's standard input.
- *
- * If Stdin is nil, the process reads from the null device (os.DevNull).
- *
- * If Stdin is an *os.File, the process's standard input is connected
- * directly to that file.
- *
- * Otherwise, during the execution of the command a separate
- * goroutine reads from Stdin and delivers that data to the command
- * over a pipe. In this case, Wait does not complete until the goroutine
- * stops copying, either because it has reached the end of Stdin
- * (EOF or a read error), or because writing to the pipe returned an error,
- * or because a nonzero WaitDelay was set and expired.
- */
- stdin: io.Reader
- /**
- * Stdout and Stderr specify the process's standard output and error.
- *
- * If either is nil, Run connects the corresponding file descriptor
- * to the null device (os.DevNull).
- *
- * If either is an *os.File, the corresponding output from the process
- * is connected directly to that file.
- *
- * Otherwise, during the execution of the command a separate goroutine
- * reads from the process over a pipe and delivers that data to the
- * corresponding Writer. In this case, Wait does not complete until the
- * goroutine reaches EOF or encounters an error or a nonzero WaitDelay
- * expires.
- *
- * If Stdout and Stderr are the same writer, and have a type that can
- * be compared with ==, at most one goroutine at a time will call Write.
- */
- stdout: io.Writer
- stderr: io.Writer
- /**
- * ExtraFiles specifies additional open files to be inherited by the
- * new process. It does not include standard input, standard output, or
- * standard error. If non-nil, entry i becomes file descriptor 3+i.
- *
- * ExtraFiles is not supported on Windows.
- */
- extraFiles: Array<(os.File | undefined)>
- /**
- * SysProcAttr holds optional, operating system-specific attributes.
- * Run passes it to os.StartProcess as the os.ProcAttr's Sys field.
- */
- sysProcAttr?: syscall.SysProcAttr
- /**
- * Process is the underlying process, once started.
- */
- process?: os.Process
- /**
- * ProcessState contains information about an exited process.
- * If the process was started successfully, Wait or Run will
- * populate its ProcessState when the command completes.
- */
- processState?: os.ProcessState
- err: Error // LookPath error, if any.
- /**
- * If Cancel is non-nil, the command must have been created with
- * CommandContext and Cancel will be called when the command's
- * Context is done. By default, CommandContext sets Cancel to
- * call the Kill method on the command's Process.
- *
- * Typically a custom Cancel will send a signal to the command's
- * Process, but it may instead take other actions to initiate cancellation,
- * such as closing a stdin or stdout pipe or sending a shutdown request on a
- * network socket.
- *
- * If the command exits with a success status after Cancel is
- * called, and Cancel does not return an error equivalent to
- * os.ErrProcessDone, then Wait and similar methods will return a non-nil
- * error: either an error wrapping the one returned by Cancel,
- * or the error from the Context.
- * (If the command exits with a non-success status, or Cancel
- * returns an error that wraps os.ErrProcessDone, Wait and similar methods
- * continue to return the command's usual exit status.)
- *
- * If Cancel is set to nil, nothing will happen immediately when the command's
- * Context is done, but a nonzero WaitDelay will still take effect. That may
- * be useful, for example, to work around deadlocks in commands that do not
- * support shutdown signals but are expected to always finish quickly.
- *
- * Cancel will not be called if Start returns a non-nil error.
- */
- cancel: () => void
- /**
- * If WaitDelay is non-zero, it bounds the time spent waiting on two sources
- * of unexpected delay in Wait: a child process that fails to exit after the
- * associated Context is canceled, and a child process that exits but leaves
- * its I/O pipes unclosed.
- *
- * The WaitDelay timer starts when either the associated Context is done or a
- * call to Wait observes that the child process has exited, whichever occurs
- * first. When the delay has elapsed, the command shuts down the child process
- * and/or its I/O pipes.
- *
- * If the child process has failed to exit — perhaps because it ignored or
- * failed to receive a shutdown signal from a Cancel function, or because no
- * Cancel function was set — then it will be terminated using os.Process.Kill.
- *
- * Then, if the I/O pipes communicating with the child process are still open,
- * those pipes are closed in order to unblock any goroutines currently blocked
- * on Read or Write calls.
- *
- * If pipes are closed due to WaitDelay, no Cancel call has occurred,
- * and the command has otherwise exited with a successful status, Wait and
- * similar methods will return ErrWaitDelay instead of nil.
- *
- * If WaitDelay is zero (the default), I/O pipes will be read until EOF,
- * which might not occur until orphaned subprocesses of the command have
- * also closed their descriptors for the pipes.
- */
- waitDelay: time.Duration
- }
- interface Cmd {
- /**
- * String returns a human-readable description of c.
- * It is intended only for debugging.
- * In particular, it is not suitable for use as input to a shell.
- * The output of String may vary across Go releases.
- */
- string(): string
- }
- interface Cmd {
- /**
- * Run starts the specified command and waits for it to complete.
- *
- * The returned error is nil if the command runs, has no problems
- * copying stdin, stdout, and stderr, and exits with a zero exit
- * status.
- *
- * If the command starts but does not complete successfully, the error is of
- * type *ExitError. Other error types may be returned for other situations.
- *
- * If the calling goroutine has locked the operating system thread
- * with runtime.LockOSThread and modified any inheritable OS-level
- * thread state (for example, Linux or Plan 9 name spaces), the new
- * process will inherit the caller's thread state.
- */
- run(): void
- }
- interface Cmd {
- /**
- * Start starts the specified command but does not wait for it to complete.
- *
- * If Start returns successfully, the c.Process field will be set.
- *
- * After a successful call to Start the Wait method must be called in
- * order to release associated system resources.
- */
- start(): void
- }
- interface Cmd {
- /**
- * Wait waits for the command to exit and waits for any copying to
- * stdin or copying from stdout or stderr to complete.
- *
- * The command must have been started by Start.
- *
- * The returned error is nil if the command runs, has no problems
- * copying stdin, stdout, and stderr, and exits with a zero exit
- * status.
- *
- * If the command fails to run or doesn't complete successfully, the
- * error is of type *ExitError. Other error types may be
- * returned for I/O problems.
- *
- * If any of c.Stdin, c.Stdout or c.Stderr are not an *os.File, Wait also waits
- * for the respective I/O loop copying to or from the process to complete.
- *
- * Wait releases any resources associated with the Cmd.
- */
- wait(): void
- }
- interface Cmd {
- /**
- * Output runs the command and returns its standard output.
- * Any returned error will usually be of type *ExitError.
- * If c.Stderr was nil, Output populates ExitError.Stderr.
- */
- output(): string|Array
- }
- interface Cmd {
- /**
- * CombinedOutput runs the command and returns its combined standard
- * output and standard error.
- */
- combinedOutput(): string|Array
- }
- interface Cmd {
- /**
- * StdinPipe returns a pipe that will be connected to the command's
- * standard input when the command starts.
- * The pipe will be closed automatically after Wait sees the command exit.
- * A caller need only call Close to force the pipe to close sooner.
- * For example, if the command being run will not exit until standard input
- * is closed, the caller must close the pipe.
- */
- stdinPipe(): io.WriteCloser
- }
- interface Cmd {
- /**
- * StdoutPipe returns a pipe that will be connected to the command's
- * standard output when the command starts.
- *
- * Wait will close the pipe after seeing the command exit, so most callers
- * need not close the pipe themselves. It is thus incorrect to call Wait
- * before all reads from the pipe have completed.
- * For the same reason, it is incorrect to call Run when using StdoutPipe.
- * See the example for idiomatic usage.
- */
- stdoutPipe(): io.ReadCloser
- }
- interface Cmd {
- /**
- * StderrPipe returns a pipe that will be connected to the command's
- * standard error when the command starts.
- *
- * Wait will close the pipe after seeing the command exit, so most callers
- * need not close the pipe themselves. It is thus incorrect to call Wait
- * before all reads from the pipe have completed.
- * For the same reason, it is incorrect to use Run when using StderrPipe.
- * See the StdoutPipe example for idiomatic usage.
- */
- stderrPipe(): io.ReadCloser
- }
- interface Cmd {
- /**
- * Environ returns a copy of the environment in which the command would be run
- * as it is currently configured.
- */
- environ(): Array
- }
-}
-
-/**
- * Package jwt is a Go implementation of JSON Web Tokens: http://self-issued.info/docs/draft-jones-json-web-token.html
- *
- * See README.md for more info.
- */
-namespace jwt {
- /**
- * MapClaims is a claims type that uses the map[string]interface{} for JSON decoding.
- * This is the default claims type if you don't supply one
- */
- interface MapClaims extends _TygojaDict{}
- interface MapClaims {
- /**
- * VerifyAudience Compares the aud claim against cmp.
- * If required is false, this method will return true if the value matches or is unset
- */
- verifyAudience(cmp: string, req: boolean): boolean
- }
- interface MapClaims {
- /**
- * VerifyExpiresAt compares the exp claim against cmp (cmp <= exp).
- * If req is false, it will return true, if exp is unset.
- */
- verifyExpiresAt(cmp: number, req: boolean): boolean
- }
- interface MapClaims {
- /**
- * VerifyIssuedAt compares the exp claim against cmp (cmp >= iat).
- * If req is false, it will return true, if iat is unset.
- */
- verifyIssuedAt(cmp: number, req: boolean): boolean
- }
- interface MapClaims {
- /**
- * VerifyNotBefore compares the nbf claim against cmp (cmp >= nbf).
- * If req is false, it will return true, if nbf is unset.
- */
- verifyNotBefore(cmp: number, req: boolean): boolean
- }
- interface MapClaims {
- /**
- * VerifyIssuer compares the iss claim against cmp.
- * If required is false, this method will return true if the value matches or is unset
- */
- verifyIssuer(cmp: string, req: boolean): boolean
- }
- interface MapClaims {
- /**
- * Valid validates time based claims "exp, iat, nbf".
- * There is no accounting for clock skew.
- * As well, if any of the above claims are not in the token, it will still
- * be considered a valid claim.
- */
- valid(): void
- }
}
/**
@@ -10450,360 +10334,1923 @@ namespace blob {
*/
namespace types {
/**
- * JsonArray defines a slice that is safe for json and db read/write.
+ * DateTime represents a [time.Time] instance in UTC that is wrapped
+ * and serialized using the app default date layout.
*/
- interface JsonArray extends Array{}
- interface JsonArray {
+ interface DateTime {
+ }
+ interface DateTime {
+ /**
+ * Time returns the internal [time.Time] instance.
+ */
+ time(): time.Time
+ }
+ interface DateTime {
+ /**
+ * Add returns a new DateTime based on the current DateTime + the specified duration.
+ */
+ add(duration: time.Duration): DateTime
+ }
+ interface DateTime {
+ /**
+ * Sub returns a [time.Duration] by substracting the specified DateTime from the current one.
+ *
+ * If the result exceeds the maximum (or minimum) value that can be stored in a [time.Duration],
+ * the maximum (or minimum) duration will be returned.
+ */
+ sub(u: DateTime): time.Duration
+ }
+ interface DateTime {
+ /**
+ * AddDate returns a new DateTime based on the current one + duration.
+ *
+ * It follows the same rules as [time.AddDate].
+ */
+ addDate(years: number, months: number, days: number): DateTime
+ }
+ interface DateTime {
+ /**
+ * After reports whether the current DateTime instance is after u.
+ */
+ after(u: DateTime): boolean
+ }
+ interface DateTime {
+ /**
+ * Before reports whether the current DateTime instance is before u.
+ */
+ before(u: DateTime): boolean
+ }
+ interface DateTime {
+ /**
+ * Compare compares the current DateTime instance with u.
+ * If the current instance is before u, it returns -1.
+ * If the current instance is after u, it returns +1.
+ * If they're the same, it returns 0.
+ */
+ compare(u: DateTime): number
+ }
+ interface DateTime {
+ /**
+ * Equal reports whether the current DateTime and u represent the same time instant.
+ * Two DateTime can be equal even if they are in different locations.
+ * For example, 6:00 +0200 and 4:00 UTC are Equal.
+ */
+ equal(u: DateTime): boolean
+ }
+ interface DateTime {
+ /**
+ * Unix returns the current DateTime as a Unix time, aka.
+ * the number of seconds elapsed since January 1, 1970 UTC.
+ */
+ unix(): number
+ }
+ interface DateTime {
+ /**
+ * IsZero checks whether the current DateTime instance has zero time value.
+ */
+ isZero(): boolean
+ }
+ interface DateTime {
+ /**
+ * String serializes the current DateTime instance into a formatted
+ * UTC date string.
+ *
+ * The zero value is serialized to an empty string.
+ */
+ string(): string
+ }
+ interface DateTime {
/**
* MarshalJSON implements the [json.Marshaler] interface.
*/
marshalJSON(): string|Array
}
- interface JsonArray {
- /**
- * Value implements the [driver.Valuer] interface.
- */
- value(): any
- }
- interface JsonArray {
- /**
- * Scan implements [sql.Scanner] interface to scan the provided value
- * into the current JsonArray[T] instance.
- */
- scan(value: any): void
- }
- /**
- * JsonMap defines a map that is safe for json and db read/write.
- */
- interface JsonMap extends _TygojaDict{}
- interface JsonMap {
- /**
- * MarshalJSON implements the [json.Marshaler] interface.
- */
- marshalJSON(): string|Array
- }
- interface JsonMap {
- /**
- * Get retrieves a single value from the current JsonMap.
- *
- * This helper was added primarily to assist the goja integration since custom map types
- * don't have direct access to the map keys (https://pkg.go.dev/github.com/dop251/goja#hdr-Maps_with_methods).
- */
- get(key: string): any
- }
- interface JsonMap {
- /**
- * Set sets a single value in the current JsonMap.
- *
- * This helper was added primarily to assist the goja integration since custom map types
- * don't have direct access to the map keys (https://pkg.go.dev/github.com/dop251/goja#hdr-Maps_with_methods).
- */
- set(key: string, value: any): void
- }
- interface JsonMap {
- /**
- * Value implements the [driver.Valuer] interface.
- */
- value(): any
- }
- interface JsonMap {
- /**
- * Scan implements [sql.Scanner] interface to scan the provided value
- * into the current `JsonMap` instance.
- */
- scan(value: any): void
- }
-}
-
-/**
- * Package schema implements custom Schema and SchemaField datatypes
- * for handling the Collection schema definitions.
- */
-namespace schema {
- // @ts-ignore
- import validation = ozzo_validation
- /**
- * Schema defines a dynamic db schema as a slice of `SchemaField`s.
- */
- interface Schema {
- }
- interface Schema {
- /**
- * Fields returns the registered schema fields.
- */
- fields(): Array<(SchemaField | undefined)>
- }
- interface Schema {
- /**
- * InitFieldsOptions calls `InitOptions()` for all schema fields.
- */
- initFieldsOptions(): void
- }
- interface Schema {
- /**
- * Clone creates a deep clone of the current schema.
- */
- clone(): (Schema)
- }
- interface Schema {
- /**
- * AsMap returns a map with all registered schema field.
- * The returned map is indexed with each field name.
- */
- asMap(): _TygojaDict
- }
- interface Schema {
- /**
- * GetFieldById returns a single field by its id.
- */
- getFieldById(id: string): (SchemaField)
- }
- interface Schema {
- /**
- * GetFieldByName returns a single field by its name.
- */
- getFieldByName(name: string): (SchemaField)
- }
- interface Schema {
- /**
- * RemoveField removes a single schema field by its id.
- *
- * This method does nothing if field with `id` doesn't exist.
- */
- removeField(id: string): void
- }
- interface Schema {
- /**
- * AddField registers the provided newField to the current schema.
- *
- * If field with `newField.Id` already exist, the existing field is
- * replaced with the new one.
- *
- * Otherwise the new field is appended to the other schema fields.
- */
- addField(newField: SchemaField): void
- }
- interface Schema {
- /**
- * Validate makes Schema validatable by implementing [validation.Validatable] interface.
- *
- * Internally calls each individual field's validator and additionally
- * checks for invalid renamed fields and field name duplications.
- */
- validate(): void
- }
- interface Schema {
- /**
- * MarshalJSON implements the [json.Marshaler] interface.
- */
- marshalJSON(): string|Array
- }
- interface Schema {
+ interface DateTime {
/**
* UnmarshalJSON implements the [json.Unmarshaler] interface.
- *
- * On success, all schema field options are auto initialized.
*/
- unmarshalJSON(data: string|Array): void
+ unmarshalJSON(b: string|Array): void
}
- interface Schema {
+ interface DateTime {
/**
* Value implements the [driver.Valuer] interface.
*/
value(): any
}
- interface Schema {
+ interface DateTime {
/**
* Scan implements [sql.Scanner] interface to scan the provided value
- * into the current Schema instance.
+ * into the current DateTime instance.
*/
scan(value: any): void
}
}
-/**
- * Package models implements all PocketBase DB models and DTOs.
- */
-namespace models {
- type _subxyKhr = BaseModel
- interface Admin extends _subxyKhr {
- avatar: number
- email: string
- tokenKey: string
- passwordHash: string
- lastResetSentAt: types.DateTime
- }
- interface Admin {
+namespace hook {
+ /**
+ * HandlerFunc defines a hook handler function.
+ */
+ interface HandlerFunc {(e: T): void }
+ /**
+ * Handler defines a single Hook handler.
+ * Multiple handlers can share the same id.
+ * If Id is not explicitly set it will be autogenerated by Hook.Add and Hook.AddHandler.
+ */
+ interface Handler {
/**
- * TableName returns the Admin model SQL table name.
- */
- tableName(): string
- }
- interface Admin {
- /**
- * ValidatePassword validates a plain password against the model's password.
- */
- validatePassword(password: string): boolean
- }
- interface Admin {
- /**
- * SetPassword sets cryptographically secure string to `model.Password`.
+ * Func defines the handler function to execute.
*
- * Additionally this method also resets the LastResetSentAt and the TokenKey fields.
+ * Note that users need to call e.Next() in order to proceed with
+ * the execution of the hook chain.
*/
- setPassword(password: string): void
- }
- interface Admin {
+ func: HandlerFunc
/**
- * RefreshTokenKey generates and sets new random token key.
+ * Id is the unique identifier of the handler.
+ *
+ * It could be used later to remove the handler from a hook via [Hook.Remove].
+ *
+ * If missing, an autogenerated value will be assigned when adding
+ * the handler to a hook.
*/
- refreshTokenKey(): void
+ id: string
+ /**
+ * Priority allows changing the default exec priority of the handler
+ * withing a hook.
+ *
+ * If 0, the handler will be executed in the same order it was registered.
+ */
+ priority: number
+ }
+}
+
+namespace router {
+ // @ts-ignore
+ import validation = ozzo_validation
+ /**
+ * ApiError defines the struct for a basic api error response.
+ */
+ interface ApiError {
+ data: _TygojaDict
+ message: string
+ status: number
+ }
+ interface ApiError {
+ /**
+ * Error makes it compatible with the `error` interface.
+ */
+ error(): string
+ }
+ interface ApiError {
+ /**
+ * RawData returns the unformatted error data (could be an internal error, text, etc.)
+ */
+ rawData(): any
+ }
+ interface ApiError {
+ /**
+ * Is reports whether the current ApiError wraps the target.
+ */
+ is(target: Error): boolean
+ }
+ /**
+ * Router defines a thin wrapper around the standard Go [http.ServeMux] by
+ * adding support for routing sub-groups, middlewares and other common utils.
+ *
+ * Example:
+ *
+ * ```
+ * r := NewRouter[*MyEvent](eventFactory)
+ *
+ * // middlewares
+ * r.BindFunc(m1, m2)
+ *
+ * // routes
+ * r.GET("/test", handler1)
+ *
+ * // sub-routers/groups
+ * api := r.Group("/api")
+ * api.GET("/admins", handler2)
+ *
+ * // generate a http.ServeMux instance based on the router configurations
+ * mux, _ := r.BuildMux()
+ *
+ * http.ListenAndServe("localhost:8090", mux)
+ * ```
+ */
+ type _subjGLEK = RouterGroup
+ interface Router extends _subjGLEK {
+ }
+ interface Router {
+ /**
+ * BuildMux constructs a new mux [http.Handler] instance from the current router configurations.
+ */
+ buildMux(): http.Handler
+ }
+}
+
+/**
+ * Package core is the backbone of PocketBase.
+ *
+ * It defines the main PocketBase App interface and its base implementation.
+ */
+namespace core {
+ /**
+ * App defines the main PocketBase app interface.
+ *
+ * Note that the interface is not intended to be implemented manually by users
+ * and instead they should use core.BaseApp (either directly or as embedded field in a custom struct).
+ *
+ * This interface exists to make testing easier and to allow users to
+ * create common and pluggable helpers and methods that doesn't rely
+ * on a specific wrapped app struct (hence the large interface size).
+ */
+ interface App {
+ [key:string]: any;
+ /**
+ * UnsafeWithoutHooks returns a shallow copy of the current app WITHOUT any registered hooks.
+ *
+ * NB! Note that using the returned app instance may cause data integrity errors
+ * since the Record validations and data normalizations (including files uploads)
+ * rely on the app hooks to work.
+ */
+ unsafeWithoutHooks(): App
+ /**
+ * Logger returns the default app logger.
+ *
+ * If the application is not bootstrapped yet, fallbacks to slog.Default().
+ */
+ logger(): (slog.Logger)
+ /**
+ * IsBootstrapped checks if the application was initialized
+ * (aka. whether Bootstrap() was called).
+ */
+ isBootstrapped(): boolean
+ /**
+ * IsTransactional checks if the current app instance is part of a transaction.
+ */
+ isTransactional(): boolean
+ /**
+ * Bootstrap initializes the application
+ * (aka. create data dir, open db connections, load settings, etc.).
+ *
+ * It will call ResetBootstrapState() if the application was already bootstrapped.
+ */
+ bootstrap(): void
+ /**
+ * ResetBootstrapState releases the initialized core app resources
+ * (closing db connections, stopping cron ticker, etc.).
+ */
+ resetBootstrapState(): void
+ /**
+ * DataDir returns the app data directory path.
+ */
+ dataDir(): string
+ /**
+ * EncryptionEnv returns the name of the app secret env key
+ * (currently used primarily for optional settings encryption but this may change in the future).
+ */
+ encryptionEnv(): string
+ /**
+ * IsDev returns whether the app is in dev mode.
+ *
+ * When enabled logs, executed sql statements, etc. are printed to the stderr.
+ */
+ isDev(): boolean
+ /**
+ * Settings returns the loaded app settings.
+ */
+ settings(): (Settings)
+ /**
+ * Store returns the app runtime store.
+ */
+ store(): (store.Store)
+ /**
+ * Cron returns the app cron instance.
+ */
+ cron(): (cron.Cron)
+ /**
+ * SubscriptionsBroker returns the app realtime subscriptions broker instance.
+ */
+ subscriptionsBroker(): (subscriptions.Broker)
+ /**
+ * NewMailClient creates and returns a new SMTP or Sendmail client
+ * based on the current app settings.
+ */
+ newMailClient(): mailer.Mailer
+ /**
+ * NewFilesystem creates a new local or S3 filesystem instance
+ * for managing regular app files (ex. record uploads)
+ * based on the current app settings.
+ *
+ * NB! Make sure to call Close() on the returned result
+ * after you are done working with it.
+ */
+ newFilesystem(): (filesystem.System)
+ /**
+ * NewFilesystem creates a new local or S3 filesystem instance
+ * for managing app backups based on the current app settings.
+ *
+ * NB! Make sure to call Close() on the returned result
+ * after you are done working with it.
+ */
+ newBackupsFilesystem(): (filesystem.System)
+ /**
+ * ReloadSettings reinitializes and reloads the stored application settings.
+ */
+ reloadSettings(): void
+ /**
+ * CreateBackup creates a new backup of the current app pb_data directory.
+ *
+ * Backups can be stored on S3 if it is configured in app.Settings().Backups.
+ *
+ * Please refer to the godoc of the specific CoreApp implementation
+ * for details on the backup procedures.
+ */
+ createBackup(ctx: context.Context, name: string): void
+ /**
+ * RestoreBackup restores the backup with the specified name and restarts
+ * the current running application process.
+ *
+ * The safely perform the restore it is recommended to have free disk space
+ * for at least 2x the size of the restored pb_data backup.
+ *
+ * Please refer to the godoc of the specific CoreApp implementation
+ * for details on the restore procedures.
+ *
+ * NB! This feature is experimental and currently is expected to work only on UNIX based systems.
+ */
+ restoreBackup(ctx: context.Context, name: string): void
+ /**
+ * Restart restarts (aka. replaces) the current running application process.
+ *
+ * NB! It relies on execve which is supported only on UNIX based systems.
+ */
+ restart(): void
+ /**
+ * RunSystemMigrations applies all new migrations registered in the [core.SystemMigrations] list.
+ */
+ runSystemMigrations(): void
+ /**
+ * RunAppMigrations applies all new migrations registered in the [CoreAppMigrations] list.
+ */
+ runAppMigrations(): void
+ /**
+ * RunAllMigrations applies all system and app migrations
+ * (aka. from both [core.SystemMigrations] and [CoreAppMigrations]).
+ */
+ runAllMigrations(): void
+ /**
+ * DB returns the default app data db instance (pb_data/data.db).
+ */
+ db(): dbx.Builder
+ /**
+ * NonconcurrentDB returns the nonconcurrent app data db instance (pb_data/data.db).
+ *
+ * The returned db instance is limited only to a single open connection,
+ * meaning that it can process only 1 db operation at a time (other operations will be queued up).
+ *
+ * This method is used mainly internally and in the tests to execute write
+ * (save/delete) db operations as it helps with minimizing the SQLITE_BUSY errors.
+ *
+ * For the majority of cases you would want to use the regular DB() method
+ * since it allows concurrent db read operations.
+ *
+ * In a transaction the ConcurrentDB() and NonconcurrentDB() refer to the same *dbx.TX instance.
+ */
+ nonconcurrentDB(): dbx.Builder
+ /**
+ * AuxDB returns the default app auxiliary db instance (pb_data/aux.db).
+ */
+ auxDB(): dbx.Builder
+ /**
+ * AuxNonconcurrentDB returns the nonconcurrent app auxiliary db instance (pb_data/aux.db)..
+ *
+ * The returned db instance is limited only to a single open connection,
+ * meaning that it can process only 1 db operation at a time (other operations will be queued up).
+ *
+ * This method is used mainly internally and in the tests to execute write
+ * (save/delete) db operations as it helps with minimizing the SQLITE_BUSY errors.
+ *
+ * For the majority of cases you would want to use the regular DB() method
+ * since it allows concurrent db read operations.
+ *
+ * In a transaction the AuxNonconcurrentDB() and AuxNonconcurrentDB() refer to the same *dbx.TX instance.
+ */
+ auxNonconcurrentDB(): dbx.Builder
+ /**
+ * HasTable checks if a table (or view) with the provided name exists (case insensitive).
+ */
+ hasTable(tableName: string): boolean
+ /**
+ * TableColumns returns all column names of a single table by its name.
+ */
+ tableColumns(tableName: string): Array
+ /**
+ * TableInfo returns the "table_info" pragma result for the specified table.
+ */
+ tableInfo(tableName: string): Array<(TableInfoRow | undefined)>
+ /**
+ * TableIndexes returns a name grouped map with all non empty index of the specified table.
+ *
+ * Note: This method doesn't return an error on nonexisting table.
+ */
+ tableIndexes(tableName: string): _TygojaDict
+ /**
+ * DeleteTable drops the specified table.
+ *
+ * This method is a no-op if a table with the provided name doesn't exist.
+ *
+ * NB! Be aware that this method is vulnerable to SQL injection and the
+ * "tableName" argument must come only from trusted input!
+ */
+ deleteTable(tableName: string): void
+ /**
+ * DeleteView drops the specified view name.
+ *
+ * This method is a no-op if a view with the provided name doesn't exist.
+ *
+ * NB! Be aware that this method is vulnerable to SQL injection and the
+ * "name" argument must come only from trusted input!
+ */
+ deleteView(name: string): void
+ /**
+ * SaveView creates (or updates already existing) persistent SQL view.
+ *
+ * NB! Be aware that this method is vulnerable to SQL injection and the
+ * "selectQuery" argument must come only from trusted input!
+ */
+ saveView(name: string, selectQuery: string): void
+ /**
+ * CreateViewFields creates a new FieldsList from the provided select query.
+ *
+ * There are some caveats:
+ * - The select query must have an "id" column.
+ * - Wildcard ("*") columns are not supported to avoid accidentally leaking sensitive data.
+ */
+ createViewFields(selectQuery: string): FieldsList
+ /**
+ * FindRecordByViewFile returns the original Record of the provided view collection file.
+ */
+ findRecordByViewFile(viewCollectionModelOrIdentifier: any, fileFieldName: string, filename: string): (Record)
+ /**
+ * Vacuum executes VACUUM on the current app.DB() instance
+ * in order to reclaim unused data db disk space.
+ */
+ vacuum(): void
+ /**
+ * AuxVacuum executes VACUUM on the current app.AuxDB() instance
+ * in order to reclaim unused auxiliary db disk space.
+ */
+ auxVacuum(): void
+ /**
+ * ModelQuery creates a new preconfigured select app.DB() query with preset
+ * SELECT, FROM and other common fields based on the provided model.
+ */
+ modelQuery(model: Model): (dbx.SelectQuery)
+ /**
+ * AuxModelQuery creates a new preconfigured select app.AuxDB() query with preset
+ * SELECT, FROM and other common fields based on the provided model.
+ */
+ auxModelQuery(model: Model): (dbx.SelectQuery)
+ /**
+ * Delete deletes the specified model from the regular app database.
+ */
+ delete(model: Model): void
+ /**
+ * Delete deletes the specified model from the regular app database
+ * (the context could be used to limit the query execution).
+ */
+ deleteWithContext(ctx: context.Context, model: Model): void
+ /**
+ * AuxDelete deletes the specified model from the auxiliary database.
+ */
+ auxDelete(model: Model): void
+ /**
+ * AuxDeleteWithContext deletes the specified model from the auxiliary database
+ * (the context could be used to limit the query execution).
+ */
+ auxDeleteWithContext(ctx: context.Context, model: Model): void
+ /**
+ * Save validates and saves the specified model into the regular app database.
+ *
+ * If you don't want to run validations, use [App.SaveNoValidate()].
+ */
+ save(model: Model): void
+ /**
+ * SaveWithContext is the same as [App.Save()] but allows specifying a context to limit the db execution.
+ *
+ * If you don't want to run validations, use [App.SaveNoValidateWithContext()].
+ */
+ saveWithContext(ctx: context.Context, model: Model): void
+ /**
+ * SaveNoValidate saves the specified model into the regular app database without performing validations.
+ *
+ * If you want to also run validations before persisting, use [App.Save()].
+ */
+ saveNoValidate(model: Model): void
+ /**
+ * SaveNoValidateWithContext is the same as [App.SaveNoValidate()]
+ * but allows specifying a context to limit the db execution.
+ *
+ * If you want to also run validations before persisting, use [App.SaveWithContext()].
+ */
+ saveNoValidateWithContext(ctx: context.Context, model: Model): void
+ /**
+ * AuxSave validates and saves the specified model into the auxiliary app database.
+ *
+ * If you don't want to run validations, use [App.AuxSaveNoValidate()].
+ */
+ auxSave(model: Model): void
+ /**
+ * AuxSaveWithContext is the same as [App.AuxSave()] but allows specifying a context to limit the db execution.
+ *
+ * If you don't want to run validations, use [App.AuxSaveNoValidateWithContext()].
+ */
+ auxSaveWithContext(ctx: context.Context, model: Model): void
+ /**
+ * AuxSaveNoValidate saves the specified model into the auxiliary app database without performing validations.
+ *
+ * If you want to also run validations before persisting, use [App.AuxSave()].
+ */
+ auxSaveNoValidate(model: Model): void
+ /**
+ * AuxSaveNoValidateWithContext is the same as [App.AuxSaveNoValidate()]
+ * but allows specifying a context to limit the db execution.
+ *
+ * If you want to also run validations before persisting, use [App.AuxSaveWithContext()].
+ */
+ auxSaveNoValidateWithContext(ctx: context.Context, model: Model): void
+ /**
+ * Validate triggers the OnModelValidate hook for the specified model.
+ */
+ validate(model: Model): void
+ /**
+ * ValidateWithContext is the same as Validate but allows specifying the ModelEvent context.
+ */
+ validateWithContext(ctx: context.Context, model: Model): void
+ /**
+ * RunInTransaction wraps fn into a transaction for the regular app database.
+ *
+ * It is safe to nest RunInTransaction calls as long as you use the callback's txApp.
+ */
+ runInTransaction(fn: (txApp: App) => void): void
+ /**
+ * AuxRunInTransaction wraps fn into a transaction for the auxiliary app database.
+ *
+ * It is safe to nest RunInTransaction calls as long as you use the callback's txApp.
+ */
+ auxRunInTransaction(fn: (txApp: App) => void): void
+ /**
+ * LogQuery returns a new Log select query.
+ */
+ logQuery(): (dbx.SelectQuery)
+ /**
+ * FindLogById finds a single Log entry by its id.
+ */
+ findLogById(id: string): (Log)
+ /**
+ * LogsStatsItem defines the total number of logs for a specific time period.
+ */
+ logsStats(expr: dbx.Expression): Array<(LogsStatsItem | undefined)>
+ /**
+ * DeleteOldLogs delete all requests that are created before createdBefore.
+ */
+ deleteOldLogs(createdBefore: time.Time): void
+ /**
+ * CollectionQuery returns a new Collection select query.
+ */
+ collectionQuery(): (dbx.SelectQuery)
+ /**
+ * FindCollections finds all collections by the given type(s).
+ *
+ * If collectionTypes is not set, it returns all collections.
+ *
+ * Example:
+ *
+ * ```
+ * app.FindAllCollections() // all collections
+ * app.FindAllCollections("auth", "view") // only auth and view collections
+ * ```
+ */
+ findAllCollections(...collectionTypes: string[]): Array<(Collection | undefined)>
+ /**
+ * ReloadCachedCollections fetches all collections and caches them into the app store.
+ */
+ reloadCachedCollections(): void
+ /**
+ * FindCollectionByNameOrId finds a single collection by its name (case insensitive) or id.s
+ */
+ findCollectionByNameOrId(nameOrId: string): (Collection)
+ /**
+ * FindCachedCollectionByNameOrId is similar to [App.FindCollectionByNameOrId]
+ * but retrieves the Collection from the app cache instead of making a db call.
+ *
+ * NB! This method is suitable for read-only Collection operations.
+ *
+ * Returns [sql.ErrNoRows] if no Collection is found for consistency
+ * with the [App.FindCollectionByNameOrId] method.
+ *
+ * If you plan making changes to the returned Collection model,
+ * use [App.FindCollectionByNameOrId] instead.
+ *
+ * Caveats:
+ *
+ * ```
+ * - The returned Collection should be used only for read-only operations.
+ * Avoid directly modifying the returned cached Collection as it will affect
+ * the global cached value even if you don't persist the changes in the database!
+ * - If you are updating a Collection in a transaction and then call this method before commit,
+ * it'll return the cached Collection state and not the one from the uncommited transaction.
+ * - The cache is automatically updated on collections db change (create/update/delete).
+ * To manually reload the cache you can call [App.ReloadCachedCollections()]
+ * ```
+ */
+ findCachedCollectionByNameOrId(nameOrId: string): (Collection)
+ /**
+ * IsCollectionNameUnique checks that there is no existing collection
+ * with the provided name (case insensitive!).
+ *
+ * Note: case insensitive check because the name is used also as
+ * table name for the records.
+ */
+ isCollectionNameUnique(name: string, ...excludeIds: string[]): boolean
+ /**
+ * FindCollectionReferences returns information for all relation
+ * fields referencing the provided collection.
+ *
+ * If the provided collection has reference to itself then it will be
+ * also included in the result. To exclude it, pass the collection id
+ * as the excludeIds argument.
+ */
+ findCollectionReferences(collection: Collection, ...excludeIds: string[]): _TygojaDict
+ /**
+ * TruncateCollection deletes all records associated with the provided collection.
+ *
+ * The truncate operation is executed in a single transaction,
+ * aka. either everything is deleted or none.
+ *
+ * Note that this method will also trigger the records related
+ * cascade and file delete actions.
+ */
+ truncateCollection(collection: Collection): void
+ /**
+ * ImportCollections imports the provided collections data in a single transaction.
+ *
+ * For existing matching collections, the imported data is unmarshaled on top of the existing model.
+ *
+ * NB! If deleteMissing is true, ALL NON-SYSTEM COLLECTIONS AND SCHEMA FIELDS,
+ * that are not present in the imported configuration, WILL BE DELETED
+ * (this includes their related records data).
+ */
+ importCollections(toImport: Array<_TygojaDict>, deleteMissing: boolean): void
+ /**
+ * ImportCollectionsByMarshaledJSON is the same as [ImportCollections]
+ * but accept marshaled json array as import data (usually used for the autogenerated snapshots).
+ */
+ importCollectionsByMarshaledJSON(rawSliceOfMaps: string|Array, deleteMissing: boolean): void
+ /**
+ * SyncRecordTableSchema compares the two provided collections
+ * and applies the necessary related record table changes.
+ *
+ * If oldCollection is null, then only newCollection is used to create the record table.
+ *
+ * This method is automatically invoked as part of a collection create/update/delete operation.
+ */
+ syncRecordTableSchema(newCollection: Collection, oldCollection: Collection): void
+ /**
+ * FindAllExternalAuthsByRecord returns all ExternalAuth models
+ * linked to the provided auth record.
+ */
+ findAllExternalAuthsByRecord(authRecord: Record): Array<(ExternalAuth | undefined)>
+ /**
+ * FindAllExternalAuthsByCollection returns all ExternalAuth models
+ * linked to the provided auth collection.
+ */
+ findAllExternalAuthsByCollection(collection: Collection): Array<(ExternalAuth | undefined)>
+ /**
+ * FindFirstExternalAuthByExpr returns the first available (the most recent created)
+ * ExternalAuth model that satisfies the non-nil expression.
+ */
+ findFirstExternalAuthByExpr(expr: dbx.Expression): (ExternalAuth)
+ /**
+ * FindAllMFAsByRecord returns all MFA models linked to the provided auth record.
+ */
+ findAllMFAsByRecord(authRecord: Record): Array<(MFA | undefined)>
+ /**
+ * FindAllMFAsByCollection returns all MFA models linked to the provided collection.
+ */
+ findAllMFAsByCollection(collection: Collection): Array<(MFA | undefined)>
+ /**
+ * FindMFAById retuns a single MFA model by its id.
+ */
+ findMFAById(id: string): (MFA)
+ /**
+ * DeleteAllMFAsByRecord deletes all MFA models associated with the provided record.
+ *
+ * Returns a combined error with the failed deletes.
+ */
+ deleteAllMFAsByRecord(authRecord: Record): void
+ /**
+ * DeleteExpiredMFAs deletes the expired MFAs for all auth collections.
+ */
+ deleteExpiredMFAs(): void
+ /**
+ * FindAllOTPsByRecord returns all OTP models linked to the provided auth record.
+ */
+ findAllOTPsByRecord(authRecord: Record): Array<(OTP | undefined)>
+ /**
+ * FindAllOTPsByCollection returns all OTP models linked to the provided collection.
+ */
+ findAllOTPsByCollection(collection: Collection): Array<(OTP | undefined)>
+ /**
+ * FindOTPById retuns a single OTP model by its id.
+ */
+ findOTPById(id: string): (OTP)
+ /**
+ * DeleteAllOTPsByRecord deletes all OTP models associated with the provided record.
+ *
+ * Returns a combined error with the failed deletes.
+ */
+ deleteAllOTPsByRecord(authRecord: Record): void
+ /**
+ * DeleteExpiredOTPs deletes the expired OTPs for all auth collections.
+ */
+ deleteExpiredOTPs(): void
+ /**
+ * FindAllAuthOriginsByRecord returns all AuthOrigin models linked to the provided auth record (in DESC order).
+ */
+ findAllAuthOriginsByRecord(authRecord: Record): Array<(AuthOrigin | undefined)>
+ /**
+ * FindAllAuthOriginsByCollection returns all AuthOrigin models linked to the provided collection (in DESC order).
+ */
+ findAllAuthOriginsByCollection(collection: Collection): Array<(AuthOrigin | undefined)>
+ /**
+ * FindAuthOriginById returns a single AuthOrigin model by its id.
+ */
+ findAuthOriginById(id: string): (AuthOrigin)
+ /**
+ * FindAuthOriginByRecordAndFingerprint returns a single AuthOrigin model
+ * by its authRecord relation and fingerprint.
+ */
+ findAuthOriginByRecordAndFingerprint(authRecord: Record, fingerprint: string): (AuthOrigin)
+ /**
+ * DeleteAllAuthOriginsByRecord deletes all AuthOrigin models associated with the provided record.
+ *
+ * Returns a combined error with the failed deletes.
+ */
+ deleteAllAuthOriginsByRecord(authRecord: Record): void
+ /**
+ * RecordQuery returns a new Record select query from a collection model, id or name.
+ *
+ * In case a collection id or name is provided and that collection doesn't
+ * actually exists, the generated query will be created with a cancelled context
+ * and will fail once an executor (Row(), One(), All(), etc.) is called.
+ */
+ recordQuery(collectionModelOrIdentifier: any): (dbx.SelectQuery)
+ /**
+ * FindRecordById finds the Record model by its id.
+ */
+ findRecordById(collectionModelOrIdentifier: any, recordId: string, ...optFilters: ((q: dbx.SelectQuery) => void)[]): (Record)
+ /**
+ * FindRecordsByIds finds all records by the specified ids.
+ * If no records are found, returns an empty slice.
+ */
+ findRecordsByIds(collectionModelOrIdentifier: any, recordIds: Array, ...optFilters: ((q: dbx.SelectQuery) => void)[]): Array<(Record | undefined)>
+ /**
+ * FindAllRecords finds all records matching specified db expressions.
+ *
+ * Returns all collection records if no expression is provided.
+ *
+ * Returns an empty slice if no records are found.
+ *
+ * Example:
+ *
+ * ```
+ * // no extra expressions
+ * app.FindAllRecords("example")
+ *
+ * // with extra expressions
+ * expr1 := dbx.HashExp{"email": "test@example.com"}
+ * expr2 := dbx.NewExp("LOWER(username) = {:username}", dbx.Params{"username": "test"})
+ * app.FindAllRecords("example", expr1, expr2)
+ * ```
+ */
+ findAllRecords(collectionModelOrIdentifier: any, ...exprs: dbx.Expression[]): Array<(Record | undefined)>
+ /**
+ * FindFirstRecordByData returns the first found record matching
+ * the provided key-value pair.
+ */
+ findFirstRecordByData(collectionModelOrIdentifier: any, key: string, value: any): (Record)
+ /**
+ * FindRecordsByFilter returns limit number of records matching the
+ * provided string filter.
+ *
+ * NB! Use the last "params" argument to bind untrusted user variables!
+ *
+ * The filter argument is optional and can be empty string to target
+ * all available records.
+ *
+ * The sort argument is optional and can be empty string OR the same format
+ * used in the web APIs, ex. "-created,title".
+ *
+ * If the limit argument is <= 0, no limit is applied to the query and
+ * all matching records are returned.
+ *
+ * Returns an empty slice if no records are found.
+ *
+ * Example:
+ *
+ * ```
+ * app.FindRecordsByFilter(
+ * "posts",
+ * "title ~ {:title} && visible = {:visible}",
+ * "-created",
+ * 10,
+ * 0,
+ * dbx.Params{"title": "lorem ipsum", "visible": true}
+ * )
+ * ```
+ */
+ findRecordsByFilter(collectionModelOrIdentifier: any, filter: string, sort: string, limit: number, offset: number, ...params: dbx.Params[]): Array<(Record | undefined)>
+ /**
+ * FindFirstRecordByFilter returns the first available record matching the provided filter (if any).
+ *
+ * NB! Use the last params argument to bind untrusted user variables!
+ *
+ * Returns sql.ErrNoRows if no record is found.
+ *
+ * Example:
+ *
+ * ```
+ * app.FindFirstRecordByFilter("posts", "")
+ * app.FindFirstRecordByFilter("posts", "slug={:slug} && status='public'", dbx.Params{"slug": "test"})
+ * ```
+ */
+ findFirstRecordByFilter(collectionModelOrIdentifier: any, filter: string, ...params: dbx.Params[]): (Record)
+ /**
+ * CountRecords returns the total number of records in a collection.
+ */
+ countRecords(collectionModelOrIdentifier: any, ...exprs: dbx.Expression[]): number
+ /**
+ * FindAuthRecordByToken finds the auth record associated with the provided JWT
+ * (auth, file, verifyEmail, changeEmail, passwordReset types).
+ *
+ * Optionally specify a list of validTypes to check tokens only from those types.
+ *
+ * Returns an error if the JWT is invalid, expired or not associated to an auth collection record.
+ */
+ findAuthRecordByToken(token: string, ...validTypes: string[]): (Record)
+ /**
+ * FindAuthRecordByEmail finds the auth record associated with the provided email.
+ *
+ * Returns an error if it is not an auth collection or the record is not found.
+ */
+ findAuthRecordByEmail(collectionModelOrIdentifier: any, email: string): (Record)
+ /**
+ * CanAccessRecord checks if a record is allowed to be accessed by the
+ * specified requestInfo and accessRule.
+ *
+ * Rule and db checks are ignored in case requestInfo.AuthRecord is a superuser.
+ *
+ * The returned error indicate that something unexpected happened during
+ * the check (eg. invalid rule or db query error).
+ *
+ * The method always return false on invalid rule or db query error.
+ *
+ * Example:
+ *
+ * ```
+ * requestInfo, _ := e.RequestInfo()
+ * record, _ := app.FindRecordById("example", "RECORD_ID")
+ * rule := types.Pointer("@request.auth.id != '' || status = 'public'")
+ * // ... or use one of the record collection's rule, eg. record.Collection().ViewRule
+ *
+ * if ok, _ := app.CanAccessRecord(record, requestInfo, rule); ok { ... }
+ * ```
+ */
+ canAccessRecord(record: Record, requestInfo: RequestInfo, accessRule: string): boolean
+ /**
+ * ExpandRecord expands the relations of a single Record model.
+ *
+ * If optFetchFunc is not set, then a default function will be used
+ * that returns all relation records.
+ *
+ * Returns a map with the failed expand parameters and their errors.
+ */
+ expandRecord(record: Record, expands: Array, optFetchFunc: ExpandFetchFunc): _TygojaDict
+ /**
+ * ExpandRecords expands the relations of the provided Record models list.
+ *
+ * If optFetchFunc is not set, then a default function will be used
+ * that returns all relation records.
+ *
+ * Returns a map with the failed expand parameters and their errors.
+ */
+ expandRecords(records: Array<(Record | undefined)>, expands: Array, optFetchFunc: ExpandFetchFunc): _TygojaDict
+ /**
+ * OnBootstrap hook is triggered on initializing the main application
+ * resources (db, app settings, etc).
+ */
+ onBootstrap(): (hook.Hook)
+ /**
+ * OnServe hook is triggered on when the app web server is started
+ * (after starting the tcp listener but before initializing the blocking serve task),
+ * allowing you to adjust its options and attach new routes or middlewares.
+ */
+ onServe(): (hook.Hook)
+ /**
+ * OnTerminate hook is triggered when the app is in the process
+ * of being terminated (ex. on SIGTERM signal).
+ */
+ onTerminate(): (hook.Hook)
+ /**
+ * OnBackupCreate hook is triggered on each [App.CreateBackup] call.
+ */
+ onBackupCreate(): (hook.Hook)
+ /**
+ * OnBackupRestore hook is triggered before app backup restore (aka. [App.RestoreBackup] call).
+ *
+ * Note that by default on success the application is restarted and the after state of the hook is ignored.
+ */
+ onBackupRestore(): (hook.Hook)
+ /**
+ * OnModelValidate is triggered every time when a model is being validated
+ * (e.g. triggered by App.Validate() or App.Save()).
+ *
+ * If the optional "tags" list (Collection id/name, Model table name, etc.) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onModelValidate(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnModelCreate is triggered every time when a new model is being created
+ * (e.g. triggered by App.Save()).
+ *
+ * Operations BEFORE the e.Next() execute before the model validation
+ * and the INSERT DB statement.
+ *
+ * Operations AFTER the e.Next() execute after the model validation
+ * and the INSERT DB statement.
+ *
+ * Note that succesful execution doesn't guarantee that the model
+ * is persisted in the database since its wrapping transaction may
+ * not have been committed yet.
+ * If you wan to listen to only the actual persisted events, you can
+ * bind to [OnModelAfterCreateSuccess] or [OnModelAfterCreateError] hooks.
+ *
+ * For convenience, if you want to listen to only the Record models
+ * events without doing manual type assertion, you can attach to the OnRecord* proxy hooks.
+ *
+ * If the optional "tags" list (Collection id/name, Model table name, etc.) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onModelCreate(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnModelCreateExecute is triggered after successful Model validation
+ * and right before the model INSERT DB statement execution.
+ *
+ * Usually it is triggered as part of the App.Save() in the following firing order:
+ * OnModelCreate {
+ * ```
+ * -> OnModelValidate (skipped with App.SaveNoValidate())
+ * -> OnModelCreateExecute
+ * ```
+ * }
+ *
+ * Note that succesful execution doesn't guarantee that the model
+ * is persisted in the database since its wrapping transaction may have been
+ * committed yet.
+ * If you wan to listen to only the actual persisted events,
+ * you can bind to [OnModelAfterCreateSuccess] or [OnModelAfterCreateError] hooks.
+ *
+ * For convenience, if you want to listen to only the Record models
+ * events without doing manual type assertion, you can attach to the OnRecord* proxy hooks.
+ *
+ * If the optional "tags" list (Collection id/name, Model table name, etc.) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onModelCreateExecute(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnModelAfterCreateSuccess is triggered after each successful
+ * Model DB create persistence.
+ *
+ * Note that when a Model is persisted as part of a transaction,
+ * this hook is triggered AFTER the transaction has been commited.
+ * This hook is NOT triggered in case the transaction rollbacks
+ * (aka. when the model wasn't persisted).
+ *
+ * For convenience, if you want to listen to only the Record models
+ * events without doing manual type assertion, you can attach to the OnRecord* proxy hooks.
+ *
+ * If the optional "tags" list (Collection id/name, Model table name, etc.) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onModelAfterCreateSuccess(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnModelAfterCreateError is triggered after each failed
+ * Model DB create persistence.
+ * Note that when a Model is persisted as part of a transaction,
+ * this hook is triggered in one of the following cases:
+ * ```
+ * - immediatelly after App.Save() failure
+ * - on transaction rollback
+ * ```
+ *
+ * For convenience, if you want to listen to only the Record models
+ * events without doing manual type assertion, you can attach to the OnRecord* proxy hooks.
+ *
+ * If the optional "tags" list (Collection id/name, Model table name, etc.) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onModelAfterCreateError(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnModelUpdate is triggered every time when a new model is being updated
+ * (e.g. triggered by App.Save()).
+ *
+ * Operations BEFORE the e.Next() execute before the model validation
+ * and the UPDATE DB statement.
+ *
+ * Operations AFTER the e.Next() execute after the model validation
+ * and the UPDATE DB statement.
+ *
+ * Note that succesful execution doesn't guarantee that the model
+ * is persisted in the database since its wrapping transaction may
+ * not have been committed yet.
+ * If you wan to listen to only the actual persisted events, you can
+ * bind to [OnModelAfterUpdateSuccess] or [OnModelAfterUpdateError] hooks.
+ *
+ * For convenience, if you want to listen to only the Record models
+ * events without doing manual type assertion, you can attach to the OnRecord* proxy hooks.
+ *
+ * If the optional "tags" list (Collection id/name, Model table name, etc.) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onModelUpdate(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnModelUpdateExecute is triggered after successful Model validation
+ * and right before the model UPDATE DB statement execution.
+ *
+ * Usually it is triggered as part of the App.Save() in the following firing order:
+ * OnModelUpdate {
+ * ```
+ * -> OnModelValidate (skipped with App.SaveNoValidate())
+ * -> OnModelUpdateExecute
+ * ```
+ * }
+ *
+ * Note that succesful execution doesn't guarantee that the model
+ * is persisted in the database since its wrapping transaction may have been
+ * committed yet.
+ * If you wan to listen to only the actual persisted events,
+ * you can bind to [OnModelAfterUpdateSuccess] or [OnModelAfterUpdateError] hooks.
+ *
+ * For convenience, if you want to listen to only the Record models
+ * events without doing manual type assertion, you can attach to the OnRecord* proxy hooks.
+ *
+ * If the optional "tags" list (Collection id/name, Model table name, etc.) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onModelUpdateExecute(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnModelAfterUpdateSuccess is triggered after each successful
+ * Model DB update persistence.
+ *
+ * Note that when a Model is persisted as part of a transaction,
+ * this hook is triggered AFTER the transaction has been commited.
+ * This hook is NOT triggered in case the transaction rollbacks
+ * (aka. when the model changes weren't persisted).
+ *
+ * For convenience, if you want to listen to only the Record models
+ * events without doing manual type assertion, you can attach to the OnRecord* proxy hooks.
+ *
+ * If the optional "tags" list (Collection id/name, Model table name, etc.) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onModelAfterUpdateSuccess(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnModelAfterUpdateError is triggered after each failed
+ * Model DB update persistence.
+ *
+ * Note that when a Model is persisted as part of a transaction,
+ * this hook is triggered in one of the following cases:
+ * ```
+ * - immediatelly after App.Save() failure
+ * - on transaction rollback
+ * ```
+ *
+ * For convenience, if you want to listen to only the Record models
+ * events without doing manual type assertion, you can attach to the OnRecord* proxy hooks.
+ *
+ * If the optional "tags" list (Collection id/name, Model table name, etc.) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onModelAfterUpdateError(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnModelDelete is triggered every time when a new model is being deleted
+ * (e.g. triggered by App.Delete()).
+ *
+ * Note that succesful execution doesn't guarantee that the model
+ * is deleted from the database since its wrapping transaction may
+ * not have been committed yet.
+ * If you wan to listen to only the actual persisted deleted events, you can
+ * bind to [OnModelAfterDeleteSuccess] or [OnModelAfterDeleteError] hooks.
+ *
+ * For convenience, if you want to listen to only the Record models
+ * events without doing manual type assertion, you can attach to the OnRecord* proxy hooks.
+ *
+ * If the optional "tags" list (Collection id/name, Model table name, etc.) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onModelDelete(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnModelUpdateExecute is triggered right before the model
+ * DELETE DB statement execution.
+ *
+ * Usually it is triggered as part of the App.Delete() in the following firing order:
+ * OnModelDelete {
+ * ```
+ * -> (internal delete checks)
+ * -> OnModelDeleteExecute
+ * ```
+ * }
+ *
+ * Note that succesful execution doesn't guarantee that the model
+ * is deleted from the database since its wrapping transaction may
+ * not have been committed yet.
+ * If you wan to listen to only the actual persisted deleted events, you can
+ * bind to [OnModelAfterDeleteSuccess] or [OnModelAfterDeleteError] hooks.
+ *
+ * For convenience, if you want to listen to only the Record models
+ * events without doing manual type assertion, you can attach to the OnRecord* proxy hooks.
+ *
+ * If the optional "tags" list (Collection id/name, Model table name, etc.) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onModelDeleteExecute(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnModelAfterDeleteSuccess is triggered after each successful
+ * Model DB delete persistence.
+ *
+ * Note that when a Model is deleted as part of a transaction,
+ * this hook is triggered AFTER the transaction has been commited.
+ * This hook is NOT triggered in case the transaction rollbacks
+ * (aka. when the model delete wasn't persisted).
+ *
+ * For convenience, if you want to listen to only the Record models
+ * events without doing manual type assertion, you can attach to the OnRecord* proxy hooks.
+ *
+ * If the optional "tags" list (Collection id/name, Model table name, etc.) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onModelAfterDeleteSuccess(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnModelAfterDeleteError is triggered after each failed
+ * Model DB delete persistence.
+ *
+ * Note that when a Model is deleted as part of a transaction,
+ * this hook is triggered in one of the following cases:
+ * ```
+ * - immediatelly after App.Delete() failure
+ * - on transaction rollback
+ * ```
+ *
+ * For convenience, if you want to listen to only the Record models
+ * events without doing manual type assertion, you can attach to the OnRecord* proxy hooks.
+ *
+ * If the optional "tags" list (Collection id/name, Model table name, etc.) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onModelAfterDeleteError(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordEnrich is triggered every time when a record is enriched
+ * (during realtime message seriazation, as part of the builtin Record
+ * responses, or when [apis.EnrichRecord] is invoked).
+ *
+ * It could be used for example to redact/hide or add computed temp
+ * Record model props only for the specific request info. For example:
+ *
+ * app.OnRecordEnrich("posts").BindFunc(func(e core.*RecordEnrichEvent) {
+ * ```
+ * // hide one or more fields
+ * e.Record.Hide("role")
+ *
+ * // add new custom field for registered users
+ * if e.RequestInfo.Auth != nil && e.RequestInfo.Auth.Collection().Name == "users" {
+ * e.Record.WithCustomData(true) // for security requires explicitly allowing it
+ * e.Record.Set("computedScore", e.Record.GetInt("score") * e.RequestInfo.Auth.GetInt("baseScore"))
+ * }
+ *
+ * return e.Next()
+ * ```
+ * })
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordEnrich(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordValidate is a proxy Record model hook for [OnModelValidate].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordValidate(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordCreate is a proxy Record model hook for [OnModelCreate].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordCreate(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordCreateExecute is a proxy Record model hook for [OnModelCreateExecute].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordCreateExecute(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordAfterCreateSuccess is a proxy Record model hook for [OnModelAfterCreateSuccess].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterCreateSuccess(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordAfterCreateError is a proxy Record model hook for [OnModelAfterCreateError].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterCreateError(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordUpdate is a proxy Record model hook for [OnModelUpdate].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordUpdate(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordUpdateExecute is a proxy Record model hook for [OnModelUpdateExecute].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordUpdateExecute(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordAfterUpdateSuccess is a proxy Record model hook for [OnModelAfterUpdateSuccess].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterUpdateSuccess(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordAfterUpdateError is a proxy Record model hook for [OnModelAfterUpdateError].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterUpdateError(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordDelete is a proxy Record model hook for [OnModelDelete].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordDelete(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordDeleteExecute is a proxy Record model hook for [OnModelDeleteExecute].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordDeleteExecute(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordAfterDeleteSuccess is a proxy Record model hook for [OnModelAfterDeleteSuccess].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterDeleteSuccess(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordAfterDeleteError is a proxy Record model hook for [OnModelAfterDeleteError].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterDeleteError(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnCollectionValidate is a proxy Collection model hook for [OnModelValidate].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onCollectionValidate(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnCollectionCreate is a proxy Collection model hook for [OnModelCreate].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onCollectionCreate(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnCollectionCreateExecute is a proxy Collection model hook for [OnModelCreateExecute].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onCollectionCreateExecute(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnCollectionAfterCreateSuccess is a proxy Collection model hook for [OnModelAfterCreateSuccess].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onCollectionAfterCreateSuccess(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnCollectionAfterCreateError is a proxy Collection model hook for [OnModelAfterCreateError].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onCollectionAfterCreateError(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnCollectionUpdate is a proxy Collection model hook for [OnModelUpdate].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onCollectionUpdate(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnCollectionUpdateExecute is a proxy Collection model hook for [OnModelUpdateExecute].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onCollectionUpdateExecute(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnCollectionAfterUpdateSuccess is a proxy Collection model hook for [OnModelAfterUpdateSuccess].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onCollectionAfterUpdateSuccess(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnCollectionAfterUpdateError is a proxy Collection model hook for [OnModelAfterUpdateError].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onCollectionAfterUpdateError(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnCollectionDelete is a proxy Collection model hook for [OnModelDelete].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onCollectionDelete(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnCollectionDeleteExecute is a proxy Collection model hook for [OnModelDeleteExecute].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onCollectionDeleteExecute(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnCollectionAfterDeleteSuccess is a proxy Collection model hook for [OnModelAfterDeleteSuccess].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onCollectionAfterDeleteSuccess(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnCollectionAfterDeleteError is a proxy Collection model hook for [OnModelAfterDeleteError].
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onCollectionAfterDeleteError(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnMailerSend hook is triggered every time when a new email is
+ * being send using the App.NewMailClient() instance.
+ *
+ * It allows intercepting the email message or to use a custom mailer client.
+ */
+ onMailerSend(): (hook.Hook)
+ /**
+ * OnMailerRecordAuthAlertSend hook is triggered when
+ * sending a new device login auth alert email, allowing you to
+ * intercept and customize the email message that is being sent.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onMailerRecordAuthAlertSend(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnMailerBeforeRecordResetPasswordSend hook is triggered when
+ * sending a password reset email to an auth record, allowing
+ * you to intercept and customize the email message that is being sent.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onMailerRecordPasswordResetSend(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnMailerBeforeRecordVerificationSend hook is triggered when
+ * sending a verification email to an auth record, allowing
+ * you to intercept and customize the email message that is being sent.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onMailerRecordVerificationSend(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnMailerRecordEmailChangeSend hook is triggered when sending a
+ * confirmation new address email to an auth record, allowing
+ * you to intercept and customize the email message that is being sent.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onMailerRecordEmailChangeSend(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnMailerRecordOTPSend hook is triggered when sending an OTP email
+ * to an auth record, allowing you to intercept and customize the
+ * email message that is being sent.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onMailerRecordOTPSend(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRealtimeConnectRequest hook is triggered when establishing the SSE client connection.
+ *
+ * Any execution after [e.Next()] of a hook handler happens after the client disconnects.
+ */
+ onRealtimeConnectRequest(): (hook.Hook)
+ /**
+ * OnRealtimeMessageSend hook is triggered when sending an SSE message to a client.
+ */
+ onRealtimeMessageSend(): (hook.Hook)
+ /**
+ * OnRealtimeSubscribeRequest hook is triggered when updating the
+ * client subscriptions, allowing you to further validate and
+ * modify the submitted change.
+ */
+ onRealtimeSubscribeRequest(): (hook.Hook)
+ /**
+ * OnSettingsListRequest hook is triggered on each API Settings list request.
+ *
+ * Could be used to validate or modify the response before returning it to the client.
+ */
+ onSettingsListRequest(): (hook.Hook)
+ /**
+ * OnSettingsUpdateRequest hook is triggered on each API Settings update request.
+ *
+ * Could be used to additionally validate the request data or
+ * implement completely different persistence behavior.
+ */
+ onSettingsUpdateRequest(): (hook.Hook)
+ /**
+ * OnSettingsReload hook is triggered every time when the App.Settings()
+ * is being replaced with a new state.
+ *
+ * Calling App.Settings() after e.Next() should return the new state.
+ */
+ onSettingsReload(): (hook.Hook)
+ /**
+ * OnFileDownloadRequest hook is triggered before each API File download request.
+ *
+ * Could be used to validate or modify the file response before
+ * returning it to the client.
+ */
+ onFileDownloadRequest(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnFileBeforeTokenRequest hook is triggered on each file token API request.
+ */
+ onFileTokenRequest(): (hook.Hook)
+ /**
+ * OnRecordAuthRequest hook is triggered on each successful API
+ * record authentication request (sign-in, token refresh, etc.).
+ *
+ * Could be used to additionally validate or modify the authenticated
+ * record data and token.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAuthRequest(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordAuthWithPasswordRequest hook is triggered on each
+ * Record auth with password API request.
+ *
+ * RecordAuthWithPasswordRequestEvent.Record could be nil if no
+ * matching identity is found, allowing you to manually locate a different
+ * Record model (by reassigning [RecordAuthWithPasswordRequestEvent.Record]).
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAuthWithPasswordRequest(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordAuthWithOAuth2Request hook is triggered on each Record
+ * OAuth2 sign-in/sign-up API request (after token exchange and before external provider linking).
+ *
+ * If the [RecordAuthWithOAuth2RequestEvent.Record] is not set, then the OAuth2
+ * request will try to create a new auth Record.
+ *
+ * To assign or link a different existing record model you can
+ * change the [RecordAuthWithOAuth2RequestEvent.Record] field.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAuthWithOAuth2Request(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordAuthRefreshRequest hook is triggered on each Record
+ * auth refresh API request (right before generating a new auth token).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different auth refresh behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAuthRefreshRequest(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordRequestPasswordResetRequest hook is triggered on
+ * each Record request password reset API request.
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different password reset behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordRequestPasswordResetRequest(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordConfirmPasswordResetRequest hook is triggered on
+ * each Record confirm password reset API request.
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different persistence behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordConfirmPasswordResetRequest(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordRequestVerificationRequest hook is triggered on
+ * each Record request verification API request.
+ *
+ * Could be used to additionally validate the loaded request data or implement
+ * completely different verification behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordRequestVerificationRequest(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordConfirmVerificationRequest hook is triggered on each
+ * Record confirm verification API request.
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different persistence behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordConfirmVerificationRequest(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordRequestEmailChangeRequest hook is triggered on each
+ * Record request email change API request.
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different request email change behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordRequestEmailChangeRequest(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordConfirmEmailChangeRequest hook is triggered on each
+ * Record confirm email change API request.
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different persistence behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordConfirmEmailChangeRequest(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordRequestOTPRequest hook is triggered on each Record
+ * request OTP API request.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordRequestOTPRequest(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordAuthWithOTPRequest hook is triggered on each Record
+ * auth with OTP API request.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAuthWithOTPRequest(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordsListRequest hook is triggered on each API Records list request.
+ *
+ * Could be used to validate or modify the response before returning it to the client.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordsListRequest(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordViewRequest hook is triggered on each API Record view request.
+ *
+ * Could be used to validate or modify the response before returning it to the client.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordViewRequest(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordCreateRequest hook is triggered on each API Record create request.
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different persistence behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordCreateRequest(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordUpdateRequest hook is triggered on each API Record update request.
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different persistence behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordUpdateRequest(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnRecordDeleteRequest hook is triggered on each API Record delete request.
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different delete behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordDeleteRequest(...tags: string[]): (hook.TaggedHook)
+ /**
+ * OnCollectionsListRequest hook is triggered on each API Collections list request.
+ *
+ * Could be used to validate or modify the response before returning it to the client.
+ */
+ onCollectionsListRequest(): (hook.Hook)
+ /**
+ * OnCollectionViewRequest hook is triggered on each API Collection view request.
+ *
+ * Could be used to validate or modify the response before returning it to the client.
+ */
+ onCollectionViewRequest(): (hook.Hook)
+ /**
+ * OnCollectionCreateRequest hook is triggered on each API Collection create request.
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different persistence behavior.
+ */
+ onCollectionCreateRequest(): (hook.Hook)
+ /**
+ * OnCollectionUpdateRequest hook is triggered on each API Collection update request.
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different persistence behavior.
+ */
+ onCollectionUpdateRequest(): (hook.Hook)
+ /**
+ * OnCollectionDeleteRequest hook is triggered on each API Collection delete request.
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different delete behavior.
+ */
+ onCollectionDeleteRequest(): (hook.Hook)
+ /**
+ * OnCollectionsBeforeImportRequest hook is triggered on each API
+ * collections import request.
+ *
+ * Could be used to additionally validate the imported collections or
+ * to implement completely different import behavior.
+ */
+ onCollectionsImportRequest(): (hook.Hook)
+ /**
+ * OnBatchRequest hook is triggered on each API batch request.
+ *
+ * Could be used to additionally validate or modify the submitted batch requests.
+ */
+ onBatchRequest(): (hook.Hook)
}
// @ts-ignore
import validation = ozzo_validation
- type _subUyFbk = BaseModel
- interface Collection extends _subUyFbk {
- name: string
- type: string
- system: boolean
- schema: schema.Schema
- indexes: types.JsonArray
+ /**
+ * DBConnectFunc defines a database connection initialization function.
+ */
+ interface DBConnectFunc {(dbPath: string): (dbx.DB) }
+ /**
+ * RequestEvent defines the PocketBase router handler event.
+ */
+ type _subwWwMU = router.Event
+ interface RequestEvent extends _subwWwMU {
+ app: App
+ auth?: Record
+ }
+ interface RequestEvent {
/**
- * rules
+ * RealIP returns the "real" IP address from the configured trusted proxy headers.
+ *
+ * If Settings.TrustedProxy is not configured or the found IP is empty,
+ * it fallbacks to e.RemoteIP().
+ *
+ * NB!
+ * Be careful when used in a security critical context as it relies on
+ * the trusted proxy to be properly configured and your app to be accessible only through it.
+ * If you are not sure, use e.RemoteIP().
*/
- listRule?: string
- viewRule?: string
- createRule?: string
- updateRule?: string
- deleteRule?: string
- options: types.JsonMap
+ realIP(): string
}
- interface Collection {
+ interface RequestEvent {
/**
- * TableName returns the Collection model SQL table name.
+ * HasSuperuserAuth checks whether the current RequestEvent has superuser authentication loaded.
*/
- tableName(): string
+ hasSuperuserAuth(): boolean
}
- interface Collection {
+ interface RequestEvent {
/**
- * BaseFilesPath returns the storage dir path used by the collection.
+ * RequestInfo parses the current request into RequestInfo instance.
+ *
+ * Note that the returned result is cached to avoid copying the request data multiple times
+ * but the auth state and other common store items are always refreshed in case they were changed my another handler.
*/
- baseFilesPath(): string
+ requestInfo(): (RequestInfo)
}
- interface Collection {
+ interface InternalRequest {
/**
- * IsBase checks if the current collection has "base" type.
+ * note: for uploading files the value must be either *filesystem.File or []*filesystem.File
*/
- isBase(): boolean
+ body: _TygojaDict
+ headers: _TygojaDict
+ method: string
+ url: string
}
- interface Collection {
- /**
- * IsAuth checks if the current collection has "auth" type.
- */
- isAuth(): boolean
+ interface InternalRequest {
+ validate(): void
}
- interface Collection {
- /**
- * IsView checks if the current collection has "view" type.
- */
- isView(): boolean
- }
- interface Collection {
- /**
- * MarshalJSON implements the [json.Marshaler] interface.
- */
- marshalJSON(): string|Array
- }
- interface Collection {
- /**
- * BaseOptions decodes the current collection options and returns them
- * as new [CollectionBaseOptions] instance.
- */
- baseOptions(): CollectionBaseOptions
- }
- interface Collection {
- /**
- * AuthOptions decodes the current collection options and returns them
- * as new [CollectionAuthOptions] instance.
- */
- authOptions(): CollectionAuthOptions
- }
- interface Collection {
- /**
- * ViewOptions decodes the current collection options and returns them
- * as new [CollectionViewOptions] instance.
- */
- viewOptions(): CollectionViewOptions
- }
- interface Collection {
- /**
- * NormalizeOptions updates the current collection options with a
- * new normalized state based on the collection type.
- */
- normalizeOptions(): void
- }
- interface Collection {
- /**
- * DecodeOptions decodes the current collection options into the
- * provided "result" (must be a pointer).
- */
- decodeOptions(result: any): void
- }
- interface Collection {
- /**
- * SetOptions normalizes and unmarshals the specified options into m.Options.
- */
- setOptions(typedOptions: any): void
- }
- type _subeDKbD = BaseModel
- interface ExternalAuth extends _subeDKbD {
- collectionId: string
- recordId: string
- provider: string
- providerId: string
- }
- interface ExternalAuth {
- tableName(): string
- }
- type _subkdarp = BaseModel
- interface Record extends _subkdarp {
+ type _subxDBbH = BaseModel
+ interface Record extends _subxDBbH {
}
interface Record {
/**
- * TableName returns the table name associated to the current Record model.
- */
- tableName(): string
- }
- interface Record {
- /**
- * Collection returns the Collection model associated to the current Record model.
+ * Collection returns the Collection model associated with the current Record model.
+ *
+ * NB! The returned collection is only for read purposes and it shouldn't be modified
+ * because it could have unintended side-effects on other Record models from the same collection.
*/
collection(): (Collection)
}
interface Record {
/**
- * OriginalCopy returns a copy of the current record model populated
- * with its ORIGINAL data state (aka. the initially loaded) and
- * everything else reset to the defaults.
+ * TableName returns the table name associated with the current Record model.
*/
- originalCopy(): (Record)
+ tableName(): string
}
interface Record {
/**
- * CleanCopy returns a copy of the current record model populated only
- * with its LATEST data state and everything else reset to the defaults.
+ * PostScan implements the [dbx.PostScanner] interface.
+ *
+ * It essentially refreshes/updates the current Record original state
+ * as if the model was fetched from the databases for the first time.
+ *
+ * Or in other words, it means that m.Original().FieldsData() will have
+ * the same values as m.Record().FieldsData().
*/
- cleanCopy(): (Record)
+ postScan(): void
}
interface Record {
/**
- * Expand returns a shallow copy of the current Record model expand data.
+ * HookTags returns the hook tags associated with the current record.
+ */
+ hookTags(): Array
+ }
+ interface Record {
+ /**
+ * BaseFilesPath returns the storage dir path used by the record.
+ */
+ baseFilesPath(): string
+ }
+ interface Record {
+ /**
+ * Original returns a shallow copy of the current record model populated
+ * with its ORIGINAL db data state (aka. right after PostScan())
+ * and everything else reset to the defaults.
+ *
+ * If record was created using NewRecord() the original will be always
+ * a blank record (until PostScan() is invoked).
+ */
+ original(): (Record)
+ }
+ interface Record {
+ /**
+ * Fresh returns a shallow copy of the current record model populated
+ * with its LATEST data state and everything else reset to the defaults
+ * (aka. no expand, no unknown fields and with default visibility flags).
+ */
+ fresh(): (Record)
+ }
+ interface Record {
+ /**
+ * Clone returns a shallow copy of the current record model with all of
+ * its collection and unknown fields data, expand and flags copied.
+ *
+ * use [Record.Fresh()] instead if you want a copy with only the latest
+ * collection fields data and everything else reset to the defaults.
+ */
+ clone(): (Record)
+ }
+ interface Record {
+ /**
+ * Expand returns a shallow copy of the current Record model expand data (if any).
*/
expand(): _TygojaDict
}
interface Record {
/**
- * SetExpand shallow copies the provided data to the current Record model's expand.
+ * SetExpand replaces the current Record's expand with the provided expand arg data (shallow copied).
*/
setExpand(expand: _TygojaDict): void
}
@@ -10820,46 +12267,88 @@ namespace models {
}
interface Record {
/**
- * SchemaData returns a shallow copy ONLY of the defined record schema fields data.
+ * FieldsData returns a shallow copy ONLY of the collection's fields record's data.
*/
- schemaData(): _TygojaDict
+ fieldsData(): _TygojaDict
}
interface Record {
/**
- * UnknownData returns a shallow copy ONLY of the unknown record fields data,
- * aka. fields that are neither one of the base and special system ones,
- * nor defined by the collection schema.
+ * CustomData returns a shallow copy ONLY of the custom record fields data,
+ * aka. fields that are neither defined by the collection, nor special system ones.
+ *
+ * Note that custom fields prefixed with "@pbInternal" are always skipped.
*/
- unknownData(): _TygojaDict
+ customData(): _TygojaDict
+ }
+ interface Record {
+ /**
+ * WithCustomData toggles the export/serialization of custom data fields
+ * (false by default).
+ */
+ withCustomData(state: boolean): (Record)
}
interface Record {
/**
* IgnoreEmailVisibility toggles the flag to ignore the auth record email visibility check.
*/
- ignoreEmailVisibility(state: boolean): void
+ ignoreEmailVisibility(state: boolean): (Record)
}
interface Record {
/**
- * WithUnknownData toggles the export/serialization of unknown data fields
- * (false by default).
+ * IgnoreUnchangedFields toggles the flag to ignore the unchanged fields
+ * from the DB export for the UPDATE SQL query.
+ *
+ * This could be used if you want to save only the record fields that you've changed
+ * without overwrite other untouched fields in case of concurrent update.
*/
- withUnknownData(state: boolean): void
+ ignoreUnchangedFields(state: boolean): (Record)
}
interface Record {
/**
- * Set sets the provided key-value data pair for the current Record model.
+ * Set sets the provided key-value data pair into the current Record
+ * model directly as it is WITHOUT NORMALIZATIONS.
+ *
+ * See also [Record.Set].
+ */
+ setRaw(key: string, value: any): void
+ }
+ interface Record {
+ /**
+ * SetIfFieldExists sets the provided key-value data pair into the current Record model
+ * ONLY if key is existing Collection field name/modifier.
+ *
+ * This method does nothing if key is not a known Collection field name/modifier.
+ *
+ * On success returns the matched Field, otherwise - nil.
+ *
+ * To set any key-value, including custom/unknown fields, use the [Record.Set] method.
+ */
+ setIfFieldExists(key: string, value: any): Field
+ }
+ interface Record {
+ /**
+ * Set sets the provided key-value data pair into the current Record model.
*
* If the record collection has field with name matching the provided "key",
- * the value will be further normalized according to the field rules.
+ * the value will be further normalized according to the field setter(s).
*/
set(key: string, value: any): void
}
+ interface Record {
+ getRaw(key: string): any
+ }
interface Record {
/**
* Get returns a normalized single record model data value for "key".
*/
get(key: string): any
}
+ interface Record {
+ /**
+ * Load bulk loads the provided data into the current Record model.
+ */
+ load(data: _TygojaDict): void
+ }
interface Record {
/**
* GetBool returns the data value for "key" as a bool.
@@ -10884,12 +12373,6 @@ namespace models {
*/
getFloat(key: string): number
}
- interface Record {
- /**
- * GetTime returns the data value for "key" as a [time.Time] instance.
- */
- getTime(key: string): time.Time
- }
interface Record {
/**
* GetDateTime returns the data value for "key" as a DateTime instance.
@@ -10898,10 +12381,43 @@ namespace models {
}
interface Record {
/**
- * GetStringSlice returns the data value for "key" as a slice of unique strings.
+ * GetStringSlice returns the data value for "key" as a slice of non-zero unique strings.
*/
getStringSlice(key: string): Array
}
+ interface Record {
+ /**
+ * GetUploadedFiles returns the uploaded files for the provided "file" field key,
+ * (aka. the current [*filesytem.File] values) so that you can apply further
+ * validations or modifications (including changing the file name or content before persisting).
+ *
+ * Example:
+ *
+ * ```
+ * files := record.GetUploadedFiles("documents")
+ * for _, f := range files {
+ * f.Name = "doc_" + f.Name // add a prefix to each file name
+ * }
+ * app.Save(record) // the files are pointers so the applied changes will transparently reflect on the record value
+ * ```
+ */
+ getUploadedFiles(key: string): Array<(filesystem.File | undefined)>
+ }
+ interface Record {
+ /**
+ * Retrieves the "key" json field value and unmarshals it into "result".
+ *
+ * Example
+ *
+ * ```
+ * result := struct {
+ * FirstName string `json:"first_name"`
+ * }{}
+ * err := m.UnmarshalJSONField("my_field_name", &result)
+ * ```
+ */
+ unmarshalJSONField(key: string, result: any): void
+ }
interface Record {
/**
* ExpandedOne retrieves a single relation Record from the already
@@ -10926,52 +12442,41 @@ namespace models {
*/
expandedAll(relField: string): Array<(Record | undefined)>
}
- interface Record {
- /**
- * Retrieves the "key" json field value and unmarshals it into "result".
- *
- * Example
- *
- * ```
- * result := struct {
- * FirstName string `json:"first_name"`
- * }{}
- * err := m.UnmarshalJSONField("my_field_name", &result)
- * ```
- */
- unmarshalJSONField(key: string, result: any): void
- }
- interface Record {
- /**
- * BaseFilesPath returns the storage dir path used by the record.
- */
- baseFilesPath(): string
- }
interface Record {
/**
* FindFileFieldByFile returns the first file type field for which
* any of the record's data contains the provided filename.
*/
- findFileFieldByFile(filename: string): (schema.SchemaField)
+ findFileFieldByFile(filename: string): (FileField)
}
interface Record {
/**
- * Load bulk loads the provided data into the current Record model.
+ * DBExport implements the [DBExporter] interface and returns a key-value
+ * map with the data to be persisted when saving the Record in the database.
*/
- load(data: _TygojaDict): void
+ dbExport(app: App): _TygojaDict
}
interface Record {
/**
- * ColumnValueMap implements [ColumnValueMapper] interface.
+ * Hide hides the specified fields from the public safe serialization of the record.
*/
- columnValueMap(): _TygojaDict
+ hide(...fieldNames: string[]): (Record)
+ }
+ interface Record {
+ /**
+ * Unhide forces to unhide the specified fields from the public safe serialization
+ * of the record (even when the collection field itself is marked as hidden).
+ */
+ unhide(...fieldNames: string[]): (Record)
}
interface Record {
/**
* PublicExport exports only the record fields that are safe to be public.
*
+ * To export unknown data fields you need to set record.WithCustomData(true).
+ *
* For auth records, to force the export of the email field you need to set
- * `m.IgnoreEmailVisibility(true)`.
+ * record.IgnoreEmailVisibility(true).
*/
publicExport(): _TygojaDict
}
@@ -10991,753 +12496,143 @@ namespace models {
}
interface Record {
/**
- * ReplaceModifers returns a new map with applied modifier
+ * ReplaceModifiers returns a new map with applied modifier
* values based on the current record and the specified data.
*
* The resolved modifier keys will be removed.
*
* Multiple modifiers will be applied one after another,
- * while reusing the previous base key value result (eg. 1; -5; +2 => -2).
+ * while reusing the previous base key value result (ex. 1; -5; +2 => -2).
+ *
+ * Note that because Go doesn't guaranteed the iteration order of maps,
+ * we would explicitly apply shorter keys first for a more consistent and reproducible behavior.
*
* Example usage:
*
* ```
- * newData := record.ReplaceModifers(data)
- * // record: {"field": 10}
- * // data: {"field+": 5}
- * // newData: {"field": 15}
+ * newData := record.ReplaceModifiers(data)
+ * // record: {"field": 10}
+ * // data: {"field+": 5}
+ * // result: {"field": 15}
* ```
*/
- replaceModifers(data: _TygojaDict): _TygojaDict
+ replaceModifiers(data: _TygojaDict): _TygojaDict
}
interface Record {
/**
- * Username returns the "username" auth record data value.
- */
- username(): string
- }
- interface Record {
- /**
- * SetUsername sets the "username" auth record data value.
- *
- * This method doesn't check whether the provided value is a valid username.
- *
- * Returns an error if the record is not from an auth collection.
- */
- setUsername(username: string): void
- }
- interface Record {
- /**
- * Email returns the "email" auth record data value.
+ * Email returns the "email" record field value (usually available with Auth collections).
*/
email(): string
}
interface Record {
/**
- * SetEmail sets the "email" auth record data value.
- *
- * This method doesn't check whether the provided value is a valid email.
- *
- * Returns an error if the record is not from an auth collection.
+ * SetEmail sets the "email" record field value (usually available with Auth collections).
*/
setEmail(email: string): void
}
interface Record {
/**
- * Verified returns the "emailVisibility" auth record data value.
+ * Verified returns the "emailVisibility" record field value (usually available with Auth collections).
*/
emailVisibility(): boolean
}
interface Record {
/**
- * SetEmailVisibility sets the "emailVisibility" auth record data value.
- *
- * Returns an error if the record is not from an auth collection.
+ * SetEmailVisibility sets the "emailVisibility" record field value (usually available with Auth collections).
*/
setEmailVisibility(visible: boolean): void
}
interface Record {
/**
- * Verified returns the "verified" auth record data value.
+ * Verified returns the "verified" record field value (usually available with Auth collections).
*/
verified(): boolean
}
interface Record {
/**
- * SetVerified sets the "verified" auth record data value.
- *
- * Returns an error if the record is not from an auth collection.
+ * SetVerified sets the "verified" record field value (usually available with Auth collections).
*/
setVerified(verified: boolean): void
}
interface Record {
/**
- * TokenKey returns the "tokenKey" auth record data value.
+ * TokenKey returns the "tokenKey" record field value (usually available with Auth collections).
*/
tokenKey(): string
}
interface Record {
/**
- * SetTokenKey sets the "tokenKey" auth record data value.
- *
- * Returns an error if the record is not from an auth collection.
+ * SetTokenKey sets the "tokenKey" record field value (usually available with Auth collections).
*/
setTokenKey(key: string): void
}
interface Record {
/**
- * RefreshTokenKey generates and sets new random auth record "tokenKey".
- *
- * Returns an error if the record is not from an auth collection.
+ * RefreshTokenKey generates and sets a new random auth record "tokenKey".
*/
refreshTokenKey(): void
}
interface Record {
/**
- * LastResetSentAt returns the "lastResentSentAt" auth record data value.
+ * SetPassword sets the "password" record field value (usually available with Auth collections).
*/
- lastResetSentAt(): types.DateTime
+ setPassword(password: string): void
}
interface Record {
/**
- * SetLastResetSentAt sets the "lastResentSentAt" auth record data value.
+ * ValidatePassword validates a plain password against the "password" record field.
*
- * Returns an error if the record is not from an auth collection.
- */
- setLastResetSentAt(dateTime: types.DateTime): void
- }
- interface Record {
- /**
- * LastVerificationSentAt returns the "lastVerificationSentAt" auth record data value.
- */
- lastVerificationSentAt(): types.DateTime
- }
- interface Record {
- /**
- * SetLastVerificationSentAt sets an "lastVerificationSentAt" auth record data value.
- *
- * Returns an error if the record is not from an auth collection.
- */
- setLastVerificationSentAt(dateTime: types.DateTime): void
- }
- interface Record {
- /**
- * PasswordHash returns the "passwordHash" auth record data value.
- */
- passwordHash(): string
- }
- interface Record {
- /**
- * ValidatePassword validates a plain password against the auth record password.
- *
- * Returns false if the password is incorrect or record is not from an auth collection.
+ * Returns false if the password is incorrect.
*/
validatePassword(password: string): boolean
}
interface Record {
/**
- * SetPassword sets cryptographically secure string to the auth record "password" field.
- * This method also resets the "lastResetSentAt" and the "tokenKey" fields.
+ * IsSuperuser returns whether the current record is a superuser, aka.
+ * whether the record is from the _superusers collection.
+ */
+ isSuperuser(): boolean
+ }
+ interface Record {
+ /**
+ * NewStaticAuthToken generates and returns a new static record authentication token.
*
- * Returns an error if the record is not from an auth collection or
- * an empty password is provided.
- */
- setPassword(password: string): void
- }
- /**
- * RequestInfo defines a HTTP request data struct, usually used
- * as part of the `@request.*` filter resolver.
- */
- interface RequestInfo {
- context: string
- query: _TygojaDict
- data: _TygojaDict
- headers: _TygojaDict
- authRecord?: Record
- admin?: Admin
- method: string
- }
- interface RequestInfo {
- /**
- * HasModifierDataKeys loosely checks if the current struct has any modifier Data keys.
- */
- hasModifierDataKeys(): boolean
- }
-}
-
-/**
- * Package echo implements high performance, minimalist Go web framework.
- *
- * Example:
- *
- * ```
- * package main
- *
- * import (
- * "github.com/labstack/echo/v5"
- * "github.com/labstack/echo/v5/middleware"
- * "log"
- * "net/http"
- * )
- *
- * // Handler
- * func hello(c echo.Context) error {
- * return c.String(http.StatusOK, "Hello, World!")
- * }
- *
- * func main() {
- * // Echo instance
- * e := echo.New()
- *
- * // Middleware
- * e.Use(middleware.Logger())
- * e.Use(middleware.Recover())
- *
- * // Routes
- * e.GET("/", hello)
- *
- * // Start server
- * if err := e.Start(":8080"); err != http.ErrServerClosed {
- * log.Fatal(err)
- * }
- * }
- * ```
- *
- * Learn more at https://echo.labstack.com
- */
-namespace echo {
- /**
- * Context represents the context of the current HTTP request. It holds request and
- * response objects, path, path parameters, data and registered handler.
- */
- interface Context {
- [key:string]: any;
- /**
- * Request returns `*http.Request`.
- */
- request(): (http.Request)
- /**
- * SetRequest sets `*http.Request`.
- */
- setRequest(r: http.Request): void
- /**
- * SetResponse sets `*Response`.
- */
- setResponse(r: Response): void
- /**
- * Response returns `*Response`.
- */
- response(): (Response)
- /**
- * IsTLS returns true if HTTP connection is TLS otherwise false.
- */
- isTLS(): boolean
- /**
- * IsWebSocket returns true if HTTP connection is WebSocket otherwise false.
- */
- isWebSocket(): boolean
- /**
- * Scheme returns the HTTP protocol scheme, `http` or `https`.
- */
- scheme(): string
- /**
- * RealIP returns the client's network address based on `X-Forwarded-For`
- * or `X-Real-IP` request header.
- * The behavior can be configured using `Echo#IPExtractor`.
- */
- realIP(): string
- /**
- * RouteInfo returns current request route information. Method, Path, Name and params if they exist for matched route.
- * In case of 404 (route not found) and 405 (method not allowed) RouteInfo returns generic struct for these cases.
- */
- routeInfo(): RouteInfo
- /**
- * Path returns the registered path for the handler.
- */
- path(): string
- /**
- * PathParam returns path parameter by name.
- */
- pathParam(name: string): string
- /**
- * PathParamDefault returns the path parameter or default value for the provided name.
+ * Static auth tokens are similar to the regular auth tokens, but are
+ * non-refreshable and support custom duration.
*
- * Notes for DefaultRouter implementation:
- * Path parameter could be empty for cases like that:
- * * route `/release-:version/bin` and request URL is `/release-/bin`
- * * route `/api/:version/image.jpg` and request URL is `/api//image.jpg`
- * but not when path parameter is last part of route path
- * * route `/download/file.:ext` will not match request `/download/file.`
+ * Zero or negative duration will fallback to the duration from the auth collection settings.
*/
- pathParamDefault(name: string, defaultValue: string): string
- /**
- * PathParams returns path parameter values.
- */
- pathParams(): PathParams
- /**
- * SetPathParams sets path parameters for current request.
- */
- setPathParams(params: PathParams): void
- /**
- * QueryParam returns the query param for the provided name.
- */
- queryParam(name: string): string
- /**
- * QueryParamDefault returns the query param or default value for the provided name.
- */
- queryParamDefault(name: string, defaultValue: string): string
- /**
- * QueryParams returns the query parameters as `url.Values`.
- */
- queryParams(): url.Values
- /**
- * QueryString returns the URL query string.
- */
- queryString(): string
- /**
- * FormValue returns the form field value for the provided name.
- */
- formValue(name: string): string
- /**
- * FormValueDefault returns the form field value or default value for the provided name.
- */
- formValueDefault(name: string, defaultValue: string): string
- /**
- * FormValues returns the form field values as `url.Values`.
- */
- formValues(): url.Values
- /**
- * FormFile returns the multipart form file for the provided name.
- */
- formFile(name: string): (multipart.FileHeader)
- /**
- * MultipartForm returns the multipart form.
- */
- multipartForm(): (multipart.Form)
- /**
- * Cookie returns the named cookie provided in the request.
- */
- cookie(name: string): (http.Cookie)
- /**
- * SetCookie adds a `Set-Cookie` header in HTTP response.
- */
- setCookie(cookie: http.Cookie): void
- /**
- * Cookies returns the HTTP cookies sent with the request.
- */
- cookies(): Array<(http.Cookie | undefined)>
- /**
- * Get retrieves data from the context.
- */
- get(key: string): {
+ newStaticAuthToken(duration: time.Duration): string
}
+ interface Record {
/**
- * Set saves data in the context.
+ * NewAuthToken generates and returns a new record authentication token.
*/
- set(key: string, val: {
- }): void
- /**
- * Bind binds path params, query params and the request body into provided type `i`. The default binder
- * binds body based on Content-Type header.
- */
- bind(i: {
- }): void
- /**
- * Validate validates provided `i`. It is usually called after `Context#Bind()`.
- * Validator must be registered using `Echo#Validator`.
- */
- validate(i: {
- }): void
- /**
- * Render renders a template with data and sends a text/html response with status
- * code. Renderer must be registered using `Echo.Renderer`.
- */
- render(code: number, name: string, data: {
- }): void
- /**
- * HTML sends an HTTP response with status code.
- */
- html(code: number, html: string): void
- /**
- * HTMLBlob sends an HTTP blob response with status code.
- */
- htmlBlob(code: number, b: string|Array