diff --git a/plugins/jsvm/hooks.go b/plugins/jsvm/hooks.go
index 71de62ca..18a3d814 100644
--- a/plugins/jsvm/hooks.go
+++ b/plugins/jsvm/hooks.go
@@ -1,8 +1,11 @@
package jsvm
import (
+ "os"
"path/filepath"
+ "regexp"
"runtime"
+ "strings"
"time"
"github.com/dop251/goja"
@@ -12,8 +15,16 @@ import (
"github.com/dop251/goja_nodejs/require"
"github.com/fatih/color"
"github.com/fsnotify/fsnotify"
- "github.com/pocketbase/dbx"
"github.com/pocketbase/pocketbase/core"
+ "github.com/pocketbase/pocketbase/plugins/jsvm/internal/docs/generated"
+)
+
+const (
+ hooksExtension = ".pb.js"
+
+ typesFileName = ".types.d.ts"
+
+ typesReferenceDirective = `/// `
)
// HooksConfig defines the config options of the JS app hooks plugin.
@@ -51,12 +62,21 @@ func RegisterHooks(app core.App, config HooksConfig) error {
}
// fetch all js hooks sorted by their filename
- files, err := filesContent(p.config.Dir, `^.*\.pb\.js$`)
+ files, err := filesContent(p.config.Dir, `^.*`+regexp.QuoteMeta(hooksExtension)+`$`)
if err != nil {
return err
}
- dbx.HashExp{}.Build(app.DB(), nil)
+ // prepend the types reference directive to empty files
+ for name, content := range files {
+ if len(content) != 0 {
+ continue
+ }
+ path := filepath.Join(p.config.Dir, name)
+ if err := prependToEmptyFile(path, typesReferenceDirective+"\n\n"); err != nil {
+ color.Yellow("Unable to prepend the types reference: %v", err)
+ }
+ }
registry := new(require.Registry) // this can be shared by multiple runtimes
@@ -82,7 +102,7 @@ func RegisterHooks(app core.App, config HooksConfig) error {
if p.config.Watch {
color.Red("Failed to execute %s: %v", file, err)
} else {
- // return err
+ panic(err)
}
}
}
@@ -90,6 +110,18 @@ func RegisterHooks(app core.App, config HooksConfig) error {
loop.Start()
+ app.OnAfterBootstrap().Add(func(e *core.BootstrapEvent) error {
+ // always update the app types on start to ensure that
+ // the user has the latest generated declarations
+ if len(files) > 0 {
+ if err := p.saveTypesFile(); err != nil {
+ color.Yellow("Unable to save app types file: %v", err)
+ }
+ }
+
+ return nil
+ })
+
app.OnTerminate().Add(func(e *core.TerminateEvent) error {
loop.StopNoWait()
@@ -108,45 +140,58 @@ type hooks struct {
config HooksConfig
}
-func (h *hooks) watchFiles() error {
+func (p *hooks) watchFiles() error {
watcher, err := fsnotify.NewWatcher()
if err != nil {
return err
}
- h.app.OnTerminate().Add(func(e *core.TerminateEvent) error {
+ var debounceTimer *time.Timer
+
+ stopDebounceTimer := func() {
+ if debounceTimer != nil {
+ debounceTimer.Stop()
+ }
+ }
+
+ p.app.OnTerminate().Add(func(e *core.TerminateEvent) error {
watcher.Close()
+ stopDebounceTimer()
+
return nil
})
- var debounceTimer *time.Timer
-
// start listening for events.
go func() {
for {
select {
case event, ok := <-watcher.Events:
if !ok {
+ stopDebounceTimer()
return
}
- if debounceTimer != nil {
- debounceTimer.Stop()
+ // skip TS declaration files change
+ if strings.HasSuffix(event.Name, ".d.ts") {
+ continue
}
- debounceTimer = time.AfterFunc(100*time.Millisecond, func() {
+
+ stopDebounceTimer()
+ debounceTimer = time.AfterFunc(50*time.Millisecond, func() {
// app restart is currently not supported on Windows
if runtime.GOOS == "windows" {
color.Yellow("File %s changed, please restart the app", event.Name)
} else {
color.Yellow("File %s changed, restarting...", event.Name)
- if err := h.app.Restart(); err != nil {
+ if err := p.app.Restart(); err != nil {
color.Red("Failed to restart the app:", err)
}
}
})
case err, ok := <-watcher.Errors:
if !ok {
+ stopDebounceTimer()
return
}
color.Red("Watch error:", err)
@@ -155,7 +200,7 @@ func (h *hooks) watchFiles() error {
}()
// add the directory to watch
- err = watcher.Add(h.config.Dir)
+ err = watcher.Add(p.config.Dir)
if err != nil {
watcher.Close()
return err
@@ -163,3 +208,26 @@ func (h *hooks) watchFiles() error {
return nil
}
+
+func (p *hooks) saveTypesFile() error {
+ data, _ := generated.Types.ReadFile("types.d.ts")
+
+ if err := os.WriteFile(filepath.Join(p.config.Dir, typesFileName), data, 0644); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// prependToEmptyFile prepends the specified text to an empty file.
+//
+// If the file is not empty this method does nothing.
+func prependToEmptyFile(path, text string) error {
+ info, err := os.Stat(path)
+
+ if err == nil && info.Size() == 0 {
+ return os.WriteFile(path, []byte(text), 0644)
+ }
+
+ return err
+}
diff --git a/plugins/jsvm/internal/docs/docs.go b/plugins/jsvm/internal/docs/docs.go
new file mode 100644
index 00000000..4dfbb149
--- /dev/null
+++ b/plugins/jsvm/internal/docs/docs.go
@@ -0,0 +1,294 @@
+package main
+
+import (
+ "log"
+ "os"
+ "reflect"
+
+ "github.com/pocketbase/pocketbase/plugins/jsvm"
+ "github.com/pocketbase/tygoja"
+)
+
+const heading = `
+// -------------------------------------------------------------------
+// baseBinds
+// -------------------------------------------------------------------
+
+declare var $app: core.App
+
+interface Record extends models.Record{} // merge
+declare class Record implements models.Record {
+ constructor(collection?: models.Collection, data?: { [key:string]: any })
+}
+
+interface Collection extends models.Collection{} // merge
+declare class Collection implements models.Collection {
+ constructor(data?: Partial)
+}
+
+interface Admin extends models.Admin{} // merge
+declare class Admin implements models.Admin {
+ constructor(data?: Partial)
+}
+
+interface Schema extends schema.Schema{} // merge
+declare class Schema implements schema.Schema {
+ constructor(data?: Partial)
+}
+
+interface SchemaField extends schema.SchemaField{} // merge
+declare class SchemaField implements schema.SchemaField {
+ constructor(data?: Partial)
+}
+
+interface Mail extends mailer.Message{} // merge
+declare class Mail implements mailer.Message {
+ constructor(message?: Partial)
+}
+
+interface ValidationError extends ozzo_validation.Error{} // merge
+declare class ValidationError implements ozzo_validation.Error {
+ constructor(code?: number, message?: string)
+}
+
+interface Dao extends daos.Dao{} // merge
+declare class Dao implements daos.Dao {
+ constructor(concurrentDB?: dbx.Builder, nonconcurrentDB?: dbx.Builder)
+}
+
+// -------------------------------------------------------------------
+// dbxBinds
+// -------------------------------------------------------------------
+
+declare namespace $dbx {
+ /**
+ * {@inheritDoc dbx.HashExp}
+ */
+ export function hashExp(pairs: { [key:string]: any }): dbx.Expression
+
+ let _in: dbx._in
+ export { _in as in }
+
+ export let exp: dbx.newExp
+ export let not: dbx.not
+ export let and: dbx.and
+ export let or: dbx.or
+ export let notIn: dbx.notIn
+ export let like: dbx.like
+ export let orLike: dbx.orLike
+ export let notLike: dbx.notLike
+ export let orNotLike: dbx.orNotLike
+ export let exists: dbx.exists
+ export let notExists: dbx.notExists
+ export let between: dbx.between
+ export let notBetween: dbx.notBetween
+}
+
+// -------------------------------------------------------------------
+// tokensBinds
+// -------------------------------------------------------------------
+
+declare namespace $tokens {
+ let adminAuthToken: tokens.newAdminAuthToken
+ let adminResetPasswordToken: tokens.newAdminResetPasswordToken
+ let adminFileToken: tokens.newAdminFileToken
+ let recordAuthToken: tokens.newRecordAuthToken
+ let recordVerifyToken: tokens.newRecordVerifyToken
+ let recordResetPasswordToken: tokens.newRecordResetPasswordToken
+ let recordChangeEmailToken: tokens.newRecordChangeEmailToken
+ let recordFileToken: tokens.newRecordFileToken
+}
+
+// -------------------------------------------------------------------
+// securityBinds
+// -------------------------------------------------------------------
+
+declare namespace $security {
+ let randomString: security.randomString
+ let randomStringWithAlphabet: security.randomStringWithAlphabet
+ let pseudorandomString: security.pseudorandomString
+ let pseudorandomStringWithAlphabet: security.pseudorandomStringWithAlphabet
+ let parseUnverifiedToken: security.parseUnverifiedJWT
+ let parseToken: security.parseJWT
+ let createToken: security.newToken
+}
+
+// -------------------------------------------------------------------
+// filesystemBinds
+// -------------------------------------------------------------------
+
+declare namespace $filesystem {
+ let fileFromPath: filesystem.newFileFromPath
+ let fileFromBytes: filesystem.newFileFromBytes
+ let fileFromMultipart: filesystem.newFileFromMultipart
+}
+
+// -------------------------------------------------------------------
+// formsBinds
+// -------------------------------------------------------------------
+
+interface AdminLoginForm extends forms.AdminLogin{} // merge
+declare class AdminLoginForm implements forms.AdminLogin {
+ constructor(app: core.App)
+}
+
+interface AdminPasswordResetConfirmForm extends forms.AdminPasswordResetConfirm{} // merge
+declare class AdminPasswordResetConfirmForm implements forms.AdminPasswordResetConfirm {
+ constructor(app: core.App)
+}
+
+interface AdminPasswordResetRequestForm extends forms.AdminPasswordResetRequest{} // merge
+declare class AdminPasswordResetRequestForm implements forms.AdminPasswordResetRequest {
+ constructor(app: core.App)
+}
+
+interface AdminUpsertForm extends forms.AdminUpsert{} // merge
+declare class AdminUpsertForm implements forms.AdminUpsert {
+ constructor(app: core.App, admin: models.Admin)
+}
+
+interface AppleClientSecretCreateForm extends forms.AppleClientSecretCreate{} // merge
+declare class AppleClientSecretCreateForm implements forms.AppleClientSecretCreate {
+ constructor(app: core.App)
+}
+
+interface CollectionUpsertForm extends forms.CollectionUpsert{} // merge
+declare class CollectionUpsertForm implements forms.CollectionUpsert {
+ constructor(app: core.App, collection: models.Collection)
+}
+
+interface CollectionsImportForm extends forms.CollectionsImport{} // merge
+declare class CollectionsImportForm implements forms.CollectionsImport {
+ constructor(app: core.App)
+}
+
+interface RealtimeSubscribeForm extends forms.RealtimeSubscribe{} // merge
+declare class RealtimeSubscribeForm implements forms.RealtimeSubscribe {}
+
+interface RecordEmailChangeConfirmForm extends forms.RecordEmailChangeConfirm{} // merge
+declare class RecordEmailChangeConfirmForm implements forms.RecordEmailChangeConfirm {
+ constructor(app: core.App, collection: models.Collection)
+}
+
+interface RecordEmailChangeRequestForm extends forms.RecordEmailChangeRequest{} // merge
+declare class RecordEmailChangeRequestForm implements forms.RecordEmailChangeRequest {
+ constructor(app: core.App, record: models.Record)
+}
+
+interface RecordOAuth2LoginForm extends forms.RecordOAuth2Login{} // merge
+declare class RecordOAuth2LoginForm implements forms.RecordOAuth2Login {
+ constructor(app: core.App, collection: models.Collection, optAuthRecord?: models.Record)
+}
+
+interface RecordPasswordLoginForm extends forms.RecordPasswordLogin{} // merge
+declare class RecordPasswordLoginForm implements forms.RecordPasswordLogin {
+ constructor(app: core.App, collection: models.Collection)
+}
+
+interface RecordPasswordResetConfirmForm extends forms.RecordPasswordResetConfirm{} // merge
+declare class RecordPasswordResetConfirmForm implements forms.RecordPasswordResetConfirm {
+ constructor(app: core.App, collection: models.Collection)
+}
+
+interface RecordPasswordResetRequestForm extends forms.RecordPasswordResetRequest{} // merge
+declare class RecordPasswordResetRequestForm implements forms.RecordPasswordResetRequest {
+ constructor(app: core.App, collection: models.Collection)
+}
+
+interface RecordUpsertForm extends forms.RecordUpsert{} // merge
+declare class RecordUpsertForm implements forms.RecordUpsert {
+ constructor(app: core.App, record: models.Record)
+}
+
+interface RecordVerificationConfirmForm extends forms.RecordVerificationConfirm{} // merge
+declare class RecordVerificationConfirmForm implements forms.RecordVerificationConfirm {
+ constructor(app: core.App, collection: models.Collection)
+}
+
+interface RecordVerificationRequestForm extends forms.RecordVerificationRequest{} // merge
+declare class RecordVerificationRequestForm implements forms.RecordVerificationRequest {
+ constructor(app: core.App, collection: models.Collection)
+}
+
+interface SettingsUpsertForm extends forms.SettingsUpsert{} // merge
+declare class SettingsUpsertForm implements forms.SettingsUpsert {
+ constructor(app: core.App)
+}
+
+interface TestEmailSendForm extends forms.TestEmailSend{} // merge
+declare class TestEmailSendForm implements forms.TestEmailSend {
+ constructor(app: core.App)
+}
+
+interface TestS3FilesystemForm extends forms.TestS3Filesystem{} // merge
+declare class TestS3FilesystemForm implements forms.TestS3Filesystem {
+ constructor(app: core.App)
+}
+
+// -------------------------------------------------------------------
+// apisBinds
+// -------------------------------------------------------------------
+
+interface Route extends echo.Route{} // merge
+declare class Route implements echo.Route {
+ constructor(data?: Partial)
+}
+
+interface ApiError extends apis.ApiError{} // merge
+declare class ApiError implements apis.ApiError {
+ constructor(status?: number, message?: string, data?: any)
+}
+
+declare namespace $apis {
+ let requireRecordAuth: apis.requireRecordAuth
+ let requireSameContextRecordAuth: apis.requireSameContextRecordAuth
+ let requireAdminAuth: apis.requireAdminAuth
+ let requireAdminAuthOnlyIfAny: apis.requireAdminAuthOnlyIfAny
+ let requireAdminOrRecordAuth: apis.requireAdminOrRecordAuth
+ let requireAdminOrOwnerAuth: apis.requireAdminOrOwnerAuth
+ let activityLogger: apis.activityLogger
+ let requestData: apis.requestData
+ let recordAuthResponse: apis.recordAuthResponse
+ let enrichRecord: apis.enrichRecord
+ let enrichRecords: apis.enrichRecords
+ let notFoundError: apis.newNotFoundError
+ let badRequestError: apis.newBadRequestError
+ let forbiddenError: apis.newForbiddenError
+ let unauthorizedError: apis.newUnauthorizedError
+}
+`
+
+func main() {
+ mapper := &jsvm.FieldMapper{}
+
+ gen := tygoja.New(tygoja.Config{
+ Packages: map[string][]string{
+ "github.com/go-ozzo/ozzo-validation/v4": {"Error"},
+ "github.com/pocketbase/dbx": {"*"},
+ "github.com/pocketbase/pocketbase/tools/security": {"*"},
+ "github.com/pocketbase/pocketbase/tools/filesystem": {"*"},
+ "github.com/pocketbase/pocketbase/tokens": {"*"},
+ "github.com/pocketbase/pocketbase/apis": {"*"},
+ "github.com/pocketbase/pocketbase/forms": {"*"},
+ "github.com/pocketbase/pocketbase/core": {"*"},
+ },
+ FieldNameFormatter: func(s string) string {
+ return mapper.FieldName(nil, reflect.StructField{Name: s})
+ },
+ MethodNameFormatter: func(s string) string {
+ return mapper.MethodName(nil, reflect.Method{Name: s})
+ },
+ Indent: " ", // use only a single space to reduce slight the size
+ WithPackageFunctions: true,
+ Heading: heading,
+ })
+
+ result, err := gen.Generate()
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ if err := os.WriteFile("./generated/types.d.ts", []byte(result), 0644); err != nil {
+ log.Fatal(err)
+ }
+}
diff --git a/plugins/jsvm/internal/docs/generated/embed.go b/plugins/jsvm/internal/docs/generated/embed.go
new file mode 100644
index 00000000..b5b98258
--- /dev/null
+++ b/plugins/jsvm/internal/docs/generated/embed.go
@@ -0,0 +1,6 @@
+package generated
+
+import "embed"
+
+//go:embed types.d.ts
+var Types embed.FS
diff --git a/plugins/jsvm/internal/docs/generated/types.d.ts b/plugins/jsvm/internal/docs/generated/types.d.ts
new file mode 100644
index 00000000..c43ad73f
--- /dev/null
+++ b/plugins/jsvm/internal/docs/generated/types.d.ts
@@ -0,0 +1,19932 @@
+// GENERATED CODE - DO NOT MODIFY BY HAND
+
+// -------------------------------------------------------------------
+// baseBinds
+// -------------------------------------------------------------------
+
+declare var $app: core.App
+
+interface Record extends models.Record{} // merge
+declare class Record implements models.Record {
+ constructor(collection?: models.Collection, data?: { [key:string]: any })
+}
+
+interface Collection extends models.Collection{} // merge
+declare class Collection implements models.Collection {
+ constructor(data?: Partial)
+}
+
+interface Admin extends models.Admin{} // merge
+declare class Admin implements models.Admin {
+ constructor(data?: Partial)
+}
+
+interface Schema extends schema.Schema{} // merge
+declare class Schema implements schema.Schema {
+ constructor(data?: Partial)
+}
+
+interface SchemaField extends schema.SchemaField{} // merge
+declare class SchemaField implements schema.SchemaField {
+ constructor(data?: Partial)
+}
+
+interface Mail extends mailer.Message{} // merge
+declare class Mail implements mailer.Message {
+ constructor(message?: Partial)
+}
+
+interface ValidationError extends ozzo_validation.Error{} // merge
+declare class ValidationError implements ozzo_validation.Error {
+ constructor(code?: number, message?: string)
+}
+
+interface Dao extends daos.Dao{} // merge
+declare class Dao implements daos.Dao {
+ constructor(concurrentDB?: dbx.Builder, nonconcurrentDB?: dbx.Builder)
+}
+
+// -------------------------------------------------------------------
+// dbxBinds
+// -------------------------------------------------------------------
+
+declare namespace $dbx {
+ /**
+ * {@inheritDoc dbx.HashExp}
+ */
+ export function hashExp(pairs: { [key:string]: any }): dbx.Expression
+
+ let _in: dbx._in
+ export { _in as in }
+
+ export let exp: dbx.newExp
+ export let not: dbx.not
+ export let and: dbx.and
+ export let or: dbx.or
+ export let notIn: dbx.notIn
+ export let like: dbx.like
+ export let orLike: dbx.orLike
+ export let notLike: dbx.notLike
+ export let orNotLike: dbx.orNotLike
+ export let exists: dbx.exists
+ export let notExists: dbx.notExists
+ export let between: dbx.between
+ export let notBetween: dbx.notBetween
+}
+
+// -------------------------------------------------------------------
+// tokensBinds
+// -------------------------------------------------------------------
+
+declare namespace $tokens {
+ let adminAuthToken: tokens.newAdminAuthToken
+ let adminResetPasswordToken: tokens.newAdminResetPasswordToken
+ let adminFileToken: tokens.newAdminFileToken
+ let recordAuthToken: tokens.newRecordAuthToken
+ let recordVerifyToken: tokens.newRecordVerifyToken
+ let recordResetPasswordToken: tokens.newRecordResetPasswordToken
+ let recordChangeEmailToken: tokens.newRecordChangeEmailToken
+ let recordFileToken: tokens.newRecordFileToken
+}
+
+// -------------------------------------------------------------------
+// securityBinds
+// -------------------------------------------------------------------
+
+declare namespace $security {
+ let randomString: security.randomString
+ let randomStringWithAlphabet: security.randomStringWithAlphabet
+ let pseudorandomString: security.pseudorandomString
+ let pseudorandomStringWithAlphabet: security.pseudorandomStringWithAlphabet
+ let parseUnverifiedToken: security.parseUnverifiedJWT
+ let parseToken: security.parseJWT
+ let createToken: security.newToken
+}
+
+// -------------------------------------------------------------------
+// filesystemBinds
+// -------------------------------------------------------------------
+
+declare namespace $filesystem {
+ let fileFromPath: filesystem.newFileFromPath
+ let fileFromBytes: filesystem.newFileFromBytes
+ let fileFromMultipart: filesystem.newFileFromMultipart
+}
+
+// -------------------------------------------------------------------
+// formsBinds
+// -------------------------------------------------------------------
+
+interface AdminLoginForm extends forms.AdminLogin{} // merge
+declare class AdminLoginForm implements forms.AdminLogin {
+ constructor(app: core.App)
+}
+
+interface AdminPasswordResetConfirmForm extends forms.AdminPasswordResetConfirm{} // merge
+declare class AdminPasswordResetConfirmForm implements forms.AdminPasswordResetConfirm {
+ constructor(app: core.App)
+}
+
+interface AdminPasswordResetRequestForm extends forms.AdminPasswordResetRequest{} // merge
+declare class AdminPasswordResetRequestForm implements forms.AdminPasswordResetRequest {
+ constructor(app: core.App)
+}
+
+interface AdminUpsertForm extends forms.AdminUpsert{} // merge
+declare class AdminUpsertForm implements forms.AdminUpsert {
+ constructor(app: core.App, admin: models.Admin)
+}
+
+interface AppleClientSecretCreateForm extends forms.AppleClientSecretCreate{} // merge
+declare class AppleClientSecretCreateForm implements forms.AppleClientSecretCreate {
+ constructor(app: core.App)
+}
+
+interface CollectionUpsertForm extends forms.CollectionUpsert{} // merge
+declare class CollectionUpsertForm implements forms.CollectionUpsert {
+ constructor(app: core.App, collection: models.Collection)
+}
+
+interface CollectionsImportForm extends forms.CollectionsImport{} // merge
+declare class CollectionsImportForm implements forms.CollectionsImport {
+ constructor(app: core.App)
+}
+
+interface RealtimeSubscribeForm extends forms.RealtimeSubscribe{} // merge
+declare class RealtimeSubscribeForm implements forms.RealtimeSubscribe {}
+
+interface RecordEmailChangeConfirmForm extends forms.RecordEmailChangeConfirm{} // merge
+declare class RecordEmailChangeConfirmForm implements forms.RecordEmailChangeConfirm {
+ constructor(app: core.App, collection: models.Collection)
+}
+
+interface RecordEmailChangeRequestForm extends forms.RecordEmailChangeRequest{} // merge
+declare class RecordEmailChangeRequestForm implements forms.RecordEmailChangeRequest {
+ constructor(app: core.App, record: models.Record)
+}
+
+interface RecordOAuth2LoginForm extends forms.RecordOAuth2Login{} // merge
+declare class RecordOAuth2LoginForm implements forms.RecordOAuth2Login {
+ constructor(app: core.App, collection: models.Collection, optAuthRecord?: models.Record)
+}
+
+interface RecordPasswordLoginForm extends forms.RecordPasswordLogin{} // merge
+declare class RecordPasswordLoginForm implements forms.RecordPasswordLogin {
+ constructor(app: core.App, collection: models.Collection)
+}
+
+interface RecordPasswordResetConfirmForm extends forms.RecordPasswordResetConfirm{} // merge
+declare class RecordPasswordResetConfirmForm implements forms.RecordPasswordResetConfirm {
+ constructor(app: core.App, collection: models.Collection)
+}
+
+interface RecordPasswordResetRequestForm extends forms.RecordPasswordResetRequest{} // merge
+declare class RecordPasswordResetRequestForm implements forms.RecordPasswordResetRequest {
+ constructor(app: core.App, collection: models.Collection)
+}
+
+interface RecordUpsertForm extends forms.RecordUpsert{} // merge
+declare class RecordUpsertForm implements forms.RecordUpsert {
+ constructor(app: core.App, record: models.Record)
+}
+
+interface RecordVerificationConfirmForm extends forms.RecordVerificationConfirm{} // merge
+declare class RecordVerificationConfirmForm implements forms.RecordVerificationConfirm {
+ constructor(app: core.App, collection: models.Collection)
+}
+
+interface RecordVerificationRequestForm extends forms.RecordVerificationRequest{} // merge
+declare class RecordVerificationRequestForm implements forms.RecordVerificationRequest {
+ constructor(app: core.App, collection: models.Collection)
+}
+
+interface SettingsUpsertForm extends forms.SettingsUpsert{} // merge
+declare class SettingsUpsertForm implements forms.SettingsUpsert {
+ constructor(app: core.App)
+}
+
+interface TestEmailSendForm extends forms.TestEmailSend{} // merge
+declare class TestEmailSendForm implements forms.TestEmailSend {
+ constructor(app: core.App)
+}
+
+interface TestS3FilesystemForm extends forms.TestS3Filesystem{} // merge
+declare class TestS3FilesystemForm implements forms.TestS3Filesystem {
+ constructor(app: core.App)
+}
+
+// -------------------------------------------------------------------
+// apisBinds
+// -------------------------------------------------------------------
+
+interface Route extends echo.Route{} // merge
+declare class Route implements echo.Route {
+ constructor(data?: Partial)
+}
+
+interface ApiError extends apis.ApiError{} // merge
+declare class ApiError implements apis.ApiError {
+ constructor(status?: number, message?: string, data?: any)
+}
+
+declare namespace $apis {
+ let requireRecordAuth: apis.requireRecordAuth
+ let requireSameContextRecordAuth: apis.requireSameContextRecordAuth
+ let requireAdminAuth: apis.requireAdminAuth
+ let requireAdminAuthOnlyIfAny: apis.requireAdminAuthOnlyIfAny
+ let requireAdminOrRecordAuth: apis.requireAdminOrRecordAuth
+ let requireAdminOrOwnerAuth: apis.requireAdminOrOwnerAuth
+ let activityLogger: apis.activityLogger
+ let requestData: apis.requestData
+ let recordAuthResponse: apis.recordAuthResponse
+ let enrichRecord: apis.enrichRecord
+ let enrichRecords: apis.enrichRecords
+ let notFoundError: apis.newNotFoundError
+ let badRequestError: apis.newBadRequestError
+ let forbiddenError: apis.newForbiddenError
+ let unauthorizedError: apis.newUnauthorizedError
+}
+type _TygojaDict = { [key:string | number | symbol]: any; }
+type _TygojaAny = any
+
+/**
+ * Package dbx provides a set of DB-agnostic and easy-to-use query building methods for relational databases.
+ */
+namespace dbx {
+ /**
+ * Builder supports building SQL statements in a DB-agnostic way.
+ * Builder mainly provides two sets of query building methods: those building SELECT statements
+ * and those manipulating DB data or schema (e.g. INSERT statements, CREATE TABLE statements).
+ */
+ interface Builder {
+ /**
+ * NewQuery creates a new Query object with the given SQL statement.
+ * The SQL statement may contain parameter placeholders which can be bound with actual parameter
+ * values before the statement is executed.
+ */
+ newQuery(_arg0: string): (Query | undefined)
+ /**
+ * Select returns a new SelectQuery object that can be used to build a SELECT statement.
+ * The parameters to this method should be the list column names to be selected.
+ * A column name may have an optional alias name. For example, Select("id", "my_name AS name").
+ */
+ select(..._arg0: string[]): (SelectQuery | undefined)
+ /**
+ * ModelQuery returns a new ModelQuery object that can be used to perform model insertion, update, and deletion.
+ * The parameter to this method should be a pointer to the model struct that needs to be inserted, updated, or deleted.
+ */
+ model(_arg0: {
+ }): (ModelQuery | undefined)
+ /**
+ * GeneratePlaceholder generates an anonymous parameter placeholder with the given parameter ID.
+ */
+ generatePlaceholder(_arg0: number): string
+ /**
+ * Quote quotes a string so that it can be embedded in a SQL statement as a string value.
+ */
+ quote(_arg0: string): string
+ /**
+ * QuoteSimpleTableName quotes a simple table name.
+ * A simple table name does not contain any schema prefix.
+ */
+ quoteSimpleTableName(_arg0: string): string
+ /**
+ * QuoteSimpleColumnName quotes a simple column name.
+ * A simple column name does not contain any table prefix.
+ */
+ quoteSimpleColumnName(_arg0: string): string
+ /**
+ * QueryBuilder returns the query builder supporting the current DB.
+ */
+ queryBuilder(): QueryBuilder
+ /**
+ * Insert creates a Query that represents an INSERT SQL statement.
+ * The keys of cols are the column names, while the values of cols are the corresponding column
+ * values to be inserted.
+ */
+ insert(table: string, cols: Params): (Query | undefined)
+ /**
+ * Upsert creates a Query that represents an UPSERT SQL statement.
+ * Upsert inserts a row into the table if the primary key or unique index is not found.
+ * Otherwise it will update the row with the new values.
+ * The keys of cols are the column names, while the values of cols are the corresponding column
+ * values to be inserted.
+ */
+ upsert(table: string, cols: Params, ...constraints: string[]): (Query | undefined)
+ /**
+ * Update creates a Query that represents an UPDATE SQL statement.
+ * The keys of cols are the column names, while the values of cols are the corresponding new column
+ * values. If the "where" expression is nil, the UPDATE SQL statement will have no WHERE clause
+ * (be careful in this case as the SQL statement will update ALL rows in the table).
+ */
+ update(table: string, cols: Params, where: Expression): (Query | undefined)
+ /**
+ * Delete creates a Query that represents a DELETE SQL statement.
+ * If the "where" expression is nil, the DELETE SQL statement will have no WHERE clause
+ * (be careful in this case as the SQL statement will delete ALL rows in the table).
+ */
+ delete(table: string, where: Expression): (Query | undefined)
+ /**
+ * CreateTable creates a Query that represents a CREATE TABLE SQL statement.
+ * The keys of cols are the column names, while the values of cols are the corresponding column types.
+ * The optional "options" parameters will be appended to the generated SQL statement.
+ */
+ createTable(table: string, cols: _TygojaDict, ...options: string[]): (Query | undefined)
+ /**
+ * RenameTable creates a Query that can be used to rename a table.
+ */
+ renameTable(oldName: string): (Query | undefined)
+ /**
+ * DropTable creates a Query that can be used to drop a table.
+ */
+ dropTable(table: string): (Query | undefined)
+ /**
+ * TruncateTable creates a Query that can be used to truncate a table.
+ */
+ truncateTable(table: string): (Query | undefined)
+ /**
+ * AddColumn creates a Query that can be used to add a column to a table.
+ */
+ addColumn(table: string): (Query | undefined)
+ /**
+ * DropColumn creates a Query that can be used to drop a column from a table.
+ */
+ dropColumn(table: string): (Query | undefined)
+ /**
+ * RenameColumn creates a Query that can be used to rename a column in a table.
+ */
+ renameColumn(table: string): (Query | undefined)
+ /**
+ * AlterColumn creates a Query that can be used to change the definition of a table column.
+ */
+ alterColumn(table: string): (Query | undefined)
+ /**
+ * AddPrimaryKey creates a Query that can be used to specify primary key(s) for a table.
+ * The "name" parameter specifies the name of the primary key constraint.
+ */
+ addPrimaryKey(table: string, ...cols: string[]): (Query | undefined)
+ /**
+ * DropPrimaryKey creates a Query that can be used to remove the named primary key constraint from a table.
+ */
+ dropPrimaryKey(table: string): (Query | undefined)
+ /**
+ * AddForeignKey creates a Query that can be used to add a foreign key constraint to a table.
+ * The length of cols and refCols must be the same as they refer to the primary and referential columns.
+ * The optional "options" parameters will be appended to the SQL statement. They can be used to
+ * specify options such as "ON DELETE CASCADE".
+ */
+ addForeignKey(table: string, cols: Array, refTable: string, ...options: string[]): (Query | undefined)
+ /**
+ * DropForeignKey creates a Query that can be used to remove the named foreign key constraint from a table.
+ */
+ dropForeignKey(table: string): (Query | undefined)
+ /**
+ * CreateIndex creates a Query that can be used to create an index for a table.
+ */
+ createIndex(table: string, ...cols: string[]): (Query | undefined)
+ /**
+ * CreateUniqueIndex creates a Query that can be used to create a unique index for a table.
+ */
+ createUniqueIndex(table: string, ...cols: string[]): (Query | undefined)
+ /**
+ * DropIndex creates a Query that can be used to remove the named index from a table.
+ */
+ dropIndex(table: string): (Query | undefined)
+ }
+ /**
+ * BaseBuilder provides a basic implementation of the Builder interface.
+ */
+ interface BaseBuilder {
+ }
+ interface newBaseBuilder {
+ /**
+ * NewBaseBuilder creates a new BaseBuilder instance.
+ */
+ (db: DB, executor: Executor): (BaseBuilder | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * DB returns the DB instance that this builder is associated with.
+ */
+ db(): (DB | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * Executor returns the executor object (a DB instance or a transaction) for executing SQL statements.
+ */
+ executor(): Executor
+ }
+ interface BaseBuilder {
+ /**
+ * NewQuery creates a new Query object with the given SQL statement.
+ * The SQL statement may contain parameter placeholders which can be bound with actual parameter
+ * values before the statement is executed.
+ */
+ newQuery(sql: string): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * GeneratePlaceholder generates an anonymous parameter placeholder with the given parameter ID.
+ */
+ generatePlaceholder(_arg0: number): string
+ }
+ interface BaseBuilder {
+ /**
+ * Quote quotes a string so that it can be embedded in a SQL statement as a string value.
+ */
+ quote(s: string): string
+ }
+ interface BaseBuilder {
+ /**
+ * QuoteSimpleTableName quotes a simple table name.
+ * A simple table name does not contain any schema prefix.
+ */
+ quoteSimpleTableName(s: string): string
+ }
+ interface BaseBuilder {
+ /**
+ * QuoteSimpleColumnName quotes a simple column name.
+ * A simple column name does not contain any table prefix.
+ */
+ quoteSimpleColumnName(s: string): string
+ }
+ interface BaseBuilder {
+ /**
+ * Insert creates a Query that represents an INSERT SQL statement.
+ * The keys of cols are the column names, while the values of cols are the corresponding column
+ * values to be inserted.
+ */
+ insert(table: string, cols: Params): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * Upsert creates a Query that represents an UPSERT SQL statement.
+ * Upsert inserts a row into the table if the primary key or unique index is not found.
+ * Otherwise it will update the row with the new values.
+ * The keys of cols are the column names, while the values of cols are the corresponding column
+ * values to be inserted.
+ */
+ upsert(table: string, cols: Params, ...constraints: string[]): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * Update creates a Query that represents an UPDATE SQL statement.
+ * The keys of cols are the column names, while the values of cols are the corresponding new column
+ * values. If the "where" expression is nil, the UPDATE SQL statement will have no WHERE clause
+ * (be careful in this case as the SQL statement will update ALL rows in the table).
+ */
+ update(table: string, cols: Params, where: Expression): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * Delete creates a Query that represents a DELETE SQL statement.
+ * If the "where" expression is nil, the DELETE SQL statement will have no WHERE clause
+ * (be careful in this case as the SQL statement will delete ALL rows in the table).
+ */
+ delete(table: string, where: Expression): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * CreateTable creates a Query that represents a CREATE TABLE SQL statement.
+ * The keys of cols are the column names, while the values of cols are the corresponding column types.
+ * The optional "options" parameters will be appended to the generated SQL statement.
+ */
+ createTable(table: string, cols: _TygojaDict, ...options: string[]): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * RenameTable creates a Query that can be used to rename a table.
+ */
+ renameTable(oldName: string): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * DropTable creates a Query that can be used to drop a table.
+ */
+ dropTable(table: string): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * TruncateTable creates a Query that can be used to truncate a table.
+ */
+ truncateTable(table: string): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * AddColumn creates a Query that can be used to add a column to a table.
+ */
+ addColumn(table: string): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * DropColumn creates a Query that can be used to drop a column from a table.
+ */
+ dropColumn(table: string): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * RenameColumn creates a Query that can be used to rename a column in a table.
+ */
+ renameColumn(table: string): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * AlterColumn creates a Query that can be used to change the definition of a table column.
+ */
+ alterColumn(table: string): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * AddPrimaryKey creates a Query that can be used to specify primary key(s) for a table.
+ * The "name" parameter specifies the name of the primary key constraint.
+ */
+ addPrimaryKey(table: string, ...cols: string[]): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * DropPrimaryKey creates a Query that can be used to remove the named primary key constraint from a table.
+ */
+ dropPrimaryKey(table: string): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * AddForeignKey creates a Query that can be used to add a foreign key constraint to a table.
+ * The length of cols and refCols must be the same as they refer to the primary and referential columns.
+ * The optional "options" parameters will be appended to the SQL statement. They can be used to
+ * specify options such as "ON DELETE CASCADE".
+ */
+ addForeignKey(table: string, cols: Array, refTable: string, ...options: string[]): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * DropForeignKey creates a Query that can be used to remove the named foreign key constraint from a table.
+ */
+ dropForeignKey(table: string): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * CreateIndex creates a Query that can be used to create an index for a table.
+ */
+ createIndex(table: string, ...cols: string[]): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * CreateUniqueIndex creates a Query that can be used to create a unique index for a table.
+ */
+ createUniqueIndex(table: string, ...cols: string[]): (Query | undefined)
+ }
+ interface BaseBuilder {
+ /**
+ * DropIndex creates a Query that can be used to remove the named index from a table.
+ */
+ dropIndex(table: string): (Query | undefined)
+ }
+ /**
+ * MssqlBuilder is the builder for SQL Server databases.
+ */
+ type _subghATn = BaseBuilder
+ interface MssqlBuilder extends _subghATn {
+ }
+ /**
+ * MssqlQueryBuilder is the query builder for SQL Server databases.
+ */
+ type _sublOvtD = BaseQueryBuilder
+ interface MssqlQueryBuilder extends _sublOvtD {
+ }
+ interface newMssqlBuilder {
+ /**
+ * NewMssqlBuilder creates a new MssqlBuilder instance.
+ */
+ (db: DB, executor: Executor): Builder
+ }
+ interface MssqlBuilder {
+ /**
+ * QueryBuilder returns the query builder supporting the current DB.
+ */
+ queryBuilder(): QueryBuilder
+ }
+ interface MssqlBuilder {
+ /**
+ * Select returns a new SelectQuery object that can be used to build a SELECT statement.
+ * The parameters to this method should be the list column names to be selected.
+ * A column name may have an optional alias name. For example, Select("id", "my_name AS name").
+ */
+ select(...cols: string[]): (SelectQuery | undefined)
+ }
+ interface MssqlBuilder {
+ /**
+ * Model returns a new ModelQuery object that can be used to perform model-based DB operations.
+ * The model passed to this method should be a pointer to a model struct.
+ */
+ model(model: {
+ }): (ModelQuery | undefined)
+ }
+ interface MssqlBuilder {
+ /**
+ * QuoteSimpleTableName quotes a simple table name.
+ * A simple table name does not contain any schema prefix.
+ */
+ quoteSimpleTableName(s: string): string
+ }
+ interface MssqlBuilder {
+ /**
+ * QuoteSimpleColumnName quotes a simple column name.
+ * A simple column name does not contain any table prefix.
+ */
+ quoteSimpleColumnName(s: string): string
+ }
+ interface MssqlBuilder {
+ /**
+ * RenameTable creates a Query that can be used to rename a table.
+ */
+ renameTable(oldName: string): (Query | undefined)
+ }
+ interface MssqlBuilder {
+ /**
+ * RenameColumn creates a Query that can be used to rename a column in a table.
+ */
+ renameColumn(table: string): (Query | undefined)
+ }
+ interface MssqlBuilder {
+ /**
+ * AlterColumn creates a Query that can be used to change the definition of a table column.
+ */
+ alterColumn(table: string): (Query | undefined)
+ }
+ interface MssqlQueryBuilder {
+ /**
+ * BuildOrderByAndLimit generates the ORDER BY and LIMIT clauses.
+ */
+ buildOrderByAndLimit(sql: string, cols: Array, limit: number, offset: number): string
+ }
+ /**
+ * MysqlBuilder is the builder for MySQL databases.
+ */
+ type _subFMzVn = BaseBuilder
+ interface MysqlBuilder extends _subFMzVn {
+ }
+ interface newMysqlBuilder {
+ /**
+ * NewMysqlBuilder creates a new MysqlBuilder instance.
+ */
+ (db: DB, executor: Executor): Builder
+ }
+ interface MysqlBuilder {
+ /**
+ * QueryBuilder returns the query builder supporting the current DB.
+ */
+ queryBuilder(): QueryBuilder
+ }
+ interface MysqlBuilder {
+ /**
+ * Select returns a new SelectQuery object that can be used to build a SELECT statement.
+ * The parameters to this method should be the list column names to be selected.
+ * A column name may have an optional alias name. For example, Select("id", "my_name AS name").
+ */
+ select(...cols: string[]): (SelectQuery | undefined)
+ }
+ interface MysqlBuilder {
+ /**
+ * Model returns a new ModelQuery object that can be used to perform model-based DB operations.
+ * The model passed to this method should be a pointer to a model struct.
+ */
+ model(model: {
+ }): (ModelQuery | undefined)
+ }
+ interface MysqlBuilder {
+ /**
+ * QuoteSimpleTableName quotes a simple table name.
+ * A simple table name does not contain any schema prefix.
+ */
+ quoteSimpleTableName(s: string): string
+ }
+ interface MysqlBuilder {
+ /**
+ * QuoteSimpleColumnName quotes a simple column name.
+ * A simple column name does not contain any table prefix.
+ */
+ quoteSimpleColumnName(s: string): string
+ }
+ interface MysqlBuilder {
+ /**
+ * Upsert creates a Query that represents an UPSERT SQL statement.
+ * Upsert inserts a row into the table if the primary key or unique index is not found.
+ * Otherwise it will update the row with the new values.
+ * The keys of cols are the column names, while the values of cols are the corresponding column
+ * values to be inserted.
+ */
+ upsert(table: string, cols: Params, ...constraints: string[]): (Query | undefined)
+ }
+ interface MysqlBuilder {
+ /**
+ * RenameColumn creates a Query that can be used to rename a column in a table.
+ */
+ renameColumn(table: string): (Query | undefined)
+ }
+ interface MysqlBuilder {
+ /**
+ * DropPrimaryKey creates a Query that can be used to remove the named primary key constraint from a table.
+ */
+ dropPrimaryKey(table: string): (Query | undefined)
+ }
+ interface MysqlBuilder {
+ /**
+ * DropForeignKey creates a Query that can be used to remove the named foreign key constraint from a table.
+ */
+ dropForeignKey(table: string): (Query | undefined)
+ }
+ /**
+ * OciBuilder is the builder for Oracle databases.
+ */
+ type _subxrhjr = BaseBuilder
+ interface OciBuilder extends _subxrhjr {
+ }
+ /**
+ * OciQueryBuilder is the query builder for Oracle databases.
+ */
+ type _subSdjpi = BaseQueryBuilder
+ interface OciQueryBuilder extends _subSdjpi {
+ }
+ interface newOciBuilder {
+ /**
+ * NewOciBuilder creates a new OciBuilder instance.
+ */
+ (db: DB, executor: Executor): Builder
+ }
+ interface OciBuilder {
+ /**
+ * Select returns a new SelectQuery object that can be used to build a SELECT statement.
+ * The parameters to this method should be the list column names to be selected.
+ * A column name may have an optional alias name. For example, Select("id", "my_name AS name").
+ */
+ select(...cols: string[]): (SelectQuery | undefined)
+ }
+ interface OciBuilder {
+ /**
+ * Model returns a new ModelQuery object that can be used to perform model-based DB operations.
+ * The model passed to this method should be a pointer to a model struct.
+ */
+ model(model: {
+ }): (ModelQuery | undefined)
+ }
+ interface OciBuilder {
+ /**
+ * GeneratePlaceholder generates an anonymous parameter placeholder with the given parameter ID.
+ */
+ generatePlaceholder(i: number): string
+ }
+ interface OciBuilder {
+ /**
+ * QueryBuilder returns the query builder supporting the current DB.
+ */
+ queryBuilder(): QueryBuilder
+ }
+ interface OciBuilder {
+ /**
+ * DropIndex creates a Query that can be used to remove the named index from a table.
+ */
+ dropIndex(table: string): (Query | undefined)
+ }
+ interface OciBuilder {
+ /**
+ * RenameTable creates a Query that can be used to rename a table.
+ */
+ renameTable(oldName: string): (Query | undefined)
+ }
+ interface OciBuilder {
+ /**
+ * AlterColumn creates a Query that can be used to change the definition of a table column.
+ */
+ alterColumn(table: string): (Query | undefined)
+ }
+ interface OciQueryBuilder {
+ /**
+ * BuildOrderByAndLimit generates the ORDER BY and LIMIT clauses.
+ */
+ buildOrderByAndLimit(sql: string, cols: Array, limit: number, offset: number): string
+ }
+ /**
+ * PgsqlBuilder is the builder for PostgreSQL databases.
+ */
+ type _subTeHer = BaseBuilder
+ interface PgsqlBuilder extends _subTeHer {
+ }
+ interface newPgsqlBuilder {
+ /**
+ * NewPgsqlBuilder creates a new PgsqlBuilder instance.
+ */
+ (db: DB, executor: Executor): Builder
+ }
+ interface PgsqlBuilder {
+ /**
+ * Select returns a new SelectQuery object that can be used to build a SELECT statement.
+ * The parameters to this method should be the list column names to be selected.
+ * A column name may have an optional alias name. For example, Select("id", "my_name AS name").
+ */
+ select(...cols: string[]): (SelectQuery | undefined)
+ }
+ interface PgsqlBuilder {
+ /**
+ * Model returns a new ModelQuery object that can be used to perform model-based DB operations.
+ * The model passed to this method should be a pointer to a model struct.
+ */
+ model(model: {
+ }): (ModelQuery | undefined)
+ }
+ interface PgsqlBuilder {
+ /**
+ * GeneratePlaceholder generates an anonymous parameter placeholder with the given parameter ID.
+ */
+ generatePlaceholder(i: number): string
+ }
+ interface PgsqlBuilder {
+ /**
+ * QueryBuilder returns the query builder supporting the current DB.
+ */
+ queryBuilder(): QueryBuilder
+ }
+ interface PgsqlBuilder {
+ /**
+ * Upsert creates a Query that represents an UPSERT SQL statement.
+ * Upsert inserts a row into the table if the primary key or unique index is not found.
+ * Otherwise it will update the row with the new values.
+ * The keys of cols are the column names, while the values of cols are the corresponding column
+ * values to be inserted.
+ */
+ upsert(table: string, cols: Params, ...constraints: string[]): (Query | undefined)
+ }
+ interface PgsqlBuilder {
+ /**
+ * DropIndex creates a Query that can be used to remove the named index from a table.
+ */
+ dropIndex(table: string): (Query | undefined)
+ }
+ interface PgsqlBuilder {
+ /**
+ * RenameTable creates a Query that can be used to rename a table.
+ */
+ renameTable(oldName: string): (Query | undefined)
+ }
+ interface PgsqlBuilder {
+ /**
+ * AlterColumn creates a Query that can be used to change the definition of a table column.
+ */
+ alterColumn(table: string): (Query | undefined)
+ }
+ /**
+ * SqliteBuilder is the builder for SQLite databases.
+ */
+ type _sublaNCj = BaseBuilder
+ interface SqliteBuilder extends _sublaNCj {
+ }
+ interface newSqliteBuilder {
+ /**
+ * NewSqliteBuilder creates a new SqliteBuilder instance.
+ */
+ (db: DB, executor: Executor): Builder
+ }
+ interface SqliteBuilder {
+ /**
+ * QueryBuilder returns the query builder supporting the current DB.
+ */
+ queryBuilder(): QueryBuilder
+ }
+ interface SqliteBuilder {
+ /**
+ * Select returns a new SelectQuery object that can be used to build a SELECT statement.
+ * The parameters to this method should be the list column names to be selected.
+ * A column name may have an optional alias name. For example, Select("id", "my_name AS name").
+ */
+ select(...cols: string[]): (SelectQuery | undefined)
+ }
+ interface SqliteBuilder {
+ /**
+ * Model returns a new ModelQuery object that can be used to perform model-based DB operations.
+ * The model passed to this method should be a pointer to a model struct.
+ */
+ model(model: {
+ }): (ModelQuery | undefined)
+ }
+ interface SqliteBuilder {
+ /**
+ * QuoteSimpleTableName quotes a simple table name.
+ * A simple table name does not contain any schema prefix.
+ */
+ quoteSimpleTableName(s: string): string
+ }
+ interface SqliteBuilder {
+ /**
+ * QuoteSimpleColumnName quotes a simple column name.
+ * A simple column name does not contain any table prefix.
+ */
+ quoteSimpleColumnName(s: string): string
+ }
+ interface SqliteBuilder {
+ /**
+ * DropIndex creates a Query that can be used to remove the named index from a table.
+ */
+ dropIndex(table: string): (Query | undefined)
+ }
+ interface SqliteBuilder {
+ /**
+ * TruncateTable creates a Query that can be used to truncate a table.
+ */
+ truncateTable(table: string): (Query | undefined)
+ }
+ interface SqliteBuilder {
+ /**
+ * RenameTable creates a Query that can be used to rename a table.
+ */
+ renameTable(oldName: string): (Query | undefined)
+ }
+ interface SqliteBuilder {
+ /**
+ * AlterColumn creates a Query that can be used to change the definition of a table column.
+ */
+ alterColumn(table: string): (Query | undefined)
+ }
+ interface SqliteBuilder {
+ /**
+ * AddPrimaryKey creates a Query that can be used to specify primary key(s) for a table.
+ * The "name" parameter specifies the name of the primary key constraint.
+ */
+ addPrimaryKey(table: string, ...cols: string[]): (Query | undefined)
+ }
+ interface SqliteBuilder {
+ /**
+ * DropPrimaryKey creates a Query that can be used to remove the named primary key constraint from a table.
+ */
+ dropPrimaryKey(table: string): (Query | undefined)
+ }
+ interface SqliteBuilder {
+ /**
+ * AddForeignKey creates a Query that can be used to add a foreign key constraint to a table.
+ * The length of cols and refCols must be the same as they refer to the primary and referential columns.
+ * The optional "options" parameters will be appended to the SQL statement. They can be used to
+ * specify options such as "ON DELETE CASCADE".
+ */
+ addForeignKey(table: string, cols: Array, refTable: string, ...options: string[]): (Query | undefined)
+ }
+ interface SqliteBuilder {
+ /**
+ * DropForeignKey creates a Query that can be used to remove the named foreign key constraint from a table.
+ */
+ dropForeignKey(table: string): (Query | undefined)
+ }
+ /**
+ * StandardBuilder is the builder that is used by DB for an unknown driver.
+ */
+ type _subiBWgV = BaseBuilder
+ interface StandardBuilder extends _subiBWgV {
+ }
+ interface newStandardBuilder {
+ /**
+ * NewStandardBuilder creates a new StandardBuilder instance.
+ */
+ (db: DB, executor: Executor): Builder
+ }
+ interface StandardBuilder {
+ /**
+ * QueryBuilder returns the query builder supporting the current DB.
+ */
+ queryBuilder(): QueryBuilder
+ }
+ interface StandardBuilder {
+ /**
+ * Select returns a new SelectQuery object that can be used to build a SELECT statement.
+ * The parameters to this method should be the list column names to be selected.
+ * A column name may have an optional alias name. For example, Select("id", "my_name AS name").
+ */
+ select(...cols: string[]): (SelectQuery | undefined)
+ }
+ interface StandardBuilder {
+ /**
+ * Model returns a new ModelQuery object that can be used to perform model-based DB operations.
+ * The model passed to this method should be a pointer to a model struct.
+ */
+ model(model: {
+ }): (ModelQuery | undefined)
+ }
+ /**
+ * LogFunc logs a message for each SQL statement being executed.
+ * This method takes one or multiple parameters. If a single parameter
+ * is provided, it will be treated as the log message. If multiple parameters
+ * are provided, they will be passed to fmt.Sprintf() to generate the log message.
+ */
+ interface LogFunc {(format: string, ...a: {
+ }[]): void }
+ /**
+ * PerfFunc is called when a query finishes execution.
+ * The query execution time is passed to this function so that the DB performance
+ * can be profiled. The "ns" parameter gives the number of nanoseconds that the
+ * SQL statement takes to execute, while the "execute" parameter indicates whether
+ * the SQL statement is executed or queried (usually SELECT statements).
+ */
+ interface PerfFunc {(ns: number, sql: string, execute: boolean): void }
+ /**
+ * QueryLogFunc is called each time when performing a SQL query.
+ * The "t" parameter gives the time that the SQL statement takes to execute,
+ * while rows and err are the result of the query.
+ */
+ interface QueryLogFunc {(ctx: context.Context, t: time.Duration, sql: string, rows: sql.Rows, err: Error): void }
+ /**
+ * ExecLogFunc is called each time when a SQL statement is executed.
+ * The "t" parameter gives the time that the SQL statement takes to execute,
+ * while result and err refer to the result of the execution.
+ */
+ interface ExecLogFunc {(ctx: context.Context, t: time.Duration, sql: string, result: sql.Result, err: Error): void }
+ /**
+ * BuilderFunc creates a Builder instance using the given DB instance and Executor.
+ */
+ interface BuilderFunc {(_arg0: DB, _arg1: Executor): Builder }
+ /**
+ * DB enhances sql.DB by providing a set of DB-agnostic query building methods.
+ * DB allows easier query building and population of data into Go variables.
+ */
+ type _subHZJep = Builder
+ interface DB extends _subHZJep {
+ /**
+ * FieldMapper maps struct fields to DB columns. Defaults to DefaultFieldMapFunc.
+ */
+ fieldMapper: FieldMapFunc
+ /**
+ * TableMapper maps structs to table names. Defaults to GetTableName.
+ */
+ tableMapper: TableMapFunc
+ /**
+ * LogFunc logs the SQL statements being executed. Defaults to nil, meaning no logging.
+ */
+ logFunc: LogFunc
+ /**
+ * PerfFunc logs the SQL execution time. Defaults to nil, meaning no performance profiling.
+ * Deprecated: Please use QueryLogFunc and ExecLogFunc instead.
+ */
+ perfFunc: PerfFunc
+ /**
+ * QueryLogFunc is called each time when performing a SQL query that returns data.
+ */
+ queryLogFunc: QueryLogFunc
+ /**
+ * ExecLogFunc is called each time when a SQL statement is executed.
+ */
+ execLogFunc: ExecLogFunc
+ }
+ /**
+ * Errors represents a list of errors.
+ */
+ interface Errors extends Array{}
+ interface newFromDB {
+ /**
+ * NewFromDB encapsulates an existing database connection.
+ */
+ (sqlDB: sql.DB, driverName: string): (DB | undefined)
+ }
+ interface open {
+ /**
+ * Open opens a database specified by a driver name and data source name (DSN).
+ * Note that Open does not check if DSN is specified correctly. It doesn't try to establish a DB connection either.
+ * Please refer to sql.Open() for more information.
+ */
+ (driverName: string): (DB | undefined)
+ }
+ interface mustOpen {
+ /**
+ * MustOpen opens a database and establishes a connection to it.
+ * Please refer to sql.Open() and sql.Ping() for more information.
+ */
+ (driverName: string): (DB | undefined)
+ }
+ interface DB {
+ /**
+ * Clone makes a shallow copy of DB.
+ */
+ clone(): (DB | undefined)
+ }
+ interface DB {
+ /**
+ * WithContext returns a new instance of DB associated with the given context.
+ */
+ withContext(ctx: context.Context): (DB | undefined)
+ }
+ interface DB {
+ /**
+ * Context returns the context associated with the DB instance.
+ * It returns nil if no context is associated.
+ */
+ context(): context.Context
+ }
+ interface DB {
+ /**
+ * DB returns the sql.DB instance encapsulated by dbx.DB.
+ */
+ db(): (sql.DB | undefined)
+ }
+ interface DB {
+ /**
+ * Close closes the database, releasing any open resources.
+ * It is rare to Close a DB, as the DB handle is meant to be
+ * long-lived and shared between many goroutines.
+ */
+ close(): void
+ }
+ interface DB {
+ /**
+ * Begin starts a transaction.
+ */
+ begin(): (Tx | undefined)
+ }
+ interface DB {
+ /**
+ * BeginTx starts a transaction with the given context and transaction options.
+ */
+ beginTx(ctx: context.Context, opts: sql.TxOptions): (Tx | undefined)
+ }
+ interface DB {
+ /**
+ * Wrap encapsulates an existing transaction.
+ */
+ wrap(sqlTx: sql.Tx): (Tx | undefined)
+ }
+ interface DB {
+ /**
+ * Transactional starts a transaction and executes the given function.
+ * If the function returns an error, the transaction will be rolled back.
+ * Otherwise, the transaction will be committed.
+ */
+ transactional(f: (_arg0: Tx) => void): void
+ }
+ interface DB {
+ /**
+ * TransactionalContext starts a transaction and executes the given function with the given context and transaction options.
+ * If the function returns an error, the transaction will be rolled back.
+ * Otherwise, the transaction will be committed.
+ */
+ transactionalContext(ctx: context.Context, opts: sql.TxOptions, f: (_arg0: Tx) => void): void
+ }
+ interface DB {
+ /**
+ * DriverName returns the name of the DB driver.
+ */
+ driverName(): string
+ }
+ interface DB {
+ /**
+ * QuoteTableName quotes the given table name appropriately.
+ * If the table name contains DB schema prefix, it will be handled accordingly.
+ * This method will do nothing if the table name is already quoted or if it contains parenthesis.
+ */
+ quoteTableName(s: string): string
+ }
+ interface DB {
+ /**
+ * QuoteColumnName quotes the given column name appropriately.
+ * If the table name contains table name prefix, it will be handled accordingly.
+ * This method will do nothing if the column name is already quoted or if it contains parenthesis.
+ */
+ quoteColumnName(s: string): string
+ }
+ interface Errors {
+ /**
+ * Error returns the error string of Errors.
+ */
+ error(): string
+ }
+ /**
+ * Expression represents a DB expression that can be embedded in a SQL statement.
+ */
+ interface Expression {
+ /**
+ * Build converts an expression into a SQL fragment.
+ * If the expression contains binding parameters, they will be added to the given Params.
+ */
+ build(_arg0: DB, _arg1: Params): string
+ }
+ /**
+ * HashExp represents a hash expression.
+ *
+ * A hash expression is a map whose keys are DB column names which need to be filtered according
+ * to the corresponding values. For example, HashExp{"level": 2, "dept": 10} will generate
+ * the SQL: "level"=2 AND "dept"=10.
+ *
+ * HashExp also handles nil values and slice values. For example, HashExp{"level": []interface{}{1, 2}, "dept": nil}
+ * will generate: "level" IN (1, 2) AND "dept" IS NULL.
+ */
+ interface HashExp extends _TygojaDict{}
+ interface newExp {
+ /**
+ * NewExp generates an expression with the specified SQL fragment and the optional binding parameters.
+ */
+ (e: string, ...params: Params[]): Expression
+ }
+ interface not {
+ /**
+ * Not generates a NOT expression which prefixes "NOT" to the specified expression.
+ */
+ (e: Expression): Expression
+ }
+ interface and {
+ /**
+ * And generates an AND expression which concatenates the given expressions with "AND".
+ */
+ (...exps: Expression[]): Expression
+ }
+ interface or {
+ /**
+ * Or generates an OR expression which concatenates the given expressions with "OR".
+ */
+ (...exps: Expression[]): Expression
+ }
+ interface _in {
+ /**
+ * In generates an IN expression for the specified column and the list of allowed values.
+ * If values is empty, a SQL "0=1" will be generated which represents a false expression.
+ */
+ (col: string, ...values: {
+ }[]): Expression
+ }
+ interface notIn {
+ /**
+ * NotIn generates an NOT IN expression for the specified column and the list of disallowed values.
+ * If values is empty, an empty string will be returned indicating a true expression.
+ */
+ (col: string, ...values: {
+ }[]): Expression
+ }
+ interface like {
+ /**
+ * Like generates a LIKE expression for the specified column and the possible strings that the column should be like.
+ * If multiple values are present, the column should be like *all* of them. For example, Like("name", "key", "word")
+ * will generate a SQL expression: "name" LIKE "%key%" AND "name" LIKE "%word%".
+ *
+ * By default, each value will be surrounded by "%" to enable partial matching. If a value contains special characters
+ * such as "%", "\", "_", they will also be properly escaped.
+ *
+ * You may call Escape() and/or Match() to change the default behavior. For example, Like("name", "key").Match(false, true)
+ * generates "name" LIKE "key%".
+ */
+ (col: string, ...values: string[]): (LikeExp | undefined)
+ }
+ interface notLike {
+ /**
+ * NotLike generates a NOT LIKE expression.
+ * For example, NotLike("name", "key", "word") will generate a SQL expression:
+ * "name" NOT LIKE "%key%" AND "name" NOT LIKE "%word%". Please see Like() for more details.
+ */
+ (col: string, ...values: string[]): (LikeExp | undefined)
+ }
+ interface orLike {
+ /**
+ * OrLike generates an OR LIKE expression.
+ * This is similar to Like() except that the column should be like one of the possible values.
+ * For example, OrLike("name", "key", "word") will generate a SQL expression:
+ * "name" LIKE "%key%" OR "name" LIKE "%word%". Please see Like() for more details.
+ */
+ (col: string, ...values: string[]): (LikeExp | undefined)
+ }
+ interface orNotLike {
+ /**
+ * OrNotLike generates an OR NOT LIKE expression.
+ * For example, OrNotLike("name", "key", "word") will generate a SQL expression:
+ * "name" NOT LIKE "%key%" OR "name" NOT LIKE "%word%". Please see Like() for more details.
+ */
+ (col: string, ...values: string[]): (LikeExp | undefined)
+ }
+ interface exists {
+ /**
+ * Exists generates an EXISTS expression by prefixing "EXISTS" to the given expression.
+ */
+ (exp: Expression): Expression
+ }
+ interface notExists {
+ /**
+ * NotExists generates an EXISTS expression by prefixing "NOT EXISTS" to the given expression.
+ */
+ (exp: Expression): Expression
+ }
+ interface between {
+ /**
+ * Between generates a BETWEEN expression.
+ * For example, Between("age", 10, 30) generates: "age" BETWEEN 10 AND 30
+ */
+ (col: string, from: {
+ }): Expression
+ }
+ interface notBetween {
+ /**
+ * NotBetween generates a NOT BETWEEN expression.
+ * For example, NotBetween("age", 10, 30) generates: "age" NOT BETWEEN 10 AND 30
+ */
+ (col: string, from: {
+ }): Expression
+ }
+ /**
+ * Exp represents an expression with a SQL fragment and a list of optional binding parameters.
+ */
+ interface Exp {
+ }
+ interface Exp {
+ /**
+ * Build converts an expression into a SQL fragment.
+ */
+ build(db: DB, params: Params): string
+ }
+ interface HashExp {
+ /**
+ * Build converts an expression into a SQL fragment.
+ */
+ build(db: DB, params: Params): string
+ }
+ /**
+ * NotExp represents an expression that should prefix "NOT" to a specified expression.
+ */
+ interface NotExp {
+ }
+ interface NotExp {
+ /**
+ * Build converts an expression into a SQL fragment.
+ */
+ build(db: DB, params: Params): string
+ }
+ /**
+ * AndOrExp represents an expression that concatenates multiple expressions using either "AND" or "OR".
+ */
+ interface AndOrExp {
+ }
+ interface AndOrExp {
+ /**
+ * Build converts an expression into a SQL fragment.
+ */
+ build(db: DB, params: Params): string
+ }
+ /**
+ * InExp represents an "IN" or "NOT IN" expression.
+ */
+ interface InExp {
+ }
+ interface InExp {
+ /**
+ * Build converts an expression into a SQL fragment.
+ */
+ build(db: DB, params: Params): string
+ }
+ /**
+ * LikeExp represents a variant of LIKE expressions.
+ */
+ interface LikeExp {
+ /**
+ * Like stores the LIKE operator. It can be "LIKE", "NOT LIKE".
+ * It may also be customized as something like "ILIKE".
+ */
+ like: string
+ }
+ interface LikeExp {
+ /**
+ * Escape specifies how a LIKE expression should be escaped.
+ * Each string at position 2i represents a special character and the string at position 2i+1 is
+ * the corresponding escaped version.
+ */
+ escape(...chars: string[]): (LikeExp | undefined)
+ }
+ interface LikeExp {
+ /**
+ * Match specifies whether to do wildcard matching on the left and/or right of given strings.
+ */
+ match(left: boolean): (LikeExp | undefined)
+ }
+ interface LikeExp {
+ /**
+ * Build converts an expression into a SQL fragment.
+ */
+ build(db: DB, params: Params): string
+ }
+ /**
+ * ExistsExp represents an EXISTS or NOT EXISTS expression.
+ */
+ interface ExistsExp {
+ }
+ interface ExistsExp {
+ /**
+ * Build converts an expression into a SQL fragment.
+ */
+ build(db: DB, params: Params): string
+ }
+ /**
+ * BetweenExp represents a BETWEEN or a NOT BETWEEN expression.
+ */
+ interface BetweenExp {
+ }
+ interface BetweenExp {
+ /**
+ * Build converts an expression into a SQL fragment.
+ */
+ build(db: DB, params: Params): string
+ }
+ interface enclose {
+ /**
+ * Enclose surrounds the provided nonempty expression with parenthesis "()".
+ */
+ (exp: Expression): Expression
+ }
+ /**
+ * EncloseExp represents a parenthesis enclosed expression.
+ */
+ interface EncloseExp {
+ }
+ interface EncloseExp {
+ /**
+ * Build converts an expression into a SQL fragment.
+ */
+ build(db: DB, params: Params): string
+ }
+ /**
+ * TableModel is the interface that should be implemented by models which have unconventional table names.
+ */
+ interface TableModel {
+ tableName(): string
+ }
+ /**
+ * ModelQuery represents a query associated with a struct model.
+ */
+ interface ModelQuery {
+ }
+ interface newModelQuery {
+ (model: {
+ }, fieldMapFunc: FieldMapFunc, db: DB, builder: Builder): (ModelQuery | undefined)
+ }
+ interface ModelQuery {
+ /**
+ * Context returns the context associated with the query.
+ */
+ context(): context.Context
+ }
+ interface ModelQuery {
+ /**
+ * WithContext associates a context with the query.
+ */
+ withContext(ctx: context.Context): (ModelQuery | undefined)
+ }
+ interface ModelQuery {
+ /**
+ * Exclude excludes the specified struct fields from being inserted/updated into the DB table.
+ */
+ exclude(...attrs: string[]): (ModelQuery | undefined)
+ }
+ interface ModelQuery {
+ /**
+ * Insert inserts a row in the table using the struct model associated with this query.
+ *
+ * By default, it inserts *all* public fields into the table, including those nil or empty ones.
+ * You may pass a list of the fields to this method to indicate that only those fields should be inserted.
+ * You may also call Exclude to exclude some fields from being inserted.
+ *
+ * If a model has an empty primary key, it is considered auto-incremental and the corresponding struct
+ * field will be filled with the generated primary key value after a successful insertion.
+ */
+ insert(...attrs: string[]): void
+ }
+ interface ModelQuery {
+ /**
+ * Update updates a row in the table using the struct model associated with this query.
+ * The row being updated has the same primary key as specified by the model.
+ *
+ * By default, it updates *all* public fields in the table, including those nil or empty ones.
+ * You may pass a list of the fields to this method to indicate that only those fields should be updated.
+ * You may also call Exclude to exclude some fields from being updated.
+ */
+ update(...attrs: string[]): void
+ }
+ interface ModelQuery {
+ /**
+ * Delete deletes a row in the table using the primary key specified by the struct model associated with this query.
+ */
+ delete(): void
+ }
+ /**
+ * ExecHookFunc executes before op allowing custom handling like auto fail/retry.
+ */
+ interface ExecHookFunc {(q: Query, op: () => void): void }
+ /**
+ * OneHookFunc executes right before the query populate the row result from One() call (aka. op).
+ */
+ interface OneHookFunc {(q: Query, a: {
+ }, op: (b: {
+ }) => void): void }
+ /**
+ * AllHookFunc executes right before the query populate the row result from All() call (aka. op).
+ */
+ interface AllHookFunc {(q: Query, sliceA: {
+ }, op: (sliceB: {
+ }) => void): void }
+ /**
+ * Params represents a list of parameter values to be bound to a SQL statement.
+ * The map keys are the parameter names while the map values are the corresponding parameter values.
+ */
+ interface Params extends _TygojaDict{}
+ /**
+ * Executor prepares, executes, or queries a SQL statement.
+ */
+ interface Executor {
+ /**
+ * Exec executes a SQL statement
+ */
+ exec(query: string, ...args: {
+ }[]): sql.Result
+ /**
+ * ExecContext executes a SQL statement with the given context
+ */
+ execContext(ctx: context.Context, query: string, ...args: {
+ }[]): sql.Result
+ /**
+ * Query queries a SQL statement
+ */
+ query(query: string, ...args: {
+ }[]): (sql.Rows | undefined)
+ /**
+ * QueryContext queries a SQL statement with the given context
+ */
+ queryContext(ctx: context.Context, query: string, ...args: {
+ }[]): (sql.Rows | undefined)
+ /**
+ * Prepare creates a prepared statement
+ */
+ prepare(query: string): (sql.Stmt | undefined)
+ }
+ /**
+ * Query represents a SQL statement to be executed.
+ */
+ interface Query {
+ /**
+ * FieldMapper maps struct field names to DB column names.
+ */
+ fieldMapper: FieldMapFunc
+ /**
+ * LastError contains the last error (if any) of the query.
+ * LastError is cleared by Execute(), Row(), Rows(), One(), and All().
+ */
+ lastError: Error
+ /**
+ * LogFunc is used to log the SQL statement being executed.
+ */
+ logFunc: LogFunc
+ /**
+ * PerfFunc is used to log the SQL execution time. It is ignored if nil.
+ * Deprecated: Please use QueryLogFunc and ExecLogFunc instead.
+ */
+ perfFunc: PerfFunc
+ /**
+ * QueryLogFunc is called each time when performing a SQL query that returns data.
+ */
+ queryLogFunc: QueryLogFunc
+ /**
+ * ExecLogFunc is called each time when a SQL statement is executed.
+ */
+ execLogFunc: ExecLogFunc
+ }
+ interface newQuery {
+ /**
+ * NewQuery creates a new Query with the given SQL statement.
+ */
+ (db: DB, executor: Executor, sql: string): (Query | undefined)
+ }
+ interface Query {
+ /**
+ * SQL returns the original SQL used to create the query.
+ * The actual SQL (RawSQL) being executed is obtained by replacing the named
+ * parameter placeholders with anonymous ones.
+ */
+ sql(): string
+ }
+ interface Query {
+ /**
+ * Context returns the context associated with the query.
+ */
+ context(): context.Context
+ }
+ interface Query {
+ /**
+ * WithContext associates a context with the query.
+ */
+ withContext(ctx: context.Context): (Query | undefined)
+ }
+ interface Query {
+ /**
+ * WithExecHook associates the provided exec hook function with the query.
+ *
+ * It is called for every Query resolver (Execute(), One(), All(), Row(), Column()),
+ * allowing you to implement auto fail/retry or any other additional handling.
+ */
+ withExecHook(fn: ExecHookFunc): (Query | undefined)
+ }
+ interface Query {
+ /**
+ * WithOneHook associates the provided hook function with the query,
+ * called on q.One(), allowing you to implement custom struct scan based
+ * on the One() argument and/or result.
+ */
+ withOneHook(fn: OneHookFunc): (Query | undefined)
+ }
+ interface Query {
+ /**
+ * WithOneHook associates the provided hook function with the query,
+ * called on q.All(), allowing you to implement custom slice scan based
+ * on the All() argument and/or result.
+ */
+ withAllHook(fn: AllHookFunc): (Query | undefined)
+ }
+ interface Query {
+ /**
+ * Params returns the parameters to be bound to the SQL statement represented by this query.
+ */
+ params(): Params
+ }
+ interface Query {
+ /**
+ * Prepare creates a prepared statement for later queries or executions.
+ * Close() should be called after finishing all queries.
+ */
+ prepare(): (Query | undefined)
+ }
+ interface Query {
+ /**
+ * Close closes the underlying prepared statement.
+ * Close does nothing if the query has not been prepared before.
+ */
+ close(): void
+ }
+ interface Query {
+ /**
+ * Bind sets the parameters that should be bound to the SQL statement.
+ * The parameter placeholders in the SQL statement are in the format of "{:ParamName}".
+ */
+ bind(params: Params): (Query | undefined)
+ }
+ interface Query {
+ /**
+ * Execute executes the SQL statement without retrieving data.
+ */
+ execute(): sql.Result
+ }
+ interface Query {
+ /**
+ * One executes the SQL statement and populates the first row of the result into a struct or NullStringMap.
+ * Refer to Rows.ScanStruct() and Rows.ScanMap() for more details on how to specify
+ * the variable to be populated.
+ * Note that when the query has no rows in the result set, an sql.ErrNoRows will be returned.
+ */
+ one(a: {
+ }): void
+ }
+ interface Query {
+ /**
+ * All executes the SQL statement and populates all the resulting rows into a slice of struct or NullStringMap.
+ * The slice must be given as a pointer. Each slice element must be either a struct or a NullStringMap.
+ * Refer to Rows.ScanStruct() and Rows.ScanMap() for more details on how each slice element can be.
+ * If the query returns no row, the slice will be an empty slice (not nil).
+ */
+ all(slice: {
+ }): void
+ }
+ interface Query {
+ /**
+ * Row executes the SQL statement and populates the first row of the result into a list of variables.
+ * Note that the number of the variables should match to that of the columns in the query result.
+ * Note that when the query has no rows in the result set, an sql.ErrNoRows will be returned.
+ */
+ row(...a: {
+ }[]): void
+ }
+ interface Query {
+ /**
+ * Column executes the SQL statement and populates the first column of the result into a slice.
+ * Note that the parameter must be a pointer to a slice.
+ */
+ column(a: {
+ }): void
+ }
+ interface Query {
+ /**
+ * Rows executes the SQL statement and returns a Rows object to allow retrieving data row by row.
+ */
+ rows(): (Rows | undefined)
+ }
+ /**
+ * QueryBuilder builds different clauses for a SELECT SQL statement.
+ */
+ interface QueryBuilder {
+ /**
+ * BuildSelect generates a SELECT clause from the given selected column names.
+ */
+ buildSelect(cols: Array, distinct: boolean, option: string): string
+ /**
+ * BuildFrom generates a FROM clause from the given tables.
+ */
+ buildFrom(tables: Array): string
+ /**
+ * BuildGroupBy generates a GROUP BY clause from the given group-by columns.
+ */
+ buildGroupBy(cols: Array): string
+ /**
+ * BuildJoin generates a JOIN clause from the given join information.
+ */
+ buildJoin(_arg0: Array, _arg1: Params): string
+ /**
+ * BuildWhere generates a WHERE clause from the given expression.
+ */
+ buildWhere(_arg0: Expression, _arg1: Params): string
+ /**
+ * BuildHaving generates a HAVING clause from the given expression.
+ */
+ buildHaving(_arg0: Expression, _arg1: Params): string
+ /**
+ * BuildOrderByAndLimit generates the ORDER BY and LIMIT clauses.
+ */
+ buildOrderByAndLimit(_arg0: string, _arg1: Array, _arg2: number, _arg3: number): string
+ /**
+ * BuildUnion generates a UNION clause from the given union information.
+ */
+ buildUnion(_arg0: Array, _arg1: Params): string
+ }
+ /**
+ * BaseQueryBuilder provides a basic implementation of QueryBuilder.
+ */
+ interface BaseQueryBuilder {
+ }
+ interface newBaseQueryBuilder {
+ /**
+ * NewBaseQueryBuilder creates a new BaseQueryBuilder instance.
+ */
+ (db: DB): (BaseQueryBuilder | undefined)
+ }
+ interface BaseQueryBuilder {
+ /**
+ * DB returns the DB instance associated with the query builder.
+ */
+ db(): (DB | undefined)
+ }
+ interface BaseQueryBuilder {
+ /**
+ * BuildSelect generates a SELECT clause from the given selected column names.
+ */
+ buildSelect(cols: Array, distinct: boolean, option: string): string
+ }
+ interface BaseQueryBuilder {
+ /**
+ * BuildFrom generates a FROM clause from the given tables.
+ */
+ buildFrom(tables: Array): string
+ }
+ interface BaseQueryBuilder {
+ /**
+ * BuildJoin generates a JOIN clause from the given join information.
+ */
+ buildJoin(joins: Array, params: Params): string
+ }
+ interface BaseQueryBuilder {
+ /**
+ * BuildWhere generates a WHERE clause from the given expression.
+ */
+ buildWhere(e: Expression, params: Params): string
+ }
+ interface BaseQueryBuilder {
+ /**
+ * BuildHaving generates a HAVING clause from the given expression.
+ */
+ buildHaving(e: Expression, params: Params): string
+ }
+ interface BaseQueryBuilder {
+ /**
+ * BuildGroupBy generates a GROUP BY clause from the given group-by columns.
+ */
+ buildGroupBy(cols: Array): string
+ }
+ interface BaseQueryBuilder {
+ /**
+ * BuildOrderByAndLimit generates the ORDER BY and LIMIT clauses.
+ */
+ buildOrderByAndLimit(sql: string, cols: Array, limit: number, offset: number): string
+ }
+ interface BaseQueryBuilder {
+ /**
+ * BuildUnion generates a UNION clause from the given union information.
+ */
+ buildUnion(unions: Array, params: Params): string
+ }
+ interface BaseQueryBuilder {
+ /**
+ * BuildOrderBy generates the ORDER BY clause.
+ */
+ buildOrderBy(cols: Array): string
+ }
+ interface BaseQueryBuilder {
+ /**
+ * BuildLimit generates the LIMIT clause.
+ */
+ buildLimit(limit: number, offset: number): string
+ }
+ /**
+ * VarTypeError indicates a variable type error when trying to populating a variable with DB result.
+ */
+ interface VarTypeError extends String{}
+ interface VarTypeError {
+ /**
+ * Error returns the error message.
+ */
+ error(): string
+ }
+ /**
+ * NullStringMap is a map of sql.NullString that can be used to hold DB query result.
+ * The map keys correspond to the DB column names, while the map values are their corresponding column values.
+ */
+ interface NullStringMap extends _TygojaDict{}
+ /**
+ * Rows enhances sql.Rows by providing additional data query methods.
+ * Rows can be obtained by calling Query.Rows(). It is mainly used to populate data row by row.
+ */
+ type _subyDZHA = sql.Rows
+ interface Rows extends _subyDZHA {
+ }
+ interface Rows {
+ /**
+ * ScanMap populates the current row of data into a NullStringMap.
+ * Note that the NullStringMap must not be nil, or it will panic.
+ * The NullStringMap will be populated using column names as keys and their values as
+ * the corresponding element values.
+ */
+ scanMap(a: NullStringMap): void
+ }
+ interface Rows {
+ /**
+ * ScanStruct populates the current row of data into a struct.
+ * The struct must be given as a pointer.
+ *
+ * ScanStruct associates struct fields with DB table columns through a field mapping function.
+ * It populates a struct field with the data of its associated column.
+ * Note that only exported struct fields will be populated.
+ *
+ * By default, DefaultFieldMapFunc() is used to map struct fields to table columns.
+ * This function separates each word in a field name with a underscore and turns every letter into lower case.
+ * For example, "LastName" is mapped to "last_name", "MyID" is mapped to "my_id", and so on.
+ * To change the default behavior, set DB.FieldMapper with your custom mapping function.
+ * You may also set Query.FieldMapper to change the behavior for particular queries.
+ */
+ scanStruct(a: {
+ }): void
+ }
+ /**
+ * BuildHookFunc defines a callback function that is executed on Query creation.
+ */
+ interface BuildHookFunc {(q: Query): void }
+ /**
+ * SelectQuery represents a DB-agnostic SELECT query.
+ * It can be built into a DB-specific query by calling the Build() method.
+ */
+ interface SelectQuery {
+ /**
+ * FieldMapper maps struct field names to DB column names.
+ */
+ fieldMapper: FieldMapFunc
+ /**
+ * TableMapper maps structs to DB table names.
+ */
+ tableMapper: TableMapFunc
+ }
+ /**
+ * JoinInfo contains the specification for a JOIN clause.
+ */
+ interface JoinInfo {
+ join: string
+ table: string
+ on: Expression
+ }
+ /**
+ * UnionInfo contains the specification for a UNION clause.
+ */
+ interface UnionInfo {
+ all: boolean
+ query?: Query
+ }
+ interface newSelectQuery {
+ /**
+ * NewSelectQuery creates a new SelectQuery instance.
+ */
+ (builder: Builder, db: DB): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * WithBuildHook runs the provided hook function with the query created on Build().
+ */
+ withBuildHook(fn: BuildHookFunc): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * Context returns the context associated with the query.
+ */
+ context(): context.Context
+ }
+ interface SelectQuery {
+ /**
+ * WithContext associates a context with the query.
+ */
+ withContext(ctx: context.Context): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * Select specifies the columns to be selected.
+ * Column names will be automatically quoted.
+ */
+ select(...cols: string[]): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * AndSelect adds additional columns to be selected.
+ * Column names will be automatically quoted.
+ */
+ andSelect(...cols: string[]): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * Distinct specifies whether to select columns distinctively.
+ * By default, distinct is false.
+ */
+ distinct(v: boolean): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * SelectOption specifies additional option that should be append to "SELECT".
+ */
+ selectOption(option: string): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * From specifies which tables to select from.
+ * Table names will be automatically quoted.
+ */
+ from(...tables: string[]): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * Where specifies the WHERE condition.
+ */
+ where(e: Expression): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * AndWhere concatenates a new WHERE condition with the existing one (if any) using "AND".
+ */
+ andWhere(e: Expression): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * OrWhere concatenates a new WHERE condition with the existing one (if any) using "OR".
+ */
+ orWhere(e: Expression): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * Join specifies a JOIN clause.
+ * The "typ" parameter specifies the JOIN type (e.g. "INNER JOIN", "LEFT JOIN").
+ */
+ join(typ: string, table: string, on: Expression): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * InnerJoin specifies an INNER JOIN clause.
+ * This is a shortcut method for Join.
+ */
+ innerJoin(table: string, on: Expression): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * LeftJoin specifies a LEFT JOIN clause.
+ * This is a shortcut method for Join.
+ */
+ leftJoin(table: string, on: Expression): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * RightJoin specifies a RIGHT JOIN clause.
+ * This is a shortcut method for Join.
+ */
+ rightJoin(table: string, on: Expression): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * OrderBy specifies the ORDER BY clause.
+ * Column names will be properly quoted. A column name can contain "ASC" or "DESC" to indicate its ordering direction.
+ */
+ orderBy(...cols: string[]): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * AndOrderBy appends additional columns to the existing ORDER BY clause.
+ * Column names will be properly quoted. A column name can contain "ASC" or "DESC" to indicate its ordering direction.
+ */
+ andOrderBy(...cols: string[]): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * GroupBy specifies the GROUP BY clause.
+ * Column names will be properly quoted.
+ */
+ groupBy(...cols: string[]): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * AndGroupBy appends additional columns to the existing GROUP BY clause.
+ * Column names will be properly quoted.
+ */
+ andGroupBy(...cols: string[]): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * Having specifies the HAVING clause.
+ */
+ having(e: Expression): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * AndHaving concatenates a new HAVING condition with the existing one (if any) using "AND".
+ */
+ andHaving(e: Expression): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * OrHaving concatenates a new HAVING condition with the existing one (if any) using "OR".
+ */
+ orHaving(e: Expression): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * Union specifies a UNION clause.
+ */
+ union(q: Query): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * UnionAll specifies a UNION ALL clause.
+ */
+ unionAll(q: Query): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * Limit specifies the LIMIT clause.
+ * A negative limit means no limit.
+ */
+ limit(limit: number): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * Offset specifies the OFFSET clause.
+ * A negative offset means no offset.
+ */
+ offset(offset: number): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * Bind specifies the parameter values to be bound to the query.
+ */
+ bind(params: Params): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * AndBind appends additional parameters to be bound to the query.
+ */
+ andBind(params: Params): (SelectQuery | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * Build builds the SELECT query and returns an executable Query object.
+ */
+ build(): (Query | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * One executes the SELECT query and populates the first row of the result into the specified variable.
+ *
+ * If the query does not specify a "from" clause, the method will try to infer the name of the table
+ * to be selected from by calling getTableName() which will return either the variable type name
+ * or the TableName() method if the variable implements the TableModel interface.
+ *
+ * Note that when the query has no rows in the result set, an sql.ErrNoRows will be returned.
+ */
+ one(a: {
+ }): void
+ }
+ interface SelectQuery {
+ /**
+ * Model selects the row with the specified primary key and populates the model with the row data.
+ *
+ * The model variable should be a pointer to a struct. If the query does not specify a "from" clause,
+ * it will use the model struct to determine which table to select data from. It will also use the model
+ * to infer the name of the primary key column. Only simple primary key is supported. For composite primary keys,
+ * please use Where() to specify the filtering condition.
+ */
+ model(pk: {
+ }): void
+ }
+ interface SelectQuery {
+ /**
+ * All executes the SELECT query and populates all rows of the result into a slice.
+ *
+ * Note that the slice must be passed in as a pointer.
+ *
+ * If the query does not specify a "from" clause, the method will try to infer the name of the table
+ * to be selected from by calling getTableName() which will return either the type name of the slice elements
+ * or the TableName() method if the slice element implements the TableModel interface.
+ */
+ all(slice: {
+ }): void
+ }
+ interface SelectQuery {
+ /**
+ * Rows builds and executes the SELECT query and returns a Rows object for data retrieval purpose.
+ * This is a shortcut to SelectQuery.Build().Rows()
+ */
+ rows(): (Rows | undefined)
+ }
+ interface SelectQuery {
+ /**
+ * Row builds and executes the SELECT query and populates the first row of the result into the specified variables.
+ * This is a shortcut to SelectQuery.Build().Row()
+ */
+ row(...a: {
+ }[]): void
+ }
+ interface SelectQuery {
+ /**
+ * Column builds and executes the SELECT statement and populates the first column of the result into a slice.
+ * Note that the parameter must be a pointer to a slice.
+ * This is a shortcut to SelectQuery.Build().Column()
+ */
+ column(a: {
+ }): void
+ }
+ /**
+ * QueryInfo represents a debug/info struct with exported SelectQuery fields.
+ */
+ interface QueryInfo {
+ builder: Builder
+ selects: Array
+ distinct: boolean
+ selectOption: string
+ from: Array
+ where: Expression
+ join: Array
+ orderBy: Array
+ groupBy: Array
+ having: Expression
+ union: Array
+ limit: number
+ offset: number
+ params: Params
+ context: context.Context
+ buildHook: BuildHookFunc
+ }
+ interface SelectQuery {
+ /**
+ * Info exports common SelectQuery fields allowing to inspect the
+ * current select query options.
+ */
+ info(): (QueryInfo | undefined)
+ }
+ /**
+ * FieldMapFunc converts a struct field name into a DB column name.
+ */
+ interface FieldMapFunc {(_arg0: string): string }
+ /**
+ * TableMapFunc converts a sample struct into a DB table name.
+ */
+ interface TableMapFunc {(a: {
+ }): string }
+ interface structInfo {
+ }
+ type _subAtcri = structInfo
+ interface structValue extends _subAtcri {
+ }
+ interface fieldInfo {
+ }
+ interface structInfoMapKey {
+ }
+ /**
+ * PostScanner is an optional interface used by ScanStruct.
+ */
+ interface PostScanner {
+ /**
+ * PostScan executes right after the struct has been populated
+ * with the DB values, allowing you to further normalize or validate
+ * the loaded data.
+ */
+ postScan(): void
+ }
+ interface defaultFieldMapFunc {
+ /**
+ * DefaultFieldMapFunc maps a field name to a DB column name.
+ * The mapping rule set by this method is that words in a field name will be separated by underscores
+ * and the name will be turned into lower case. For example, "FirstName" maps to "first_name", and "MyID" becomes "my_id".
+ * See DB.FieldMapper for more details.
+ */
+ (f: string): string
+ }
+ interface getTableName {
+ /**
+ * GetTableName implements the default way of determining the table name corresponding to the given model struct
+ * or slice of structs. To get the actual table name for a model, you should use DB.TableMapFunc() instead.
+ * Do not call this method in a model's TableName() method because it will cause infinite loop.
+ */
+ (a: {
+ }): string
+ }
+ /**
+ * Tx enhances sql.Tx with additional querying methods.
+ */
+ type _subhVSWD = Builder
+ interface Tx extends _subhVSWD {
+ }
+ interface Tx {
+ /**
+ * Commit commits the transaction.
+ */
+ commit(): void
+ }
+ interface Tx {
+ /**
+ * Rollback aborts the transaction.
+ */
+ rollback(): void
+ }
+}
+
+/**
+ * Package validation provides configurable and extensible rules for validating data of various types.
+ */
+namespace ozzo_validation {
+ /**
+ * Error interface represents an validation error
+ */
+ interface Error {
+ error(): string
+ code(): string
+ message(): string
+ setMessage(_arg0: string): Error
+ params(): _TygojaDict
+ setParams(_arg0: _TygojaDict): Error
+ }
+}
+
+namespace security {
+ // @ts-ignore
+ import crand = rand
+ interface s256Challenge {
+ /**
+ * S256Challenge creates base64 encoded sha256 challenge string derived from code.
+ * The padding of the result base64 string is stripped per [RFC 7636].
+ *
+ * [RFC 7636]: https://datatracker.ietf.org/doc/html/rfc7636#section-4.2
+ */
+ (code: string): string
+ }
+ interface encrypt {
+ /**
+ * Encrypt encrypts data with key (must be valid 32 char aes key).
+ */
+ (data: string, key: string): string
+ }
+ interface decrypt {
+ /**
+ * Decrypt decrypts encrypted text with key (must be valid 32 chars aes key).
+ */
+ (cipherText: string, key: string): string
+ }
+ interface parseUnverifiedJWT {
+ /**
+ * ParseUnverifiedJWT parses JWT token and returns its claims
+ * but DOES NOT verify the signature.
+ *
+ * It verifies only the exp, iat and nbf claims.
+ */
+ (token: string): jwt.MapClaims
+ }
+ interface parseJWT {
+ /**
+ * ParseJWT verifies and parses JWT token and returns its claims.
+ */
+ (token: string, verificationKey: string): jwt.MapClaims
+ }
+ interface newToken {
+ /**
+ * NewToken generates and returns new HS256 signed JWT token.
+ */
+ (payload: jwt.MapClaims, signingKey: string, secondsDuration: number): string
+ }
+ // @ts-ignore
+ import cryptoRand = rand
+ // @ts-ignore
+ import mathRand = rand
+ interface randomString {
+ /**
+ * RandomString generates a cryptographically random string with the specified length.
+ *
+ * The generated string matches [A-Za-z0-9]+ and it's transparent to URL-encoding.
+ */
+ (length: number): string
+ }
+ interface randomStringWithAlphabet {
+ /**
+ * RandomStringWithAlphabet generates a cryptographically random string
+ * with the specified length and characters set.
+ *
+ * It panics if for some reason rand.Int returns a non-nil error.
+ */
+ (length: number, alphabet: string): string
+ }
+ interface pseudorandomString {
+ /**
+ * PseudorandomString generates a pseudorandom string with the specified length.
+ *
+ * The generated string matches [A-Za-z0-9]+ and it's transparent to URL-encoding.
+ *
+ * For a cryptographically random string (but a little bit slower) use RandomString instead.
+ */
+ (length: number): string
+ }
+ interface pseudorandomStringWithAlphabet {
+ /**
+ * PseudorandomStringWithAlphabet generates a pseudorandom string
+ * with the specified length and characters set.
+ *
+ * For a cryptographically random (but a little bit slower) use RandomStringWithAlphabet instead.
+ */
+ (length: number, alphabet: string): string
+ }
+}
+
+namespace filesystem {
+ /**
+ * FileReader defines an interface for a file resource reader.
+ */
+ interface FileReader {
+ open(): io.ReadSeekCloser
+ }
+ /**
+ * File defines a single file [io.ReadSeekCloser] resource.
+ *
+ * The file could be from a local path, multipipart/formdata header, etc.
+ */
+ interface File {
+ name: string
+ originalName: string
+ size: number
+ reader: FileReader
+ }
+ interface newFileFromPath {
+ /**
+ * NewFileFromPath creates a new File instance from the provided local file path.
+ */
+ (path: string): (File | undefined)
+ }
+ interface newFileFromBytes {
+ /**
+ * NewFileFromBytes creates a new File instance from the provided byte slice.
+ */
+ (b: string, name: string): (File | undefined)
+ }
+ interface newFileFromMultipart {
+ /**
+ * NewFileFromMultipart creates a new File instace from the provided multipart header.
+ */
+ (mh: multipart.FileHeader): (File | undefined)
+ }
+ /**
+ * MultipartReader defines a FileReader from [multipart.FileHeader].
+ */
+ interface MultipartReader {
+ header?: multipart.FileHeader
+ }
+ interface MultipartReader {
+ /**
+ * Open implements the [filesystem.FileReader] interface.
+ */
+ open(): io.ReadSeekCloser
+ }
+ /**
+ * PathReader defines a FileReader from a local file path.
+ */
+ interface PathReader {
+ path: string
+ }
+ interface PathReader {
+ /**
+ * Open implements the [filesystem.FileReader] interface.
+ */
+ open(): io.ReadSeekCloser
+ }
+ /**
+ * BytesReader defines a FileReader from bytes content.
+ */
+ interface BytesReader {
+ bytes: string
+ }
+ interface BytesReader {
+ /**
+ * Open implements the [filesystem.FileReader] interface.
+ */
+ open(): io.ReadSeekCloser
+ }
+ type _subEsFsW = bytes.Reader
+ interface bytesReadSeekCloser extends _subEsFsW {
+ }
+ interface bytesReadSeekCloser {
+ /**
+ * Close implements the [io.ReadSeekCloser] interface.
+ */
+ close(): void
+ }
+ interface System {
+ }
+ interface newS3 {
+ /**
+ * NewS3 initializes an S3 filesystem instance.
+ *
+ * NB! Make sure to call `Close()` after you are done working with it.
+ */
+ (bucketName: string, region: string, endpoint: string, accessKey: string, secretKey: string, s3ForcePathStyle: boolean): (System | undefined)
+ }
+ interface newLocal {
+ /**
+ * NewLocal initializes a new local filesystem instance.
+ *
+ * NB! Make sure to call `Close()` after you are done working with it.
+ */
+ (dirPath: string): (System | undefined)
+ }
+ interface System {
+ /**
+ * SetContext assigns the specified context to the current filesystem.
+ */
+ setContext(ctx: context.Context): void
+ }
+ interface System {
+ /**
+ * Close releases any resources used for the related filesystem.
+ */
+ close(): void
+ }
+ interface System {
+ /**
+ * Exists checks if file with fileKey path exists or not.
+ */
+ exists(fileKey: string): boolean
+ }
+ interface System {
+ /**
+ * Attributes returns the attributes for the file with fileKey path.
+ */
+ attributes(fileKey: string): (blob.Attributes | undefined)
+ }
+ interface System {
+ /**
+ * GetFile returns a file content reader for the given fileKey.
+ *
+ * NB! Make sure to call `Close()` after you are done working with it.
+ */
+ getFile(fileKey: string): (blob.Reader | undefined)
+ }
+ interface System {
+ /**
+ * List returns a flat list with info for all files under the specified prefix.
+ */
+ list(prefix: string): Array<(blob.ListObject | undefined)>
+ }
+ interface System {
+ /**
+ * Upload writes content into the fileKey location.
+ */
+ upload(content: string, fileKey: string): void
+ }
+ interface System {
+ /**
+ * UploadFile uploads the provided multipart file to the fileKey location.
+ */
+ uploadFile(file: File, fileKey: string): void
+ }
+ interface System {
+ /**
+ * UploadMultipart uploads the provided multipart file to the fileKey location.
+ */
+ uploadMultipart(fh: multipart.FileHeader, fileKey: string): void
+ }
+ interface System {
+ /**
+ * Delete deletes stored file at fileKey location.
+ */
+ delete(fileKey: string): void
+ }
+ interface System {
+ /**
+ * DeletePrefix deletes everything starting with the specified prefix.
+ */
+ deletePrefix(prefix: string): Array
+ }
+ interface System {
+ /**
+ * Serve serves the file at fileKey location to an HTTP response.
+ */
+ serve(res: http.ResponseWriter, req: http.Request, fileKey: string, name: string): void
+ }
+ interface System {
+ /**
+ * CreateThumb creates a new thumb image for the file at originalKey location.
+ * The new thumb file is stored at thumbKey location.
+ *
+ * thumbSize is in the format:
+ * - 0xH (eg. 0x100) - resize to H height preserving the aspect ratio
+ * - Wx0 (eg. 300x0) - resize to W width preserving the aspect ratio
+ * - WxH (eg. 300x100) - resize and crop to WxH viewbox (from center)
+ * - WxHt (eg. 300x100t) - resize and crop to WxH viewbox (from top)
+ * - WxHb (eg. 300x100b) - resize and crop to WxH viewbox (from bottom)
+ * - WxHf (eg. 300x100f) - fit inside a WxH viewbox (without cropping)
+ */
+ createThumb(originalKey: string, thumbKey: string): void
+ }
+}
+
+/**
+ * Package core is the backbone of PocketBase.
+ *
+ * It defines the main PocketBase App interface and its base implementation.
+ */
+namespace core {
+ /**
+ * App defines the main PocketBase app interface.
+ */
+ interface App {
+ /**
+ * Deprecated:
+ * This method may get removed in the near future.
+ * It is recommended to access the app db instance from app.Dao().DB() or
+ * if you want more flexibility - app.Dao().ConcurrentDB() and app.Dao().NonconcurrentDB().
+ *
+ * DB returns the default app database instance.
+ */
+ db(): (dbx.DB | undefined)
+ /**
+ * Dao returns the default app Dao instance.
+ *
+ * This Dao could operate only on the tables and models
+ * associated with the default app database. For example,
+ * trying to access the request logs table will result in error.
+ */
+ dao(): (daos.Dao | undefined)
+ /**
+ * Deprecated:
+ * This method may get removed in the near future.
+ * It is recommended to access the logs db instance from app.LogsDao().DB() or
+ * if you want more flexibility - app.LogsDao().ConcurrentDB() and app.LogsDao().NonconcurrentDB().
+ *
+ * LogsDB returns the app logs database instance.
+ */
+ logsDB(): (dbx.DB | undefined)
+ /**
+ * LogsDao returns the app logs Dao instance.
+ *
+ * This Dao could operate only on the tables and models
+ * associated with the logs database. For example, trying to access
+ * the users table from LogsDao will result in error.
+ */
+ logsDao(): (daos.Dao | undefined)
+ /**
+ * DataDir returns the app data directory path.
+ */
+ dataDir(): string
+ /**
+ * EncryptionEnv returns the name of the app secret env key
+ * (used for settings encryption).
+ */
+ encryptionEnv(): string
+ /**
+ * IsDebug returns whether the app is in debug mode
+ * (showing more detailed error logs, executed sql statements, etc.).
+ */
+ isDebug(): boolean
+ /**
+ * Settings returns the loaded app settings.
+ */
+ settings(): (settings.Settings | undefined)
+ /**
+ * Cache returns the app internal cache store.
+ */
+ cache(): (store.Store | undefined)
+ /**
+ * SubscriptionsBroker returns the app realtime subscriptions broker instance.
+ */
+ subscriptionsBroker(): (subscriptions.Broker | undefined)
+ /**
+ * NewMailClient creates and returns a configured app mail client.
+ */
+ newMailClient(): mailer.Mailer
+ /**
+ * NewFilesystem creates and returns a configured filesystem.System instance
+ * for managing regular app files (eg. collection uploads).
+ *
+ * NB! Make sure to call Close() on the returned result
+ * after you are done working with it.
+ */
+ newFilesystem(): (filesystem.System | undefined)
+ /**
+ * NewBackupsFilesystem creates and returns a configured filesystem.System instance
+ * for managing app backups.
+ *
+ * NB! Make sure to call Close() on the returned result
+ * after you are done working with it.
+ */
+ newBackupsFilesystem(): (filesystem.System | undefined)
+ /**
+ * RefreshSettings reinitializes and reloads the stored application settings.
+ */
+ refreshSettings(): void
+ /**
+ * IsBootstrapped checks if the application was initialized
+ * (aka. whether Bootstrap() was called).
+ */
+ isBootstrapped(): boolean
+ /**
+ * Bootstrap takes care for initializing the application
+ * (open db connections, load settings, etc.).
+ *
+ * It will call ResetBootstrapState() if the application was already bootstrapped.
+ */
+ bootstrap(): void
+ /**
+ * ResetBootstrapState takes care for releasing initialized app resources
+ * (eg. closing db connections).
+ */
+ resetBootstrapState(): void
+ /**
+ * CreateBackup creates a new backup of the current app pb_data directory.
+ *
+ * Backups can be stored on S3 if it is configured in app.Settings().Backups.
+ *
+ * Please refer to the godoc of the specific core.App implementation
+ * for details on the backup procedures.
+ */
+ createBackup(ctx: context.Context, name: string): void
+ /**
+ * RestoreBackup restores the backup with the specified name and restarts
+ * the current running application process.
+ *
+ * The safely perform the restore it is recommended to have free disk space
+ * for at least 2x the size of the restored pb_data backup.
+ *
+ * Please refer to the godoc of the specific core.App implementation
+ * for details on the restore procedures.
+ *
+ * NB! This feature is experimental and currently is expected to work only on UNIX based systems.
+ */
+ restoreBackup(ctx: context.Context, name: string): void
+ /**
+ * Restart restarts the current running application process.
+ *
+ * Currently it is relying on execve so it is supported only on UNIX based systems.
+ */
+ restart(): void
+ /**
+ * OnBeforeBootstrap hook is triggered before initializing the main
+ * application resources (eg. before db open and initial settings load).
+ */
+ onBeforeBootstrap(): (hook.Hook | undefined)
+ /**
+ * OnAfterBootstrap hook is triggered after initializing the main
+ * application resources (eg. after db open and initial settings load).
+ */
+ onAfterBootstrap(): (hook.Hook | undefined)
+ /**
+ * OnBeforeServe hook is triggered before serving the internal router (echo),
+ * allowing you to adjust its options and attach new routes or middlewares.
+ */
+ onBeforeServe(): (hook.Hook | undefined)
+ /**
+ * OnBeforeApiError hook is triggered right before sending an error API
+ * response to the client, allowing you to further modify the error data
+ * or to return a completely different API response.
+ */
+ onBeforeApiError(): (hook.Hook | undefined)
+ /**
+ * OnAfterApiError hook is triggered right after sending an error API
+ * response to the client.
+ * It could be used to log the final API error in external services.
+ */
+ onAfterApiError(): (hook.Hook | undefined)
+ /**
+ * OnTerminate hook is triggered when the app is in the process
+ * of being terminated (eg. on SIGTERM signal).
+ */
+ onTerminate(): (hook.Hook | undefined)
+ /**
+ * OnModelBeforeCreate hook is triggered before inserting a new
+ * entry in the DB, allowing you to modify or validate the stored data.
+ *
+ * If the optional "tags" list (table names and/or the Collection id for Record models)
+ * is specified, then all event handlers registered via the created hook
+ * will be triggered and called only if their event data origin matches the tags.
+ */
+ onModelBeforeCreate(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnModelAfterCreate hook is triggered after successfully
+ * inserting a new entry in the DB.
+ *
+ * If the optional "tags" list (table names and/or the Collection id for Record models)
+ * is specified, then all event handlers registered via the created hook
+ * will be triggered and called only if their event data origin matches the tags.
+ */
+ onModelAfterCreate(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnModelBeforeUpdate hook is triggered before updating existing
+ * entry in the DB, allowing you to modify or validate the stored data.
+ *
+ * If the optional "tags" list (table names and/or the Collection id for Record models)
+ * is specified, then all event handlers registered via the created hook
+ * will be triggered and called only if their event data origin matches the tags.
+ */
+ onModelBeforeUpdate(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnModelAfterUpdate hook is triggered after successfully updating
+ * existing entry in the DB.
+ *
+ * If the optional "tags" list (table names and/or the Collection id for Record models)
+ * is specified, then all event handlers registered via the created hook
+ * will be triggered and called only if their event data origin matches the tags.
+ */
+ onModelAfterUpdate(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnModelBeforeDelete hook is triggered before deleting an
+ * existing entry from the DB.
+ *
+ * If the optional "tags" list (table names and/or the Collection id for Record models)
+ * is specified, then all event handlers registered via the created hook
+ * will be triggered and called only if their event data origin matches the tags.
+ */
+ onModelBeforeDelete(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnModelAfterDelete hook is triggered after successfully deleting an
+ * existing entry from the DB.
+ *
+ * If the optional "tags" list (table names and/or the Collection id for Record models)
+ * is specified, then all event handlers registered via the created hook
+ * will be triggered and called only if their event data origin matches the tags.
+ */
+ onModelAfterDelete(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnMailerBeforeAdminResetPasswordSend hook is triggered right
+ * before sending a password reset email to an admin, allowing you
+ * to inspect and customize the email message that is being sent.
+ */
+ onMailerBeforeAdminResetPasswordSend(): (hook.Hook | undefined)
+ /**
+ * OnMailerAfterAdminResetPasswordSend hook is triggered after
+ * admin password reset email was successfully sent.
+ */
+ onMailerAfterAdminResetPasswordSend(): (hook.Hook | undefined)
+ /**
+ * OnMailerBeforeRecordResetPasswordSend hook is triggered right
+ * before sending a password reset email to an auth record, allowing
+ * you to inspect and customize the email message that is being sent.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onMailerBeforeRecordResetPasswordSend(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnMailerAfterRecordResetPasswordSend hook is triggered after
+ * an auth record password reset email was successfully sent.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onMailerAfterRecordResetPasswordSend(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnMailerBeforeRecordVerificationSend hook is triggered right
+ * before sending a verification email to an auth record, allowing
+ * you to inspect and customize the email message that is being sent.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onMailerBeforeRecordVerificationSend(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnMailerAfterRecordVerificationSend hook is triggered after a
+ * verification email was successfully sent to an auth record.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onMailerAfterRecordVerificationSend(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnMailerBeforeRecordChangeEmailSend hook is triggered right before
+ * sending a confirmation new address email to an auth record, allowing
+ * you to inspect and customize the email message that is being sent.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onMailerBeforeRecordChangeEmailSend(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnMailerAfterRecordChangeEmailSend hook is triggered after a
+ * verification email was successfully sent to an auth record.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onMailerAfterRecordChangeEmailSend(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRealtimeConnectRequest hook is triggered right before establishing
+ * the SSE client connection.
+ */
+ onRealtimeConnectRequest(): (hook.Hook | undefined)
+ /**
+ * OnRealtimeDisconnectRequest hook is triggered on disconnected/interrupted
+ * SSE client connection.
+ */
+ onRealtimeDisconnectRequest(): (hook.Hook | undefined)
+ /**
+ * OnRealtimeBeforeMessage hook is triggered right before sending
+ * an SSE message to a client.
+ *
+ * Returning [hook.StopPropagation] will prevent sending the message.
+ * Returning any other non-nil error will close the realtime connection.
+ */
+ onRealtimeBeforeMessageSend(): (hook.Hook | undefined)
+ /**
+ * OnRealtimeBeforeMessage hook is triggered right after sending
+ * an SSE message to a client.
+ */
+ onRealtimeAfterMessageSend(): (hook.Hook | undefined)
+ /**
+ * OnRealtimeBeforeSubscribeRequest hook is triggered before changing
+ * the client subscriptions, allowing you to further validate and
+ * modify the submitted change.
+ */
+ onRealtimeBeforeSubscribeRequest(): (hook.Hook | undefined)
+ /**
+ * OnRealtimeAfterSubscribeRequest hook is triggered after the client
+ * subscriptions were successfully changed.
+ */
+ onRealtimeAfterSubscribeRequest(): (hook.Hook | undefined)
+ /**
+ * OnSettingsListRequest hook is triggered on each successful
+ * API Settings list request.
+ *
+ * Could be used to validate or modify the response before
+ * returning it to the client.
+ */
+ onSettingsListRequest(): (hook.Hook | undefined)
+ /**
+ * OnSettingsBeforeUpdateRequest hook is triggered before each API
+ * Settings update request (after request data load and before settings persistence).
+ *
+ * Could be used to additionally validate the request data or
+ * implement completely different persistence behavior.
+ */
+ onSettingsBeforeUpdateRequest(): (hook.Hook | undefined)
+ /**
+ * OnSettingsAfterUpdateRequest hook is triggered after each
+ * successful API Settings update request.
+ */
+ onSettingsAfterUpdateRequest(): (hook.Hook | undefined)
+ /**
+ * OnFileDownloadRequest hook is triggered before each API File download request.
+ *
+ * Could be used to validate or modify the file response before
+ * returning it to the client.
+ */
+ onFileDownloadRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnFileBeforeTokenRequest hook is triggered before each file
+ * token API request.
+ *
+ * If no token or model was submitted, e.Model and e.Token will be empty,
+ * allowing you to implement your own custom model file auth implementation.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onFileBeforeTokenRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnFileAfterTokenRequest hook is triggered after each
+ * successful file token API request.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onFileAfterTokenRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnAdminsListRequest hook is triggered on each API Admins list request.
+ *
+ * Could be used to validate or modify the response before returning it to the client.
+ */
+ onAdminsListRequest(): (hook.Hook | undefined)
+ /**
+ * OnAdminViewRequest hook is triggered on each API Admin view request.
+ *
+ * Could be used to validate or modify the response before returning it to the client.
+ */
+ onAdminViewRequest(): (hook.Hook | undefined)
+ /**
+ * OnAdminBeforeCreateRequest hook is triggered before each API
+ * Admin create request (after request data load and before model persistence).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different persistence behavior.
+ */
+ onAdminBeforeCreateRequest(): (hook.Hook | undefined)
+ /**
+ * OnAdminAfterCreateRequest hook is triggered after each
+ * successful API Admin create request.
+ */
+ onAdminAfterCreateRequest(): (hook.Hook | undefined)
+ /**
+ * OnAdminBeforeUpdateRequest hook is triggered before each API
+ * Admin update request (after request data load and before model persistence).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different persistence behavior.
+ */
+ onAdminBeforeUpdateRequest(): (hook.Hook | undefined)
+ /**
+ * OnAdminAfterUpdateRequest hook is triggered after each
+ * successful API Admin update request.
+ */
+ onAdminAfterUpdateRequest(): (hook.Hook | undefined)
+ /**
+ * OnAdminBeforeDeleteRequest hook is triggered before each API
+ * Admin delete request (after model load and before actual deletion).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different delete behavior.
+ */
+ onAdminBeforeDeleteRequest(): (hook.Hook | undefined)
+ /**
+ * OnAdminAfterDeleteRequest hook is triggered after each
+ * successful API Admin delete request.
+ */
+ onAdminAfterDeleteRequest(): (hook.Hook | undefined)
+ /**
+ * OnAdminAuthRequest hook is triggered on each successful API Admin
+ * authentication request (sign-in, token refresh, etc.).
+ *
+ * Could be used to additionally validate or modify the
+ * authenticated admin data and token.
+ */
+ onAdminAuthRequest(): (hook.Hook | undefined)
+ /**
+ * OnAdminBeforeAuthWithPasswordRequest hook is triggered before each Admin
+ * auth with password API request (after request data load and before password validation).
+ *
+ * Could be used to implement for example a custom password validation
+ * or to locate a different Admin identity (by assigning [AdminAuthWithPasswordEvent.Admin]).
+ */
+ onAdminBeforeAuthWithPasswordRequest(): (hook.Hook | undefined)
+ /**
+ * OnAdminAfterAuthWithPasswordRequest hook is triggered after each
+ * successful Admin auth with password API request.
+ */
+ onAdminAfterAuthWithPasswordRequest(): (hook.Hook | undefined)
+ /**
+ * OnAdminBeforeAuthRefreshRequest hook is triggered before each Admin
+ * auth refresh API request (right before generating a new auth token).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different auth refresh behavior.
+ */
+ onAdminBeforeAuthRefreshRequest(): (hook.Hook | undefined)
+ /**
+ * OnAdminAfterAuthRefreshRequest hook is triggered after each
+ * successful auth refresh API request (right after generating a new auth token).
+ */
+ onAdminAfterAuthRefreshRequest(): (hook.Hook | undefined)
+ /**
+ * OnAdminBeforeRequestPasswordResetRequest hook is triggered before each Admin
+ * request password reset API request (after request data load and before sending the reset email).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different password reset behavior.
+ */
+ onAdminBeforeRequestPasswordResetRequest(): (hook.Hook | undefined)
+ /**
+ * OnAdminAfterRequestPasswordResetRequest hook is triggered after each
+ * successful request password reset API request.
+ */
+ onAdminAfterRequestPasswordResetRequest(): (hook.Hook | undefined)
+ /**
+ * OnAdminBeforeConfirmPasswordResetRequest hook is triggered before each Admin
+ * confirm password reset API request (after request data load and before persistence).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different persistence behavior.
+ */
+ onAdminBeforeConfirmPasswordResetRequest(): (hook.Hook | undefined)
+ /**
+ * OnAdminAfterConfirmPasswordResetRequest hook is triggered after each
+ * successful confirm password reset API request.
+ */
+ onAdminAfterConfirmPasswordResetRequest(): (hook.Hook | undefined)
+ /**
+ * OnRecordAuthRequest hook is triggered on each successful API
+ * record authentication request (sign-in, token refresh, etc.).
+ *
+ * Could be used to additionally validate or modify the authenticated
+ * record data and token.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAuthRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordBeforeAuthWithPasswordRequest hook is triggered before each Record
+ * auth with password API request (after request data load and before password validation).
+ *
+ * Could be used to implement for example a custom password validation
+ * or to locate a different Record model (by reassigning [RecordAuthWithPasswordEvent.Record]).
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordBeforeAuthWithPasswordRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordAfterAuthWithPasswordRequest hook is triggered after each
+ * successful Record auth with password API request.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterAuthWithPasswordRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordBeforeAuthWithOAuth2Request hook is triggered before each Record
+ * OAuth2 sign-in/sign-up API request (after token exchange and before external provider linking).
+ *
+ * If the [RecordAuthWithOAuth2Event.Record] is not set, then the OAuth2
+ * request will try to create a new auth Record.
+ *
+ * To assign or link a different existing record model you can
+ * change the [RecordAuthWithOAuth2Event.Record] field.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordBeforeAuthWithOAuth2Request(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordAfterAuthWithOAuth2Request hook is triggered after each
+ * successful Record OAuth2 API request.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterAuthWithOAuth2Request(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordBeforeAuthRefreshRequest hook is triggered before each Record
+ * auth refresh API request (right before generating a new auth token).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different auth refresh behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordBeforeAuthRefreshRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordAfterAuthRefreshRequest hook is triggered after each
+ * successful auth refresh API request (right after generating a new auth token).
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterAuthRefreshRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordListExternalAuthsRequest hook is triggered on each API record external auths list request.
+ *
+ * Could be used to validate or modify the response before returning it to the client.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordListExternalAuthsRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordBeforeUnlinkExternalAuthRequest hook is triggered before each API record
+ * external auth unlink request (after models load and before the actual relation deletion).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different delete behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordBeforeUnlinkExternalAuthRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordAfterUnlinkExternalAuthRequest hook is triggered after each
+ * successful API record external auth unlink request.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterUnlinkExternalAuthRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordBeforeRequestPasswordResetRequest hook is triggered before each Record
+ * request password reset API request (after request data load and before sending the reset email).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different password reset behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordBeforeRequestPasswordResetRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordAfterRequestPasswordResetRequest hook is triggered after each
+ * successful request password reset API request.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterRequestPasswordResetRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordBeforeConfirmPasswordResetRequest hook is triggered before each Record
+ * confirm password reset API request (after request data load and before persistence).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different persistence behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordBeforeConfirmPasswordResetRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordAfterConfirmPasswordResetRequest hook is triggered after each
+ * successful confirm password reset API request.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterConfirmPasswordResetRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordBeforeRequestVerificationRequest hook is triggered before each Record
+ * request verification API request (after request data load and before sending the verification email).
+ *
+ * Could be used to additionally validate the loaded request data or implement
+ * completely different verification behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordBeforeRequestVerificationRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordAfterRequestVerificationRequest hook is triggered after each
+ * successful request verification API request.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterRequestVerificationRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordBeforeConfirmVerificationRequest hook is triggered before each Record
+ * confirm verification API request (after request data load and before persistence).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different persistence behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordBeforeConfirmVerificationRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordAfterConfirmVerificationRequest hook is triggered after each
+ * successful confirm verification API request.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterConfirmVerificationRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordBeforeRequestEmailChangeRequest hook is triggered before each Record request email change API request
+ * (after request data load and before sending the email link to confirm the change).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different request email change behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordBeforeRequestEmailChangeRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordAfterRequestEmailChangeRequest hook is triggered after each
+ * successful request email change API request.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterRequestEmailChangeRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordBeforeConfirmEmailChangeRequest hook is triggered before each Record
+ * confirm email change API request (after request data load and before persistence).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different persistence behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordBeforeConfirmEmailChangeRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordAfterConfirmEmailChangeRequest hook is triggered after each
+ * successful confirm email change API request.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterConfirmEmailChangeRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordsListRequest hook is triggered on each API Records list request.
+ *
+ * Could be used to validate or modify the response before returning it to the client.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordsListRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordViewRequest hook is triggered on each API Record view request.
+ *
+ * Could be used to validate or modify the response before returning it to the client.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordViewRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordBeforeCreateRequest hook is triggered before each API Record
+ * create request (after request data load and before model persistence).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different persistence behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordBeforeCreateRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordAfterCreateRequest hook is triggered after each
+ * successful API Record create request.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterCreateRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordBeforeUpdateRequest hook is triggered before each API Record
+ * update request (after request data load and before model persistence).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different persistence behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordBeforeUpdateRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordAfterUpdateRequest hook is triggered after each
+ * successful API Record update request.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterUpdateRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordBeforeDeleteRequest hook is triggered before each API Record
+ * delete request (after model load and before actual deletion).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different delete behavior.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordBeforeDeleteRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnRecordAfterDeleteRequest hook is triggered after each
+ * successful API Record delete request.
+ *
+ * If the optional "tags" list (Collection ids or names) is specified,
+ * then all event handlers registered via the created hook will be
+ * triggered and called only if their event data origin matches the tags.
+ */
+ onRecordAfterDeleteRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ /**
+ * OnCollectionsListRequest hook is triggered on each API Collections list request.
+ *
+ * Could be used to validate or modify the response before returning it to the client.
+ */
+ onCollectionsListRequest(): (hook.Hook | undefined)
+ /**
+ * OnCollectionViewRequest hook is triggered on each API Collection view request.
+ *
+ * Could be used to validate or modify the response before returning it to the client.
+ */
+ onCollectionViewRequest(): (hook.Hook | undefined)
+ /**
+ * OnCollectionBeforeCreateRequest hook is triggered before each API Collection
+ * create request (after request data load and before model persistence).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different persistence behavior.
+ */
+ onCollectionBeforeCreateRequest(): (hook.Hook | undefined)
+ /**
+ * OnCollectionAfterCreateRequest hook is triggered after each
+ * successful API Collection create request.
+ */
+ onCollectionAfterCreateRequest(): (hook.Hook | undefined)
+ /**
+ * OnCollectionBeforeUpdateRequest hook is triggered before each API Collection
+ * update request (after request data load and before model persistence).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different persistence behavior.
+ */
+ onCollectionBeforeUpdateRequest(): (hook.Hook | undefined)
+ /**
+ * OnCollectionAfterUpdateRequest hook is triggered after each
+ * successful API Collection update request.
+ */
+ onCollectionAfterUpdateRequest(): (hook.Hook | undefined)
+ /**
+ * OnCollectionBeforeDeleteRequest hook is triggered before each API
+ * Collection delete request (after model load and before actual deletion).
+ *
+ * Could be used to additionally validate the request data or implement
+ * completely different delete behavior.
+ */
+ onCollectionBeforeDeleteRequest(): (hook.Hook | undefined)
+ /**
+ * OnCollectionAfterDeleteRequest hook is triggered after each
+ * successful API Collection delete request.
+ */
+ onCollectionAfterDeleteRequest(): (hook.Hook | undefined)
+ /**
+ * OnCollectionsBeforeImportRequest hook is triggered before each API
+ * collections import request (after request data load and before the actual import).
+ *
+ * Could be used to additionally validate the imported collections or
+ * to implement completely different import behavior.
+ */
+ onCollectionsBeforeImportRequest(): (hook.Hook | undefined)
+ /**
+ * OnCollectionsAfterImportRequest hook is triggered after each
+ * successful API collections import request.
+ */
+ onCollectionsAfterImportRequest(): (hook.Hook | undefined)
+ }
+ /**
+ * BaseApp implements core.App and defines the base PocketBase app structure.
+ */
+ interface BaseApp {
+ }
+ /**
+ * BaseAppConfig defines a BaseApp configuration option
+ */
+ interface BaseAppConfig {
+ dataDir: string
+ encryptionEnv: string
+ isDebug: boolean
+ dataMaxOpenConns: number // default to 500
+ dataMaxIdleConns: number // default 20
+ logsMaxOpenConns: number // default to 100
+ logsMaxIdleConns: number // default to 5
+ }
+ interface newBaseApp {
+ /**
+ * NewBaseApp creates and returns a new BaseApp instance
+ * configured with the provided arguments.
+ *
+ * To initialize the app, you need to call `app.Bootstrap()`.
+ */
+ (config: BaseAppConfig): (BaseApp | undefined)
+ }
+ interface BaseApp {
+ /**
+ * IsBootstrapped checks if the application was initialized
+ * (aka. whether Bootstrap() was called).
+ */
+ isBootstrapped(): boolean
+ }
+ interface BaseApp {
+ /**
+ * Bootstrap initializes the application
+ * (aka. create data dir, open db connections, load settings, etc.).
+ *
+ * It will call ResetBootstrapState() if the application was already bootstrapped.
+ */
+ bootstrap(): void
+ }
+ interface BaseApp {
+ /**
+ * ResetBootstrapState takes care for releasing initialized app resources
+ * (eg. closing db connections).
+ */
+ resetBootstrapState(): void
+ }
+ interface BaseApp {
+ /**
+ * Deprecated:
+ * This method may get removed in the near future.
+ * It is recommended to access the db instance from app.Dao().DB() or
+ * if you want more flexibility - app.Dao().ConcurrentDB() and app.Dao().NonconcurrentDB().
+ *
+ * DB returns the default app database instance.
+ */
+ db(): (dbx.DB | undefined)
+ }
+ interface BaseApp {
+ /**
+ * Dao returns the default app Dao instance.
+ */
+ dao(): (daos.Dao | undefined)
+ }
+ interface BaseApp {
+ /**
+ * Deprecated:
+ * This method may get removed in the near future.
+ * It is recommended to access the logs db instance from app.LogsDao().DB() or
+ * if you want more flexibility - app.LogsDao().ConcurrentDB() and app.LogsDao().NonconcurrentDB().
+ *
+ * LogsDB returns the app logs database instance.
+ */
+ logsDB(): (dbx.DB | undefined)
+ }
+ interface BaseApp {
+ /**
+ * LogsDao returns the app logs Dao instance.
+ */
+ logsDao(): (daos.Dao | undefined)
+ }
+ interface BaseApp {
+ /**
+ * DataDir returns the app data directory path.
+ */
+ dataDir(): string
+ }
+ interface BaseApp {
+ /**
+ * EncryptionEnv returns the name of the app secret env key
+ * (used for settings encryption).
+ */
+ encryptionEnv(): string
+ }
+ interface BaseApp {
+ /**
+ * IsDebug returns whether the app is in debug mode
+ * (showing more detailed error logs, executed sql statements, etc.).
+ */
+ isDebug(): boolean
+ }
+ interface BaseApp {
+ /**
+ * Settings returns the loaded app settings.
+ */
+ settings(): (settings.Settings | undefined)
+ }
+ interface BaseApp {
+ /**
+ * Cache returns the app internal cache store.
+ */
+ cache(): (store.Store | undefined)
+ }
+ interface BaseApp {
+ /**
+ * SubscriptionsBroker returns the app realtime subscriptions broker instance.
+ */
+ subscriptionsBroker(): (subscriptions.Broker | undefined)
+ }
+ interface BaseApp {
+ /**
+ * NewMailClient creates and returns a new SMTP or Sendmail client
+ * based on the current app settings.
+ */
+ newMailClient(): mailer.Mailer
+ }
+ interface BaseApp {
+ /**
+ * NewFilesystem creates a new local or S3 filesystem instance
+ * for managing regular app files (eg. collection uploads)
+ * based on the current app settings.
+ *
+ * NB! Make sure to call Close() on the returned result
+ * after you are done working with it.
+ */
+ newFilesystem(): (filesystem.System | undefined)
+ }
+ interface BaseApp {
+ /**
+ * NewFilesystem creates a new local or S3 filesystem instance
+ * for managing app backups based on the current app settings.
+ *
+ * NB! Make sure to call Close() on the returned result
+ * after you are done working with it.
+ */
+ newBackupsFilesystem(): (filesystem.System | undefined)
+ }
+ interface BaseApp {
+ /**
+ * Restart restarts (aka. replaces) the current running application process.
+ *
+ * NB! It relies on execve which is supported only on UNIX based systems.
+ */
+ restart(): void
+ }
+ interface BaseApp {
+ /**
+ * RefreshSettings reinitializes and reloads the stored application settings.
+ */
+ refreshSettings(): void
+ }
+ interface BaseApp {
+ onBeforeBootstrap(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAfterBootstrap(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onBeforeServe(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onBeforeApiError(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAfterApiError(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onTerminate(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onModelBeforeCreate(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onModelAfterCreate(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onModelBeforeUpdate(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onModelAfterUpdate(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onModelBeforeDelete(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onModelAfterDelete(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onMailerBeforeAdminResetPasswordSend(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onMailerAfterAdminResetPasswordSend(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onMailerBeforeRecordResetPasswordSend(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onMailerAfterRecordResetPasswordSend(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onMailerBeforeRecordVerificationSend(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onMailerAfterRecordVerificationSend(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onMailerBeforeRecordChangeEmailSend(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onMailerAfterRecordChangeEmailSend(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRealtimeConnectRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onRealtimeDisconnectRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onRealtimeBeforeMessageSend(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onRealtimeAfterMessageSend(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onRealtimeBeforeSubscribeRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onRealtimeAfterSubscribeRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onSettingsListRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onSettingsBeforeUpdateRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onSettingsAfterUpdateRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onFileDownloadRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onFileBeforeTokenRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onFileAfterTokenRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onAdminsListRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAdminViewRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAdminBeforeCreateRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAdminAfterCreateRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAdminBeforeUpdateRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAdminAfterUpdateRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAdminBeforeDeleteRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAdminAfterDeleteRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAdminAuthRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAdminBeforeAuthWithPasswordRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAdminAfterAuthWithPasswordRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAdminBeforeAuthRefreshRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAdminAfterAuthRefreshRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAdminBeforeRequestPasswordResetRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAdminAfterRequestPasswordResetRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAdminBeforeConfirmPasswordResetRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onAdminAfterConfirmPasswordResetRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onRecordAuthRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordBeforeAuthWithPasswordRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordAfterAuthWithPasswordRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordBeforeAuthWithOAuth2Request(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordAfterAuthWithOAuth2Request(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordBeforeAuthRefreshRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordAfterAuthRefreshRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordBeforeRequestPasswordResetRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordAfterRequestPasswordResetRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordBeforeConfirmPasswordResetRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordAfterConfirmPasswordResetRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordBeforeRequestVerificationRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordAfterRequestVerificationRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordBeforeConfirmVerificationRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordAfterConfirmVerificationRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordBeforeRequestEmailChangeRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordAfterRequestEmailChangeRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordBeforeConfirmEmailChangeRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordAfterConfirmEmailChangeRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordListExternalAuthsRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordBeforeUnlinkExternalAuthRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordAfterUnlinkExternalAuthRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordsListRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordViewRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordBeforeCreateRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordAfterCreateRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordBeforeUpdateRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordAfterUpdateRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordBeforeDeleteRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onRecordAfterDeleteRequest(...tags: string[]): (hook.TaggedHook | undefined)
+ }
+ interface BaseApp {
+ onCollectionsListRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onCollectionViewRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onCollectionBeforeCreateRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onCollectionAfterCreateRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onCollectionBeforeUpdateRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onCollectionAfterUpdateRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onCollectionBeforeDeleteRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onCollectionAfterDeleteRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onCollectionsBeforeImportRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ onCollectionsAfterImportRequest(): (hook.Hook | undefined)
+ }
+ interface BaseApp {
+ /**
+ * CreateBackup creates a new backup of the current app pb_data directory.
+ *
+ * If name is empty, it will be autogenerated.
+ * If backup with the same name exists, the new backup file will replace it.
+ *
+ * The backup is executed within a transaction, meaning that new writes
+ * will be temporary "blocked" until the backup file is generated.
+ *
+ * To safely perform the backup, it is recommended to have free disk space
+ * for at least 2x the size of the pb_data directory.
+ *
+ * By default backups are stored in pb_data/backups
+ * (the backups directory itself is excluded from the generated backup).
+ *
+ * When using S3 storage for the uploaded collection files, you have to
+ * take care manually to backup those since they are not part of the pb_data.
+ *
+ * Backups can be stored on S3 if it is configured in app.Settings().Backups.
+ */
+ createBackup(ctx: context.Context, name: string): void
+ }
+ interface BaseApp {
+ /**
+ * RestoreBackup restores the backup with the specified name and restarts
+ * the current running application process.
+ *
+ * NB! This feature is experimental and currently is expected to work only on UNIX based systems.
+ *
+ * To safely perform the restore it is recommended to have free disk space
+ * for at least 2x the size of the restored pb_data backup.
+ *
+ * The performed steps are:
+ *
+ * 1. Download the backup with the specified name in a temp location
+ * ```
+ * (this is in case of S3; otherwise it creates a temp copy of the zip)
+ * ```
+ *
+ * 2. Extract the backup in a temp directory inside the app "pb_data"
+ * ```
+ * (eg. "pb_data/.pb_temp_to_delete/pb_restore").
+ * ```
+ *
+ * 3. Move the current app "pb_data" content (excluding the local backups and the special temp dir)
+ * ```
+ * under another temp sub dir that will be deleted on the next app start up
+ * (eg. "pb_data/.pb_temp_to_delete/old_pb_data").
+ * This is because on some environments it may not be allowed
+ * to delete the currently open "pb_data" files.
+ * ```
+ *
+ * 4. Move the extracted dir content to the app "pb_data".
+ *
+ * 5. Restart the app (on successfull app bootstap it will also remove the old pb_data).
+ *
+ * If a failure occure during the restore process the dir changes are reverted.
+ * If for whatever reason the revert is not possible, it panics.
+ */
+ restoreBackup(ctx: context.Context, name: string): void
+ }
+ interface BaseModelEvent {
+ model: models.Model
+ }
+ interface BaseModelEvent {
+ tags(): Array
+ }
+ interface BaseCollectionEvent {
+ collection?: models.Collection
+ }
+ interface BaseCollectionEvent {
+ tags(): Array
+ }
+ interface BootstrapEvent {
+ app: App
+ }
+ interface TerminateEvent {
+ app: App
+ }
+ interface ServeEvent {
+ app: App
+ router?: echo.Echo
+ server?: http.Server
+ certManager?: autocert.Manager
+ }
+ interface ApiErrorEvent {
+ httpContext: echo.Context
+ error: Error
+ }
+ type _subGlANq = BaseModelEvent
+ interface ModelEvent extends _subGlANq {
+ dao?: daos.Dao
+ }
+ type _subkbHcT = BaseCollectionEvent
+ interface MailerRecordEvent extends _subkbHcT {
+ mailClient: mailer.Mailer
+ message?: mailer.Message
+ record?: models.Record
+ meta: _TygojaDict
+ }
+ interface MailerAdminEvent {
+ mailClient: mailer.Mailer
+ message?: mailer.Message
+ admin?: models.Admin
+ meta: _TygojaDict
+ }
+ interface RealtimeConnectEvent {
+ httpContext: echo.Context
+ client: subscriptions.Client
+ }
+ interface RealtimeDisconnectEvent {
+ httpContext: echo.Context
+ client: subscriptions.Client
+ }
+ interface RealtimeMessageEvent {
+ httpContext: echo.Context
+ client: subscriptions.Client
+ message?: subscriptions.Message
+ }
+ interface RealtimeSubscribeEvent {
+ httpContext: echo.Context
+ client: subscriptions.Client
+ subscriptions: Array
+ }
+ interface SettingsListEvent {
+ httpContext: echo.Context
+ redactedSettings?: settings.Settings
+ }
+ interface SettingsUpdateEvent {
+ httpContext: echo.Context
+ oldSettings?: settings.Settings
+ newSettings?: settings.Settings
+ }
+ type _subNcWJv = BaseCollectionEvent
+ interface RecordsListEvent extends _subNcWJv {
+ httpContext: echo.Context
+ records: Array<(models.Record | undefined)>
+ result?: search.Result
+ }
+ type _subCJBQk = BaseCollectionEvent
+ interface RecordViewEvent extends _subCJBQk {
+ httpContext: echo.Context
+ record?: models.Record
+ }
+ type _subJBniA = BaseCollectionEvent
+ interface RecordCreateEvent extends _subJBniA {
+ httpContext: echo.Context
+ record?: models.Record
+ uploadedFiles: _TygojaDict
+ }
+ type _subMthGJ = BaseCollectionEvent
+ interface RecordUpdateEvent extends _subMthGJ {
+ httpContext: echo.Context
+ record?: models.Record
+ uploadedFiles: _TygojaDict
+ }
+ type _subULZLD = BaseCollectionEvent
+ interface RecordDeleteEvent extends _subULZLD {
+ httpContext: echo.Context
+ record?: models.Record
+ }
+ type _subKjryZ = BaseCollectionEvent
+ interface RecordAuthEvent extends _subKjryZ {
+ httpContext: echo.Context
+ record?: models.Record
+ token: string
+ meta: any
+ }
+ type _subFfWSw = BaseCollectionEvent
+ interface RecordAuthWithPasswordEvent extends _subFfWSw {
+ httpContext: echo.Context
+ record?: models.Record
+ identity: string
+ password: string
+ }
+ type _subxSsWU = BaseCollectionEvent
+ interface RecordAuthWithOAuth2Event extends _subxSsWU {
+ httpContext: echo.Context
+ providerName: string
+ providerClient: auth.Provider
+ record?: models.Record
+ oAuth2User?: auth.AuthUser
+ isNewRecord: boolean
+ }
+ type _subBVSdi = BaseCollectionEvent
+ interface RecordAuthRefreshEvent extends _subBVSdi {
+ httpContext: echo.Context
+ record?: models.Record
+ }
+ type _subyAMtt = BaseCollectionEvent
+ interface RecordRequestPasswordResetEvent extends _subyAMtt {
+ httpContext: echo.Context
+ record?: models.Record
+ }
+ type _subcSmvE = BaseCollectionEvent
+ interface RecordConfirmPasswordResetEvent extends _subcSmvE {
+ httpContext: echo.Context
+ record?: models.Record
+ }
+ type _subyHoPB = BaseCollectionEvent
+ interface RecordRequestVerificationEvent extends _subyHoPB {
+ httpContext: echo.Context
+ record?: models.Record
+ }
+ type _subVgeAd = BaseCollectionEvent
+ interface RecordConfirmVerificationEvent extends _subVgeAd {
+ httpContext: echo.Context
+ record?: models.Record
+ }
+ type _subtfyfe = BaseCollectionEvent
+ interface RecordRequestEmailChangeEvent extends _subtfyfe {
+ httpContext: echo.Context
+ record?: models.Record
+ }
+ type _subVMgDd = BaseCollectionEvent
+ interface RecordConfirmEmailChangeEvent extends _subVMgDd {
+ httpContext: echo.Context
+ record?: models.Record
+ }
+ type _submNZyi = BaseCollectionEvent
+ interface RecordListExternalAuthsEvent extends _submNZyi {
+ httpContext: echo.Context
+ record?: models.Record
+ externalAuths: Array<(models.ExternalAuth | undefined)>
+ }
+ type _subgeAsX = BaseCollectionEvent
+ interface RecordUnlinkExternalAuthEvent extends _subgeAsX {
+ httpContext: echo.Context
+ record?: models.Record
+ externalAuth?: models.ExternalAuth
+ }
+ interface AdminsListEvent {
+ httpContext: echo.Context
+ admins: Array<(models.Admin | undefined)>
+ result?: search.Result
+ }
+ interface AdminViewEvent {
+ httpContext: echo.Context
+ admin?: models.Admin
+ }
+ interface AdminCreateEvent {
+ httpContext: echo.Context
+ admin?: models.Admin
+ }
+ interface AdminUpdateEvent {
+ httpContext: echo.Context
+ admin?: models.Admin
+ }
+ interface AdminDeleteEvent {
+ httpContext: echo.Context
+ admin?: models.Admin
+ }
+ interface AdminAuthEvent {
+ httpContext: echo.Context
+ admin?: models.Admin
+ token: string
+ }
+ interface AdminAuthWithPasswordEvent {
+ httpContext: echo.Context
+ admin?: models.Admin
+ identity: string
+ password: string
+ }
+ interface AdminAuthRefreshEvent {
+ httpContext: echo.Context
+ admin?: models.Admin
+ }
+ interface AdminRequestPasswordResetEvent {
+ httpContext: echo.Context
+ admin?: models.Admin
+ }
+ interface AdminConfirmPasswordResetEvent {
+ httpContext: echo.Context
+ admin?: models.Admin
+ }
+ interface CollectionsListEvent {
+ httpContext: echo.Context
+ collections: Array<(models.Collection | undefined)>
+ result?: search.Result
+ }
+ type _subWnnQS = BaseCollectionEvent
+ interface CollectionViewEvent extends _subWnnQS {
+ httpContext: echo.Context
+ }
+ type _subvCytE = BaseCollectionEvent
+ interface CollectionCreateEvent extends _subvCytE {
+ httpContext: echo.Context
+ }
+ type _subtotQZ = BaseCollectionEvent
+ interface CollectionUpdateEvent extends _subtotQZ {
+ httpContext: echo.Context
+ }
+ type _subeLHxu = BaseCollectionEvent
+ interface CollectionDeleteEvent extends _subeLHxu {
+ httpContext: echo.Context
+ }
+ interface CollectionsImportEvent {
+ httpContext: echo.Context
+ collections: Array<(models.Collection | undefined)>
+ }
+ type _submEUaq = BaseModelEvent
+ interface FileTokenEvent extends _submEUaq {
+ httpContext: echo.Context
+ token: string
+ }
+ type _subXTQwU = BaseCollectionEvent
+ interface FileDownloadEvent extends _subXTQwU {
+ httpContext: echo.Context
+ record?: models.Record
+ fileField?: schema.SchemaField
+ servedPath: string
+ servedName: string
+ }
+}
+
+/**
+ * Package tokens implements various user and admin tokens generation methods.
+ */
+namespace tokens {
+ interface newAdminAuthToken {
+ /**
+ * NewAdminAuthToken generates and returns a new admin authentication token.
+ */
+ (app: core.App, admin: models.Admin): string
+ }
+ interface newAdminResetPasswordToken {
+ /**
+ * NewAdminResetPasswordToken generates and returns a new admin password reset request token.
+ */
+ (app: core.App, admin: models.Admin): string
+ }
+ interface newAdminFileToken {
+ /**
+ * NewAdminFileToken generates and returns a new admin private file access token.
+ */
+ (app: core.App, admin: models.Admin): string
+ }
+ interface newRecordAuthToken {
+ /**
+ * NewRecordAuthToken generates and returns a new auth record authentication token.
+ */
+ (app: core.App, record: models.Record): string
+ }
+ interface newRecordVerifyToken {
+ /**
+ * NewRecordVerifyToken generates and returns a new record verification token.
+ */
+ (app: core.App, record: models.Record): string
+ }
+ interface newRecordResetPasswordToken {
+ /**
+ * NewRecordResetPasswordToken generates and returns a new auth record password reset request token.
+ */
+ (app: core.App, record: models.Record): string
+ }
+ interface newRecordChangeEmailToken {
+ /**
+ * NewRecordChangeEmailToken generates and returns a new auth record change email request token.
+ */
+ (app: core.App, record: models.Record, newEmail: string): string
+ }
+ interface newRecordFileToken {
+ /**
+ * NewRecordFileToken generates and returns a new record private file access token.
+ */
+ (app: core.App, record: models.Record): string
+ }
+}
+
+/**
+ * Package models implements various services used for request data
+ * validation and applying changes to existing DB models through the app Dao.
+ */
+namespace forms {
+ // @ts-ignore
+ import validation = ozzo_validation
+ /**
+ * AdminLogin is an admin email/pass login form.
+ */
+ interface AdminLogin {
+ identity: string
+ password: string
+ }
+ interface newAdminLogin {
+ /**
+ * NewAdminLogin creates a new [AdminLogin] form initialized with
+ * the provided [core.App] instance.
+ *
+ * If you want to submit the form as part of a transaction,
+ * you can change the default Dao via [SetDao()].
+ */
+ (app: core.App): (AdminLogin | undefined)
+ }
+ interface AdminLogin {
+ /**
+ * SetDao replaces the default form Dao instance with the provided one.
+ */
+ setDao(dao: daos.Dao): void
+ }
+ interface AdminLogin {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ interface AdminLogin {
+ /**
+ * Submit validates and submits the admin form.
+ * On success returns the authorized admin model.
+ *
+ * You can optionally provide a list of InterceptorFunc to
+ * further modify the form behavior before persisting it.
+ */
+ submit(...interceptors: InterceptorFunc[]): (models.Admin | undefined)
+ }
+ /**
+ * AdminPasswordResetConfirm is an admin password reset confirmation form.
+ */
+ interface AdminPasswordResetConfirm {
+ token: string
+ password: string
+ passwordConfirm: string
+ }
+ interface newAdminPasswordResetConfirm {
+ /**
+ * NewAdminPasswordResetConfirm creates a new [AdminPasswordResetConfirm]
+ * form initialized with from the provided [core.App] instance.
+ *
+ * If you want to submit the form as part of a transaction,
+ * you can change the default Dao via [SetDao()].
+ */
+ (app: core.App): (AdminPasswordResetConfirm | undefined)
+ }
+ interface AdminPasswordResetConfirm {
+ /**
+ * SetDao replaces the form Dao instance with the provided one.
+ *
+ * This is useful if you want to use a specific transaction Dao instance
+ * instead of the default app.Dao().
+ */
+ setDao(dao: daos.Dao): void
+ }
+ interface AdminPasswordResetConfirm {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ interface AdminPasswordResetConfirm {
+ /**
+ * Submit validates and submits the admin password reset confirmation form.
+ * On success returns the updated admin model associated to `form.Token`.
+ *
+ * You can optionally provide a list of InterceptorFunc to further
+ * modify the form behavior before persisting it.
+ */
+ submit(...interceptors: InterceptorFunc[]): (models.Admin | undefined)
+ }
+ /**
+ * AdminPasswordResetRequest is an admin password reset request form.
+ */
+ interface AdminPasswordResetRequest {
+ email: string
+ }
+ interface newAdminPasswordResetRequest {
+ /**
+ * NewAdminPasswordResetRequest creates a new [AdminPasswordResetRequest]
+ * form initialized with from the provided [core.App] instance.
+ *
+ * If you want to submit the form as part of a transaction,
+ * you can change the default Dao via [SetDao()].
+ */
+ (app: core.App): (AdminPasswordResetRequest | undefined)
+ }
+ interface AdminPasswordResetRequest {
+ /**
+ * SetDao replaces the default form Dao instance with the provided one.
+ */
+ setDao(dao: daos.Dao): void
+ }
+ interface AdminPasswordResetRequest {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ *
+ * This method doesn't verify that admin with `form.Email` exists (this is done on Submit).
+ */
+ validate(): void
+ }
+ interface AdminPasswordResetRequest {
+ /**
+ * Submit validates and submits the form.
+ * On success sends a password reset email to the `form.Email` admin.
+ *
+ * You can optionally provide a list of InterceptorFunc to further
+ * modify the form behavior before persisting it.
+ */
+ submit(...interceptors: InterceptorFunc[]): void
+ }
+ /**
+ * AdminUpsert is a [models.Admin] upsert (create/update) form.
+ */
+ interface AdminUpsert {
+ id: string
+ avatar: number
+ email: string
+ password: string
+ passwordConfirm: string
+ }
+ interface newAdminUpsert {
+ /**
+ * NewAdminUpsert creates a new [AdminUpsert] form with initializer
+ * config created from the provided [core.App] and [models.Admin] instances
+ * (for create you could pass a pointer to an empty Admin - `&models.Admin{}`).
+ *
+ * If you want to submit the form as part of a transaction,
+ * you can change the default Dao via [SetDao()].
+ */
+ (app: core.App, admin: models.Admin): (AdminUpsert | undefined)
+ }
+ interface AdminUpsert {
+ /**
+ * SetDao replaces the default form Dao instance with the provided one.
+ */
+ setDao(dao: daos.Dao): void
+ }
+ interface AdminUpsert {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ interface AdminUpsert {
+ /**
+ * Submit validates the form and upserts the form admin model.
+ *
+ * You can optionally provide a list of InterceptorFunc to further
+ * modify the form behavior before persisting it.
+ */
+ submit(...interceptors: InterceptorFunc[]): void
+ }
+ /**
+ * AppleClientSecretCreate is a [models.Admin] upsert (create/update) form.
+ *
+ * Reference: https://developer.apple.com/documentation/sign_in_with_apple/generate_and_validate_tokens
+ */
+ interface AppleClientSecretCreate {
+ /**
+ * ClientId is the identifier of your app (aka. Service ID).
+ */
+ clientId: string
+ /**
+ * TeamId is a 10-character string associated with your developer account
+ * (usually could be found next to your name in the Apple Developer site).
+ */
+ teamId: string
+ /**
+ * KeyId is a 10-character key identifier generated for the "Sign in with Apple"
+ * private key associated with your developer account.
+ */
+ keyId: string
+ /**
+ * PrivateKey is the private key associated to your app.
+ * Usually wrapped within -----BEGIN PRIVATE KEY----- X -----END PRIVATE KEY-----.
+ */
+ privateKey: string
+ /**
+ * Duration specifies how long the generated JWT token should be considered valid.
+ * The specified value must be in seconds and max 15777000 (~6months).
+ */
+ duration: number
+ }
+ interface newAppleClientSecretCreate {
+ /**
+ * NewAppleClientSecretCreate creates a new [AppleClientSecretCreate] form with initializer
+ * config created from the provided [core.App] instances.
+ */
+ (app: core.App): (AppleClientSecretCreate | undefined)
+ }
+ interface AppleClientSecretCreate {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ interface AppleClientSecretCreate {
+ /**
+ * Submit validates the form and returns a new Apple Client Secret JWT.
+ */
+ submit(): string
+ }
+ /**
+ * BackupCreate is a request form for creating a new app backup.
+ */
+ interface BackupCreate {
+ name: string
+ }
+ interface newBackupCreate {
+ /**
+ * NewBackupCreate creates new BackupCreate request form.
+ */
+ (app: core.App): (BackupCreate | undefined)
+ }
+ interface BackupCreate {
+ /**
+ * SetContext replaces the default form context with the provided one.
+ */
+ setContext(ctx: context.Context): void
+ }
+ interface BackupCreate {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ interface BackupCreate {
+ /**
+ * Submit validates the form and creates the app backup.
+ *
+ * You can optionally provide a list of InterceptorFunc to further
+ * modify the form behavior before creating the backup.
+ */
+ submit(...interceptors: InterceptorFunc[]): void
+ }
+ /**
+ * InterceptorNextFunc is a interceptor handler function.
+ * Usually used in combination with InterceptorFunc.
+ */
+ interface InterceptorNextFunc {(t: T): void }
+ /**
+ * InterceptorFunc defines a single interceptor function that
+ * will execute the provided next func handler.
+ */
+ interface InterceptorFunc {(next: InterceptorNextFunc): InterceptorNextFunc }
+ /**
+ * CollectionUpsert is a [models.Collection] upsert (create/update) form.
+ */
+ interface CollectionUpsert {
+ id: string
+ type: string
+ name: string
+ system: boolean
+ schema: schema.Schema
+ indexes: types.JsonArray
+ listRule?: string
+ viewRule?: string
+ createRule?: string
+ updateRule?: string
+ deleteRule?: string
+ options: types.JsonMap
+ }
+ interface newCollectionUpsert {
+ /**
+ * NewCollectionUpsert creates a new [CollectionUpsert] form with initializer
+ * config created from the provided [core.App] and [models.Collection] instances
+ * (for create you could pass a pointer to an empty Collection - `&models.Collection{}`).
+ *
+ * If you want to submit the form as part of a transaction,
+ * you can change the default Dao via [SetDao()].
+ */
+ (app: core.App, collection: models.Collection): (CollectionUpsert | undefined)
+ }
+ interface CollectionUpsert {
+ /**
+ * SetDao replaces the default form Dao instance with the provided one.
+ */
+ setDao(dao: daos.Dao): void
+ }
+ interface CollectionUpsert {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ interface CollectionUpsert {
+ /**
+ * Submit validates the form and upserts the form's Collection model.
+ *
+ * On success the related record table schema will be auto updated.
+ *
+ * You can optionally provide a list of InterceptorFunc to further
+ * modify the form behavior before persisting it.
+ */
+ submit(...interceptors: InterceptorFunc[]): void
+ }
+ /**
+ * CollectionsImport is a form model to bulk import
+ * (create, replace and delete) collections from a user provided list.
+ */
+ interface CollectionsImport {
+ collections: Array<(models.Collection | undefined)>
+ deleteMissing: boolean
+ }
+ interface newCollectionsImport {
+ /**
+ * NewCollectionsImport creates a new [CollectionsImport] form with
+ * initialized with from the provided [core.App] instance.
+ *
+ * If you want to submit the form as part of a transaction,
+ * you can change the default Dao via [SetDao()].
+ */
+ (app: core.App): (CollectionsImport | undefined)
+ }
+ interface CollectionsImport {
+ /**
+ * SetDao replaces the default form Dao instance with the provided one.
+ */
+ setDao(dao: daos.Dao): void
+ }
+ interface CollectionsImport {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ interface CollectionsImport {
+ /**
+ * Submit applies the import, aka.:
+ * - imports the form collections (create or replace)
+ * - sync the collection changes with their related records table
+ * - ensures the integrity of the imported structure (aka. run validations for each collection)
+ * - if [form.DeleteMissing] is set, deletes all local collections that are not found in the imports list
+ *
+ * All operations are wrapped in a single transaction that are
+ * rollbacked on the first encountered error.
+ *
+ * You can optionally provide a list of InterceptorFunc to further
+ * modify the form behavior before persisting it.
+ */
+ submit(...interceptors: InterceptorFunc>[]): void
+ }
+ /**
+ * RealtimeSubscribe is a realtime subscriptions request form.
+ */
+ interface RealtimeSubscribe {
+ clientId: string
+ subscriptions: Array
+ }
+ interface newRealtimeSubscribe {
+ /**
+ * NewRealtimeSubscribe creates new RealtimeSubscribe request form.
+ */
+ (): (RealtimeSubscribe | undefined)
+ }
+ interface RealtimeSubscribe {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ /**
+ * RecordEmailChangeConfirm is an auth record email change confirmation form.
+ */
+ interface RecordEmailChangeConfirm {
+ token: string
+ password: string
+ }
+ interface newRecordEmailChangeConfirm {
+ /**
+ * NewRecordEmailChangeConfirm creates a new [RecordEmailChangeConfirm] form
+ * initialized with from the provided [core.App] and [models.Collection] instances.
+ *
+ * If you want to submit the form as part of a transaction,
+ * you can change the default Dao via [SetDao()].
+ */
+ (app: core.App, collection: models.Collection): (RecordEmailChangeConfirm | undefined)
+ }
+ interface RecordEmailChangeConfirm {
+ /**
+ * SetDao replaces the default form Dao instance with the provided one.
+ */
+ setDao(dao: daos.Dao): void
+ }
+ interface RecordEmailChangeConfirm {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ interface RecordEmailChangeConfirm {
+ /**
+ * Submit validates and submits the auth record email change confirmation form.
+ * On success returns the updated auth record associated to `form.Token`.
+ *
+ * You can optionally provide a list of InterceptorFunc to
+ * further modify the form behavior before persisting it.
+ */
+ submit(...interceptors: InterceptorFunc[]): (models.Record | undefined)
+ }
+ /**
+ * RecordEmailChangeRequest is an auth record email change request form.
+ */
+ interface RecordEmailChangeRequest {
+ newEmail: string
+ }
+ interface newRecordEmailChangeRequest {
+ /**
+ * NewRecordEmailChangeRequest creates a new [RecordEmailChangeRequest] form
+ * initialized with from the provided [core.App] and [models.Record] instances.
+ *
+ * If you want to submit the form as part of a transaction,
+ * you can change the default Dao via [SetDao()].
+ */
+ (app: core.App, record: models.Record): (RecordEmailChangeRequest | undefined)
+ }
+ interface RecordEmailChangeRequest {
+ /**
+ * SetDao replaces the default form Dao instance with the provided one.
+ */
+ setDao(dao: daos.Dao): void
+ }
+ interface RecordEmailChangeRequest {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ interface RecordEmailChangeRequest {
+ /**
+ * Submit validates and sends the change email request.
+ *
+ * You can optionally provide a list of InterceptorFunc to
+ * further modify the form behavior before persisting it.
+ */
+ submit(...interceptors: InterceptorFunc[]): void
+ }
+ /**
+ * RecordOAuth2LoginData defines the OA
+ */
+ interface RecordOAuth2LoginData {
+ externalAuth?: models.ExternalAuth
+ record?: models.Record
+ oAuth2User?: auth.AuthUser
+ providerClient: auth.Provider
+ }
+ /**
+ * BeforeOAuth2RecordCreateFunc defines a callback function that will
+ * be called before OAuth2 new Record creation.
+ */
+ interface BeforeOAuth2RecordCreateFunc {(createForm: RecordUpsert, authRecord: models.Record, authUser: auth.AuthUser): void }
+ /**
+ * RecordOAuth2Login is an auth record OAuth2 login form.
+ */
+ interface RecordOAuth2Login {
+ /**
+ * The name of the OAuth2 client provider (eg. "google")
+ */
+ provider: string
+ /**
+ * The authorization code returned from the initial request.
+ */
+ code: string
+ /**
+ * The code verifier sent with the initial request as part of the code_challenge.
+ */
+ codeVerifier: string
+ /**
+ * The redirect url sent with the initial request.
+ */
+ redirectUrl: string
+ /**
+ * Additional data that will be used for creating a new auth record
+ * if an existing OAuth2 account doesn't exist.
+ */
+ createData: _TygojaDict
+ }
+ interface newRecordOAuth2Login {
+ /**
+ * NewRecordOAuth2Login creates a new [RecordOAuth2Login] form with
+ * initialized with from the provided [core.App] instance.
+ *
+ * If you want to submit the form as part of a transaction,
+ * you can change the default Dao via [SetDao()].
+ */
+ (app: core.App, collection: models.Collection, optAuthRecord: models.Record): (RecordOAuth2Login | undefined)
+ }
+ interface RecordOAuth2Login {
+ /**
+ * SetDao replaces the default form Dao instance with the provided one.
+ */
+ setDao(dao: daos.Dao): void
+ }
+ interface RecordOAuth2Login {
+ /**
+ * SetBeforeNewRecordCreateFunc sets a before OAuth2 record create callback handler.
+ */
+ setBeforeNewRecordCreateFunc(f: BeforeOAuth2RecordCreateFunc): void
+ }
+ interface RecordOAuth2Login {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ interface RecordOAuth2Login {
+ /**
+ * Submit validates and submits the form.
+ *
+ * If an auth record doesn't exist, it will make an attempt to create it
+ * based on the fetched OAuth2 profile data via a local [RecordUpsert] form.
+ * You can intercept/modify the Record create form with [form.SetBeforeNewRecordCreateFunc()].
+ *
+ * You can also optionally provide a list of InterceptorFunc to
+ * further modify the form behavior before persisting it.
+ *
+ * On success returns the authorized record model and the fetched provider's data.
+ */
+ submit(...interceptors: InterceptorFunc[]): [(models.Record | undefined), (auth.AuthUser | undefined)]
+ }
+ /**
+ * RecordPasswordLogin is record username/email + password login form.
+ */
+ interface RecordPasswordLogin {
+ identity: string
+ password: string
+ }
+ interface newRecordPasswordLogin {
+ /**
+ * NewRecordPasswordLogin creates a new [RecordPasswordLogin] form initialized
+ * with from the provided [core.App] and [models.Collection] instance.
+ *
+ * If you want to submit the form as part of a transaction,
+ * you can change the default Dao via [SetDao()].
+ */
+ (app: core.App, collection: models.Collection): (RecordPasswordLogin | undefined)
+ }
+ interface RecordPasswordLogin {
+ /**
+ * SetDao replaces the default form Dao instance with the provided one.
+ */
+ setDao(dao: daos.Dao): void
+ }
+ interface RecordPasswordLogin {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ interface RecordPasswordLogin {
+ /**
+ * Submit validates and submits the form.
+ * On success returns the authorized record model.
+ *
+ * You can optionally provide a list of InterceptorFunc to
+ * further modify the form behavior before persisting it.
+ */
+ submit(...interceptors: InterceptorFunc[]): (models.Record | undefined)
+ }
+ /**
+ * RecordPasswordResetConfirm is an auth record password reset confirmation form.
+ */
+ interface RecordPasswordResetConfirm {
+ token: string
+ password: string
+ passwordConfirm: string
+ }
+ interface newRecordPasswordResetConfirm {
+ /**
+ * NewRecordPasswordResetConfirm creates a new [RecordPasswordResetConfirm]
+ * form initialized with from the provided [core.App] instance.
+ *
+ * If you want to submit the form as part of a transaction,
+ * you can change the default Dao via [SetDao()].
+ */
+ (app: core.App, collection: models.Collection): (RecordPasswordResetConfirm | undefined)
+ }
+ interface RecordPasswordResetConfirm {
+ /**
+ * SetDao replaces the default form Dao instance with the provided one.
+ */
+ setDao(dao: daos.Dao): void
+ }
+ interface RecordPasswordResetConfirm {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ interface RecordPasswordResetConfirm {
+ /**
+ * Submit validates and submits the form.
+ * On success returns the updated auth record associated to `form.Token`.
+ *
+ * You can optionally provide a list of InterceptorFunc to further
+ * modify the form behavior before persisting it.
+ */
+ submit(...interceptors: InterceptorFunc[]): (models.Record | undefined)
+ }
+ /**
+ * RecordPasswordResetRequest is an auth record reset password request form.
+ */
+ interface RecordPasswordResetRequest {
+ email: string
+ }
+ interface newRecordPasswordResetRequest {
+ /**
+ * NewRecordPasswordResetRequest creates a new [RecordPasswordResetRequest]
+ * form initialized with from the provided [core.App] instance.
+ *
+ * If you want to submit the form as part of a transaction,
+ * you can change the default Dao via [SetDao()].
+ */
+ (app: core.App, collection: models.Collection): (RecordPasswordResetRequest | undefined)
+ }
+ interface RecordPasswordResetRequest {
+ /**
+ * SetDao replaces the default form Dao instance with the provided one.
+ */
+ setDao(dao: daos.Dao): void
+ }
+ interface RecordPasswordResetRequest {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ *
+ * This method doesn't checks whether auth record with `form.Email` exists (this is done on Submit).
+ */
+ validate(): void
+ }
+ interface RecordPasswordResetRequest {
+ /**
+ * Submit validates and submits the form.
+ * On success, sends a password reset email to the `form.Email` auth record.
+ *
+ * You can optionally provide a list of InterceptorFunc to further
+ * modify the form behavior before persisting it.
+ */
+ submit(...interceptors: InterceptorFunc[]): void
+ }
+ /**
+ * RecordUpsert is a [models.Record] upsert (create/update) form.
+ */
+ interface RecordUpsert {
+ /**
+ * base model fields
+ */
+ id: string
+ /**
+ * auth collection fields
+ * ---
+ */
+ username: string
+ email: string
+ emailVisibility: boolean
+ verified: boolean
+ password: string
+ passwordConfirm: string
+ oldPassword: string
+ }
+ interface newRecordUpsert {
+ /**
+ * NewRecordUpsert creates a new [RecordUpsert] form with initializer
+ * config created from the provided [core.App] and [models.Record] instances
+ * (for create you could pass a pointer to an empty Record - models.NewRecord(collection)).
+ *
+ * If you want to submit the form as part of a transaction,
+ * you can change the default Dao via [SetDao()].
+ */
+ (app: core.App, record: models.Record): (RecordUpsert | undefined)
+ }
+ interface RecordUpsert {
+ /**
+ * Data returns the loaded form's data.
+ */
+ data(): _TygojaDict
+ }
+ interface RecordUpsert {
+ /**
+ * SetFullManageAccess sets the manageAccess bool flag of the current
+ * form to enable/disable directly changing some system record fields
+ * (often used with auth collection records).
+ */
+ setFullManageAccess(fullManageAccess: boolean): void
+ }
+ interface RecordUpsert {
+ /**
+ * SetDao replaces the default form Dao instance with the provided one.
+ */
+ setDao(dao: daos.Dao): void
+ }
+ interface RecordUpsert {
+ /**
+ * LoadRequest extracts the json or multipart/form-data request data
+ * and lods it into the form.
+ *
+ * File upload is supported only via multipart/form-data.
+ */
+ loadRequest(r: http.Request, keyPrefix: string): void
+ }
+ interface RecordUpsert {
+ /**
+ * FilesToUpload returns the parsed request files ready for upload.
+ */
+ filesToUpload(): _TygojaDict
+ }
+ interface RecordUpsert {
+ /**
+ * FilesToUpload returns the parsed request filenames ready to be deleted.
+ */
+ filesToDelete(): Array
+ }
+ interface RecordUpsert {
+ /**
+ * AddFiles adds the provided file(s) to the specified file field.
+ *
+ * If the file field is a SINGLE-value file field (aka. "Max Select = 1"),
+ * then the newly added file will REPLACE the existing one.
+ * In this case if you pass more than 1 files only the first one will be assigned.
+ *
+ * If the file field is a MULTI-value file field (aka. "Max Select > 1"),
+ * then the newly added file(s) will be APPENDED to the existing one(s).
+ *
+ * Example
+ *
+ * ```
+ * f1, _ := filesystem.NewFileFromPath("/path/to/file1.txt")
+ * f2, _ := filesystem.NewFileFromPath("/path/to/file2.txt")
+ * form.AddFiles("documents", f1, f2)
+ * ```
+ */
+ addFiles(key: string, ...files: (filesystem.File | undefined)[]): void
+ }
+ interface RecordUpsert {
+ /**
+ * RemoveFiles removes a single or multiple file from the specified file field.
+ *
+ * NB! If filesToDelete is not set it will remove all existing files
+ * assigned to the file field (including those assigned with AddFiles)!
+ *
+ * Example
+ *
+ * ```
+ * // mark only only 2 files for removal
+ * form.AddFiles("documents", "file1_aw4bdrvws6.txt", "file2_xwbs36bafv.txt")
+ *
+ * // mark all "documents" files for removal
+ * form.AddFiles("documents")
+ * ```
+ */
+ removeFiles(key: string, ...toDelete: string[]): void
+ }
+ interface RecordUpsert {
+ /**
+ * LoadData loads and normalizes the provided regular record data fields into the form.
+ */
+ loadData(requestData: _TygojaDict): void
+ }
+ interface RecordUpsert {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ interface RecordUpsert {
+ validateAndFill(): void
+ }
+ interface RecordUpsert {
+ /**
+ * DrySubmit performs a form submit within a transaction and reverts it.
+ * For actual record persistence, check the `form.Submit()` method.
+ *
+ * This method doesn't handle file uploads/deletes or trigger any app events!
+ */
+ drySubmit(callback: (txDao: daos.Dao) => void): void
+ }
+ interface RecordUpsert {
+ /**
+ * Submit validates the form and upserts the form Record model.
+ *
+ * You can optionally provide a list of InterceptorFunc to further
+ * modify the form behavior before persisting it.
+ */
+ submit(...interceptors: InterceptorFunc[]): void
+ }
+ /**
+ * RecordVerificationConfirm is an auth record email verification confirmation form.
+ */
+ interface RecordVerificationConfirm {
+ token: string
+ }
+ interface newRecordVerificationConfirm {
+ /**
+ * NewRecordVerificationConfirm creates a new [RecordVerificationConfirm]
+ * form initialized with from the provided [core.App] instance.
+ *
+ * If you want to submit the form as part of a transaction,
+ * you can change the default Dao via [SetDao()].
+ */
+ (app: core.App, collection: models.Collection): (RecordVerificationConfirm | undefined)
+ }
+ interface RecordVerificationConfirm {
+ /**
+ * SetDao replaces the default form Dao instance with the provided one.
+ */
+ setDao(dao: daos.Dao): void
+ }
+ interface RecordVerificationConfirm {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ interface RecordVerificationConfirm {
+ /**
+ * Submit validates and submits the form.
+ * On success returns the verified auth record associated to `form.Token`.
+ *
+ * You can optionally provide a list of InterceptorFunc to further
+ * modify the form behavior before persisting it.
+ */
+ submit(...interceptors: InterceptorFunc[]): (models.Record | undefined)
+ }
+ /**
+ * RecordVerificationRequest is an auth record email verification request form.
+ */
+ interface RecordVerificationRequest {
+ email: string
+ }
+ interface newRecordVerificationRequest {
+ /**
+ * NewRecordVerificationRequest creates a new [RecordVerificationRequest]
+ * form initialized with from the provided [core.App] instance.
+ *
+ * If you want to submit the form as part of a transaction,
+ * you can change the default Dao via [SetDao()].
+ */
+ (app: core.App, collection: models.Collection): (RecordVerificationRequest | undefined)
+ }
+ interface RecordVerificationRequest {
+ /**
+ * SetDao replaces the default form Dao instance with the provided one.
+ */
+ setDao(dao: daos.Dao): void
+ }
+ interface RecordVerificationRequest {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ *
+ * // This method doesn't verify that auth record with `form.Email` exists (this is done on Submit).
+ */
+ validate(): void
+ }
+ interface RecordVerificationRequest {
+ /**
+ * Submit validates and sends a verification request email
+ * to the `form.Email` auth record.
+ *
+ * You can optionally provide a list of InterceptorFunc to further
+ * modify the form behavior before persisting it.
+ */
+ submit(...interceptors: InterceptorFunc[]): void
+ }
+ /**
+ * SettingsUpsert is a [settings.Settings] upsert (create/update) form.
+ */
+ type _subZPQBc = settings.Settings
+ interface SettingsUpsert extends _subZPQBc {
+ }
+ interface newSettingsUpsert {
+ /**
+ * NewSettingsUpsert creates a new [SettingsUpsert] form with initializer
+ * config created from the provided [core.App] instance.
+ *
+ * If you want to submit the form as part of a transaction,
+ * you can change the default Dao via [SetDao()].
+ */
+ (app: core.App): (SettingsUpsert | undefined)
+ }
+ interface SettingsUpsert {
+ /**
+ * SetDao replaces the default form Dao instance with the provided one.
+ */
+ setDao(dao: daos.Dao): void
+ }
+ interface SettingsUpsert {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ interface SettingsUpsert {
+ /**
+ * Submit validates the form and upserts the loaded settings.
+ *
+ * On success the app settings will be refreshed with the form ones.
+ *
+ * You can optionally provide a list of InterceptorFunc to further
+ * modify the form behavior before persisting it.
+ */
+ submit(...interceptors: InterceptorFunc[]): void
+ }
+ /**
+ * TestEmailSend is a email template test request form.
+ */
+ interface TestEmailSend {
+ template: string
+ email: string
+ }
+ interface newTestEmailSend {
+ /**
+ * NewTestEmailSend creates and initializes new TestEmailSend form.
+ */
+ (app: core.App): (TestEmailSend | undefined)
+ }
+ interface TestEmailSend {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ interface TestEmailSend {
+ /**
+ * Submit validates and sends a test email to the form.Email address.
+ */
+ submit(): void
+ }
+ /**
+ * TestS3Filesystem defines a S3 filesystem connection test.
+ */
+ interface TestS3Filesystem {
+ /**
+ * The name of the filesystem - storage or backups
+ */
+ filesystem: string
+ }
+ interface newTestS3Filesystem {
+ /**
+ * NewTestS3Filesystem creates and initializes new TestS3Filesystem form.
+ */
+ (app: core.App): (TestS3Filesystem | undefined)
+ }
+ interface TestS3Filesystem {
+ /**
+ * Validate makes the form validatable by implementing [validation.Validatable] interface.
+ */
+ validate(): void
+ }
+ interface TestS3Filesystem {
+ /**
+ * Submit validates and performs a S3 filesystem connection test.
+ */
+ submit(): void
+ }
+}
+
+/**
+ * Package apis implements the default PocketBase api services and middlewares.
+ */
+namespace apis {
+ interface adminApi {
+ }
+ // @ts-ignore
+ import validation = ozzo_validation
+ /**
+ * ApiError defines the struct for a basic api error response.
+ */
+ interface ApiError {
+ code: number
+ message: string
+ data: _TygojaDict
+ }
+ interface ApiError {
+ /**
+ * Error makes it compatible with the `error` interface.
+ */
+ error(): string
+ }
+ interface ApiError {
+ /**
+ * RawData returns the unformatted error data (could be an internal error, text, etc.)
+ */
+ rawData(): any
+ }
+ interface newNotFoundError {
+ /**
+ * NewNotFoundError creates and returns 404 `ApiError`.
+ */
+ (message: string, data: any): (ApiError | undefined)
+ }
+ interface newBadRequestError {
+ /**
+ * NewBadRequestError creates and returns 400 `ApiError`.
+ */
+ (message: string, data: any): (ApiError | undefined)
+ }
+ interface newForbiddenError {
+ /**
+ * NewForbiddenError creates and returns 403 `ApiError`.
+ */
+ (message: string, data: any): (ApiError | undefined)
+ }
+ interface newUnauthorizedError {
+ /**
+ * NewUnauthorizedError creates and returns 401 `ApiError`.
+ */
+ (message: string, data: any): (ApiError | undefined)
+ }
+ interface newApiError {
+ /**
+ * NewApiError creates and returns new normalized `ApiError` instance.
+ */
+ (status: number, message: string, data: any): (ApiError | undefined)
+ }
+ interface backupApi {
+ }
+ interface initApi {
+ /**
+ * InitApi creates a configured echo instance with registered
+ * system and app specific routes and middlewares.
+ */
+ (app: core.App): (echo.Echo | undefined)
+ }
+ interface staticDirectoryHandler {
+ /**
+ * StaticDirectoryHandler is similar to `echo.StaticDirectoryHandler`
+ * but without the directory redirect which conflicts with RemoveTrailingSlash middleware.
+ *
+ * If a file resource is missing and indexFallback is set, the request
+ * will be forwarded to the base index.html (useful also for SPA).
+ *
+ * @see https://github.com/labstack/echo/issues/2211
+ */
+ (fileSystem: fs.FS, indexFallback: boolean): echo.HandlerFunc
+ }
+ interface collectionApi {
+ }
+ interface fileApi {
+ }
+ interface healthApi {
+ }
+ interface healthCheckResponse {
+ code: number
+ message: string
+ data: {
+ canBackup: boolean
+ }
+ }
+ interface logsApi {
+ }
+ interface requireGuestOnly {
+ /**
+ * RequireGuestOnly middleware requires a request to NOT have a valid
+ * Authorization header.
+ *
+ * This middleware is the opposite of [apis.RequireAdminOrRecordAuth()].
+ */
+ (): echo.MiddlewareFunc
+ }
+ interface requireRecordAuth {
+ /**
+ * RequireRecordAuth middleware requires a request to have
+ * a valid record auth Authorization header.
+ *
+ * The auth record could be from any collection.
+ *
+ * You can further filter the allowed record auth collections by
+ * specifying their names.
+ *
+ * Example:
+ *
+ * ```
+ * apis.RequireRecordAuth()
+ * ```
+ *
+ * Or:
+ *
+ * ```
+ * apis.RequireRecordAuth("users", "supervisors")
+ * ```
+ *
+ * To restrict the auth record only to the loaded context collection,
+ * use [apis.RequireSameContextRecordAuth()] instead.
+ */
+ (...optCollectionNames: string[]): echo.MiddlewareFunc
+ }
+ interface requireSameContextRecordAuth {
+ /**
+ * RequireSameContextRecordAuth middleware requires a request to have
+ * a valid record Authorization header.
+ *
+ * The auth record must be from the same collection already loaded in the context.
+ */
+ (): echo.MiddlewareFunc
+ }
+ interface requireAdminAuth {
+ /**
+ * RequireAdminAuth middleware requires a request to have
+ * a valid admin Authorization header.
+ */
+ (): echo.MiddlewareFunc
+ }
+ interface requireAdminAuthOnlyIfAny {
+ /**
+ * RequireAdminAuthOnlyIfAny middleware requires a request to have
+ * a valid admin Authorization header ONLY if the application has
+ * at least 1 existing Admin model.
+ */
+ (app: core.App): echo.MiddlewareFunc
+ }
+ interface requireAdminOrRecordAuth {
+ /**
+ * RequireAdminOrRecordAuth middleware requires a request to have
+ * a valid admin or record Authorization header set.
+ *
+ * You can further filter the allowed auth record collections by providing their names.
+ *
+ * This middleware is the opposite of [apis.RequireGuestOnly()].
+ */
+ (...optCollectionNames: string[]): echo.MiddlewareFunc
+ }
+ interface requireAdminOrOwnerAuth {
+ /**
+ * RequireAdminOrOwnerAuth middleware requires a request to have
+ * a valid admin or auth record owner Authorization header set.
+ *
+ * This middleware is similar to [apis.RequireAdminOrRecordAuth()] but
+ * for the auth record token expects to have the same id as the path
+ * parameter ownerIdParam (default to "id" if empty).
+ */
+ (ownerIdParam: string): echo.MiddlewareFunc
+ }
+ interface loadAuthContext {
+ /**
+ * LoadAuthContext middleware reads the Authorization request header
+ * and loads the token related record or admin instance into the
+ * request's context.
+ *
+ * This middleware is expected to be already registered by default for all routes.
+ */
+ (app: core.App): echo.MiddlewareFunc
+ }
+ interface loadCollectionContext {
+ /**
+ * LoadCollectionContext middleware finds the collection with related
+ * path identifier and loads it into the request context.
+ *
+ * Set optCollectionTypes to further filter the found collection by its type.
+ */
+ (app: core.App, ...optCollectionTypes: string[]): echo.MiddlewareFunc
+ }
+ interface activityLogger {
+ /**
+ * ActivityLogger middleware takes care to save the request information
+ * into the logs database.
+ *
+ * The middleware does nothing if the app logs retention period is zero
+ * (aka. app.Settings().Logs.MaxDays = 0).
+ */
+ (app: core.App): echo.MiddlewareFunc
+ }
+ interface realtimeApi {
+ }
+ interface recordData {
+ action: string
+ record?: models.Record
+ }
+ interface getter {
+ get(_arg0: string): any
+ }
+ interface recordAuthApi {
+ }
+ interface providerInfo {
+ name: string
+ state: string
+ codeVerifier: string
+ codeChallenge: string
+ codeChallengeMethod: string
+ authUrl: string
+ }
+ interface recordApi {
+ }
+ interface requestData {
+ /**
+ * RequestData exports cached common request data fields
+ * (query, body, logged auth state, etc.) from the provided context.
+ */
+ (c: echo.Context): (models.RequestData | undefined)
+ }
+ interface recordAuthResponse {
+ /**
+ * RecordAuthResponse generates and writes a properly formatted record
+ * auth response into the specified request context.
+ */
+ (app: core.App, c: echo.Context, authRecord: models.Record, meta: any, ...finalizers: ((token: string) => void)[]): void
+ }
+ interface enrichRecord {
+ /**
+ * EnrichRecord parses the request context and enrich the provided record:
+ * ```
+ * - expands relations (if defaultExpands and/or ?expand query param is set)
+ * - ensures that the emails of the auth record and its expanded auth relations
+ * are visibe only for the current logged admin, record owner or record with manage access
+ * ```
+ */
+ (c: echo.Context, dao: daos.Dao, record: models.Record, ...defaultExpands: string[]): void
+ }
+ interface enrichRecords {
+ /**
+ * EnrichRecords parses the request context and enriches the provided records:
+ * ```
+ * - expands relations (if defaultExpands and/or ?expand query param is set)
+ * - ensures that the emails of the auth records and their expanded auth relations
+ * are visibe only for the current logged admin, record owner or record with manage access
+ * ```
+ */
+ (c: echo.Context, dao: daos.Dao, records: Array<(models.Record | undefined)>, ...defaultExpands: string[]): void
+ }
+ /**
+ * ServeConfig defines a configuration struct for apis.Serve().
+ */
+ interface ServeConfig {
+ /**
+ * ShowStartBanner indicates whether to show or hide the server start console message.
+ */
+ showStartBanner: boolean
+ /**
+ * HttpAddr is the HTTP server address to bind (eg. `127.0.0.1:80`).
+ */
+ httpAddr: string
+ /**
+ * HttpsAddr is the HTTPS server address to bind (eg. `127.0.0.1:443`).
+ */
+ httpsAddr: string
+ /**
+ * AllowedOrigins is an optional list of CORS origins (default to "*").
+ */
+ allowedOrigins: Array
+ }
+ interface serve {
+ /**
+ * Serve starts a new app web server.
+ */
+ (app: core.App, config: ServeConfig): void
+ }
+ interface migrationsConnection {
+ db?: dbx.DB
+ migrationsList: migrate.MigrationsList
+ }
+ interface settingsApi {
+ }
+}
+
+/**
+ * Package io provides basic interfaces to I/O primitives.
+ * Its primary job is to wrap existing implementations of such primitives,
+ * such as those in package os, into shared public interfaces that
+ * abstract the functionality, plus some other related primitives.
+ *
+ * Because these interfaces and primitives wrap lower-level operations with
+ * various implementations, unless otherwise informed clients should not
+ * assume they are safe for parallel execution.
+ */
+namespace io {
+ /**
+ * ReadSeekCloser is the interface that groups the basic Read, Seek and Close
+ * methods.
+ */
+ interface ReadSeekCloser {
+ }
+}
+
+/**
+ * Package time provides functionality for measuring and displaying time.
+ *
+ * The calendrical calculations always assume a Gregorian calendar, with
+ * no leap seconds.
+ *
+ * # Monotonic Clocks
+ *
+ * Operating systems provide both a “wall clock,” which is subject to
+ * changes for clock synchronization, and a “monotonic clock,” which is
+ * not. The general rule is that the wall clock is for telling time and
+ * the monotonic clock is for measuring time. Rather than split the API,
+ * in this package the Time returned by time.Now contains both a wall
+ * clock reading and a monotonic clock reading; later time-telling
+ * operations use the wall clock reading, but later time-measuring
+ * operations, specifically comparisons and subtractions, use the
+ * monotonic clock reading.
+ *
+ * For example, this code always computes a positive elapsed time of
+ * approximately 20 milliseconds, even if the wall clock is changed during
+ * the operation being timed:
+ *
+ * ```
+ * start := time.Now()
+ * ... operation that takes 20 milliseconds ...
+ * t := time.Now()
+ * elapsed := t.Sub(start)
+ * ```
+ *
+ * Other idioms, such as time.Since(start), time.Until(deadline), and
+ * time.Now().Before(deadline), are similarly robust against wall clock
+ * resets.
+ *
+ * The rest of this section gives the precise details of how operations
+ * use monotonic clocks, but understanding those details is not required
+ * to use this package.
+ *
+ * The Time returned by time.Now contains a monotonic clock reading.
+ * If Time t has a monotonic clock reading, t.Add adds the same duration to
+ * both the wall clock and monotonic clock readings to compute the result.
+ * Because t.AddDate(y, m, d), t.Round(d), and t.Truncate(d) are wall time
+ * computations, they always strip any monotonic clock reading from their results.
+ * Because t.In, t.Local, and t.UTC are used for their effect on the interpretation
+ * of the wall time, they also strip any monotonic clock reading from their results.
+ * The canonical way to strip a monotonic clock reading is to use t = t.Round(0).
+ *
+ * If Times t and u both contain monotonic clock readings, the operations
+ * t.After(u), t.Before(u), t.Equal(u), and t.Sub(u) are carried out
+ * using the monotonic clock readings alone, ignoring the wall clock
+ * readings. If either t or u contains no monotonic clock reading, these
+ * operations fall back to using the wall clock readings.
+ *
+ * On some systems the monotonic clock will stop if the computer goes to sleep.
+ * On such a system, t.Sub(u) may not accurately reflect the actual
+ * time that passed between t and u.
+ *
+ * Because the monotonic clock reading has no meaning outside
+ * the current process, the serialized forms generated by t.GobEncode,
+ * t.MarshalBinary, t.MarshalJSON, and t.MarshalText omit the monotonic
+ * clock reading, and t.Format provides no format for it. Similarly, the
+ * constructors time.Date, time.Parse, time.ParseInLocation, and time.Unix,
+ * as well as the unmarshalers t.GobDecode, t.UnmarshalBinary.
+ * t.UnmarshalJSON, and t.UnmarshalText always create times with
+ * no monotonic clock reading.
+ *
+ * The monotonic clock reading exists only in Time values. It is not
+ * a part of Duration values or the Unix times returned by t.Unix and
+ * friends.
+ *
+ * Note that the Go == operator compares not just the time instant but
+ * also the Location and the monotonic clock reading. See the
+ * documentation for the Time type for a discussion of equality
+ * testing for Time values.
+ *
+ * For debugging, the result of t.String does include the monotonic
+ * clock reading if present. If t != u because of different monotonic clock readings,
+ * that difference will be visible when printing t.String() and u.String().
+ */
+namespace time {
+ /**
+ * A Duration represents the elapsed time between two instants
+ * as an int64 nanosecond count. The representation limits the
+ * largest representable duration to approximately 290 years.
+ */
+ interface Duration extends Number{}
+ interface Duration {
+ /**
+ * String returns a string representing the duration in the form "72h3m0.5s".
+ * Leading zero units are omitted. As a special case, durations less than one
+ * second format use a smaller unit (milli-, micro-, or nanoseconds) to ensure
+ * that the leading digit is non-zero. The zero duration formats as 0s.
+ */
+ string(): string
+ }
+ interface Duration {
+ /**
+ * Nanoseconds returns the duration as an integer nanosecond count.
+ */
+ nanoseconds(): number
+ }
+ interface Duration {
+ /**
+ * Microseconds returns the duration as an integer microsecond count.
+ */
+ microseconds(): number
+ }
+ interface Duration {
+ /**
+ * Milliseconds returns the duration as an integer millisecond count.
+ */
+ milliseconds(): number
+ }
+ interface Duration {
+ /**
+ * Seconds returns the duration as a floating point number of seconds.
+ */
+ seconds(): number
+ }
+ interface Duration {
+ /**
+ * Minutes returns the duration as a floating point number of minutes.
+ */
+ minutes(): number
+ }
+ interface Duration {
+ /**
+ * Hours returns the duration as a floating point number of hours.
+ */
+ hours(): number
+ }
+ interface Duration {
+ /**
+ * Truncate returns the result of rounding d toward zero to a multiple of m.
+ * If m <= 0, Truncate returns d unchanged.
+ */
+ truncate(m: Duration): Duration
+ }
+ interface Duration {
+ /**
+ * Round returns the result of rounding d to the nearest multiple of m.
+ * The rounding behavior for halfway values is to round away from zero.
+ * If the result exceeds the maximum (or minimum)
+ * value that can be stored in a Duration,
+ * Round returns the maximum (or minimum) duration.
+ * If m <= 0, Round returns d unchanged.
+ */
+ round(m: Duration): Duration
+ }
+ interface Duration {
+ /**
+ * Abs returns the absolute value of d.
+ * As a special case, math.MinInt64 is converted to math.MaxInt64.
+ */
+ abs(): Duration
+ }
+}
+
+/**
+ * Package fs defines basic interfaces to a file system.
+ * A file system can be provided by the host operating system
+ * but also by other packages.
+ */
+namespace fs {
+ /**
+ * An FS provides access to a hierarchical file system.
+ *
+ * The FS interface is the minimum implementation required of the file system.
+ * A file system may implement additional interfaces,
+ * such as ReadFileFS, to provide additional or optimized functionality.
+ */
+ interface FS {
+ /**
+ * Open opens the named file.
+ *
+ * When Open returns an error, it should be of type *PathError
+ * with the Op field set to "open", the Path field set to name,
+ * and the Err field describing the problem.
+ *
+ * Open should reject attempts to open names that do not satisfy
+ * ValidPath(name), returning a *PathError with Err set to
+ * ErrInvalid or ErrNotExist.
+ */
+ open(name: string): File
+ }
+}
+
+/**
+ * Package bytes implements functions for the manipulation of byte slices.
+ * It is analogous to the facilities of the strings package.
+ */
+namespace bytes {
+ /**
+ * A Reader implements the io.Reader, io.ReaderAt, io.WriterTo, io.Seeker,
+ * io.ByteScanner, and io.RuneScanner interfaces by reading from
+ * a byte slice.
+ * Unlike a Buffer, a Reader is read-only and supports seeking.
+ * The zero value for Reader operates like a Reader of an empty slice.
+ */
+ interface Reader {
+ }
+ interface Reader {
+ /**
+ * Len returns the number of bytes of the unread portion of the
+ * slice.
+ */
+ len(): number
+ }
+ interface Reader {
+ /**
+ * Size returns the original length of the underlying byte slice.
+ * Size is the number of bytes available for reading via ReadAt.
+ * The result is unaffected by any method calls except Reset.
+ */
+ size(): number
+ }
+ interface Reader {
+ /**
+ * Read implements the io.Reader interface.
+ */
+ read(b: string): number
+ }
+ interface Reader {
+ /**
+ * ReadAt implements the io.ReaderAt interface.
+ */
+ readAt(b: string, off: number): number
+ }
+ interface Reader {
+ /**
+ * ReadByte implements the io.ByteReader interface.
+ */
+ readByte(): string
+ }
+ interface Reader {
+ /**
+ * UnreadByte complements ReadByte in implementing the io.ByteScanner interface.
+ */
+ unreadByte(): void
+ }
+ interface Reader {
+ /**
+ * ReadRune implements the io.RuneReader interface.
+ */
+ readRune(): [string, number]
+ }
+ interface Reader {
+ /**
+ * UnreadRune complements ReadRune in implementing the io.RuneScanner interface.
+ */
+ unreadRune(): void
+ }
+ interface Reader {
+ /**
+ * Seek implements the io.Seeker interface.
+ */
+ seek(offset: number, whence: number): number
+ }
+ interface Reader {
+ /**
+ * WriteTo implements the io.WriterTo interface.
+ */
+ writeTo(w: io.Writer): number
+ }
+ interface Reader {
+ /**
+ * Reset resets the Reader to be reading from b.
+ */
+ reset(b: string): void
+ }
+}
+
+/**
+ * Package context defines the Context type, which carries deadlines,
+ * cancellation signals, and other request-scoped values across API boundaries
+ * and between processes.
+ *
+ * Incoming requests to a server should create a Context, and outgoing
+ * calls to servers should accept a Context. The chain of function
+ * calls between them must propagate the Context, optionally replacing
+ * it with a derived Context created using WithCancel, WithDeadline,
+ * WithTimeout, or WithValue. When a Context is canceled, all
+ * Contexts derived from it are also canceled.
+ *
+ * The WithCancel, WithDeadline, and WithTimeout functions take a
+ * Context (the parent) and return a derived Context (the child) and a
+ * CancelFunc. Calling the CancelFunc cancels the child and its
+ * children, removes the parent's reference to the child, and stops
+ * any associated timers. Failing to call the CancelFunc leaks the
+ * child and its children until the parent is canceled or the timer
+ * fires. The go vet tool checks that CancelFuncs are used on all
+ * control-flow paths.
+ *
+ * Programs that use Contexts should follow these rules to keep interfaces
+ * consistent across packages and enable static analysis tools to check context
+ * propagation:
+ *
+ * Do not store Contexts inside a struct type; instead, pass a Context
+ * explicitly to each function that needs it. The Context should be the first
+ * parameter, typically named ctx:
+ *
+ * ```
+ * func DoSomething(ctx context.Context, arg Arg) error {
+ * // ... use ctx ...
+ * }
+ * ```
+ *
+ * Do not pass a nil Context, even if a function permits it. Pass context.TODO
+ * if you are unsure about which Context to use.
+ *
+ * Use context Values only for request-scoped data that transits processes and
+ * APIs, not for passing optional parameters to functions.
+ *
+ * The same Context may be passed to functions running in different goroutines;
+ * Contexts are safe for simultaneous use by multiple goroutines.
+ *
+ * See https://blog.golang.org/context for example code for a server that uses
+ * Contexts.
+ */
+namespace context {
+ /**
+ * A Context carries a deadline, a cancellation signal, and other values across
+ * API boundaries.
+ *
+ * Context's methods may be called by multiple goroutines simultaneously.
+ */
+ interface Context {
+ /**
+ * Deadline returns the time when work done on behalf of this context
+ * should be canceled. Deadline returns ok==false when no deadline is
+ * set. Successive calls to Deadline return the same results.
+ */
+ deadline(): [time.Time, boolean]
+ /**
+ * Done returns a channel that's closed when work done on behalf of this
+ * context should be canceled. Done may return nil if this context can
+ * never be canceled. Successive calls to Done return the same value.
+ * The close of the Done channel may happen asynchronously,
+ * after the cancel function returns.
+ *
+ * WithCancel arranges for Done to be closed when cancel is called;
+ * WithDeadline arranges for Done to be closed when the deadline
+ * expires; WithTimeout arranges for Done to be closed when the timeout
+ * elapses.
+ *
+ * Done is provided for use in select statements:
+ *
+ * // Stream generates values with DoSomething and sends them to out
+ * // until DoSomething returns an error or ctx.Done is closed.
+ * func Stream(ctx context.Context, out chan<- Value) error {
+ * for {
+ * v, err := DoSomething(ctx)
+ * if err != nil {
+ * return err
+ * }
+ * select {
+ * case <-ctx.Done():
+ * return ctx.Err()
+ * case out <- v:
+ * }
+ * }
+ * }
+ *
+ * See https://blog.golang.org/pipelines for more examples of how to use
+ * a Done channel for cancellation.
+ */
+ done(): undefined
+ /**
+ * If Done is not yet closed, Err returns nil.
+ * If Done is closed, Err returns a non-nil error explaining why:
+ * Canceled if the context was canceled
+ * or DeadlineExceeded if the context's deadline passed.
+ * After Err returns a non-nil error, successive calls to Err return the same error.
+ */
+ err(): void
+ /**
+ * Value returns the value associated with this context for key, or nil
+ * if no value is associated with key. Successive calls to Value with
+ * the same key returns the same result.
+ *
+ * Use context values only for request-scoped data that transits
+ * processes and API boundaries, not for passing optional parameters to
+ * functions.
+ *
+ * A key identifies a specific value in a Context. Functions that wish
+ * to store values in Context typically allocate a key in a global
+ * variable then use that key as the argument to context.WithValue and
+ * Context.Value. A key can be any type that supports equality;
+ * packages should define keys as an unexported type to avoid
+ * collisions.
+ *
+ * Packages that define a Context key should provide type-safe accessors
+ * for the values stored using that key:
+ *
+ * ```
+ * // Package user defines a User type that's stored in Contexts.
+ * package user
+ *
+ * import "context"
+ *
+ * // User is the type of value stored in the Contexts.
+ * type User struct {...}
+ *
+ * // key is an unexported type for keys defined in this package.
+ * // This prevents collisions with keys defined in other packages.
+ * type key int
+ *
+ * // userKey is the key for user.User values in Contexts. It is
+ * // unexported; clients use user.NewContext and user.FromContext
+ * // instead of using this key directly.
+ * var userKey key
+ *
+ * // NewContext returns a new Context that carries value u.
+ * func NewContext(ctx context.Context, u *User) context.Context {
+ * return context.WithValue(ctx, userKey, u)
+ * }
+ *
+ * // FromContext returns the User value stored in ctx, if any.
+ * func FromContext(ctx context.Context) (*User, bool) {
+ * u, ok := ctx.Value(userKey).(*User)
+ * return u, ok
+ * }
+ * ```
+ */
+ value(key: any): any
+ }
+}
+
+/**
+ * Package sql provides a generic interface around SQL (or SQL-like)
+ * databases.
+ *
+ * The sql package must be used in conjunction with a database driver.
+ * See https://golang.org/s/sqldrivers for a list of drivers.
+ *
+ * Drivers that do not support context cancellation will not return until
+ * after the query is completed.
+ *
+ * For usage examples, see the wiki page at
+ * https://golang.org/s/sqlwiki.
+ */
+namespace sql {
+ /**
+ * TxOptions holds the transaction options to be used in DB.BeginTx.
+ */
+ interface TxOptions {
+ /**
+ * Isolation is the transaction isolation level.
+ * If zero, the driver or database's default level is used.
+ */
+ isolation: IsolationLevel
+ readOnly: boolean
+ }
+ /**
+ * DB is a database handle representing a pool of zero or more
+ * underlying connections. It's safe for concurrent use by multiple
+ * goroutines.
+ *
+ * The sql package creates and frees connections automatically; it
+ * also maintains a free pool of idle connections. If the database has
+ * a concept of per-connection state, such state can be reliably observed
+ * within a transaction (Tx) or connection (Conn). Once DB.Begin is called, the
+ * returned Tx is bound to a single connection. Once Commit or
+ * Rollback is called on the transaction, that transaction's
+ * connection is returned to DB's idle connection pool. The pool size
+ * can be controlled with SetMaxIdleConns.
+ */
+ interface DB {
+ }
+ interface DB {
+ /**
+ * PingContext verifies a connection to the database is still alive,
+ * establishing a connection if necessary.
+ */
+ pingContext(ctx: context.Context): void
+ }
+ interface DB {
+ /**
+ * Ping verifies a connection to the database is still alive,
+ * establishing a connection if necessary.
+ *
+ * Ping uses context.Background internally; to specify the context, use
+ * PingContext.
+ */
+ ping(): void
+ }
+ interface DB {
+ /**
+ * Close closes the database and prevents new queries from starting.
+ * Close then waits for all queries that have started processing on the server
+ * to finish.
+ *
+ * It is rare to Close a DB, as the DB handle is meant to be
+ * long-lived and shared between many goroutines.
+ */
+ close(): void
+ }
+ interface DB {
+ /**
+ * SetMaxIdleConns sets the maximum number of connections in the idle
+ * connection pool.
+ *
+ * If MaxOpenConns is greater than 0 but less than the new MaxIdleConns,
+ * then the new MaxIdleConns will be reduced to match the MaxOpenConns limit.
+ *
+ * If n <= 0, no idle connections are retained.
+ *
+ * The default max idle connections is currently 2. This may change in
+ * a future release.
+ */
+ setMaxIdleConns(n: number): void
+ }
+ interface DB {
+ /**
+ * SetMaxOpenConns sets the maximum number of open connections to the database.
+ *
+ * If MaxIdleConns is greater than 0 and the new MaxOpenConns is less than
+ * MaxIdleConns, then MaxIdleConns will be reduced to match the new
+ * MaxOpenConns limit.
+ *
+ * If n <= 0, then there is no limit on the number of open connections.
+ * The default is 0 (unlimited).
+ */
+ setMaxOpenConns(n: number): void
+ }
+ interface DB {
+ /**
+ * SetConnMaxLifetime sets the maximum amount of time a connection may be reused.
+ *
+ * Expired connections may be closed lazily before reuse.
+ *
+ * If d <= 0, connections are not closed due to a connection's age.
+ */
+ setConnMaxLifetime(d: time.Duration): void
+ }
+ interface DB {
+ /**
+ * SetConnMaxIdleTime sets the maximum amount of time a connection may be idle.
+ *
+ * Expired connections may be closed lazily before reuse.
+ *
+ * If d <= 0, connections are not closed due to a connection's idle time.
+ */
+ setConnMaxIdleTime(d: time.Duration): void
+ }
+ interface DB {
+ /**
+ * Stats returns database statistics.
+ */
+ stats(): DBStats
+ }
+ interface DB {
+ /**
+ * PrepareContext creates a prepared statement for later queries or executions.
+ * Multiple queries or executions may be run concurrently from the
+ * returned statement.
+ * The caller must call the statement's Close method
+ * when the statement is no longer needed.
+ *
+ * The provided context is used for the preparation of the statement, not for the
+ * execution of the statement.
+ */
+ prepareContext(ctx: context.Context, query: string): (Stmt | undefined)
+ }
+ interface DB {
+ /**
+ * Prepare creates a prepared statement for later queries or executions.
+ * Multiple queries or executions may be run concurrently from the
+ * returned statement.
+ * The caller must call the statement's Close method
+ * when the statement is no longer needed.
+ *
+ * Prepare uses context.Background internally; to specify the context, use
+ * PrepareContext.
+ */
+ prepare(query: string): (Stmt | undefined)
+ }
+ interface DB {
+ /**
+ * ExecContext executes a query without returning any rows.
+ * The args are for any placeholder parameters in the query.
+ */
+ execContext(ctx: context.Context, query: string, ...args: any[]): Result
+ }
+ interface DB {
+ /**
+ * Exec executes a query without returning any rows.
+ * The args are for any placeholder parameters in the query.
+ *
+ * Exec uses context.Background internally; to specify the context, use
+ * ExecContext.
+ */
+ exec(query: string, ...args: any[]): Result
+ }
+ interface DB {
+ /**
+ * QueryContext executes a query that returns rows, typically a SELECT.
+ * The args are for any placeholder parameters in the query.
+ */
+ queryContext(ctx: context.Context, query: string, ...args: any[]): (Rows | undefined)
+ }
+ interface DB {
+ /**
+ * Query executes a query that returns rows, typically a SELECT.
+ * The args are for any placeholder parameters in the query.
+ *
+ * Query uses context.Background internally; to specify the context, use
+ * QueryContext.
+ */
+ query(query: string, ...args: any[]): (Rows | undefined)
+ }
+ interface DB {
+ /**
+ * QueryRowContext executes a query that is expected to return at most one row.
+ * QueryRowContext always returns a non-nil value. Errors are deferred until
+ * Row's Scan method is called.
+ * If the query selects no rows, the *Row's Scan will return ErrNoRows.
+ * Otherwise, the *Row's Scan scans the first selected row and discards
+ * the rest.
+ */
+ queryRowContext(ctx: context.Context, query: string, ...args: any[]): (Row | undefined)
+ }
+ interface DB {
+ /**
+ * QueryRow executes a query that is expected to return at most one row.
+ * QueryRow always returns a non-nil value. Errors are deferred until
+ * Row's Scan method is called.
+ * If the query selects no rows, the *Row's Scan will return ErrNoRows.
+ * Otherwise, the *Row's Scan scans the first selected row and discards
+ * the rest.
+ *
+ * QueryRow uses context.Background internally; to specify the context, use
+ * QueryRowContext.
+ */
+ queryRow(query: string, ...args: any[]): (Row | undefined)
+ }
+ interface DB {
+ /**
+ * BeginTx starts a transaction.
+ *
+ * The provided context is used until the transaction is committed or rolled back.
+ * If the context is canceled, the sql package will roll back
+ * the transaction. Tx.Commit will return an error if the context provided to
+ * BeginTx is canceled.
+ *
+ * The provided TxOptions is optional and may be nil if defaults should be used.
+ * If a non-default isolation level is used that the driver doesn't support,
+ * an error will be returned.
+ */
+ beginTx(ctx: context.Context, opts: TxOptions): (Tx | undefined)
+ }
+ interface DB {
+ /**
+ * Begin starts a transaction. The default isolation level is dependent on
+ * the driver.
+ *
+ * Begin uses context.Background internally; to specify the context, use
+ * BeginTx.
+ */
+ begin(): (Tx | undefined)
+ }
+ interface DB {
+ /**
+ * Driver returns the database's underlying driver.
+ */
+ driver(): driver.Driver
+ }
+ interface DB {
+ /**
+ * Conn returns a single connection by either opening a new connection
+ * or returning an existing connection from the connection pool. Conn will
+ * block until either a connection is returned or ctx is canceled.
+ * Queries run on the same Conn will be run in the same database session.
+ *
+ * Every Conn must be returned to the database pool after use by
+ * calling Conn.Close.
+ */
+ conn(ctx: context.Context): (Conn | undefined)
+ }
+ /**
+ * Tx is an in-progress database transaction.
+ *
+ * A transaction must end with a call to Commit or Rollback.
+ *
+ * After a call to Commit or Rollback, all operations on the
+ * transaction fail with ErrTxDone.
+ *
+ * The statements prepared for a transaction by calling
+ * the transaction's Prepare or Stmt methods are closed
+ * by the call to Commit or Rollback.
+ */
+ interface Tx {
+ }
+ interface Tx {
+ /**
+ * Commit commits the transaction.
+ */
+ commit(): void
+ }
+ interface Tx {
+ /**
+ * Rollback aborts the transaction.
+ */
+ rollback(): void
+ }
+ interface Tx {
+ /**
+ * PrepareContext creates a prepared statement for use within a transaction.
+ *
+ * The returned statement operates within the transaction and will be closed
+ * when the transaction has been committed or rolled back.
+ *
+ * To use an existing prepared statement on this transaction, see Tx.Stmt.
+ *
+ * The provided context will be used for the preparation of the context, not
+ * for the execution of the returned statement. The returned statement
+ * will run in the transaction context.
+ */
+ prepareContext(ctx: context.Context, query: string): (Stmt | undefined)
+ }
+ interface Tx {
+ /**
+ * Prepare creates a prepared statement for use within a transaction.
+ *
+ * The returned statement operates within the transaction and will be closed
+ * when the transaction has been committed or rolled back.
+ *
+ * To use an existing prepared statement on this transaction, see Tx.Stmt.
+ *
+ * Prepare uses context.Background internally; to specify the context, use
+ * PrepareContext.
+ */
+ prepare(query: string): (Stmt | undefined)
+ }
+ interface Tx {
+ /**
+ * StmtContext returns a transaction-specific prepared statement from
+ * an existing statement.
+ *
+ * Example:
+ *
+ * ```
+ * updateMoney, err := db.Prepare("UPDATE balance SET money=money+? WHERE id=?")
+ * ...
+ * tx, err := db.Begin()
+ * ...
+ * res, err := tx.StmtContext(ctx, updateMoney).Exec(123.45, 98293203)
+ * ```
+ *
+ * The provided context is used for the preparation of the statement, not for the
+ * execution of the statement.
+ *
+ * The returned statement operates within the transaction and will be closed
+ * when the transaction has been committed or rolled back.
+ */
+ stmtContext(ctx: context.Context, stmt: Stmt): (Stmt | undefined)
+ }
+ interface Tx {
+ /**
+ * Stmt returns a transaction-specific prepared statement from
+ * an existing statement.
+ *
+ * Example:
+ *
+ * ```
+ * updateMoney, err := db.Prepare("UPDATE balance SET money=money+? WHERE id=?")
+ * ...
+ * tx, err := db.Begin()
+ * ...
+ * res, err := tx.Stmt(updateMoney).Exec(123.45, 98293203)
+ * ```
+ *
+ * The returned statement operates within the transaction and will be closed
+ * when the transaction has been committed or rolled back.
+ *
+ * Stmt uses context.Background internally; to specify the context, use
+ * StmtContext.
+ */
+ stmt(stmt: Stmt): (Stmt | undefined)
+ }
+ interface Tx {
+ /**
+ * ExecContext executes a query that doesn't return rows.
+ * For example: an INSERT and UPDATE.
+ */
+ execContext(ctx: context.Context, query: string, ...args: any[]): Result
+ }
+ interface Tx {
+ /**
+ * Exec executes a query that doesn't return rows.
+ * For example: an INSERT and UPDATE.
+ *
+ * Exec uses context.Background internally; to specify the context, use
+ * ExecContext.
+ */
+ exec(query: string, ...args: any[]): Result
+ }
+ interface Tx {
+ /**
+ * QueryContext executes a query that returns rows, typically a SELECT.
+ */
+ queryContext(ctx: context.Context, query: string, ...args: any[]): (Rows | undefined)
+ }
+ interface Tx {
+ /**
+ * Query executes a query that returns rows, typically a SELECT.
+ *
+ * Query uses context.Background internally; to specify the context, use
+ * QueryContext.
+ */
+ query(query: string, ...args: any[]): (Rows | undefined)
+ }
+ interface Tx {
+ /**
+ * QueryRowContext executes a query that is expected to return at most one row.
+ * QueryRowContext always returns a non-nil value. Errors are deferred until
+ * Row's Scan method is called.
+ * If the query selects no rows, the *Row's Scan will return ErrNoRows.
+ * Otherwise, the *Row's Scan scans the first selected row and discards
+ * the rest.
+ */
+ queryRowContext(ctx: context.Context, query: string, ...args: any[]): (Row | undefined)
+ }
+ interface Tx {
+ /**
+ * QueryRow executes a query that is expected to return at most one row.
+ * QueryRow always returns a non-nil value. Errors are deferred until
+ * Row's Scan method is called.
+ * If the query selects no rows, the *Row's Scan will return ErrNoRows.
+ * Otherwise, the *Row's Scan scans the first selected row and discards
+ * the rest.
+ *
+ * QueryRow uses context.Background internally; to specify the context, use
+ * QueryRowContext.
+ */
+ queryRow(query: string, ...args: any[]): (Row | undefined)
+ }
+ /**
+ * Stmt is a prepared statement.
+ * A Stmt is safe for concurrent use by multiple goroutines.
+ *
+ * If a Stmt is prepared on a Tx or Conn, it will be bound to a single
+ * underlying connection forever. If the Tx or Conn closes, the Stmt will
+ * become unusable and all operations will return an error.
+ * If a Stmt is prepared on a DB, it will remain usable for the lifetime of the
+ * DB. When the Stmt needs to execute on a new underlying connection, it will
+ * prepare itself on the new connection automatically.
+ */
+ interface Stmt {
+ }
+ interface Stmt {
+ /**
+ * ExecContext executes a prepared statement with the given arguments and
+ * returns a Result summarizing the effect of the statement.
+ */
+ execContext(ctx: context.Context, ...args: any[]): Result
+ }
+ interface Stmt {
+ /**
+ * Exec executes a prepared statement with the given arguments and
+ * returns a Result summarizing the effect of the statement.
+ *
+ * Exec uses context.Background internally; to specify the context, use
+ * ExecContext.
+ */
+ exec(...args: any[]): Result
+ }
+ interface Stmt {
+ /**
+ * QueryContext executes a prepared query statement with the given arguments
+ * and returns the query results as a *Rows.
+ */
+ queryContext(ctx: context.Context, ...args: any[]): (Rows | undefined)
+ }
+ interface Stmt {
+ /**
+ * Query executes a prepared query statement with the given arguments
+ * and returns the query results as a *Rows.
+ *
+ * Query uses context.Background internally; to specify the context, use
+ * QueryContext.
+ */
+ query(...args: any[]): (Rows | undefined)
+ }
+ interface Stmt {
+ /**
+ * QueryRowContext executes a prepared query statement with the given arguments.
+ * If an error occurs during the execution of the statement, that error will
+ * be returned by a call to Scan on the returned *Row, which is always non-nil.
+ * If the query selects no rows, the *Row's Scan will return ErrNoRows.
+ * Otherwise, the *Row's Scan scans the first selected row and discards
+ * the rest.
+ */
+ queryRowContext(ctx: context.Context, ...args: any[]): (Row | undefined)
+ }
+ interface Stmt {
+ /**
+ * QueryRow executes a prepared query statement with the given arguments.
+ * If an error occurs during the execution of the statement, that error will
+ * be returned by a call to Scan on the returned *Row, which is always non-nil.
+ * If the query selects no rows, the *Row's Scan will return ErrNoRows.
+ * Otherwise, the *Row's Scan scans the first selected row and discards
+ * the rest.
+ *
+ * Example usage:
+ *
+ * ```
+ * var name string
+ * err := nameByUseridStmt.QueryRow(id).Scan(&name)
+ * ```
+ *
+ * QueryRow uses context.Background internally; to specify the context, use
+ * QueryRowContext.
+ */
+ queryRow(...args: any[]): (Row | undefined)
+ }
+ interface Stmt {
+ /**
+ * Close closes the statement.
+ */
+ close(): void
+ }
+ /**
+ * Rows is the result of a query. Its cursor starts before the first row
+ * of the result set. Use Next to advance from row to row.
+ */
+ interface Rows {
+ }
+ interface Rows {
+ /**
+ * Next prepares the next result row for reading with the Scan method. It
+ * returns true on success, or false if there is no next result row or an error
+ * happened while preparing it. Err should be consulted to distinguish between
+ * the two cases.
+ *
+ * Every call to Scan, even the first one, must be preceded by a call to Next.
+ */
+ next(): boolean
+ }
+ interface Rows {
+ /**
+ * NextResultSet prepares the next result set for reading. It reports whether
+ * there is further result sets, or false if there is no further result set
+ * or if there is an error advancing to it. The Err method should be consulted
+ * to distinguish between the two cases.
+ *
+ * After calling NextResultSet, the Next method should always be called before
+ * scanning. If there are further result sets they may not have rows in the result
+ * set.
+ */
+ nextResultSet(): boolean
+ }
+ interface Rows {
+ /**
+ * Err returns the error, if any, that was encountered during iteration.
+ * Err may be called after an explicit or implicit Close.
+ */
+ err(): void
+ }
+ interface Rows {
+ /**
+ * Columns returns the column names.
+ * Columns returns an error if the rows are closed.
+ */
+ columns(): Array
+ }
+ interface Rows {
+ /**
+ * ColumnTypes returns column information such as column type, length,
+ * and nullable. Some information may not be available from some drivers.
+ */
+ columnTypes(): Array<(ColumnType | undefined)>
+ }
+ interface Rows {
+ /**
+ * Scan copies the columns in the current row into the values pointed
+ * at by dest. The number of values in dest must be the same as the
+ * number of columns in Rows.
+ *
+ * Scan converts columns read from the database into the following
+ * common Go types and special types provided by the sql package:
+ *
+ * ```
+ * *string
+ * *[]byte
+ * *int, *int8, *int16, *int32, *int64
+ * *uint, *uint8, *uint16, *uint32, *uint64
+ * *bool
+ * *float32, *float64
+ * *interface{}
+ * *RawBytes
+ * *Rows (cursor value)
+ * any type implementing Scanner (see Scanner docs)
+ * ```
+ *
+ * In the most simple case, if the type of the value from the source
+ * column is an integer, bool or string type T and dest is of type *T,
+ * Scan simply assigns the value through the pointer.
+ *
+ * Scan also converts between string and numeric types, as long as no
+ * information would be lost. While Scan stringifies all numbers
+ * scanned from numeric database columns into *string, scans into
+ * numeric types are checked for overflow. For example, a float64 with
+ * value 300 or a string with value "300" can scan into a uint16, but
+ * not into a uint8, though float64(255) or "255" can scan into a
+ * uint8. One exception is that scans of some float64 numbers to
+ * strings may lose information when stringifying. In general, scan
+ * floating point columns into *float64.
+ *
+ * If a dest argument has type *[]byte, Scan saves in that argument a
+ * copy of the corresponding data. The copy is owned by the caller and
+ * can be modified and held indefinitely. The copy can be avoided by
+ * using an argument of type *RawBytes instead; see the documentation
+ * for RawBytes for restrictions on its use.
+ *
+ * If an argument has type *interface{}, Scan copies the value
+ * provided by the underlying driver without conversion. When scanning
+ * from a source value of type []byte to *interface{}, a copy of the
+ * slice is made and the caller owns the result.
+ *
+ * Source values of type time.Time may be scanned into values of type
+ * *time.Time, *interface{}, *string, or *[]byte. When converting to
+ * the latter two, time.RFC3339Nano is used.
+ *
+ * Source values of type bool may be scanned into types *bool,
+ * *interface{}, *string, *[]byte, or *RawBytes.
+ *
+ * For scanning into *bool, the source may be true, false, 1, 0, or
+ * string inputs parseable by strconv.ParseBool.
+ *
+ * Scan can also convert a cursor returned from a query, such as
+ * "select cursor(select * from my_table) from dual", into a
+ * *Rows value that can itself be scanned from. The parent
+ * select query will close any cursor *Rows if the parent *Rows is closed.
+ *
+ * If any of the first arguments implementing Scanner returns an error,
+ * that error will be wrapped in the returned error
+ */
+ scan(...dest: any[]): void
+ }
+ interface Rows {
+ /**
+ * Close closes the Rows, preventing further enumeration. If Next is called
+ * and returns false and there are no further result sets,
+ * the Rows are closed automatically and it will suffice to check the
+ * result of Err. Close is idempotent and does not affect the result of Err.
+ */
+ close(): void
+ }
+ /**
+ * A Result summarizes an executed SQL command.
+ */
+ interface Result {
+ /**
+ * LastInsertId returns the integer generated by the database
+ * in response to a command. Typically this will be from an
+ * "auto increment" column when inserting a new row. Not all
+ * databases support this feature, and the syntax of such
+ * statements varies.
+ */
+ lastInsertId(): number
+ /**
+ * RowsAffected returns the number of rows affected by an
+ * update, insert, or delete. Not every database or database
+ * driver may support this.
+ */
+ rowsAffected(): number
+ }
+}
+
+namespace migrate {
+ /**
+ * MigrationsList defines a list with migration definitions
+ */
+ interface MigrationsList {
+ }
+ interface MigrationsList {
+ /**
+ * Item returns a single migration from the list by its index.
+ */
+ item(index: number): (Migration | undefined)
+ }
+ interface MigrationsList {
+ /**
+ * Items returns the internal migrations list slice.
+ */
+ items(): Array<(Migration | undefined)>
+ }
+ interface MigrationsList {
+ /**
+ * Register adds new migration definition to the list.
+ *
+ * If `optFilename` is not provided, it will try to get the name from its .go file.
+ *
+ * The list will be sorted automatically based on the migrations file name.
+ */
+ register(up: (db: dbx.Builder) => void, down: (db: dbx.Builder) => void, ...optFilename: string[]): void
+ }
+}
+
+/**
+ * Package multipart implements MIME multipart parsing, as defined in RFC
+ * 2046.
+ *
+ * The implementation is sufficient for HTTP (RFC 2388) and the multipart
+ * bodies generated by popular browsers.
+ */
+namespace multipart {
+ /**
+ * A FileHeader describes a file part of a multipart request.
+ */
+ interface FileHeader {
+ filename: string
+ header: textproto.MIMEHeader
+ size: number
+ }
+ interface FileHeader {
+ /**
+ * Open opens and returns the FileHeader's associated File.
+ */
+ open(): File
+ }
+}
+
+/**
+ * Package http provides HTTP client and server implementations.
+ *
+ * Get, Head, Post, and PostForm make HTTP (or HTTPS) requests:
+ *
+ * ```
+ * resp, err := http.Get("http://example.com/")
+ * ...
+ * resp, err := http.Post("http://example.com/upload", "image/jpeg", &buf)
+ * ...
+ * resp, err := http.PostForm("http://example.com/form",
+ * url.Values{"key": {"Value"}, "id": {"123"}})
+ * ```
+ *
+ * The client must close the response body when finished with it:
+ *
+ * ```
+ * resp, err := http.Get("http://example.com/")
+ * if err != nil {
+ * // handle error
+ * }
+ * defer resp.Body.Close()
+ * body, err := io.ReadAll(resp.Body)
+ * // ...
+ * ```
+ *
+ * For control over HTTP client headers, redirect policy, and other
+ * settings, create a Client:
+ *
+ * ```
+ * client := &http.Client{
+ * CheckRedirect: redirectPolicyFunc,
+ * }
+ *
+ * resp, err := client.Get("http://example.com")
+ * // ...
+ *
+ * req, err := http.NewRequest("GET", "http://example.com", nil)
+ * // ...
+ * req.Header.Add("If-None-Match", `W/"wyzzy"`)
+ * resp, err := client.Do(req)
+ * // ...
+ * ```
+ *
+ * For control over proxies, TLS configuration, keep-alives,
+ * compression, and other settings, create a Transport:
+ *
+ * ```
+ * tr := &http.Transport{
+ * MaxIdleConns: 10,
+ * IdleConnTimeout: 30 * time.Second,
+ * DisableCompression: true,
+ * }
+ * client := &http.Client{Transport: tr}
+ * resp, err := client.Get("https://example.com")
+ * ```
+ *
+ * Clients and Transports are safe for concurrent use by multiple
+ * goroutines and for efficiency should only be created once and re-used.
+ *
+ * ListenAndServe starts an HTTP server with a given address and handler.
+ * The handler is usually nil, which means to use DefaultServeMux.
+ * Handle and HandleFunc add handlers to DefaultServeMux:
+ *
+ * ```
+ * http.Handle("/foo", fooHandler)
+ *
+ * http.HandleFunc("/bar", func(w http.ResponseWriter, r *http.Request) {
+ * fmt.Fprintf(w, "Hello, %q", html.EscapeString(r.URL.Path))
+ * })
+ *
+ * log.Fatal(http.ListenAndServe(":8080", nil))
+ * ```
+ *
+ * More control over the server's behavior is available by creating a
+ * custom Server:
+ *
+ * ```
+ * s := &http.Server{
+ * Addr: ":8080",
+ * Handler: myHandler,
+ * ReadTimeout: 10 * time.Second,
+ * WriteTimeout: 10 * time.Second,
+ * MaxHeaderBytes: 1 << 20,
+ * }
+ * log.Fatal(s.ListenAndServe())
+ * ```
+ *
+ * Starting with Go 1.6, the http package has transparent support for the
+ * HTTP/2 protocol when using HTTPS. Programs that must disable HTTP/2
+ * can do so by setting Transport.TLSNextProto (for clients) or
+ * Server.TLSNextProto (for servers) to a non-nil, empty
+ * map. Alternatively, the following GODEBUG environment variables are
+ * currently supported:
+ *
+ * ```
+ * GODEBUG=http2client=0 # disable HTTP/2 client support
+ * GODEBUG=http2server=0 # disable HTTP/2 server support
+ * GODEBUG=http2debug=1 # enable verbose HTTP/2 debug logs
+ * GODEBUG=http2debug=2 # ... even more verbose, with frame dumps
+ * ```
+ *
+ * The GODEBUG variables are not covered by Go's API compatibility
+ * promise. Please report any issues before disabling HTTP/2
+ * support: https://golang.org/s/http2bug
+ *
+ * The http package's Transport and Server both automatically enable
+ * HTTP/2 support for simple configurations. To enable HTTP/2 for more
+ * complex configurations, to use lower-level HTTP/2 features, or to use
+ * a newer version of Go's http2 package, import "golang.org/x/net/http2"
+ * directly and use its ConfigureTransport and/or ConfigureServer
+ * functions. Manually configuring HTTP/2 via the golang.org/x/net/http2
+ * package takes precedence over the net/http package's built-in HTTP/2
+ * support.
+ */
+namespace http {
+ // @ts-ignore
+ import mathrand = rand
+ // @ts-ignore
+ import urlpkg = url
+ /**
+ * A Request represents an HTTP request received by a server
+ * or to be sent by a client.
+ *
+ * The field semantics differ slightly between client and server
+ * usage. In addition to the notes on the fields below, see the
+ * documentation for Request.Write and RoundTripper.
+ */
+ interface Request {
+ /**
+ * Method specifies the HTTP method (GET, POST, PUT, etc.).
+ * For client requests, an empty string means GET.
+ *
+ * Go's HTTP client does not support sending a request with
+ * the CONNECT method. See the documentation on Transport for
+ * details.
+ */
+ method: string
+ /**
+ * URL specifies either the URI being requested (for server
+ * requests) or the URL to access (for client requests).
+ *
+ * For server requests, the URL is parsed from the URI
+ * supplied on the Request-Line as stored in RequestURI. For
+ * most requests, fields other than Path and RawQuery will be
+ * empty. (See RFC 7230, Section 5.3)
+ *
+ * For client requests, the URL's Host specifies the server to
+ * connect to, while the Request's Host field optionally
+ * specifies the Host header value to send in the HTTP
+ * request.
+ */
+ url?: url.URL
+ /**
+ * The protocol version for incoming server requests.
+ *
+ * For client requests, these fields are ignored. The HTTP
+ * client code always uses either HTTP/1.1 or HTTP/2.
+ * See the docs on Transport for details.
+ */
+ proto: string // "HTTP/1.0"
+ protoMajor: number // 1
+ protoMinor: number // 0
+ /**
+ * Header contains the request header fields either received
+ * by the server or to be sent by the client.
+ *
+ * If a server received a request with header lines,
+ *
+ * ```
+ * Host: example.com
+ * accept-encoding: gzip, deflate
+ * Accept-Language: en-us
+ * fOO: Bar
+ * foo: two
+ * ```
+ *
+ * then
+ *
+ * ```
+ * Header = map[string][]string{
+ * "Accept-Encoding": {"gzip, deflate"},
+ * "Accept-Language": {"en-us"},
+ * "Foo": {"Bar", "two"},
+ * }
+ * ```
+ *
+ * For incoming requests, the Host header is promoted to the
+ * Request.Host field and removed from the Header map.
+ *
+ * HTTP defines that header names are case-insensitive. The
+ * request parser implements this by using CanonicalHeaderKey,
+ * making the first character and any characters following a
+ * hyphen uppercase and the rest lowercase.
+ *
+ * For client requests, certain headers such as Content-Length
+ * and Connection are automatically written when needed and
+ * values in Header may be ignored. See the documentation
+ * for the Request.Write method.
+ */
+ header: Header
+ /**
+ * Body is the request's body.
+ *
+ * For client requests, a nil body means the request has no
+ * body, such as a GET request. The HTTP Client's Transport
+ * is responsible for calling the Close method.
+ *
+ * For server requests, the Request Body is always non-nil
+ * but will return EOF immediately when no body is present.
+ * The Server will close the request body. The ServeHTTP
+ * Handler does not need to.
+ *
+ * Body must allow Read to be called concurrently with Close.
+ * In particular, calling Close should unblock a Read waiting
+ * for input.
+ */
+ body: io.ReadCloser
+ /**
+ * GetBody defines an optional func to return a new copy of
+ * Body. It is used for client requests when a redirect requires
+ * reading the body more than once. Use of GetBody still
+ * requires setting Body.
+ *
+ * For server requests, it is unused.
+ */
+ getBody: () => io.ReadCloser
+ /**
+ * ContentLength records the length of the associated content.
+ * The value -1 indicates that the length is unknown.
+ * Values >= 0 indicate that the given number of bytes may
+ * be read from Body.
+ *
+ * For client requests, a value of 0 with a non-nil Body is
+ * also treated as unknown.
+ */
+ contentLength: number
+ /**
+ * TransferEncoding lists the transfer encodings from outermost to
+ * innermost. An empty list denotes the "identity" encoding.
+ * TransferEncoding can usually be ignored; chunked encoding is
+ * automatically added and removed as necessary when sending and
+ * receiving requests.
+ */
+ transferEncoding: Array
+ /**
+ * Close indicates whether to close the connection after
+ * replying to this request (for servers) or after sending this
+ * request and reading its response (for clients).
+ *
+ * For server requests, the HTTP server handles this automatically
+ * and this field is not needed by Handlers.
+ *
+ * For client requests, setting this field prevents re-use of
+ * TCP connections between requests to the same hosts, as if
+ * Transport.DisableKeepAlives were set.
+ */
+ close: boolean
+ /**
+ * For server requests, Host specifies the host on which the
+ * URL is sought. For HTTP/1 (per RFC 7230, section 5.4), this
+ * is either the value of the "Host" header or the host name
+ * given in the URL itself. For HTTP/2, it is the value of the
+ * ":authority" pseudo-header field.
+ * It may be of the form "host:port". For international domain
+ * names, Host may be in Punycode or Unicode form. Use
+ * golang.org/x/net/idna to convert it to either format if
+ * needed.
+ * To prevent DNS rebinding attacks, server Handlers should
+ * validate that the Host header has a value for which the
+ * Handler considers itself authoritative. The included
+ * ServeMux supports patterns registered to particular host
+ * names and thus protects its registered Handlers.
+ *
+ * For client requests, Host optionally overrides the Host
+ * header to send. If empty, the Request.Write method uses
+ * the value of URL.Host. Host may contain an international
+ * domain name.
+ */
+ host: string
+ /**
+ * Form contains the parsed form data, including both the URL
+ * field's query parameters and the PATCH, POST, or PUT form data.
+ * This field is only available after ParseForm is called.
+ * The HTTP client ignores Form and uses Body instead.
+ */
+ form: url.Values
+ /**
+ * PostForm contains the parsed form data from PATCH, POST
+ * or PUT body parameters.
+ *
+ * This field is only available after ParseForm is called.
+ * The HTTP client ignores PostForm and uses Body instead.
+ */
+ postForm: url.Values
+ /**
+ * MultipartForm is the parsed multipart form, including file uploads.
+ * This field is only available after ParseMultipartForm is called.
+ * The HTTP client ignores MultipartForm and uses Body instead.
+ */
+ multipartForm?: multipart.Form
+ /**
+ * Trailer specifies additional headers that are sent after the request
+ * body.
+ *
+ * For server requests, the Trailer map initially contains only the
+ * trailer keys, with nil values. (The client declares which trailers it
+ * will later send.) While the handler is reading from Body, it must
+ * not reference Trailer. After reading from Body returns EOF, Trailer
+ * can be read again and will contain non-nil values, if they were sent
+ * by the client.
+ *
+ * For client requests, Trailer must be initialized to a map containing
+ * the trailer keys to later send. The values may be nil or their final
+ * values. The ContentLength must be 0 or -1, to send a chunked request.
+ * After the HTTP request is sent the map values can be updated while
+ * the request body is read. Once the body returns EOF, the caller must
+ * not mutate Trailer.
+ *
+ * Few HTTP clients, servers, or proxies support HTTP trailers.
+ */
+ trailer: Header
+ /**
+ * RemoteAddr allows HTTP servers and other software to record
+ * the network address that sent the request, usually for
+ * logging. This field is not filled in by ReadRequest and
+ * has no defined format. The HTTP server in this package
+ * sets RemoteAddr to an "IP:port" address before invoking a
+ * handler.
+ * This field is ignored by the HTTP client.
+ */
+ remoteAddr: string
+ /**
+ * RequestURI is the unmodified request-target of the
+ * Request-Line (RFC 7230, Section 3.1.1) as sent by the client
+ * to a server. Usually the URL field should be used instead.
+ * It is an error to set this field in an HTTP client request.
+ */
+ requestURI: string
+ /**
+ * TLS allows HTTP servers and other software to record
+ * information about the TLS connection on which the request
+ * was received. This field is not filled in by ReadRequest.
+ * The HTTP server in this package sets the field for
+ * TLS-enabled connections before invoking a handler;
+ * otherwise it leaves the field nil.
+ * This field is ignored by the HTTP client.
+ */
+ tls?: tls.ConnectionState
+ /**
+ * Cancel is an optional channel whose closure indicates that the client
+ * request should be regarded as canceled. Not all implementations of
+ * RoundTripper may support Cancel.
+ *
+ * For server requests, this field is not applicable.
+ *
+ * Deprecated: Set the Request's context with NewRequestWithContext
+ * instead. If a Request's Cancel field and context are both
+ * set, it is undefined whether Cancel is respected.
+ */
+ cancel: undefined
+ /**
+ * Response is the redirect response which caused this request
+ * to be created. This field is only populated during client
+ * redirects.
+ */
+ response?: Response
+ }
+ interface Request {
+ /**
+ * Context returns the request's context. To change the context, use
+ * WithContext.
+ *
+ * The returned context is always non-nil; it defaults to the
+ * background context.
+ *
+ * For outgoing client requests, the context controls cancellation.
+ *
+ * For incoming server requests, the context is canceled when the
+ * client's connection closes, the request is canceled (with HTTP/2),
+ * or when the ServeHTTP method returns.
+ */
+ context(): context.Context
+ }
+ interface Request {
+ /**
+ * WithContext returns a shallow copy of r with its context changed
+ * to ctx. The provided ctx must be non-nil.
+ *
+ * For outgoing client request, the context controls the entire
+ * lifetime of a request and its response: obtaining a connection,
+ * sending the request, and reading the response headers and body.
+ *
+ * To create a new request with a context, use NewRequestWithContext.
+ * To change the context of a request, such as an incoming request you
+ * want to modify before sending back out, use Request.Clone. Between
+ * those two uses, it's rare to need WithContext.
+ */
+ withContext(ctx: context.Context): (Request | undefined)
+ }
+ interface Request {
+ /**
+ * Clone returns a deep copy of r with its context changed to ctx.
+ * The provided ctx must be non-nil.
+ *
+ * For an outgoing client request, the context controls the entire
+ * lifetime of a request and its response: obtaining a connection,
+ * sending the request, and reading the response headers and body.
+ */
+ clone(ctx: context.Context): (Request | undefined)
+ }
+ interface Request {
+ /**
+ * ProtoAtLeast reports whether the HTTP protocol used
+ * in the request is at least major.minor.
+ */
+ protoAtLeast(major: number): boolean
+ }
+ interface Request {
+ /**
+ * UserAgent returns the client's User-Agent, if sent in the request.
+ */
+ userAgent(): string
+ }
+ interface Request {
+ /**
+ * Cookies parses and returns the HTTP cookies sent with the request.
+ */
+ cookies(): Array<(Cookie | undefined)>
+ }
+ interface Request {
+ /**
+ * Cookie returns the named cookie provided in the request or
+ * ErrNoCookie if not found.
+ * If multiple cookies match the given name, only one cookie will
+ * be returned.
+ */
+ cookie(name: string): (Cookie | undefined)
+ }
+ interface Request {
+ /**
+ * AddCookie adds a cookie to the request. Per RFC 6265 section 5.4,
+ * AddCookie does not attach more than one Cookie header field. That
+ * means all cookies, if any, are written into the same line,
+ * separated by semicolon.
+ * AddCookie only sanitizes c's name and value, and does not sanitize
+ * a Cookie header already present in the request.
+ */
+ addCookie(c: Cookie): void
+ }
+ interface Request {
+ /**
+ * Referer returns the referring URL, if sent in the request.
+ *
+ * Referer is misspelled as in the request itself, a mistake from the
+ * earliest days of HTTP. This value can also be fetched from the
+ * Header map as Header["Referer"]; the benefit of making it available
+ * as a method is that the compiler can diagnose programs that use the
+ * alternate (correct English) spelling req.Referrer() but cannot
+ * diagnose programs that use Header["Referrer"].
+ */
+ referer(): string
+ }
+ interface Request {
+ /**
+ * MultipartReader returns a MIME multipart reader if this is a
+ * multipart/form-data or a multipart/mixed POST request, else returns nil and an error.
+ * Use this function instead of ParseMultipartForm to
+ * process the request body as a stream.
+ */
+ multipartReader(): (multipart.Reader | undefined)
+ }
+ interface Request {
+ /**
+ * Write writes an HTTP/1.1 request, which is the header and body, in wire format.
+ * This method consults the following fields of the request:
+ *
+ * ```
+ * Host
+ * URL
+ * Method (defaults to "GET")
+ * Header
+ * ContentLength
+ * TransferEncoding
+ * Body
+ * ```
+ *
+ * If Body is present, Content-Length is <= 0 and TransferEncoding
+ * hasn't been set to "identity", Write adds "Transfer-Encoding:
+ * chunked" to the header. Body is closed after it is sent.
+ */
+ write(w: io.Writer): void
+ }
+ interface Request {
+ /**
+ * WriteProxy is like Write but writes the request in the form
+ * expected by an HTTP proxy. In particular, WriteProxy writes the
+ * initial Request-URI line of the request with an absolute URI, per
+ * section 5.3 of RFC 7230, including the scheme and host.
+ * In either case, WriteProxy also writes a Host header, using
+ * either r.Host or r.URL.Host.
+ */
+ writeProxy(w: io.Writer): void
+ }
+ interface Request {
+ /**
+ * BasicAuth returns the username and password provided in the request's
+ * Authorization header, if the request uses HTTP Basic Authentication.
+ * See RFC 2617, Section 2.
+ */
+ basicAuth(): [string, boolean]
+ }
+ interface Request {
+ /**
+ * SetBasicAuth sets the request's Authorization header to use HTTP
+ * Basic Authentication with the provided username and password.
+ *
+ * With HTTP Basic Authentication the provided username and password
+ * are not encrypted. It should generally only be used in an HTTPS
+ * request.
+ *
+ * The username may not contain a colon. Some protocols may impose
+ * additional requirements on pre-escaping the username and
+ * password. For instance, when used with OAuth2, both arguments must
+ * be URL encoded first with url.QueryEscape.
+ */
+ setBasicAuth(username: string): void
+ }
+ interface Request {
+ /**
+ * ParseForm populates r.Form and r.PostForm.
+ *
+ * For all requests, ParseForm parses the raw query from the URL and updates
+ * r.Form.
+ *
+ * For POST, PUT, and PATCH requests, it also reads the request body, parses it
+ * as a form and puts the results into both r.PostForm and r.Form. Request body
+ * parameters take precedence over URL query string values in r.Form.
+ *
+ * If the request Body's size has not already been limited by MaxBytesReader,
+ * the size is capped at 10MB.
+ *
+ * For other HTTP methods, or when the Content-Type is not
+ * application/x-www-form-urlencoded, the request Body is not read, and
+ * r.PostForm is initialized to a non-nil, empty value.
+ *
+ * ParseMultipartForm calls ParseForm automatically.
+ * ParseForm is idempotent.
+ */
+ parseForm(): void
+ }
+ interface Request {
+ /**
+ * ParseMultipartForm parses a request body as multipart/form-data.
+ * The whole request body is parsed and up to a total of maxMemory bytes of
+ * its file parts are stored in memory, with the remainder stored on
+ * disk in temporary files.
+ * ParseMultipartForm calls ParseForm if necessary.
+ * If ParseForm returns an error, ParseMultipartForm returns it but also
+ * continues parsing the request body.
+ * After one call to ParseMultipartForm, subsequent calls have no effect.
+ */
+ parseMultipartForm(maxMemory: number): void
+ }
+ interface Request {
+ /**
+ * FormValue returns the first value for the named component of the query.
+ * POST and PUT body parameters take precedence over URL query string values.
+ * FormValue calls ParseMultipartForm and ParseForm if necessary and ignores
+ * any errors returned by these functions.
+ * If key is not present, FormValue returns the empty string.
+ * To access multiple values of the same key, call ParseForm and
+ * then inspect Request.Form directly.
+ */
+ formValue(key: string): string
+ }
+ interface Request {
+ /**
+ * PostFormValue returns the first value for the named component of the POST,
+ * PATCH, or PUT request body. URL query parameters are ignored.
+ * PostFormValue calls ParseMultipartForm and ParseForm if necessary and ignores
+ * any errors returned by these functions.
+ * If key is not present, PostFormValue returns the empty string.
+ */
+ postFormValue(key: string): string
+ }
+ interface Request {
+ /**
+ * FormFile returns the first file for the provided form key.
+ * FormFile calls ParseMultipartForm and ParseForm if necessary.
+ */
+ formFile(key: string): [multipart.File, (multipart.FileHeader | undefined)]
+ }
+ /**
+ * A ResponseWriter interface is used by an HTTP handler to
+ * construct an HTTP response.
+ *
+ * A ResponseWriter may not be used after the Handler.ServeHTTP method
+ * has returned.
+ */
+ interface ResponseWriter {
+ /**
+ * Header returns the header map that will be sent by
+ * WriteHeader. The Header map also is the mechanism with which
+ * Handlers can set HTTP trailers.
+ *
+ * Changing the header map after a call to WriteHeader (or
+ * Write) has no effect unless the HTTP status code was of the
+ * 1xx class or the modified headers are trailers.
+ *
+ * There are two ways to set Trailers. The preferred way is to
+ * predeclare in the headers which trailers you will later
+ * send by setting the "Trailer" header to the names of the
+ * trailer keys which will come later. In this case, those
+ * keys of the Header map are treated as if they were
+ * trailers. See the example. The second way, for trailer
+ * keys not known to the Handler until after the first Write,
+ * is to prefix the Header map keys with the TrailerPrefix
+ * constant value. See TrailerPrefix.
+ *
+ * To suppress automatic response headers (such as "Date"), set
+ * their value to nil.
+ */
+ header(): Header
+ /**
+ * Write writes the data to the connection as part of an HTTP reply.
+ *
+ * If WriteHeader has not yet been called, Write calls
+ * WriteHeader(http.StatusOK) before writing the data. If the Header
+ * does not contain a Content-Type line, Write adds a Content-Type set
+ * to the result of passing the initial 512 bytes of written data to
+ * DetectContentType. Additionally, if the total size of all written
+ * data is under a few KB and there are no Flush calls, the
+ * Content-Length header is added automatically.
+ *
+ * Depending on the HTTP protocol version and the client, calling
+ * Write or WriteHeader may prevent future reads on the
+ * Request.Body. For HTTP/1.x requests, handlers should read any
+ * needed request body data before writing the response. Once the
+ * headers have been flushed (due to either an explicit Flusher.Flush
+ * call or writing enough data to trigger a flush), the request body
+ * may be unavailable. For HTTP/2 requests, the Go HTTP server permits
+ * handlers to continue to read the request body while concurrently
+ * writing the response. However, such behavior may not be supported
+ * by all HTTP/2 clients. Handlers should read before writing if
+ * possible to maximize compatibility.
+ */
+ write(_arg0: string): number
+ /**
+ * WriteHeader sends an HTTP response header with the provided
+ * status code.
+ *
+ * If WriteHeader is not called explicitly, the first call to Write
+ * will trigger an implicit WriteHeader(http.StatusOK).
+ * Thus explicit calls to WriteHeader are mainly used to
+ * send error codes or 1xx informational responses.
+ *
+ * The provided code must be a valid HTTP 1xx-5xx status code.
+ * Any number of 1xx headers may be written, followed by at most
+ * one 2xx-5xx header. 1xx headers are sent immediately, but 2xx-5xx
+ * headers may be buffered. Use the Flusher interface to send
+ * buffered data. The header map is cleared when 2xx-5xx headers are
+ * sent, but not with 1xx headers.
+ *
+ * The server will automatically send a 100 (Continue) header
+ * on the first read from the request body if the request has
+ * an "Expect: 100-continue" header.
+ */
+ writeHeader(statusCode: number): void
+ }
+ /**
+ * A Server defines parameters for running an HTTP server.
+ * The zero value for Server is a valid configuration.
+ */
+ interface Server {
+ /**
+ * Addr optionally specifies the TCP address for the server to listen on,
+ * in the form "host:port". If empty, ":http" (port 80) is used.
+ * The service names are defined in RFC 6335 and assigned by IANA.
+ * See net.Dial for details of the address format.
+ */
+ addr: string
+ handler: Handler // handler to invoke, http.DefaultServeMux if nil
+ /**
+ * TLSConfig optionally provides a TLS configuration for use
+ * by ServeTLS and ListenAndServeTLS. Note that this value is
+ * cloned by ServeTLS and ListenAndServeTLS, so it's not
+ * possible to modify the configuration with methods like
+ * tls.Config.SetSessionTicketKeys. To use
+ * SetSessionTicketKeys, use Server.Serve with a TLS Listener
+ * instead.
+ */
+ tlsConfig?: tls.Config
+ /**
+ * ReadTimeout is the maximum duration for reading the entire
+ * request, including the body. A zero or negative value means
+ * there will be no timeout.
+ *
+ * Because ReadTimeout does not let Handlers make per-request
+ * decisions on each request body's acceptable deadline or
+ * upload rate, most users will prefer to use
+ * ReadHeaderTimeout. It is valid to use them both.
+ */
+ readTimeout: time.Duration
+ /**
+ * ReadHeaderTimeout is the amount of time allowed to read
+ * request headers. The connection's read deadline is reset
+ * after reading the headers and the Handler can decide what
+ * is considered too slow for the body. If ReadHeaderTimeout
+ * is zero, the value of ReadTimeout is used. If both are
+ * zero, there is no timeout.
+ */
+ readHeaderTimeout: time.Duration
+ /**
+ * WriteTimeout is the maximum duration before timing out
+ * writes of the response. It is reset whenever a new
+ * request's header is read. Like ReadTimeout, it does not
+ * let Handlers make decisions on a per-request basis.
+ * A zero or negative value means there will be no timeout.
+ */
+ writeTimeout: time.Duration
+ /**
+ * IdleTimeout is the maximum amount of time to wait for the
+ * next request when keep-alives are enabled. If IdleTimeout
+ * is zero, the value of ReadTimeout is used. If both are
+ * zero, there is no timeout.
+ */
+ idleTimeout: time.Duration
+ /**
+ * MaxHeaderBytes controls the maximum number of bytes the
+ * server will read parsing the request header's keys and
+ * values, including the request line. It does not limit the
+ * size of the request body.
+ * If zero, DefaultMaxHeaderBytes is used.
+ */
+ maxHeaderBytes: number
+ /**
+ * TLSNextProto optionally specifies a function to take over
+ * ownership of the provided TLS connection when an ALPN
+ * protocol upgrade has occurred. The map key is the protocol
+ * name negotiated. The Handler argument should be used to
+ * handle HTTP requests and will initialize the Request's TLS
+ * and RemoteAddr if not already set. The connection is
+ * automatically closed when the function returns.
+ * If TLSNextProto is not nil, HTTP/2 support is not enabled
+ * automatically.
+ */
+ tlsNextProto: _TygojaDict
+ /**
+ * ConnState specifies an optional callback function that is
+ * called when a client connection changes state. See the
+ * ConnState type and associated constants for details.
+ */
+ connState: (_arg0: net.Conn, _arg1: ConnState) => void
+ /**
+ * ErrorLog specifies an optional logger for errors accepting
+ * connections, unexpected behavior from handlers, and
+ * underlying FileSystem errors.
+ * If nil, logging is done via the log package's standard logger.
+ */
+ errorLog?: log.Logger
+ /**
+ * BaseContext optionally specifies a function that returns
+ * the base context for incoming requests on this server.
+ * The provided Listener is the specific Listener that's
+ * about to start accepting requests.
+ * If BaseContext is nil, the default is context.Background().
+ * If non-nil, it must return a non-nil context.
+ */
+ baseContext: (_arg0: net.Listener) => context.Context
+ /**
+ * ConnContext optionally specifies a function that modifies
+ * the context used for a new connection c. The provided ctx
+ * is derived from the base context and has a ServerContextKey
+ * value.
+ */
+ connContext: (ctx: context.Context, c: net.Conn) => context.Context
+ }
+ interface Server {
+ /**
+ * Close immediately closes all active net.Listeners and any
+ * connections in state StateNew, StateActive, or StateIdle. For a
+ * graceful shutdown, use Shutdown.
+ *
+ * Close does not attempt to close (and does not even know about)
+ * any hijacked connections, such as WebSockets.
+ *
+ * Close returns any error returned from closing the Server's
+ * underlying Listener(s).
+ */
+ close(): void
+ }
+ interface Server {
+ /**
+ * Shutdown gracefully shuts down the server without interrupting any
+ * active connections. Shutdown works by first closing all open
+ * listeners, then closing all idle connections, and then waiting
+ * indefinitely for connections to return to idle and then shut down.
+ * If the provided context expires before the shutdown is complete,
+ * Shutdown returns the context's error, otherwise it returns any
+ * error returned from closing the Server's underlying Listener(s).
+ *
+ * When Shutdown is called, Serve, ListenAndServe, and
+ * ListenAndServeTLS immediately return ErrServerClosed. Make sure the
+ * program doesn't exit and waits instead for Shutdown to return.
+ *
+ * Shutdown does not attempt to close nor wait for hijacked
+ * connections such as WebSockets. The caller of Shutdown should
+ * separately notify such long-lived connections of shutdown and wait
+ * for them to close, if desired. See RegisterOnShutdown for a way to
+ * register shutdown notification functions.
+ *
+ * Once Shutdown has been called on a server, it may not be reused;
+ * future calls to methods such as Serve will return ErrServerClosed.
+ */
+ shutdown(ctx: context.Context): void
+ }
+ interface Server {
+ /**
+ * RegisterOnShutdown registers a function to call on Shutdown.
+ * This can be used to gracefully shutdown connections that have
+ * undergone ALPN protocol upgrade or that have been hijacked.
+ * This function should start protocol-specific graceful shutdown,
+ * but should not wait for shutdown to complete.
+ */
+ registerOnShutdown(f: () => void): void
+ }
+ interface Server {
+ /**
+ * ListenAndServe listens on the TCP network address srv.Addr and then
+ * calls Serve to handle requests on incoming connections.
+ * Accepted connections are configured to enable TCP keep-alives.
+ *
+ * If srv.Addr is blank, ":http" is used.
+ *
+ * ListenAndServe always returns a non-nil error. After Shutdown or Close,
+ * the returned error is ErrServerClosed.
+ */
+ listenAndServe(): void
+ }
+ interface Server {
+ /**
+ * Serve accepts incoming connections on the Listener l, creating a
+ * new service goroutine for each. The service goroutines read requests and
+ * then call srv.Handler to reply to them.
+ *
+ * HTTP/2 support is only enabled if the Listener returns *tls.Conn
+ * connections and they were configured with "h2" in the TLS
+ * Config.NextProtos.
+ *
+ * Serve always returns a non-nil error and closes l.
+ * After Shutdown or Close, the returned error is ErrServerClosed.
+ */
+ serve(l: net.Listener): void
+ }
+ interface Server {
+ /**
+ * ServeTLS accepts incoming connections on the Listener l, creating a
+ * new service goroutine for each. The service goroutines perform TLS
+ * setup and then read requests, calling srv.Handler to reply to them.
+ *
+ * Files containing a certificate and matching private key for the
+ * server must be provided if neither the Server's
+ * TLSConfig.Certificates nor TLSConfig.GetCertificate are populated.
+ * If the certificate is signed by a certificate authority, the
+ * certFile should be the concatenation of the server's certificate,
+ * any intermediates, and the CA's certificate.
+ *
+ * ServeTLS always returns a non-nil error. After Shutdown or Close, the
+ * returned error is ErrServerClosed.
+ */
+ serveTLS(l: net.Listener, certFile: string): void
+ }
+ interface Server {
+ /**
+ * SetKeepAlivesEnabled controls whether HTTP keep-alives are enabled.
+ * By default, keep-alives are always enabled. Only very
+ * resource-constrained environments or servers in the process of
+ * shutting down should disable them.
+ */
+ setKeepAlivesEnabled(v: boolean): void
+ }
+ interface Server {
+ /**
+ * ListenAndServeTLS listens on the TCP network address srv.Addr and
+ * then calls ServeTLS to handle requests on incoming TLS connections.
+ * Accepted connections are configured to enable TCP keep-alives.
+ *
+ * Filenames containing a certificate and matching private key for the
+ * server must be provided if neither the Server's TLSConfig.Certificates
+ * nor TLSConfig.GetCertificate are populated. If the certificate is
+ * signed by a certificate authority, the certFile should be the
+ * concatenation of the server's certificate, any intermediates, and
+ * the CA's certificate.
+ *
+ * If srv.Addr is blank, ":https" is used.
+ *
+ * ListenAndServeTLS always returns a non-nil error. After Shutdown or
+ * Close, the returned error is ErrServerClosed.
+ */
+ listenAndServeTLS(certFile: string): void
+ }
+}
+
+namespace store {
+ /**
+ * Store defines a concurrent safe in memory key-value data store.
+ */
+ interface Store {
+ }
+ interface Store {
+ /**
+ * Reset clears the store and replaces the store data with a
+ * shallow copy of the provided newData.
+ */
+ reset(newData: _TygojaDict): void
+ }
+ interface Store {
+ /**
+ * Length returns the current number of elements in the store.
+ */
+ length(): number
+ }
+ interface Store {
+ /**
+ * RemoveAll removes all the existing store entries.
+ */
+ removeAll(): void
+ }
+ interface Store {
+ /**
+ * Remove removes a single entry from the store.
+ *
+ * Remove does nothing if key doesn't exist in the store.
+ */
+ remove(key: string): void
+ }
+ interface Store {
+ /**
+ * Has checks if element with the specified key exist or not.
+ */
+ has(key: string): boolean
+ }
+ interface Store