[#114] simplified some code by returning early and added cap for slices

This commit is contained in:
Valley 2022-07-15 00:26:08 +08:00 committed by GitHub
parent 03a7bafa66
commit a16b0c9004
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 124 additions and 147 deletions

View File

@ -64,7 +64,7 @@ func (dao *Dao) expandRecords(records []*models.Record, expandPath string, fetch
} }
// extract the id of the relations to expand // extract the id of the relations to expand
relIds := []string{} relIds := make([]string, 0, len(records))
for _, record := range records { for _, record := range records {
relIds = append(relIds, record.GetStringSliceDataValue(relField.Name)...) relIds = append(relIds, record.GetStringSliceDataValue(relField.Name)...)
} }
@ -92,7 +92,7 @@ func (dao *Dao) expandRecords(records []*models.Record, expandPath string, fetch
for _, model := range records { for _, model := range records {
relIds := model.GetStringSliceDataValue(relField.Name) relIds := model.GetStringSliceDataValue(relField.Name)
validRels := []*models.Record{} validRels := make([]*models.Record, 0, len(relIds))
for _, id := range relIds { for _, id := range relIds {
if rel, ok := indexedRels[id]; ok { if rel, ok := indexedRels[id]; ok {
validRels = append(validRels, rel) validRels = append(validRels, rel)
@ -120,20 +120,18 @@ func (dao *Dao) expandRecords(records []*models.Record, expandPath string, fetch
// normalizeExpands normalizes expand strings and merges self containing paths // normalizeExpands normalizes expand strings and merges self containing paths
// (eg. ["a.b.c", "a.b", " test ", " ", "test"] -> ["a.b.c", "test"]). // (eg. ["a.b.c", "a.b", " test ", " ", "test"] -> ["a.b.c", "test"]).
func normalizeExpands(paths []string) []string { func normalizeExpands(paths []string) []string {
result := []string{}
// normalize paths // normalize paths
normalized := []string{} normalized := make([]string, 0, len(paths))
for _, p := range paths { for _, p := range paths {
p := strings.ReplaceAll(p, " ", "") // replace spaces p = strings.ReplaceAll(p, " ", "") // replace spaces
p = strings.Trim(p, ".") // trim incomplete paths p = strings.Trim(p, ".") // trim incomplete paths
if p == "" { if p != "" {
continue normalized = append(normalized, p)
} }
normalized = append(normalized, p)
} }
// merge containing paths // merge containing paths
result := make([]string, 0, len(normalized))
for i, p1 := range normalized { for i, p1 := range normalized {
var skip bool var skip bool
for j, p2 := range normalized { for j, p2 := range normalized {

View File

@ -43,10 +43,10 @@ func (dao *Dao) LoadProfiles(users []*models.User) error {
} }
// extract user ids // extract user ids
ids := []string{} ids := make([]string, len(users))
usersMap := map[string]*models.User{} usersMap := map[string]*models.User{}
for _, user := range users { for i, user := range users {
ids = append(ids, user.Id) ids[i] = user.Id
usersMap[user.Id] = user usersMap[user.Id] = user
} }

View File

@ -150,76 +150,77 @@ func (form *RecordUpsert) LoadData(r *http.Request) error {
value := extendedData[key] value := extendedData[key]
value = field.PrepareValue(value) value = field.PrepareValue(value)
if field.Type == schema.FieldTypeFile { if field.Type != schema.FieldTypeFile {
options, _ := field.Options.(*schema.FileOptions) form.Data[key] = value
oldNames := list.ToUniqueStringSlice(form.Data[key]) continue
}
// delete previously uploaded file(s) options, _ := field.Options.(*schema.FileOptions)
if options.MaxSelect == 1 { oldNames := list.ToUniqueStringSlice(form.Data[key])
// search for unset zero indexed key as a fallback
indexedKeyValue, hasIndexedKey := extendedData[key+".0"]
if cast.ToString(value) == "" || (hasIndexedKey && cast.ToString(indexedKeyValue) == "") { // delete previously uploaded file(s)
if len(oldNames) > 0 { if options.MaxSelect == 1 {
form.filesToDelete = append(form.filesToDelete, oldNames...) // search for unset zero indexed key as a fallback
} indexedKeyValue, hasIndexedKey := extendedData[key+".0"]
form.Data[key] = nil
if cast.ToString(value) == "" || (hasIndexedKey && cast.ToString(indexedKeyValue) == "") {
if len(oldNames) > 0 {
form.filesToDelete = append(form.filesToDelete, oldNames...)
} }
} else if options.MaxSelect > 1 { form.Data[key] = nil
// search for individual file index to delete (eg. "file.0") }
keyExp, _ := regexp.Compile(`^` + regexp.QuoteMeta(key) + `\.\d+$`) } else if options.MaxSelect > 1 {
indexesToDelete := []int{} // search for individual file index to delete (eg. "file.0")
for indexedKey := range extendedData { keyExp, _ := regexp.Compile(`^` + regexp.QuoteMeta(key) + `\.\d+$`)
if keyExp.MatchString(indexedKey) && cast.ToString(extendedData[indexedKey]) == "" { indexesToDelete := make([]int, 0, len(extendedData))
index, indexErr := strconv.Atoi(indexedKey[len(key)+1:]) for indexedKey := range extendedData {
if indexErr != nil || index >= len(oldNames) { if keyExp.MatchString(indexedKey) && cast.ToString(extendedData[indexedKey]) == "" {
continue index, indexErr := strconv.Atoi(indexedKey[len(key)+1:])
} if indexErr != nil || index >= len(oldNames) {
indexesToDelete = append(indexesToDelete, index)
}
}
// slice to fill only with the non-deleted indexes
nonDeleted := []string{}
for i, name := range oldNames {
// not marked for deletion
if !list.ExistInSlice(i, indexesToDelete) {
nonDeleted = append(nonDeleted, name)
continue continue
} }
indexesToDelete = append(indexesToDelete, index)
// store the id to actually delete the file later
form.filesToDelete = append(form.filesToDelete, name)
} }
form.Data[key] = nonDeleted
} }
// check if there are any new uploaded form files // slice to fill only with the non-deleted indexes
files, err := rest.FindUploadedFiles(r, key) nonDeleted := make([]string, 0, len(oldNames))
if err != nil { for i, name := range oldNames {
continue // skip invalid or missing file(s) // not marked for deletion
} if !list.ExistInSlice(i, indexesToDelete) {
nonDeleted = append(nonDeleted, name)
// refresh oldNames list continue
oldNames = list.ToUniqueStringSlice(form.Data[key])
if options.MaxSelect == 1 {
// delete previous file(s) before replacing
if len(oldNames) > 0 {
form.filesToDelete = list.ToUniqueStringSlice(append(form.filesToDelete, oldNames...))
} }
form.filesToUpload = append(form.filesToUpload, files[0])
form.Data[key] = files[0].Name() // store the id to actually delete the file later
} else if options.MaxSelect > 1 { form.filesToDelete = append(form.filesToDelete, name)
// append the id of each uploaded file instance
form.filesToUpload = append(form.filesToUpload, files...)
for _, file := range files {
oldNames = append(oldNames, file.Name())
}
form.Data[key] = oldNames
} }
} else { form.Data[key] = nonDeleted
form.Data[key] = value }
// check if there are any new uploaded form files
files, err := rest.FindUploadedFiles(r, key)
if err != nil {
continue // skip invalid or missing file(s)
}
// refresh oldNames list
oldNames = list.ToUniqueStringSlice(form.Data[key])
if options.MaxSelect == 1 {
// delete previous file(s) before replacing
if len(oldNames) > 0 {
form.filesToDelete = list.ToUniqueStringSlice(append(form.filesToDelete, oldNames...))
}
form.filesToUpload = append(form.filesToUpload, files[0])
form.Data[key] = files[0].Name()
} else if options.MaxSelect > 1 {
// append the id of each uploaded file instance
form.filesToUpload = append(form.filesToUpload, files...)
for _, file := range files {
oldNames = append(oldNames, file.Name())
}
form.Data[key] = oldNames
} }
} }

View File

@ -98,35 +98,35 @@ func (form *UserOauth2Login) Submit() (*models.User, *auth.AuthUser, error) {
return nil, authData, err return nil, authData, err
} }
} }
} else { return user, authData, nil
if !config.AllowRegistrations { }
// registration of new users is not allowed via the Oauth2 provider if !config.AllowRegistrations {
return nil, authData, errors.New("Cannot find user with the authorized email.") // registration of new users is not allowed via the Oauth2 provider
} return nil, authData, errors.New("Cannot find user with the authorized email.")
}
// create new user // create new user
user = &models.User{Verified: true} user = &models.User{Verified: true}
upsertForm := NewUserUpsert(form.app, user) upsertForm := NewUserUpsert(form.app, user)
upsertForm.Email = authData.Email upsertForm.Email = authData.Email
upsertForm.Password = security.RandomString(30) upsertForm.Password = security.RandomString(30)
upsertForm.PasswordConfirm = upsertForm.Password upsertForm.PasswordConfirm = upsertForm.Password
event := &core.UserOauth2RegisterEvent{ event := &core.UserOauth2RegisterEvent{
User: user, User: user,
AuthData: authData, AuthData: authData,
} }
if err := form.app.OnUserBeforeOauth2Register().Trigger(event); err != nil { if err := form.app.OnUserBeforeOauth2Register().Trigger(event); err != nil {
return nil, authData, err return nil, authData, err
} }
if err := upsertForm.Submit(); err != nil { if err := upsertForm.Submit(); err != nil {
return nil, authData, err return nil, authData, err
} }
if err := form.app.OnUserAfterOauth2Register().Trigger(event); err != nil { if err := form.app.OnUserAfterOauth2Register().Trigger(event); err != nil {
return nil, authData, err return nil, authData, err
}
} }
return user, authData, nil return user, authData, nil

View File

@ -318,12 +318,10 @@ func (validator *RecordDataValidator) checkFileValue(field *schema.SchemaField,
} }
// extract the uploaded files // extract the uploaded files
files := []*rest.UploadedFile{} files := make([]*rest.UploadedFile, 0, len(validator.uploadedFiles))
if len(validator.uploadedFiles) > 0 { for _, file := range validator.uploadedFiles {
for _, file := range validator.uploadedFiles { if list.ExistInSlice(file.Name(), names) {
if list.ExistInSlice(file.Name(), names) { files = append(files, file)
files = append(files, file)
}
} }
} }

View File

@ -68,10 +68,9 @@ func NewRecordFromNullStringMap(collection *Collection, data dbx.NullStringMap)
// NewRecordsFromNullStringMaps initializes a new Record model for // NewRecordsFromNullStringMaps initializes a new Record model for
// each row in the provided NullStringMap slice. // each row in the provided NullStringMap slice.
func NewRecordsFromNullStringMaps(collection *Collection, rows []dbx.NullStringMap) []*Record { func NewRecordsFromNullStringMaps(collection *Collection, rows []dbx.NullStringMap) []*Record {
result := []*Record{} result := make([]*Record, len(rows))
for i, row := range rows {
for _, row := range rows { result[i] = NewRecordFromNullStringMap(collection, row)
result = append(result, NewRecordFromNullStringMap(collection, row))
} }
return result return result

View File

@ -9,10 +9,10 @@ import (
var _ Model = (*User)(nil) var _ Model = (*User)(nil)
const ( const (
// The name of the system user profiles collection. // ProfileCollectionName is the name of the system user profiles collection.
ProfileCollectionName = "profiles" ProfileCollectionName = "profiles"
// The name of the user field from the system user profiles collection. // ProfileCollectionUserFieldName is the name of the user field from the system user profiles collection.
ProfileCollectionUserFieldName = "userId" ProfileCollectionUserFieldName = "userId"
) )

View File

@ -168,7 +168,7 @@ func (r *RecordFieldResolver) Resolve(fieldName string) (resultName string, plac
return "", nil, fmt.Errorf("Failed to find field %q collection.", prop) return "", nil, fmt.Errorf("Failed to find field %q collection.", prop)
} }
newCollectionName := relCollection.Name newCollectionName := relCollection.Name
newTableAlias := (currentTableAlias + "_" + field.Name) newTableAlias := currentTableAlias + "_" + field.Name
r.addJoin( r.addJoin(
newCollectionName, newCollectionName,

View File

@ -48,7 +48,7 @@ func (scenario *ApiScenario) Test(t *testing.T) {
recorder := httptest.NewRecorder() recorder := httptest.NewRecorder()
req := httptest.NewRequest(scenario.Method, scenario.Url, scenario.Body) req := httptest.NewRequest(scenario.Method, scenario.Url, scenario.Body)
// add middeware to timeout long running requests (eg. keep-alive routes) // add middleware to timeout long-running requests (eg. keep-alive routes)
e.Pre(func(next echo.HandlerFunc) echo.HandlerFunc { e.Pre(func(next echo.HandlerFunc) echo.HandlerFunc {
return func(c echo.Context) error { return func(c echo.Context) error {
ctx, cancelFunc := context.WithTimeout(c.Request().Context(), 100*time.Millisecond) ctx, cancelFunc := context.WithTimeout(c.Request().Context(), 100*time.Millisecond)

View File

@ -89,15 +89,11 @@ func (f FilterData) resolveTokenizedExpr(expr fexpr.Expr, fieldResolver FieldRes
// merge both operands parameters (if any) // merge both operands parameters (if any)
params := dbx.Params{} params := dbx.Params{}
if len(lParams) > 0 { for k, v := range lParams {
for k, v := range lParams { params[k] = v
params[k] = v
}
} }
if len(rParams) > 0 { for k, v := range rParams {
for k, v := range rParams { params[k] = v
params[k] = v
}
} }
switch expr.Op { switch expr.Op {
@ -139,32 +135,26 @@ func (f FilterData) resolveTokenizedExpr(expr fexpr.Expr, fieldResolver FieldRes
} }
func (f FilterData) resolveToken(token fexpr.Token, fieldResolver FieldResolver) (name string, params dbx.Params, err error) { func (f FilterData) resolveToken(token fexpr.Token, fieldResolver FieldResolver) (name string, params dbx.Params, err error) {
if token.Type == fexpr.TokenIdentifier { switch token.Type {
case fexpr.TokenIdentifier:
name, params, err := fieldResolver.Resolve(token.Literal) name, params, err := fieldResolver.Resolve(token.Literal)
if name == "" || err != nil { if name == "" || err != nil {
// if `null` field is missing, treat `null` identifier as NULL token m := map[string]string{
if strings.ToLower(token.Literal) == "null" { // if `null` field is missing, treat `null` identifier as NULL token
return "NULL", nil, nil "null": "NULL",
// if `true` field is missing, treat `true` identifier as TRUE token
"true": "1",
// if `false` field is missing, treat `false` identifier as FALSE token
"false": "0",
} }
if v, ok := m[strings.ToLower(token.Literal)]; ok {
// if `true` field is missing, treat `true` identifier as TRUE token return v, nil, nil
if strings.ToLower(token.Literal) == "true" {
return "1", nil, nil
} }
// if `false` field is missing, treat `false` identifier as FALSE token
if strings.ToLower(token.Literal) == "false" {
return "0", nil, nil
}
return "", nil, err return "", nil, err
} }
return name, params, err return name, params, err
} case fexpr.TokenNumber, fexpr.TokenText:
if token.Type == fexpr.TokenNumber || token.Type == fexpr.TokenText {
placeholder := "t" + security.RandomString(7) placeholder := "t" + security.RandomString(7)
name := fmt.Sprintf("{:%s}", placeholder) name := fmt.Sprintf("{:%s}", placeholder)
params := dbx.Params{placeholder: token.Literal} params := dbx.Params{placeholder: token.Literal}
@ -177,10 +167,6 @@ func (f FilterData) resolveToken(token fexpr.Token, fieldResolver FieldResolver)
func (f FilterData) normalizeLikeParams(params dbx.Params) dbx.Params { func (f FilterData) normalizeLikeParams(params dbx.Params) dbx.Params {
result := dbx.Params{} result := dbx.Params{}
if len(params) == 0 {
return result
}
for k, v := range params { for k, v := range params {
vStr := cast.ToString(v) vStr := cast.ToString(v)
if !strings.Contains(vStr, "%") { if !strings.Contains(vStr, "%") {

View File

@ -66,10 +66,5 @@ func Decrypt(cipherText string, key string) ([]byte, error) {
} }
nonce, cipherByteClean := cipherByte[:nonceSize], cipherByte[nonceSize:] nonce, cipherByteClean := cipherByte[:nonceSize], cipherByte[nonceSize:]
plainData, err := gcm.Open(nil, nonce, cipherByteClean, nil) return gcm.Open(nil, nonce, cipherByteClean, nil)
if err != nil {
return nil, err
}
return plainData, nil
} }