diff --git a/tools/list/list.go b/tools/list/list.go index 39994a07..aaace6d4 100644 --- a/tools/list/list.go +++ b/tools/list/list.go @@ -38,21 +38,23 @@ func ExistInSliceWithRegex(str string, list []string) bool { if str == field { return true } - } else { - // check for regex match - pattern, ok := cachedPatterns[field] - if !ok { - var patternErr error - pattern, patternErr = regexp.Compile(field) - if patternErr != nil { - continue - } - // "cache" the pattern to avoid compiling it every time - cachedPatterns[field] = pattern - } - if pattern != nil && pattern.MatchString(str) { - return true + continue + } + + // check for regex match + pattern, ok := cachedPatterns[field] + if !ok { + var err error + pattern, err = regexp.Compile(field) + if err != nil { + continue } + // "cache" the pattern to avoid compiling it every time + cachedPatterns[field] = pattern + } + + if pattern != nil && pattern.MatchString(str) { + return true } } @@ -72,46 +74,45 @@ func ToInterfaceSlice[T any](list []T) []any { // NonzeroUniques returns only the nonzero unique values from a slice. func NonzeroUniques[T comparable](list []T) []T { - result := []T{} - existMap := map[T]bool{} + result := make([]T, 0, len(list)) + existMap := make(map[T]struct{}, len(list)) var zeroVal T for _, val := range list { - if !existMap[val] && val != zeroVal { - existMap[val] = true - result = append(result, val) + if _, ok := existMap[val]; ok || val == zeroVal { + continue } + existMap[val] = struct{}{} + result = append(result, val) } return result } // ToUniqueStringSlice casts `value` to a slice of non-zero unique strings. -func ToUniqueStringSlice(value any) []string { - strings := []string{} - +func ToUniqueStringSlice(value any) (result []string) { switch val := value.(type) { case nil: // nothing to cast case []string: - strings = val + result = val case string: if val == "" { break } // check if it is a json encoded array of strings - if err := json.Unmarshal([]byte(val), &strings); err != nil { + if err := json.Unmarshal([]byte(val), &result); err != nil { // not a json array, just add the string as single array element - strings = append(strings, val) + result = append(result, val) } case json.Marshaler: // eg. JsonArray raw, _ := val.MarshalJSON() - json.Unmarshal(raw, &strings) + _ = json.Unmarshal(raw, &result) default: - strings = cast.ToStringSlice(value) + result = cast.ToStringSlice(value) } - return NonzeroUniques(strings) + return NonzeroUniques(result) } diff --git a/tools/search/filter.go b/tools/search/filter.go index f2e76d1f..0d221c94 100644 --- a/tools/search/filter.go +++ b/tools/search/filter.go @@ -27,21 +27,16 @@ var parsedFilterData = store.New(make(map[string][]fexpr.ExprGroup, 50)) // BuildExpr parses the current filter data and returns a new db WHERE expression. func (f FilterData) BuildExpr(fieldResolver FieldResolver) (dbx.Expression, error) { raw := string(f) - var data []fexpr.ExprGroup - if parsedFilterData.Has(raw) { - data = parsedFilterData.Get(raw) - } else { - var err error - data, err = fexpr.Parse(raw) - if err != nil { - return nil, err - } - // store in cache - // (the limit size is arbitrary and it is there to prevent the cache growing too big) - parsedFilterData.SetIfLessThanLimit(raw, data, 500) + return f.build(parsedFilterData.Get(raw), fieldResolver) } - + data, err := fexpr.Parse(raw) + if err != nil { + return nil, err + } + // store in cache + // (the limit size is arbitrary and it is there to prevent the cache growing too big) + parsedFilterData.SetIfLessThanLimit(raw, data, 500) return f.build(data, fieldResolver) } diff --git a/tools/search/provider.go b/tools/search/provider.go index 97f8ebd2..5318095c 100644 --- a/tools/search/provider.go +++ b/tools/search/provider.go @@ -121,8 +121,7 @@ func (s *Provider) Parse(urlQuery string) error { return err } - rawPage := params.Get(PageQueryParam) - if rawPage != "" { + if rawPage := params.Get(PageQueryParam); rawPage != "" { page, err := strconv.Atoi(rawPage) if err != nil { return err @@ -130,8 +129,7 @@ func (s *Provider) Parse(urlQuery string) error { s.Page(page) } - rawPerPage := params.Get(PerPageQueryParam) - if rawPerPage != "" { + if rawPerPage := params.Get(PerPageQueryParam); rawPerPage != "" { perPage, err := strconv.Atoi(rawPerPage) if err != nil { return err @@ -139,15 +137,13 @@ func (s *Provider) Parse(urlQuery string) error { s.PerPage(perPage) } - rawSort := params.Get(SortQueryParam) - if rawSort != "" { + if rawSort := params.Get(SortQueryParam); rawSort != "" { for _, sortField := range ParseSortFromString(rawSort) { s.AddSort(sortField) } } - rawFilter := params.Get(FilterQueryParam) - if rawFilter != "" { + if rawFilter := params.Get(FilterQueryParam); rawFilter != "" { s.AddFilter(FilterData(rawFilter)) } @@ -165,35 +161,30 @@ func (s *Provider) Exec(items any) (*Result, error) { modelsQuery := *s.query // apply filters - if len(s.filter) > 0 { - for _, f := range s.filter { - expr, err := f.BuildExpr(s.fieldResolver) - if err != nil { - return nil, err - } - if expr != nil { - modelsQuery.AndWhere(expr) - } + for _, f := range s.filter { + expr, err := f.BuildExpr(s.fieldResolver) + if err != nil { + return nil, err + } + if expr != nil { + modelsQuery.AndWhere(expr) } } // apply sorting - if len(s.sort) > 0 { - for _, sortField := range s.sort { - expr, err := sortField.BuildExpr(s.fieldResolver) - if err != nil { - return nil, err - } - if expr != "" { - modelsQuery.AndOrderBy(expr) - } + for _, sortField := range s.sort { + expr, err := sortField.BuildExpr(s.fieldResolver) + if err != nil { + return nil, err + } + if expr != "" { + modelsQuery.AndOrderBy(expr) } } // apply field resolver query modifications (if any) - updateQueryErr := s.fieldResolver.UpdateQuery(&modelsQuery) - if updateQueryErr != nil { - return nil, updateQueryErr + if err := s.fieldResolver.UpdateQuery(&modelsQuery); err != nil { + return nil, err } // count diff --git a/tools/search/sort.go b/tools/search/sort.go index cc48b8f9..f3e46524 100644 --- a/tools/search/sort.go +++ b/tools/search/sort.go @@ -34,26 +34,18 @@ func (s *SortField) BuildExpr(fieldResolver FieldResolver) (string, error) { // // Example: // fields := search.ParseSortFromString("-name,+created") -func ParseSortFromString(str string) []SortField { - result := []SortField{} - +func ParseSortFromString(str string) (fields []SortField) { data := strings.Split(str, ",") for _, field := range data { // trim whitespaces field = strings.TrimSpace(field) - - var dir string if strings.HasPrefix(field, "-") { - dir = SortDesc - field = strings.TrimPrefix(field, "-") + fields = append(fields, SortField{strings.TrimPrefix(field, "-"), SortDesc}) } else { - dir = SortAsc - field = strings.TrimPrefix(field, "+") + fields = append(fields, SortField{strings.TrimPrefix(field, "+"), SortAsc}) } - - result = append(result, SortField{field, dir}) } - return result + return } diff --git a/tools/security/jwt.go b/tools/security/jwt.go index f8e5a72e..c7bce553 100644 --- a/tools/security/jwt.go +++ b/tools/security/jwt.go @@ -24,9 +24,7 @@ func ParseUnverifiedJWT(token string) (jwt.MapClaims, error) { // ParseJWT verifies and parses JWT token and returns its claims. func ParseJWT(token string, verificationKey string) (jwt.MapClaims, error) { - parser := &jwt.Parser{ - ValidMethods: []string{"HS256"}, - } + parser := jwt.NewParser(jwt.WithValidMethods([]string{"HS256"})) parsedToken, err := parser.Parse(token, func(t *jwt.Token) (any, error) { return []byte(verificationKey), nil @@ -50,10 +48,8 @@ func NewToken(payload jwt.MapClaims, signingKey string, secondsDuration int64) ( "exp": time.Now().Add(seconds).Unix(), } - if len(payload) > 0 { - for k, v := range payload { - claims[k] = v - } + for k, v := range payload { + claims[k] = v } return jwt.NewWithClaims(jwt.SigningMethodHS256, claims).SignedString([]byte(signingKey))