added more geoPoint field acccess tests

This commit is contained in:
Gani Georgiev 2025-04-02 23:25:42 +03:00
parent 4c5abd5bd9
commit d135b1e686
5 changed files with 25 additions and 17 deletions

View File

@ -170,7 +170,7 @@ func TestImportCollections(t *testing.T) {
expectedCollectionFields := map[string]int{ expectedCollectionFields := map[string]int{
core.CollectionNameAuthOrigins: 6, core.CollectionNameAuthOrigins: 6,
"nologin": 10, "nologin": 10,
"demo1": 18, "demo1": 19,
"demo2": 5, "demo2": 5,
"demo3": 5, "demo3": 5,
"demo4": 16, "demo4": 16,

View File

@ -419,8 +419,8 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
return nil, fmt.Errorf("non-filterable field %q", prop) return nil, fmt.Errorf("non-filterable field %q", prop)
} }
// json field -> treat the rest of the props as json path // json or geoPoint field -> treat the rest of the props as json path
// @todo consider converting to "JSONExtractable" interface // @todo consider converting to "JSONExtractable" interface with optional extra validation for the remaining props?
if field != nil && (field.Type() == FieldTypeJSON || field.Type() == FieldTypeGeoPoint) { if field != nil && (field.Type() == FieldTypeJSON || field.Type() == FieldTypeGeoPoint) {
var jsonPath strings.Builder var jsonPath strings.Builder
for j, p := range r.activeProps[i+1:] { for j, p := range r.activeProps[i+1:] {

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@ -291,6 +291,15 @@ var filterFunctions = map[string]func(
}, },
} }
var normalizedIdentifiers = map[string]string{
// if `null` field is missing, treat `null` identifier as NULL token
"null": "NULL",
// if `true` field is missing, treat `true` identifier as TRUE token
"true": "1",
// if `false` field is missing, treat `false` identifier as FALSE token
"false": "0",
}
func resolveToken(token fexpr.Token, fieldResolver FieldResolver) (*ResolverResult, error) { func resolveToken(token fexpr.Token, fieldResolver FieldResolver) (*ResolverResult, error) {
switch token.Type { switch token.Type {
case fexpr.TokenIdentifier: case fexpr.TokenIdentifier:
@ -313,19 +322,11 @@ func resolveToken(token fexpr.Token, fieldResolver FieldResolver) (*ResolverResu
// custom resolver // custom resolver
// --- // ---
result, err := fieldResolver.Resolve(token.Literal) result, err := fieldResolver.Resolve(token.Literal)
// @todo replace with strings.EqualFold
if err != nil || result.Identifier == "" { if err != nil || result.Identifier == "" {
m := map[string]string{ for k, v := range normalizedIdentifiers {
// if `null` field is missing, treat `null` identifier as NULL token if strings.EqualFold(k, token.Literal) {
"null": "NULL", return &ResolverResult{Identifier: v}, nil
// if `true` field is missing, treat `true` identifier as TRUE token }
"true": "1",
// if `false` field is missing, treat `false` identifier as FALSE token
"false": "0",
}
if v, ok := m[strings.ToLower(token.Literal)]; ok {
return &ResolverResult{Identifier: v}, nil
} }
return nil, err return nil, err
} }
@ -346,13 +347,13 @@ func resolveToken(token fexpr.Token, fieldResolver FieldResolver) (*ResolverResu
Params: dbx.Params{placeholder: cast.ToFloat64(token.Literal)}, Params: dbx.Params{placeholder: cast.ToFloat64(token.Literal)},
}, nil }, nil
case fexpr.TokenFunction: case fexpr.TokenFunction:
f, ok := filterFunctions[token.Literal] fn, ok := filterFunctions[token.Literal]
if !ok { if !ok {
return nil, fmt.Errorf("unknown function %q", token.Literal) return nil, fmt.Errorf("unknown function %q", token.Literal)
} }
args, _ := token.Meta.([]fexpr.Token) args, _ := token.Meta.([]fexpr.Token)
return f(func(argToken fexpr.Token) (*ResolverResult, error) { return fn(func(argToken fexpr.Token) (*ResolverResult, error) {
return resolveToken(argToken, fieldResolver) return resolveToken(argToken, fieldResolver)
}, args...) }, args...)
} }