optimized single relation lookups

This commit is contained in:
Gani Georgiev 2023-03-07 23:28:35 +02:00
parent 4768e07c0b
commit d046811df7
6 changed files with 83 additions and 55 deletions

View File

@ -2,6 +2,8 @@
- Added _experimental_ Apple OAuth2 integration. - Added _experimental_ Apple OAuth2 integration.
- Optimized single relation lookups (@todo doc).
- Normalized record values on `maxSelect` field option change (`select`, `file`, `relation`). - Normalized record values on `maxSelect` field option change (`select`, `file`, `relation`).
When changing **from single to multiple** all already inserted single values are converted to an array. When changing **from single to multiple** all already inserted single values are converted to an array.
When changing **from multiple to single** only the last item of the already inserted array items is kept. When changing **from multiple to single** only the last item of the already inserted array items is kept.

View File

@ -472,14 +472,16 @@ func (dao *Dao) cascadeRecordDelete(mainRecord *models.Record, refs map[*models.
recordTableName := inflector.Columnify(refCollection.Name) recordTableName := inflector.Columnify(refCollection.Name)
prefixedFieldName := recordTableName + "." + inflector.Columnify(field.Name) prefixedFieldName := recordTableName + "." + inflector.Columnify(field.Name)
// @todo optimize single relation lookup query := dao.RecordQuery(refCollection).Distinct(true)
query := dao.RecordQuery(refCollection).
Distinct(true). if opt, ok := field.Options.(schema.MultiValuer); !ok || !opt.IsMultiple() {
InnerJoin(fmt.Sprintf( query.AndWhere(dbx.HashExp{prefixedFieldName: mainRecord.Id})
// note: the case is used to normalize the value access } else {
query.InnerJoin(fmt.Sprintf(
`json_each(CASE WHEN json_valid([[%s]]) THEN [[%s]] ELSE json_array([[%s]]) END) as {{%s}}`, `json_each(CASE WHEN json_valid([[%s]]) THEN [[%s]] ELSE json_array([[%s]]) END) as {{%s}}`,
prefixedFieldName, prefixedFieldName, prefixedFieldName, uniqueJsonEachAlias, prefixedFieldName, prefixedFieldName, prefixedFieldName, uniqueJsonEachAlias,
), dbx.HashExp{uniqueJsonEachAlias + ".value": mainRecord.Id}) ), dbx.HashExp{uniqueJsonEachAlias + ".value": mainRecord.Id})
}
if refCollection.Id == mainRecord.Collection().Id { if refCollection.Id == mainRecord.Collection().Id {
query.AndWhere(dbx.Not(dbx.HashExp{recordTableName + ".id": mainRecord.Id})) query.AndWhere(dbx.Not(dbx.HashExp{recordTableName + ".id": mainRecord.Id}))

View File

@ -748,13 +748,13 @@ func TestDeleteRecord(t *testing.T) {
} }
// ensure that the json rel fields were prefixed // ensure that the json rel fields were prefixed
joinedQueries := strings.Join(calledQueries, " ") joinedQueries := strings.Join(calledQueries, " ")
expectedRelManyJoin := "`demo1` INNER JOIN json_each(CASE WHEN json_valid([[demo1.rel_many]]) THEN [[demo1.rel_many]] ELSE json_array([[demo1.rel_many]]) END)" expectedRelManyPart := "`demo1` INNER JOIN json_each(CASE WHEN json_valid([[demo1.rel_many]]) THEN [[demo1.rel_many]] ELSE json_array([[demo1.rel_many]]) END)"
if !strings.Contains(joinedQueries, expectedRelManyJoin) { if !strings.Contains(joinedQueries, expectedRelManyPart) {
t.Fatalf("(rec3) Expected the cascade delete to call the query \n%v, got \n%v", expectedRelManyJoin, calledQueries) t.Fatalf("(rec3) Expected the cascade delete to call the query \n%v, got \n%v", expectedRelManyPart, calledQueries)
} }
expectedRelOneJoin := "`demo1` INNER JOIN json_each(CASE WHEN json_valid([[demo1.rel_one]]) THEN [[demo1.rel_one]] ELSE json_array([[demo1.rel_one]]) END)" expectedRelOnePart := "SELECT DISTINCT `demo1`.* FROM `demo1` WHERE (`demo1`.`rel_one`="
if !strings.Contains(joinedQueries, expectedRelOneJoin) { if !strings.Contains(joinedQueries, expectedRelOnePart) {
t.Fatalf("(rec3) Expected the cascade delete to call the query \n%v, got \n%v", expectedRelOneJoin, calledQueries) t.Fatalf("(rec3) Expected the cascade delete to call the query \n%v, got \n%v", expectedRelOnePart, calledQueries)
} }
} }

View File

@ -189,15 +189,18 @@ func (dao *Dao) FindRecordByViewFile(
record := &models.Record{} record := &models.Record{}
err = dao.RecordQuery(qf.collection). query := dao.RecordQuery(qf.collection).Limit(1)
InnerJoin(fmt.Sprintf(
// note: the case is used to normalize the value access if opt, ok := qf.original.Options.(schema.MultiValuer); !ok || !opt.IsMultiple() {
query.AndWhere(dbx.HashExp{cleanFieldName: filename})
} else {
query.InnerJoin(fmt.Sprintf(
`json_each(CASE WHEN json_valid([[%s]]) THEN [[%s]] ELSE json_array([[%s]]) END) as {{_je_file}}`, `json_each(CASE WHEN json_valid([[%s]]) THEN [[%s]] ELSE json_array([[%s]]) END) as {{_je_file}}`,
cleanFieldName, cleanFieldName, cleanFieldName, cleanFieldName, cleanFieldName, cleanFieldName,
), dbx.HashExp{"_je_file.value": filename}). ), dbx.HashExp{"_je_file.value": filename})
Limit(1). }
One(record)
if err != nil { if err := query.One(record); err != nil {
return nil, err return nil, err
} }

View File

@ -512,44 +512,65 @@ func (r *runner) processActiveProps() (*search.ResolverResult, error) {
} }
cleanFieldName := inflector.Columnify(field.Name) cleanFieldName := inflector.Columnify(field.Name)
newCollectionName := relCollection.Name prefixedFieldName := r.activeTableAlias + "." + cleanFieldName
newTableAlias := r.activeTableAlias + "_" + cleanFieldName newTableAlias := r.activeTableAlias + "_" + cleanFieldName
newCollectionName := relCollection.Name
if !options.IsMultiple() {
r.resolver.registerJoin(
inflector.Columnify(newCollectionName),
newTableAlias,
dbx.NewExp(fmt.Sprintf("[[%s.id]] = [[%s]]", newTableAlias, prefixedFieldName)),
)
} else {
jeAlias := r.activeTableAlias + "_" + cleanFieldName + "_je"
r.resolver.registerJoin(jsonEach(prefixedFieldName), jeAlias, nil)
r.resolver.registerJoin(
inflector.Columnify(newCollectionName),
newTableAlias,
dbx.NewExp(fmt.Sprintf("[[%s.id]] = [[%s.value]]", newTableAlias, jeAlias)),
)
}
jeAlias := r.activeTableAlias + "_" + cleanFieldName + "_je"
jePair := r.activeTableAlias + "." + cleanFieldName
r.resolver.registerJoin(jsonEach(jePair), jeAlias, nil)
r.resolver.registerJoin(
inflector.Columnify(newCollectionName),
newTableAlias,
dbx.NewExp(fmt.Sprintf("[[%s.id]] = [[%s.value]]", newTableAlias, jeAlias)),
)
r.activeCollectionName = newCollectionName r.activeCollectionName = newCollectionName
r.activeTableAlias = newTableAlias r.activeTableAlias = newTableAlias
// --- // ---
// join the relation to the multi-match subquery // join the relation to the multi-match subquery
// --- // ---
if options.MaxSelect == nil || *options.MaxSelect != 1 { if options.IsMultiple() {
r.withMultiMatch = true r.withMultiMatch = true
} }
newTableAlias2 := r.multiMatchActiveTableAlias + "_" + cleanFieldName newTableAlias2 := r.multiMatchActiveTableAlias + "_" + cleanFieldName
jeAlias2 := r.multiMatchActiveTableAlias + "_" + cleanFieldName + "_je" prefixedFieldName2 := r.multiMatchActiveTableAlias + "." + cleanFieldName
jePair2 := r.multiMatchActiveTableAlias + "." + cleanFieldName
r.multiMatchActiveTableAlias = newTableAlias2
r.multiMatch.joins = append( if !options.IsMultiple() {
r.multiMatch.joins, r.multiMatch.joins = append(
&join{ r.multiMatch.joins,
tableName: jsonEach(jePair2), &join{
tableAlias: jeAlias2, tableName: inflector.Columnify(newCollectionName),
}, tableAlias: newTableAlias2,
&join{ on: dbx.NewExp(fmt.Sprintf("[[%s.id]] = [[%s]]", newTableAlias2, prefixedFieldName2)),
tableName: inflector.Columnify(newCollectionName), },
tableAlias: newTableAlias2, )
on: dbx.NewExp(fmt.Sprintf("[[%s.id]] = [[%s.value]]", newTableAlias2, jeAlias2)), } else {
}, jeAlias2 := r.multiMatchActiveTableAlias + "_" + cleanFieldName + "_je"
) r.multiMatch.joins = append(
r.multiMatch.joins,
&join{
tableName: jsonEach(prefixedFieldName2),
tableAlias: jeAlias2,
},
&join{
tableName: inflector.Columnify(newCollectionName),
tableAlias: newTableAlias2,
on: dbx.NewExp(fmt.Sprintf("[[%s.id]] = [[%s.value]]", newTableAlias2, jeAlias2)),
},
)
}
r.multiMatchActiveTableAlias = newTableAlias2
// --- // ---
} }

File diff suppressed because one or more lines are too long