2021-09-08 05:30:15 +00:00
|
|
|
//go:build integration
|
2021-01-18 01:23:20 +00:00
|
|
|
// +build integration
|
|
|
|
|
|
|
|
package sqlite_test
|
|
|
|
|
|
|
|
import (
|
2022-05-19 07:49:32 +00:00
|
|
|
"context"
|
2021-01-18 01:23:20 +00:00
|
|
|
"database/sql"
|
|
|
|
"fmt"
|
2022-03-28 19:45:46 +00:00
|
|
|
"math"
|
2022-07-13 06:30:54 +00:00
|
|
|
"path/filepath"
|
|
|
|
"reflect"
|
2021-02-01 20:57:56 +00:00
|
|
|
"regexp"
|
2021-01-18 01:23:20 +00:00
|
|
|
"strconv"
|
|
|
|
"testing"
|
2022-07-13 06:30:54 +00:00
|
|
|
"time"
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
"github.com/stashapp/stash/pkg/file"
|
2021-01-18 01:23:20 +00:00
|
|
|
"github.com/stashapp/stash/pkg/models"
|
2022-07-13 06:30:54 +00:00
|
|
|
"github.com/stashapp/stash/pkg/sliceutil/intslice"
|
|
|
|
"github.com/stretchr/testify/assert"
|
2021-01-18 01:23:20 +00:00
|
|
|
)
|
|
|
|
|
2022-08-12 02:21:46 +00:00
|
|
|
func loadSceneRelationships(ctx context.Context, expected models.Scene, actual *models.Scene) error {
|
|
|
|
if expected.GalleryIDs.Loaded() {
|
|
|
|
if err := actual.LoadGalleryIDs(ctx, db.Scene); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if expected.TagIDs.Loaded() {
|
|
|
|
if err := actual.LoadTagIDs(ctx, db.Scene); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if expected.PerformerIDs.Loaded() {
|
|
|
|
if err := actual.LoadPerformerIDs(ctx, db.Scene); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if expected.Movies.Loaded() {
|
|
|
|
if err := actual.LoadMovies(ctx, db.Scene); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if expected.StashIDs.Loaded() {
|
|
|
|
if err := actual.LoadStashIDs(ctx, db.Scene); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
func Test_sceneQueryBuilder_Create(t *testing.T) {
|
|
|
|
var (
|
|
|
|
title = "title"
|
|
|
|
details = "details"
|
|
|
|
url = "url"
|
|
|
|
rating = 3
|
|
|
|
ocounter = 5
|
|
|
|
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
|
|
|
|
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
|
|
|
|
sceneIndex = 123
|
|
|
|
sceneIndex2 = 234
|
|
|
|
endpoint1 = "endpoint1"
|
|
|
|
endpoint2 = "endpoint2"
|
|
|
|
stashID1 = "stashid1"
|
|
|
|
stashID2 = "stashid2"
|
|
|
|
|
|
|
|
date = models.NewDate("2003-02-01")
|
|
|
|
|
|
|
|
videoFile = makeFileWithID(fileIdxStartVideoFiles)
|
|
|
|
)
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
newObject models.Scene
|
|
|
|
wantErr bool
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"full",
|
|
|
|
models.Scene{
|
|
|
|
Title: title,
|
|
|
|
Details: details,
|
|
|
|
URL: url,
|
|
|
|
Date: &date,
|
|
|
|
Rating: &rating,
|
|
|
|
Organized: true,
|
|
|
|
OCounter: ocounter,
|
|
|
|
StudioID: &studioIDs[studioIdxWithScene],
|
|
|
|
CreatedAt: createdAt,
|
|
|
|
UpdatedAt: updatedAt,
|
2022-08-12 02:21:46 +00:00
|
|
|
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
|
|
|
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
|
|
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
|
|
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
2022-07-13 06:30:54 +00:00
|
|
|
{
|
|
|
|
MovieID: movieIDs[movieIdxWithScene],
|
|
|
|
SceneIndex: &sceneIndex,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
MovieID: movieIDs[movieIdxWithStudio],
|
|
|
|
SceneIndex: &sceneIndex2,
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
}),
|
|
|
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{
|
2022-07-13 06:30:54 +00:00
|
|
|
{
|
|
|
|
StashID: stashID1,
|
|
|
|
Endpoint: endpoint1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
StashID: stashID2,
|
|
|
|
Endpoint: endpoint2,
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
}),
|
2022-08-11 06:14:57 +00:00
|
|
|
Files: []*file.VideoFile{},
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"with file",
|
|
|
|
models.Scene{
|
|
|
|
Title: title,
|
|
|
|
Details: details,
|
|
|
|
URL: url,
|
|
|
|
Date: &date,
|
|
|
|
Rating: &rating,
|
|
|
|
Organized: true,
|
|
|
|
OCounter: ocounter,
|
|
|
|
StudioID: &studioIDs[studioIdxWithScene],
|
|
|
|
Files: []*file.VideoFile{
|
|
|
|
videoFile.(*file.VideoFile),
|
|
|
|
},
|
|
|
|
CreatedAt: createdAt,
|
|
|
|
UpdatedAt: updatedAt,
|
2022-08-12 02:21:46 +00:00
|
|
|
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
|
|
|
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
|
|
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
|
|
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
2022-07-13 06:30:54 +00:00
|
|
|
{
|
|
|
|
MovieID: movieIDs[movieIdxWithScene],
|
|
|
|
SceneIndex: &sceneIndex,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
MovieID: movieIDs[movieIdxWithStudio],
|
|
|
|
SceneIndex: &sceneIndex2,
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
}),
|
|
|
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{
|
2022-07-13 06:30:54 +00:00
|
|
|
{
|
|
|
|
StashID: stashID1,
|
|
|
|
Endpoint: endpoint1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
StashID: stashID2,
|
|
|
|
Endpoint: endpoint2,
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid studio id",
|
|
|
|
models.Scene{
|
|
|
|
StudioID: &invalidID,
|
|
|
|
},
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid gallery id",
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
GalleryIDs: models.NewRelatedIDs([]int{invalidID}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid tag id",
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
TagIDs: models.NewRelatedIDs([]int{invalidID}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid performer id",
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
PerformerIDs: models.NewRelatedIDs([]int{invalidID}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid movie id",
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
2022-07-13 06:30:54 +00:00
|
|
|
{
|
|
|
|
MovieID: invalidID,
|
|
|
|
SceneIndex: &sceneIndex,
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
qb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
var fileIDs []file.ID
|
|
|
|
for _, f := range tt.newObject.Files {
|
|
|
|
fileIDs = append(fileIDs, f.ID)
|
|
|
|
}
|
|
|
|
|
|
|
|
s := tt.newObject
|
|
|
|
if err := qb.Create(ctx, &s, fileIDs); (err != nil) != tt.wantErr {
|
|
|
|
t.Errorf("sceneQueryBuilder.Create() error = %v, wantErr = %v", err, tt.wantErr)
|
|
|
|
}
|
|
|
|
|
|
|
|
if tt.wantErr {
|
|
|
|
assert.Zero(s.ID)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.NotZero(s.ID)
|
|
|
|
|
|
|
|
copy := tt.newObject
|
|
|
|
copy.ID = s.ID
|
|
|
|
|
2022-08-12 02:21:46 +00:00
|
|
|
// load relationships
|
|
|
|
if err := loadSceneRelationships(ctx, copy, &s); err != nil {
|
|
|
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.Equal(copy, s)
|
|
|
|
|
|
|
|
// ensure can find the scene
|
|
|
|
found, err := qb.Find(ctx, s.ID)
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("sceneQueryBuilder.Find() error = %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
if !assert.NotNil(found) {
|
|
|
|
return
|
|
|
|
}
|
2022-08-12 02:21:46 +00:00
|
|
|
|
|
|
|
// load relationships
|
|
|
|
if err := loadSceneRelationships(ctx, copy, found); err != nil {
|
|
|
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
|
|
|
return
|
|
|
|
}
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.Equal(copy, *found)
|
|
|
|
|
|
|
|
return
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func clearSceneFileIDs(scene *models.Scene) {
|
|
|
|
for _, f := range scene.Files {
|
|
|
|
f.Base().ID = 0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func makeSceneFileWithID(i int) *file.VideoFile {
|
|
|
|
ret := makeSceneFile(i)
|
|
|
|
ret.ID = sceneFileIDs[i]
|
|
|
|
return ret
|
|
|
|
}
|
|
|
|
|
|
|
|
func Test_sceneQueryBuilder_Update(t *testing.T) {
|
|
|
|
var (
|
|
|
|
title = "title"
|
|
|
|
details = "details"
|
|
|
|
url = "url"
|
|
|
|
rating = 3
|
|
|
|
ocounter = 5
|
|
|
|
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
|
|
|
|
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
|
|
|
|
sceneIndex = 123
|
|
|
|
sceneIndex2 = 234
|
|
|
|
endpoint1 = "endpoint1"
|
|
|
|
endpoint2 = "endpoint2"
|
|
|
|
stashID1 = "stashid1"
|
|
|
|
stashID2 = "stashid2"
|
|
|
|
|
|
|
|
date = models.NewDate("2003-02-01")
|
|
|
|
)
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
updatedObject *models.Scene
|
|
|
|
wantErr bool
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"full",
|
|
|
|
&models.Scene{
|
|
|
|
ID: sceneIDs[sceneIdxWithGallery],
|
|
|
|
Files: []*file.VideoFile{
|
|
|
|
makeSceneFileWithID(sceneIdxWithGallery),
|
|
|
|
},
|
|
|
|
Title: title,
|
|
|
|
Details: details,
|
|
|
|
URL: url,
|
|
|
|
Date: &date,
|
|
|
|
Rating: &rating,
|
|
|
|
Organized: true,
|
|
|
|
OCounter: ocounter,
|
|
|
|
StudioID: &studioIDs[studioIdxWithScene],
|
|
|
|
CreatedAt: createdAt,
|
|
|
|
UpdatedAt: updatedAt,
|
2022-08-12 02:21:46 +00:00
|
|
|
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
|
|
|
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
|
|
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
|
|
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
2022-07-13 06:30:54 +00:00
|
|
|
{
|
|
|
|
MovieID: movieIDs[movieIdxWithScene],
|
|
|
|
SceneIndex: &sceneIndex,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
MovieID: movieIDs[movieIdxWithStudio],
|
|
|
|
SceneIndex: &sceneIndex2,
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
}),
|
|
|
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{
|
2022-07-13 06:30:54 +00:00
|
|
|
{
|
|
|
|
StashID: stashID1,
|
|
|
|
Endpoint: endpoint1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
StashID: stashID2,
|
|
|
|
Endpoint: endpoint2,
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"clear nullables",
|
|
|
|
&models.Scene{
|
|
|
|
ID: sceneIDs[sceneIdxWithSpacedName],
|
|
|
|
Files: []*file.VideoFile{
|
|
|
|
makeSceneFileWithID(sceneIdxWithSpacedName),
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
GalleryIDs: models.NewRelatedIDs([]int{}),
|
|
|
|
TagIDs: models.NewRelatedIDs([]int{}),
|
|
|
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
|
|
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{}),
|
|
|
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"clear gallery ids",
|
|
|
|
&models.Scene{
|
|
|
|
ID: sceneIDs[sceneIdxWithGallery],
|
|
|
|
Files: []*file.VideoFile{
|
|
|
|
makeSceneFileWithID(sceneIdxWithGallery),
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
GalleryIDs: models.NewRelatedIDs([]int{}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"clear tag ids",
|
|
|
|
&models.Scene{
|
|
|
|
ID: sceneIDs[sceneIdxWithTag],
|
|
|
|
Files: []*file.VideoFile{
|
|
|
|
makeSceneFileWithID(sceneIdxWithTag),
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
TagIDs: models.NewRelatedIDs([]int{}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"clear performer ids",
|
|
|
|
&models.Scene{
|
|
|
|
ID: sceneIDs[sceneIdxWithPerformer],
|
|
|
|
Files: []*file.VideoFile{
|
|
|
|
makeSceneFileWithID(sceneIdxWithPerformer),
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"clear movies",
|
|
|
|
&models.Scene{
|
|
|
|
ID: sceneIDs[sceneIdxWithMovie],
|
|
|
|
Files: []*file.VideoFile{
|
|
|
|
makeSceneFileWithID(sceneIdxWithMovie),
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid studio id",
|
|
|
|
&models.Scene{
|
|
|
|
ID: sceneIDs[sceneIdxWithGallery],
|
|
|
|
Files: []*file.VideoFile{
|
|
|
|
makeSceneFileWithID(sceneIdxWithGallery),
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
StudioID: &invalidID,
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid gallery id",
|
|
|
|
&models.Scene{
|
|
|
|
ID: sceneIDs[sceneIdxWithGallery],
|
|
|
|
Files: []*file.VideoFile{
|
|
|
|
makeSceneFileWithID(sceneIdxWithGallery),
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
GalleryIDs: models.NewRelatedIDs([]int{invalidID}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid tag id",
|
|
|
|
&models.Scene{
|
|
|
|
ID: sceneIDs[sceneIdxWithGallery],
|
|
|
|
Files: []*file.VideoFile{
|
|
|
|
makeSceneFileWithID(sceneIdxWithGallery),
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
TagIDs: models.NewRelatedIDs([]int{invalidID}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid performer id",
|
|
|
|
&models.Scene{
|
|
|
|
ID: sceneIDs[sceneIdxWithGallery],
|
|
|
|
Files: []*file.VideoFile{
|
|
|
|
makeSceneFileWithID(sceneIdxWithGallery),
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
PerformerIDs: models.NewRelatedIDs([]int{invalidID}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid movie id",
|
|
|
|
&models.Scene{
|
|
|
|
ID: sceneIDs[sceneIdxWithSpacedName],
|
|
|
|
Files: []*file.VideoFile{
|
|
|
|
makeSceneFileWithID(sceneIdxWithSpacedName),
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
2022-07-13 06:30:54 +00:00
|
|
|
{
|
|
|
|
MovieID: invalidID,
|
|
|
|
SceneIndex: &sceneIndex,
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
qb := db.Scene
|
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
copy := *tt.updatedObject
|
|
|
|
|
|
|
|
if err := qb.Update(ctx, tt.updatedObject); (err != nil) != tt.wantErr {
|
|
|
|
t.Errorf("sceneQueryBuilder.Update() error = %v, wantErr %v", err, tt.wantErr)
|
|
|
|
}
|
|
|
|
|
|
|
|
if tt.wantErr {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
s, err := qb.Find(ctx, tt.updatedObject.ID)
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("sceneQueryBuilder.Find() error = %v", err)
|
|
|
|
}
|
|
|
|
|
2022-08-12 02:21:46 +00:00
|
|
|
// load relationships
|
|
|
|
if err := loadSceneRelationships(ctx, copy, s); err != nil {
|
|
|
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.Equal(copy, *s)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func clearScenePartial() models.ScenePartial {
|
|
|
|
// leave mandatory fields
|
|
|
|
return models.ScenePartial{
|
|
|
|
Title: models.OptionalString{Set: true, Null: true},
|
|
|
|
Details: models.OptionalString{Set: true, Null: true},
|
|
|
|
URL: models.OptionalString{Set: true, Null: true},
|
|
|
|
Date: models.OptionalDate{Set: true, Null: true},
|
|
|
|
Rating: models.OptionalInt{Set: true, Null: true},
|
|
|
|
StudioID: models.OptionalInt{Set: true, Null: true},
|
|
|
|
GalleryIDs: &models.UpdateIDs{Mode: models.RelationshipUpdateModeSet},
|
|
|
|
TagIDs: &models.UpdateIDs{Mode: models.RelationshipUpdateModeSet},
|
|
|
|
PerformerIDs: &models.UpdateIDs{Mode: models.RelationshipUpdateModeSet},
|
|
|
|
StashIDs: &models.UpdateStashIDs{Mode: models.RelationshipUpdateModeSet},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
|
|
|
|
var (
|
|
|
|
title = "title"
|
|
|
|
details = "details"
|
|
|
|
url = "url"
|
|
|
|
rating = 3
|
|
|
|
ocounter = 5
|
|
|
|
createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
|
|
|
|
updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC)
|
|
|
|
sceneIndex = 123
|
|
|
|
sceneIndex2 = 234
|
|
|
|
endpoint1 = "endpoint1"
|
|
|
|
endpoint2 = "endpoint2"
|
|
|
|
stashID1 = "stashid1"
|
|
|
|
stashID2 = "stashid2"
|
|
|
|
|
|
|
|
date = models.NewDate("2003-02-01")
|
|
|
|
)
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
id int
|
|
|
|
partial models.ScenePartial
|
|
|
|
want models.Scene
|
|
|
|
wantErr bool
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"full",
|
|
|
|
sceneIDs[sceneIdxWithSpacedName],
|
|
|
|
models.ScenePartial{
|
|
|
|
Title: models.NewOptionalString(title),
|
|
|
|
Details: models.NewOptionalString(details),
|
|
|
|
URL: models.NewOptionalString(url),
|
|
|
|
Date: models.NewOptionalDate(date),
|
|
|
|
Rating: models.NewOptionalInt(rating),
|
|
|
|
Organized: models.NewOptionalBool(true),
|
|
|
|
OCounter: models.NewOptionalInt(ocounter),
|
|
|
|
StudioID: models.NewOptionalInt(studioIDs[studioIdxWithScene]),
|
|
|
|
CreatedAt: models.NewOptionalTime(createdAt),
|
|
|
|
UpdatedAt: models.NewOptionalTime(updatedAt),
|
|
|
|
GalleryIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{galleryIDs[galleryIdxWithScene]},
|
|
|
|
Mode: models.RelationshipUpdateModeSet,
|
|
|
|
},
|
|
|
|
TagIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]},
|
|
|
|
Mode: models.RelationshipUpdateModeSet,
|
|
|
|
},
|
|
|
|
PerformerIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]},
|
|
|
|
Mode: models.RelationshipUpdateModeSet,
|
|
|
|
},
|
|
|
|
MovieIDs: &models.UpdateMovieIDs{
|
|
|
|
Movies: []models.MoviesScenes{
|
|
|
|
{
|
|
|
|
MovieID: movieIDs[movieIdxWithScene],
|
|
|
|
SceneIndex: &sceneIndex,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
MovieID: movieIDs[movieIdxWithStudio],
|
|
|
|
SceneIndex: &sceneIndex2,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
Mode: models.RelationshipUpdateModeSet,
|
|
|
|
},
|
|
|
|
StashIDs: &models.UpdateStashIDs{
|
|
|
|
StashIDs: []models.StashID{
|
|
|
|
{
|
|
|
|
StashID: stashID1,
|
|
|
|
Endpoint: endpoint1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
StashID: stashID2,
|
|
|
|
Endpoint: endpoint2,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
Mode: models.RelationshipUpdateModeSet,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
|
|
|
ID: sceneIDs[sceneIdxWithSpacedName],
|
|
|
|
Files: []*file.VideoFile{
|
|
|
|
makeSceneFile(sceneIdxWithSpacedName),
|
|
|
|
},
|
|
|
|
Title: title,
|
|
|
|
Details: details,
|
|
|
|
URL: url,
|
|
|
|
Date: &date,
|
|
|
|
Rating: &rating,
|
|
|
|
Organized: true,
|
|
|
|
OCounter: ocounter,
|
|
|
|
StudioID: &studioIDs[studioIdxWithScene],
|
|
|
|
CreatedAt: createdAt,
|
|
|
|
UpdatedAt: updatedAt,
|
2022-08-12 02:21:46 +00:00
|
|
|
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
|
|
|
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
|
|
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
|
|
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
2022-07-13 06:30:54 +00:00
|
|
|
{
|
|
|
|
MovieID: movieIDs[movieIdxWithScene],
|
|
|
|
SceneIndex: &sceneIndex,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
MovieID: movieIDs[movieIdxWithStudio],
|
|
|
|
SceneIndex: &sceneIndex2,
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
}),
|
|
|
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{
|
2022-07-13 06:30:54 +00:00
|
|
|
{
|
|
|
|
StashID: stashID1,
|
|
|
|
Endpoint: endpoint1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
StashID: stashID2,
|
|
|
|
Endpoint: endpoint2,
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"clear all",
|
|
|
|
sceneIDs[sceneIdxWithSpacedName],
|
|
|
|
clearScenePartial(),
|
|
|
|
models.Scene{
|
|
|
|
ID: sceneIDs[sceneIdxWithSpacedName],
|
|
|
|
Files: []*file.VideoFile{
|
|
|
|
makeSceneFile(sceneIdxWithSpacedName),
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
GalleryIDs: models.NewRelatedIDs([]int{}),
|
|
|
|
TagIDs: models.NewRelatedIDs([]int{}),
|
|
|
|
PerformerIDs: models.NewRelatedIDs([]int{}),
|
|
|
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{}),
|
|
|
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid id",
|
|
|
|
invalidID,
|
|
|
|
models.ScenePartial{},
|
|
|
|
models.Scene{},
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
for _, tt := range tests {
|
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
got, err := qb.UpdatePartial(ctx, tt.id, tt.partial)
|
|
|
|
if (err != nil) != tt.wantErr {
|
|
|
|
t.Errorf("sceneQueryBuilder.UpdatePartial() error = %v, wantErr %v", err, tt.wantErr)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if tt.wantErr {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// ignore file ids
|
|
|
|
clearSceneFileIDs(got)
|
|
|
|
|
2022-08-12 02:21:46 +00:00
|
|
|
// load relationships
|
|
|
|
if err := loadSceneRelationships(ctx, tt.want, got); err != nil {
|
|
|
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.Equal(tt.want, *got)
|
|
|
|
|
|
|
|
s, err := qb.Find(ctx, tt.id)
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("sceneQueryBuilder.Find() error = %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// ignore file ids
|
|
|
|
clearSceneFileIDs(s)
|
|
|
|
|
2022-08-12 02:21:46 +00:00
|
|
|
// load relationships
|
|
|
|
if err := loadSceneRelationships(ctx, tt.want, s); err != nil {
|
|
|
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.Equal(tt.want, *s)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|
|
|
var (
|
|
|
|
sceneIndex = 123
|
|
|
|
sceneIndex2 = 234
|
|
|
|
endpoint1 = "endpoint1"
|
|
|
|
endpoint2 = "endpoint2"
|
|
|
|
stashID1 = "stashid1"
|
|
|
|
stashID2 = "stashid2"
|
|
|
|
|
|
|
|
movieScenes = []models.MoviesScenes{
|
|
|
|
{
|
|
|
|
MovieID: movieIDs[movieIdxWithDupName],
|
|
|
|
SceneIndex: &sceneIndex,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
MovieID: movieIDs[movieIdxWithStudio],
|
|
|
|
SceneIndex: &sceneIndex2,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
stashIDs = []models.StashID{
|
|
|
|
{
|
|
|
|
StashID: stashID1,
|
|
|
|
Endpoint: endpoint1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
StashID: stashID2,
|
|
|
|
Endpoint: endpoint2,
|
|
|
|
},
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
2022-07-13 06:30:54 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
id int
|
|
|
|
partial models.ScenePartial
|
|
|
|
want models.Scene
|
|
|
|
wantErr bool
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"add galleries",
|
|
|
|
sceneIDs[sceneIdxWithGallery],
|
|
|
|
models.ScenePartial{
|
|
|
|
GalleryIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{galleryIDs[galleryIdx1WithImage], galleryIDs[galleryIdx1WithPerformer]},
|
|
|
|
Mode: models.RelationshipUpdateModeAdd,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
GalleryIDs: models.NewRelatedIDs(append(indexesToIDs(galleryIDs, sceneGalleries[sceneIdxWithGallery]),
|
2022-07-13 06:30:54 +00:00
|
|
|
galleryIDs[galleryIdx1WithImage],
|
|
|
|
galleryIDs[galleryIdx1WithPerformer],
|
2022-08-12 02:21:46 +00:00
|
|
|
)),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"add tags",
|
|
|
|
sceneIDs[sceneIdxWithTwoTags],
|
|
|
|
models.ScenePartial{
|
|
|
|
TagIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithGallery]},
|
|
|
|
Mode: models.RelationshipUpdateModeAdd,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
TagIDs: models.NewRelatedIDs(append(indexesToIDs(tagIDs, sceneTags[sceneIdxWithTwoTags]),
|
2022-07-13 06:30:54 +00:00
|
|
|
tagIDs[tagIdx1WithDupName],
|
|
|
|
tagIDs[tagIdx1WithGallery],
|
2022-08-12 02:21:46 +00:00
|
|
|
)),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"add performers",
|
|
|
|
sceneIDs[sceneIdxWithTwoPerformers],
|
|
|
|
models.ScenePartial{
|
|
|
|
PerformerIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{performerIDs[performerIdx1WithDupName], performerIDs[performerIdx1WithGallery]},
|
|
|
|
Mode: models.RelationshipUpdateModeAdd,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
PerformerIDs: models.NewRelatedIDs(append(indexesToIDs(performerIDs, scenePerformers[sceneIdxWithTwoPerformers]),
|
2022-07-13 06:30:54 +00:00
|
|
|
performerIDs[performerIdx1WithDupName],
|
|
|
|
performerIDs[performerIdx1WithGallery],
|
2022-08-12 02:21:46 +00:00
|
|
|
)),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"add movies",
|
|
|
|
sceneIDs[sceneIdxWithMovie],
|
|
|
|
models.ScenePartial{
|
|
|
|
MovieIDs: &models.UpdateMovieIDs{
|
|
|
|
Movies: movieScenes,
|
|
|
|
Mode: models.RelationshipUpdateModeAdd,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
Movies: models.NewRelatedMovies(append([]models.MoviesScenes{
|
2022-07-13 06:30:54 +00:00
|
|
|
{
|
|
|
|
MovieID: indexesToIDs(movieIDs, sceneMovies[sceneIdxWithMovie])[0],
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
}, movieScenes...)),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"add stash ids",
|
|
|
|
sceneIDs[sceneIdxWithSpacedName],
|
|
|
|
models.ScenePartial{
|
|
|
|
StashIDs: &models.UpdateStashIDs{
|
|
|
|
StashIDs: stashIDs,
|
|
|
|
Mode: models.RelationshipUpdateModeAdd,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
StashIDs: models.NewRelatedStashIDs(append([]models.StashID{sceneStashID(sceneIdxWithSpacedName)}, stashIDs...)),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"add duplicate galleries",
|
|
|
|
sceneIDs[sceneIdxWithGallery],
|
|
|
|
models.ScenePartial{
|
|
|
|
GalleryIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{galleryIDs[galleryIdxWithScene], galleryIDs[galleryIdx1WithPerformer]},
|
|
|
|
Mode: models.RelationshipUpdateModeAdd,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
GalleryIDs: models.NewRelatedIDs(append(indexesToIDs(galleryIDs, sceneGalleries[sceneIdxWithGallery]),
|
2022-07-13 06:30:54 +00:00
|
|
|
galleryIDs[galleryIdx1WithPerformer],
|
2022-08-12 02:21:46 +00:00
|
|
|
)),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"add duplicate tags",
|
|
|
|
sceneIDs[sceneIdxWithTwoTags],
|
|
|
|
models.ScenePartial{
|
|
|
|
TagIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithGallery]},
|
|
|
|
Mode: models.RelationshipUpdateModeAdd,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
TagIDs: models.NewRelatedIDs(append(indexesToIDs(tagIDs, sceneTags[sceneIdxWithTwoTags]),
|
2022-07-13 06:30:54 +00:00
|
|
|
tagIDs[tagIdx1WithGallery],
|
2022-08-12 02:21:46 +00:00
|
|
|
)),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"add duplicate performers",
|
|
|
|
sceneIDs[sceneIdxWithTwoPerformers],
|
|
|
|
models.ScenePartial{
|
|
|
|
PerformerIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithGallery]},
|
|
|
|
Mode: models.RelationshipUpdateModeAdd,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
PerformerIDs: models.NewRelatedIDs(append(indexesToIDs(performerIDs, scenePerformers[sceneIdxWithTwoPerformers]),
|
2022-07-13 06:30:54 +00:00
|
|
|
performerIDs[performerIdx1WithGallery],
|
2022-08-12 02:21:46 +00:00
|
|
|
)),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"add duplicate movies",
|
|
|
|
sceneIDs[sceneIdxWithMovie],
|
|
|
|
models.ScenePartial{
|
|
|
|
MovieIDs: &models.UpdateMovieIDs{
|
|
|
|
Movies: append([]models.MoviesScenes{
|
|
|
|
{
|
|
|
|
MovieID: movieIDs[movieIdxWithScene],
|
|
|
|
SceneIndex: &sceneIndex,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
movieScenes...,
|
|
|
|
),
|
|
|
|
Mode: models.RelationshipUpdateModeAdd,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
Movies: models.NewRelatedMovies(append([]models.MoviesScenes{
|
2022-07-13 06:30:54 +00:00
|
|
|
{
|
|
|
|
MovieID: indexesToIDs(movieIDs, sceneMovies[sceneIdxWithMovie])[0],
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
}, movieScenes...)),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"add duplicate stash ids",
|
|
|
|
sceneIDs[sceneIdxWithSpacedName],
|
|
|
|
models.ScenePartial{
|
|
|
|
StashIDs: &models.UpdateStashIDs{
|
|
|
|
StashIDs: []models.StashID{
|
|
|
|
sceneStashID(sceneIdxWithSpacedName),
|
|
|
|
},
|
|
|
|
Mode: models.RelationshipUpdateModeAdd,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{sceneStashID(sceneIdxWithSpacedName)}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"add invalid galleries",
|
|
|
|
sceneIDs[sceneIdxWithGallery],
|
|
|
|
models.ScenePartial{
|
|
|
|
GalleryIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{invalidID},
|
|
|
|
Mode: models.RelationshipUpdateModeAdd,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{},
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"add invalid tags",
|
|
|
|
sceneIDs[sceneIdxWithTwoTags],
|
|
|
|
models.ScenePartial{
|
|
|
|
TagIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{invalidID},
|
|
|
|
Mode: models.RelationshipUpdateModeAdd,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{},
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"add invalid performers",
|
|
|
|
sceneIDs[sceneIdxWithTwoPerformers],
|
|
|
|
models.ScenePartial{
|
|
|
|
PerformerIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{invalidID},
|
|
|
|
Mode: models.RelationshipUpdateModeAdd,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{},
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"add invalid movies",
|
|
|
|
sceneIDs[sceneIdxWithMovie],
|
|
|
|
models.ScenePartial{
|
|
|
|
MovieIDs: &models.UpdateMovieIDs{
|
|
|
|
Movies: []models.MoviesScenes{
|
|
|
|
{
|
|
|
|
MovieID: invalidID,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
Mode: models.RelationshipUpdateModeAdd,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{},
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"remove galleries",
|
|
|
|
sceneIDs[sceneIdxWithGallery],
|
|
|
|
models.ScenePartial{
|
|
|
|
GalleryIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{galleryIDs[galleryIdxWithScene]},
|
|
|
|
Mode: models.RelationshipUpdateModeRemove,
|
|
|
|
},
|
|
|
|
},
|
2022-08-11 06:14:57 +00:00
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
GalleryIDs: models.NewRelatedIDs([]int{}),
|
2022-08-11 06:14:57 +00:00
|
|
|
},
|
2022-07-13 06:30:54 +00:00
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"remove tags",
|
|
|
|
sceneIDs[sceneIdxWithTwoTags],
|
|
|
|
models.ScenePartial{
|
|
|
|
TagIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{tagIDs[tagIdx1WithScene]},
|
|
|
|
Mode: models.RelationshipUpdateModeRemove,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx2WithScene]}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"remove performers",
|
|
|
|
sceneIDs[sceneIdxWithTwoPerformers],
|
|
|
|
models.ScenePartial{
|
|
|
|
PerformerIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{performerIDs[performerIdx1WithScene]},
|
|
|
|
Mode: models.RelationshipUpdateModeRemove,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx2WithScene]}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"remove movies",
|
|
|
|
sceneIDs[sceneIdxWithMovie],
|
|
|
|
models.ScenePartial{
|
|
|
|
MovieIDs: &models.UpdateMovieIDs{
|
|
|
|
Movies: []models.MoviesScenes{
|
|
|
|
{
|
|
|
|
MovieID: movieIDs[movieIdxWithScene],
|
|
|
|
},
|
|
|
|
},
|
|
|
|
Mode: models.RelationshipUpdateModeRemove,
|
|
|
|
},
|
|
|
|
},
|
2022-08-11 06:14:57 +00:00
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{}),
|
2022-08-11 06:14:57 +00:00
|
|
|
},
|
2022-07-13 06:30:54 +00:00
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"remove stash ids",
|
|
|
|
sceneIDs[sceneIdxWithSpacedName],
|
|
|
|
models.ScenePartial{
|
|
|
|
StashIDs: &models.UpdateStashIDs{
|
|
|
|
StashIDs: []models.StashID{sceneStashID(sceneIdxWithSpacedName)},
|
|
|
|
Mode: models.RelationshipUpdateModeRemove,
|
|
|
|
},
|
|
|
|
},
|
2022-08-11 06:14:57 +00:00
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{}),
|
2022-08-11 06:14:57 +00:00
|
|
|
},
|
2022-07-13 06:30:54 +00:00
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"remove unrelated galleries",
|
|
|
|
sceneIDs[sceneIdxWithGallery],
|
|
|
|
models.ScenePartial{
|
|
|
|
GalleryIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{galleryIDs[galleryIdx1WithImage]},
|
|
|
|
Mode: models.RelationshipUpdateModeRemove,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"remove unrelated tags",
|
|
|
|
sceneIDs[sceneIdxWithTwoTags],
|
|
|
|
models.ScenePartial{
|
|
|
|
TagIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{tagIDs[tagIdx1WithPerformer]},
|
|
|
|
Mode: models.RelationshipUpdateModeRemove,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
TagIDs: models.NewRelatedIDs(indexesToIDs(tagIDs, sceneTags[sceneIdxWithTwoTags])),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"remove unrelated performers",
|
|
|
|
sceneIDs[sceneIdxWithTwoPerformers],
|
|
|
|
models.ScenePartial{
|
|
|
|
PerformerIDs: &models.UpdateIDs{
|
|
|
|
IDs: []int{performerIDs[performerIdx1WithDupName]},
|
|
|
|
Mode: models.RelationshipUpdateModeRemove,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
PerformerIDs: models.NewRelatedIDs(indexesToIDs(performerIDs, scenePerformers[sceneIdxWithTwoPerformers])),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"remove unrelated movies",
|
|
|
|
sceneIDs[sceneIdxWithMovie],
|
|
|
|
models.ScenePartial{
|
|
|
|
MovieIDs: &models.UpdateMovieIDs{
|
|
|
|
Movies: []models.MoviesScenes{
|
|
|
|
{
|
|
|
|
MovieID: movieIDs[movieIdxWithDupName],
|
|
|
|
},
|
|
|
|
},
|
|
|
|
Mode: models.RelationshipUpdateModeRemove,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
2022-07-13 06:30:54 +00:00
|
|
|
{
|
|
|
|
MovieID: indexesToIDs(movieIDs, sceneMovies[sceneIdxWithMovie])[0],
|
|
|
|
},
|
2022-08-12 02:21:46 +00:00
|
|
|
}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"remove unrelated stash ids",
|
|
|
|
sceneIDs[sceneIdxWithGallery],
|
|
|
|
models.ScenePartial{
|
|
|
|
StashIDs: &models.UpdateStashIDs{
|
|
|
|
StashIDs: stashIDs,
|
|
|
|
Mode: models.RelationshipUpdateModeRemove,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
models.Scene{
|
2022-08-12 02:21:46 +00:00
|
|
|
StashIDs: models.NewRelatedStashIDs([]models.StashID{sceneStashID(sceneIdxWithGallery)}),
|
2022-07-13 06:30:54 +00:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
got, err := qb.UpdatePartial(ctx, tt.id, tt.partial)
|
|
|
|
if (err != nil) != tt.wantErr {
|
|
|
|
t.Errorf("sceneQueryBuilder.UpdatePartial() error = %v, wantErr %v", err, tt.wantErr)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if tt.wantErr {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
s, err := qb.Find(ctx, tt.id)
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("sceneQueryBuilder.Find() error = %v", err)
|
|
|
|
}
|
|
|
|
|
2022-08-12 02:21:46 +00:00
|
|
|
// load relationships
|
|
|
|
if err := loadSceneRelationships(ctx, tt.want, got); err != nil {
|
|
|
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if err := loadSceneRelationships(ctx, tt.want, s); err != nil {
|
|
|
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
// only compare fields that were in the partial
|
|
|
|
if tt.partial.PerformerIDs != nil {
|
|
|
|
assert.Equal(tt.want.PerformerIDs, got.PerformerIDs)
|
|
|
|
assert.Equal(tt.want.PerformerIDs, s.PerformerIDs)
|
|
|
|
}
|
|
|
|
if tt.partial.TagIDs != nil {
|
|
|
|
assert.Equal(tt.want.TagIDs, got.TagIDs)
|
|
|
|
assert.Equal(tt.want.TagIDs, s.TagIDs)
|
|
|
|
}
|
|
|
|
if tt.partial.GalleryIDs != nil {
|
|
|
|
assert.Equal(tt.want.GalleryIDs, got.GalleryIDs)
|
|
|
|
assert.Equal(tt.want.GalleryIDs, s.GalleryIDs)
|
|
|
|
}
|
|
|
|
if tt.partial.MovieIDs != nil {
|
|
|
|
assert.Equal(tt.want.Movies, got.Movies)
|
|
|
|
assert.Equal(tt.want.Movies, s.Movies)
|
|
|
|
}
|
|
|
|
if tt.partial.StashIDs != nil {
|
|
|
|
assert.Equal(tt.want.StashIDs, got.StashIDs)
|
|
|
|
assert.Equal(tt.want.StashIDs, s.StashIDs)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func Test_sceneQueryBuilder_IncrementOCounter(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
id int
|
|
|
|
want int
|
|
|
|
wantErr bool
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"increment",
|
|
|
|
sceneIDs[1],
|
|
|
|
2,
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid",
|
|
|
|
invalidID,
|
|
|
|
0,
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
got, err := qb.IncrementOCounter(ctx, tt.id)
|
|
|
|
if (err != nil) != tt.wantErr {
|
|
|
|
t.Errorf("sceneQueryBuilder.IncrementOCounter() error = %v, wantErr %v", err, tt.wantErr)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if got != tt.want {
|
|
|
|
t.Errorf("sceneQueryBuilder.IncrementOCounter() = %v, want %v", got, tt.want)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func Test_sceneQueryBuilder_DecrementOCounter(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
id int
|
|
|
|
want int
|
|
|
|
wantErr bool
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"decrement",
|
|
|
|
sceneIDs[2],
|
|
|
|
1,
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"zero",
|
|
|
|
sceneIDs[0],
|
|
|
|
0,
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid",
|
|
|
|
invalidID,
|
|
|
|
0,
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
got, err := qb.DecrementOCounter(ctx, tt.id)
|
|
|
|
if (err != nil) != tt.wantErr {
|
|
|
|
t.Errorf("sceneQueryBuilder.DecrementOCounter() error = %v, wantErr %v", err, tt.wantErr)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if got != tt.want {
|
|
|
|
t.Errorf("sceneQueryBuilder.DecrementOCounter() = %v, want %v", got, tt.want)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func Test_sceneQueryBuilder_ResetOCounter(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
id int
|
|
|
|
want int
|
|
|
|
wantErr bool
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"decrement",
|
|
|
|
sceneIDs[2],
|
|
|
|
0,
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"zero",
|
|
|
|
sceneIDs[0],
|
|
|
|
0,
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid",
|
|
|
|
invalidID,
|
|
|
|
0,
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
qb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
got, err := qb.ResetOCounter(ctx, tt.id)
|
|
|
|
if (err != nil) != tt.wantErr {
|
|
|
|
t.Errorf("sceneQueryBuilder.ResetOCounter() error = %v, wantErr %v", err, tt.wantErr)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if got != tt.want {
|
|
|
|
t.Errorf("sceneQueryBuilder.ResetOCounter() = %v, want %v", got, tt.want)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func Test_sceneQueryBuilder_Destroy(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
id int
|
|
|
|
wantErr bool
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"valid",
|
|
|
|
sceneIDs[sceneIdxWithGallery],
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid",
|
|
|
|
invalidID,
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
assert := assert.New(t)
|
|
|
|
withRollbackTxn(func(ctx context.Context) error {
|
|
|
|
if err := qb.Destroy(ctx, tt.id); (err != nil) != tt.wantErr {
|
|
|
|
t.Errorf("sceneQueryBuilder.Destroy() error = %v, wantErr %v", err, tt.wantErr)
|
|
|
|
}
|
|
|
|
|
|
|
|
// ensure cannot be found
|
|
|
|
i, err := qb.Find(ctx, tt.id)
|
|
|
|
|
|
|
|
assert.NotNil(err)
|
|
|
|
assert.Nil(i)
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func makeSceneWithID(index int) *models.Scene {
|
|
|
|
ret := makeScene(index)
|
|
|
|
ret.ID = sceneIDs[index]
|
|
|
|
|
|
|
|
if ret.Date != nil && ret.Date.IsZero() {
|
|
|
|
ret.Date = nil
|
|
|
|
}
|
|
|
|
|
|
|
|
ret.Files = []*file.VideoFile{makeSceneFile(index)}
|
|
|
|
|
|
|
|
return ret
|
|
|
|
}
|
|
|
|
|
|
|
|
func Test_sceneQueryBuilder_Find(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
id int
|
|
|
|
want *models.Scene
|
|
|
|
wantErr bool
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"valid",
|
|
|
|
sceneIDs[sceneIdxWithSpacedName],
|
|
|
|
makeSceneWithID(sceneIdxWithSpacedName),
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid",
|
|
|
|
invalidID,
|
|
|
|
nil,
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"with galleries",
|
|
|
|
sceneIDs[sceneIdxWithGallery],
|
|
|
|
makeSceneWithID(sceneIdxWithGallery),
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"with performers",
|
|
|
|
sceneIDs[sceneIdxWithTwoPerformers],
|
|
|
|
makeSceneWithID(sceneIdxWithTwoPerformers),
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"with tags",
|
|
|
|
sceneIDs[sceneIdxWithTwoTags],
|
|
|
|
makeSceneWithID(sceneIdxWithTwoTags),
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"with movies",
|
|
|
|
sceneIDs[sceneIdxWithMovie],
|
|
|
|
makeSceneWithID(sceneIdxWithMovie),
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
assert := assert.New(t)
|
|
|
|
withTxn(func(ctx context.Context) error {
|
|
|
|
got, err := qb.Find(ctx, tt.id)
|
|
|
|
if (err != nil) != tt.wantErr {
|
|
|
|
t.Errorf("sceneQueryBuilder.Find() error = %v, wantErr %v", err, tt.wantErr)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if got != nil {
|
|
|
|
clearSceneFileIDs(got)
|
2022-08-12 02:21:46 +00:00
|
|
|
|
|
|
|
// load relationships
|
|
|
|
if err := loadSceneRelationships(ctx, *tt.want, got); err != nil {
|
|
|
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
|
|
|
return nil
|
|
|
|
}
|
2022-07-13 06:30:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
assert.Equal(tt.want, got)
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-08-12 02:21:46 +00:00
|
|
|
func postFindScenes(ctx context.Context, want []*models.Scene, got []*models.Scene) error {
|
|
|
|
for i, s := range got {
|
|
|
|
clearSceneFileIDs(s)
|
|
|
|
|
|
|
|
// load relationships
|
|
|
|
if i < len(want) {
|
|
|
|
if err := loadSceneRelationships(ctx, *want[i], s); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
func Test_sceneQueryBuilder_FindMany(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
ids []int
|
|
|
|
want []*models.Scene
|
|
|
|
wantErr bool
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"valid with relationships",
|
|
|
|
[]int{
|
|
|
|
sceneIDs[sceneIdxWithGallery],
|
|
|
|
sceneIDs[sceneIdxWithTwoPerformers],
|
|
|
|
sceneIDs[sceneIdxWithTwoTags],
|
|
|
|
sceneIDs[sceneIdxWithMovie],
|
|
|
|
},
|
|
|
|
[]*models.Scene{
|
|
|
|
makeSceneWithID(sceneIdxWithGallery),
|
|
|
|
makeSceneWithID(sceneIdxWithTwoPerformers),
|
|
|
|
makeSceneWithID(sceneIdxWithTwoTags),
|
|
|
|
makeSceneWithID(sceneIdxWithMovie),
|
|
|
|
},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid",
|
|
|
|
[]int{sceneIDs[sceneIdxWithGallery], sceneIDs[sceneIdxWithTwoPerformers], invalidID},
|
|
|
|
nil,
|
|
|
|
true,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
assert := assert.New(t)
|
|
|
|
got, err := qb.FindMany(ctx, tt.ids)
|
|
|
|
if (err != nil) != tt.wantErr {
|
|
|
|
t.Errorf("sceneQueryBuilder.FindMany() error = %v, wantErr %v", err, tt.wantErr)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-08-12 02:21:46 +00:00
|
|
|
if err := postFindScenes(ctx, tt.want, got); err != nil {
|
|
|
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
|
|
|
return
|
2022-07-13 06:30:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
assert.Equal(tt.want, got)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func Test_sceneQueryBuilder_FindByChecksum(t *testing.T) {
|
|
|
|
getChecksum := func(index int) string {
|
|
|
|
return getSceneStringValue(index, checksumField)
|
|
|
|
}
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
checksum string
|
|
|
|
want []*models.Scene
|
|
|
|
wantErr bool
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"valid",
|
|
|
|
getChecksum(sceneIdxWithSpacedName),
|
|
|
|
[]*models.Scene{makeSceneWithID(sceneIdxWithSpacedName)},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid",
|
|
|
|
"invalid checksum",
|
|
|
|
nil,
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"with galleries",
|
|
|
|
getChecksum(sceneIdxWithGallery),
|
|
|
|
[]*models.Scene{makeSceneWithID(sceneIdxWithGallery)},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"with performers",
|
|
|
|
getChecksum(sceneIdxWithTwoPerformers),
|
|
|
|
[]*models.Scene{makeSceneWithID(sceneIdxWithTwoPerformers)},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"with tags",
|
|
|
|
getChecksum(sceneIdxWithTwoTags),
|
|
|
|
[]*models.Scene{makeSceneWithID(sceneIdxWithTwoTags)},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"with movies",
|
|
|
|
getChecksum(sceneIdxWithMovie),
|
|
|
|
[]*models.Scene{makeSceneWithID(sceneIdxWithMovie)},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
withTxn(func(ctx context.Context) error {
|
|
|
|
assert := assert.New(t)
|
|
|
|
got, err := qb.FindByChecksum(ctx, tt.checksum)
|
|
|
|
if (err != nil) != tt.wantErr {
|
|
|
|
t.Errorf("sceneQueryBuilder.FindByChecksum() error = %v, wantErr %v", err, tt.wantErr)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-08-12 02:21:46 +00:00
|
|
|
if err := postFindScenes(ctx, tt.want, got); err != nil {
|
|
|
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
|
|
|
return nil
|
2022-07-13 06:30:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
assert.Equal(tt.want, got)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func Test_sceneQueryBuilder_FindByOSHash(t *testing.T) {
|
|
|
|
getOSHash := func(index int) string {
|
|
|
|
return getSceneStringValue(index, "oshash")
|
|
|
|
}
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
oshash string
|
|
|
|
want []*models.Scene
|
|
|
|
wantErr bool
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"valid",
|
|
|
|
getOSHash(sceneIdxWithSpacedName),
|
|
|
|
[]*models.Scene{makeSceneWithID(sceneIdxWithSpacedName)},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid",
|
|
|
|
"invalid oshash",
|
|
|
|
nil,
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"with galleries",
|
|
|
|
getOSHash(sceneIdxWithGallery),
|
|
|
|
[]*models.Scene{makeSceneWithID(sceneIdxWithGallery)},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"with performers",
|
|
|
|
getOSHash(sceneIdxWithTwoPerformers),
|
|
|
|
[]*models.Scene{makeSceneWithID(sceneIdxWithTwoPerformers)},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"with tags",
|
|
|
|
getOSHash(sceneIdxWithTwoTags),
|
|
|
|
[]*models.Scene{makeSceneWithID(sceneIdxWithTwoTags)},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"with movies",
|
|
|
|
getOSHash(sceneIdxWithMovie),
|
|
|
|
[]*models.Scene{makeSceneWithID(sceneIdxWithMovie)},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
withTxn(func(ctx context.Context) error {
|
|
|
|
got, err := qb.FindByOSHash(ctx, tt.oshash)
|
|
|
|
if (err != nil) != tt.wantErr {
|
|
|
|
t.Errorf("sceneQueryBuilder.FindByOSHash() error = %v, wantErr %v", err, tt.wantErr)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-08-12 02:21:46 +00:00
|
|
|
if err := postFindScenes(ctx, tt.want, got); err != nil {
|
|
|
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
|
|
|
return nil
|
2022-07-13 06:30:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if !reflect.DeepEqual(got, tt.want) {
|
|
|
|
t.Errorf("sceneQueryBuilder.FindByOSHash() = %v, want %v", got, tt.want)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
func Test_sceneQueryBuilder_FindByPath(t *testing.T) {
|
|
|
|
getPath := func(index int) string {
|
|
|
|
return getFilePath(folderIdxWithSceneFiles, getSceneBasename(index))
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
path string
|
|
|
|
want []*models.Scene
|
|
|
|
wantErr bool
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"valid",
|
|
|
|
getPath(sceneIdxWithSpacedName),
|
|
|
|
[]*models.Scene{makeSceneWithID(sceneIdxWithSpacedName)},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid",
|
|
|
|
"invalid path",
|
|
|
|
nil,
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"with galleries",
|
|
|
|
getPath(sceneIdxWithGallery),
|
|
|
|
[]*models.Scene{makeSceneWithID(sceneIdxWithGallery)},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"with performers",
|
|
|
|
getPath(sceneIdxWithTwoPerformers),
|
|
|
|
[]*models.Scene{makeSceneWithID(sceneIdxWithTwoPerformers)},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"with tags",
|
|
|
|
getPath(sceneIdxWithTwoTags),
|
|
|
|
[]*models.Scene{makeSceneWithID(sceneIdxWithTwoTags)},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"with movies",
|
|
|
|
getPath(sceneIdxWithMovie),
|
|
|
|
[]*models.Scene{makeSceneWithID(sceneIdxWithMovie)},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
qb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
withTxn(func(ctx context.Context) error {
|
|
|
|
assert := assert.New(t)
|
|
|
|
got, err := qb.FindByPath(ctx, tt.path)
|
|
|
|
if (err != nil) != tt.wantErr {
|
|
|
|
t.Errorf("sceneQueryBuilder.FindByPath() error = %v, wantErr %v", err, tt.wantErr)
|
|
|
|
return nil
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-08-12 02:21:46 +00:00
|
|
|
if err := postFindScenes(ctx, tt.want, got); err != nil {
|
|
|
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
|
|
|
return nil
|
2022-07-13 06:30:54 +00:00
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.Equal(tt.want, got)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
return nil
|
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
func Test_sceneQueryBuilder_FindByGalleryID(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
galleryID int
|
|
|
|
want []*models.Scene
|
|
|
|
wantErr bool
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"valid",
|
|
|
|
galleryIDs[galleryIdxWithScene],
|
|
|
|
[]*models.Scene{makeSceneWithID(sceneIdxWithGallery)},
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"none",
|
|
|
|
galleryIDs[galleryIdx1WithPerformer],
|
|
|
|
nil,
|
|
|
|
false,
|
|
|
|
},
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
qb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
assert := assert.New(t)
|
|
|
|
got, err := qb.FindByGalleryID(ctx, tt.galleryID)
|
|
|
|
if (err != nil) != tt.wantErr {
|
|
|
|
t.Errorf("sceneQueryBuilder.FindByGalleryID() error = %v, wantErr %v", err, tt.wantErr)
|
|
|
|
return
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-08-12 02:21:46 +00:00
|
|
|
if err := postFindScenes(ctx, tt.want, got); err != nil {
|
|
|
|
t.Errorf("loadSceneRelationships() error = %v", err)
|
|
|
|
return
|
2022-07-13 06:30:54 +00:00
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.Equal(tt.want, got)
|
|
|
|
return
|
|
|
|
})
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneCountByPerformerID(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2022-05-19 07:49:32 +00:00
|
|
|
count, err := sqb.CountByPerformerID(ctx, performerIDs[performerIdxWithScene])
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("Error counting scenes: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Equal(t, 1, count)
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
count, err = sqb.CountByPerformerID(ctx, 0)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("Error counting scenes: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Equal(t, 0, count)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-08-01 01:01:29 +00:00
|
|
|
func scenesToIDs(i []*models.Scene) []int {
|
|
|
|
var ret []int
|
|
|
|
for _, ii := range i {
|
|
|
|
ret = append(ret, ii.ID)
|
|
|
|
}
|
|
|
|
|
|
|
|
return ret
|
|
|
|
}
|
|
|
|
|
|
|
|
func Test_sceneStore_FindByFileID(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
fileID file.ID
|
|
|
|
include []int
|
|
|
|
exclude []int
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"valid",
|
|
|
|
sceneFileIDs[sceneIdx1WithPerformer],
|
|
|
|
[]int{sceneIdx1WithPerformer},
|
|
|
|
nil,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid",
|
|
|
|
invalidFileID,
|
|
|
|
nil,
|
|
|
|
[]int{sceneIdx1WithPerformer},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
assert := assert.New(t)
|
|
|
|
got, err := qb.FindByFileID(ctx, tt.fileID)
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("SceneStore.FindByFileID() error = %v", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
for _, f := range got {
|
|
|
|
clearSceneFileIDs(f)
|
|
|
|
}
|
|
|
|
|
|
|
|
ids := scenesToIDs(got)
|
|
|
|
include := indexesToIDs(galleryIDs, tt.include)
|
|
|
|
exclude := indexesToIDs(galleryIDs, tt.exclude)
|
|
|
|
|
|
|
|
for _, i := range include {
|
|
|
|
assert.Contains(ids, i)
|
|
|
|
}
|
|
|
|
for _, e := range exclude {
|
|
|
|
assert.NotContains(ids, e)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func Test_sceneStore_CountByFileID(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
fileID file.ID
|
|
|
|
want int
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"valid",
|
|
|
|
sceneFileIDs[sceneIdxWithTwoPerformers],
|
|
|
|
1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"invalid",
|
|
|
|
invalidFileID,
|
|
|
|
0,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
assert := assert.New(t)
|
|
|
|
got, err := qb.CountByFileID(ctx, tt.fileID)
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("SceneStore.CountByFileID() error = %v", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Equal(tt.want, got)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func Test_sceneStore_CountMissingChecksum(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
want int
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"valid",
|
|
|
|
0,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
assert := assert.New(t)
|
|
|
|
got, err := qb.CountMissingChecksum(ctx)
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("SceneStore.CountMissingChecksum() error = %v", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Equal(tt.want, got)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func Test_sceneStore_CountMissingOshash(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
want int
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"valid",
|
|
|
|
0,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
assert := assert.New(t)
|
|
|
|
got, err := qb.CountMissingOSHash(ctx)
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("SceneStore.CountMissingOSHash() error = %v", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Equal(tt.want, got)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
func TestSceneWall(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
const sceneIdx = 2
|
|
|
|
wallQuery := getSceneStringValue(sceneIdx, "Details")
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes, err := sqb.Wall(ctx, &wallQuery)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("Error finding scenes: %s", err.Error())
|
2022-07-13 06:30:54 +00:00
|
|
|
return nil
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
assert.Len(t, scenes, 1)
|
|
|
|
scene := scenes[0]
|
|
|
|
assert.Equal(t, sceneIDs[sceneIdx], scene.ID)
|
2022-07-13 06:30:54 +00:00
|
|
|
scenePath := getFilePath(folderIdxWithSceneFiles, getSceneBasename(sceneIdx))
|
|
|
|
assert.Equal(t, scenePath, scene.Path())
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
wallQuery = "not exist"
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes, err = sqb.Wall(ctx, &wallQuery)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("Error finding scene: %s", err.Error())
|
2022-07-13 06:30:54 +00:00
|
|
|
return nil
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryQ(t *testing.T) {
|
|
|
|
const sceneIdx = 2
|
|
|
|
|
|
|
|
q := getSceneStringValue(sceneIdx, titleField)
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
sceneQueryQ(ctx, t, sqb, q, sceneIdx)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
func queryScene(ctx context.Context, t *testing.T, sqb models.SceneReader, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) []*models.Scene {
|
2021-03-02 00:27:36 +00:00
|
|
|
t.Helper()
|
2022-05-19 07:49:32 +00:00
|
|
|
result, err := sqb.Query(ctx, models.SceneQueryOptions{
|
2021-10-25 00:40:13 +00:00
|
|
|
QueryOptions: models.QueryOptions{
|
|
|
|
FindFilter: findFilter,
|
2022-08-08 04:24:08 +00:00
|
|
|
Count: true,
|
2021-10-25 00:40:13 +00:00
|
|
|
},
|
2022-08-08 04:24:08 +00:00
|
|
|
SceneFilter: sceneFilter,
|
|
|
|
TotalDuration: true,
|
|
|
|
TotalSize: true,
|
2021-10-25 00:40:13 +00:00
|
|
|
})
|
2021-01-18 01:23:20 +00:00
|
|
|
if err != nil {
|
2021-10-25 00:40:13 +00:00
|
|
|
t.Errorf("Error querying scene: %v", err)
|
2022-07-13 06:30:54 +00:00
|
|
|
return nil
|
2021-10-25 00:40:13 +00:00
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes, err := result.Resolve(ctx)
|
2021-10-25 00:40:13 +00:00
|
|
|
if err != nil {
|
|
|
|
t.Errorf("Error resolving scenes: %v", err)
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return scenes
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
func sceneQueryQ(ctx context.Context, t *testing.T, sqb models.SceneReader, q string, expectedSceneIdx int) {
|
2021-01-18 01:23:20 +00:00
|
|
|
filter := models.FindFilterType{
|
|
|
|
Q: &q,
|
|
|
|
}
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, nil, &filter)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-08-08 04:24:08 +00:00
|
|
|
if !assert.Len(t, scenes, 1) {
|
|
|
|
return
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
scene := scenes[0]
|
|
|
|
assert.Equal(t, sceneIDs[expectedSceneIdx], scene.ID)
|
|
|
|
|
|
|
|
// no Q should return all results
|
|
|
|
filter.Q = nil
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, nil, &filter)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.Len(t, scenes, totalScenes)
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryPath(t *testing.T) {
|
2022-07-13 06:30:54 +00:00
|
|
|
const (
|
|
|
|
sceneIdx = 1
|
|
|
|
otherSceneIdx = 2
|
|
|
|
)
|
|
|
|
folder := folderPaths[folderIdxWithSceneFiles]
|
|
|
|
basename := getSceneBasename(sceneIdx)
|
|
|
|
scenePath := getFilePath(folderIdxWithSceneFiles, getSceneBasename(sceneIdx))
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
input models.StringCriterionInput
|
|
|
|
mustInclude []int
|
|
|
|
mustExclude []int
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"equals full path",
|
|
|
|
models.StringCriterionInput{
|
|
|
|
Value: scenePath,
|
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
},
|
|
|
|
[]int{sceneIdx},
|
|
|
|
[]int{otherSceneIdx},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"equals folder name",
|
|
|
|
models.StringCriterionInput{
|
|
|
|
Value: folder,
|
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
},
|
|
|
|
[]int{sceneIdx},
|
|
|
|
nil,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"equals folder name trailing slash",
|
|
|
|
models.StringCriterionInput{
|
|
|
|
Value: folder + string(filepath.Separator),
|
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
},
|
|
|
|
[]int{sceneIdx},
|
|
|
|
nil,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"equals base name",
|
|
|
|
models.StringCriterionInput{
|
|
|
|
Value: basename,
|
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
},
|
|
|
|
[]int{sceneIdx},
|
|
|
|
nil,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"equals base name leading slash",
|
|
|
|
models.StringCriterionInput{
|
|
|
|
Value: string(filepath.Separator) + basename,
|
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
},
|
|
|
|
[]int{sceneIdx},
|
|
|
|
nil,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"equals full path wildcard",
|
|
|
|
models.StringCriterionInput{
|
|
|
|
Value: filepath.Join(folder, "scene_0001_%"),
|
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
},
|
|
|
|
[]int{sceneIdx},
|
|
|
|
[]int{otherSceneIdx},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"not equals full path",
|
|
|
|
models.StringCriterionInput{
|
|
|
|
Value: scenePath,
|
|
|
|
Modifier: models.CriterionModifierNotEquals,
|
|
|
|
},
|
|
|
|
[]int{otherSceneIdx},
|
|
|
|
[]int{sceneIdx},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"not equals folder name",
|
|
|
|
models.StringCriterionInput{
|
|
|
|
Value: folder,
|
|
|
|
Modifier: models.CriterionModifierNotEquals,
|
|
|
|
},
|
|
|
|
nil,
|
|
|
|
[]int{sceneIdx},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"not equals basename",
|
|
|
|
models.StringCriterionInput{
|
|
|
|
Value: basename,
|
|
|
|
Modifier: models.CriterionModifierNotEquals,
|
|
|
|
},
|
|
|
|
nil,
|
|
|
|
[]int{sceneIdx},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"includes folder name",
|
|
|
|
models.StringCriterionInput{
|
|
|
|
Value: folder,
|
|
|
|
Modifier: models.CriterionModifierIncludes,
|
|
|
|
},
|
|
|
|
[]int{sceneIdx},
|
|
|
|
nil,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"includes base name",
|
|
|
|
models.StringCriterionInput{
|
|
|
|
Value: basename,
|
|
|
|
Modifier: models.CriterionModifierIncludes,
|
|
|
|
},
|
|
|
|
[]int{sceneIdx},
|
|
|
|
nil,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"includes full path",
|
|
|
|
models.StringCriterionInput{
|
|
|
|
Value: scenePath,
|
|
|
|
Modifier: models.CriterionModifierIncludes,
|
|
|
|
},
|
|
|
|
[]int{sceneIdx},
|
|
|
|
[]int{otherSceneIdx},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"matches regex",
|
|
|
|
models.StringCriterionInput{
|
|
|
|
Value: "scene_.*1_Path",
|
|
|
|
Modifier: models.CriterionModifierMatchesRegex,
|
|
|
|
},
|
|
|
|
[]int{sceneIdx},
|
|
|
|
nil,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"not matches regex",
|
|
|
|
models.StringCriterionInput{
|
|
|
|
Value: "scene_.*1_Path",
|
|
|
|
Modifier: models.CriterionModifierNotMatchesRegex,
|
|
|
|
},
|
|
|
|
nil,
|
|
|
|
[]int{sceneIdx},
|
|
|
|
},
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
got, err := qb.Query(ctx, models.SceneQueryOptions{
|
|
|
|
SceneFilter: &models.SceneFilterType{
|
|
|
|
Path: &tt.input,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("sceneQueryBuilder.TestSceneQueryPath() error = %v", err)
|
|
|
|
return
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
mustInclude := indexesToIDs(sceneIDs, tt.mustInclude)
|
|
|
|
mustExclude := indexesToIDs(sceneIDs, tt.mustExclude)
|
2021-02-01 20:57:56 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
missing := intslice.IntExclude(mustInclude, got.IDs)
|
|
|
|
if len(missing) > 0 {
|
|
|
|
t.Errorf("SceneStore.TestSceneQueryPath() missing expected IDs: %v", missing)
|
|
|
|
}
|
2021-02-01 20:57:56 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
notExcluded := intslice.IntIntercect(mustExclude, got.IDs)
|
|
|
|
if len(notExcluded) > 0 {
|
|
|
|
t.Errorf("SceneStore.TestSceneQueryPath() expected IDs to be excluded: %v", notExcluded)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
|
2021-04-09 05:05:11 +00:00
|
|
|
func TestSceneQueryURL(t *testing.T) {
|
|
|
|
const sceneIdx = 1
|
2022-07-13 06:30:54 +00:00
|
|
|
sceneURL := getSceneStringValue(sceneIdx, urlField)
|
2021-04-09 05:05:11 +00:00
|
|
|
|
|
|
|
urlCriterion := models.StringCriterionInput{
|
2022-07-13 06:30:54 +00:00
|
|
|
Value: sceneURL,
|
2021-04-09 05:05:11 +00:00
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
}
|
|
|
|
|
|
|
|
filter := models.SceneFilterType{
|
|
|
|
URL: &urlCriterion,
|
|
|
|
}
|
|
|
|
|
|
|
|
verifyFn := func(s *models.Scene) {
|
|
|
|
t.Helper()
|
2022-07-13 06:30:54 +00:00
|
|
|
verifyString(t, s.URL, urlCriterion)
|
2021-04-09 05:05:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
verifySceneQuery(t, filter, verifyFn)
|
|
|
|
|
|
|
|
urlCriterion.Modifier = models.CriterionModifierNotEquals
|
|
|
|
verifySceneQuery(t, filter, verifyFn)
|
|
|
|
|
|
|
|
urlCriterion.Modifier = models.CriterionModifierMatchesRegex
|
|
|
|
urlCriterion.Value = "scene_.*1_URL"
|
|
|
|
verifySceneQuery(t, filter, verifyFn)
|
|
|
|
|
|
|
|
urlCriterion.Modifier = models.CriterionModifierNotMatchesRegex
|
|
|
|
verifySceneQuery(t, filter, verifyFn)
|
|
|
|
|
|
|
|
urlCriterion.Modifier = models.CriterionModifierIsNull
|
|
|
|
urlCriterion.Value = ""
|
|
|
|
verifySceneQuery(t, filter, verifyFn)
|
|
|
|
|
|
|
|
urlCriterion.Modifier = models.CriterionModifierNotNull
|
|
|
|
verifySceneQuery(t, filter, verifyFn)
|
|
|
|
}
|
|
|
|
|
2021-03-02 00:27:36 +00:00
|
|
|
func TestSceneQueryPathOr(t *testing.T) {
|
|
|
|
const scene1Idx = 1
|
|
|
|
const scene2Idx = 2
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
scene1Path := getFilePath(folderIdxWithSceneFiles, getSceneBasename(scene1Idx))
|
|
|
|
scene2Path := getFilePath(folderIdxWithSceneFiles, getSceneBasename(scene2Idx))
|
2021-03-02 00:27:36 +00:00
|
|
|
|
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
Path: &models.StringCriterionInput{
|
|
|
|
Value: scene1Path,
|
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
},
|
|
|
|
Or: &models.SceneFilterType{
|
|
|
|
Path: &models.StringCriterionInput{
|
|
|
|
Value: scene2Path,
|
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-03-02 00:27:36 +00:00
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-03-02 00:27:36 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
if !assert.Len(t, scenes, 2) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
assert.Equal(t, scene1Path, scenes[0].Path())
|
|
|
|
assert.Equal(t, scene2Path, scenes[1].Path())
|
2021-03-02 00:27:36 +00:00
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryPathAndRating(t *testing.T) {
|
|
|
|
const sceneIdx = 1
|
2022-07-13 06:30:54 +00:00
|
|
|
scenePath := getFilePath(folderIdxWithSceneFiles, getSceneBasename(sceneIdx))
|
|
|
|
sceneRating := int(getRating(sceneIdx).Int64)
|
2021-03-02 00:27:36 +00:00
|
|
|
|
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
Path: &models.StringCriterionInput{
|
|
|
|
Value: scenePath,
|
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
},
|
|
|
|
And: &models.SceneFilterType{
|
|
|
|
Rating: &models.IntCriterionInput{
|
2022-07-13 06:30:54 +00:00
|
|
|
Value: sceneRating,
|
2021-03-02 00:27:36 +00:00
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-03-02 00:27:36 +00:00
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-03-02 00:27:36 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
if !assert.Len(t, scenes, 1) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
assert.Equal(t, scenePath, scenes[0].Path())
|
|
|
|
assert.Equal(t, sceneRating, *scenes[0].Rating)
|
2021-03-02 00:27:36 +00:00
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryPathNotRating(t *testing.T) {
|
|
|
|
const sceneIdx = 1
|
|
|
|
|
|
|
|
sceneRating := getRating(sceneIdx)
|
|
|
|
|
|
|
|
pathCriterion := models.StringCriterionInput{
|
|
|
|
Value: "scene_.*1_Path",
|
|
|
|
Modifier: models.CriterionModifierMatchesRegex,
|
|
|
|
}
|
|
|
|
|
|
|
|
ratingCriterion := models.IntCriterionInput{
|
|
|
|
Value: int(sceneRating.Int64),
|
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
}
|
|
|
|
|
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
Path: &pathCriterion,
|
|
|
|
Not: &models.SceneFilterType{
|
|
|
|
Rating: &ratingCriterion,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-03-02 00:27:36 +00:00
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-03-02 00:27:36 +00:00
|
|
|
|
|
|
|
for _, scene := range scenes {
|
2022-07-13 06:30:54 +00:00
|
|
|
verifyString(t, scene.Path(), pathCriterion)
|
2021-03-02 00:27:36 +00:00
|
|
|
ratingCriterion.Modifier = models.CriterionModifierNotEquals
|
2022-07-13 06:30:54 +00:00
|
|
|
verifyIntPtr(t, scene.Rating, ratingCriterion)
|
2021-03-02 00:27:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneIllegalQuery(t *testing.T) {
|
|
|
|
assert := assert.New(t)
|
|
|
|
|
|
|
|
const sceneIdx = 1
|
|
|
|
subFilter := models.SceneFilterType{
|
|
|
|
Path: &models.StringCriterionInput{
|
|
|
|
Value: getSceneStringValue(sceneIdx, "Path"),
|
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
sceneFilter := &models.SceneFilterType{
|
|
|
|
And: &subFilter,
|
|
|
|
Or: &subFilter,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-03-02 00:27:36 +00:00
|
|
|
|
2021-10-25 00:40:13 +00:00
|
|
|
queryOptions := models.SceneQueryOptions{
|
|
|
|
SceneFilter: sceneFilter,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
_, err := sqb.Query(ctx, queryOptions)
|
2021-03-02 00:27:36 +00:00
|
|
|
assert.NotNil(err)
|
|
|
|
|
|
|
|
sceneFilter.Or = nil
|
|
|
|
sceneFilter.Not = &subFilter
|
2022-05-19 07:49:32 +00:00
|
|
|
_, err = sqb.Query(ctx, queryOptions)
|
2021-03-02 00:27:36 +00:00
|
|
|
assert.NotNil(err)
|
|
|
|
|
|
|
|
sceneFilter.And = nil
|
|
|
|
sceneFilter.Or = &subFilter
|
2022-05-19 07:49:32 +00:00
|
|
|
_, err = sqb.Query(ctx, queryOptions)
|
2021-03-02 00:27:36 +00:00
|
|
|
assert.NotNil(err)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-04-09 05:05:11 +00:00
|
|
|
func verifySceneQuery(t *testing.T, filter models.SceneFilterType, verifyFn func(s *models.Scene)) {
|
2022-07-13 06:30:54 +00:00
|
|
|
t.Helper()
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2021-04-09 05:05:11 +00:00
|
|
|
t.Helper()
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-04-09 05:05:11 +00:00
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &filter, nil)
|
2021-04-09 05:05:11 +00:00
|
|
|
|
|
|
|
// assume it should find at least one
|
|
|
|
assert.Greater(t, len(scenes), 0)
|
|
|
|
|
|
|
|
for _, scene := range scenes {
|
|
|
|
verifyFn(scene)
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
func verifyScenesPath(t *testing.T, pathCriterion models.StringCriterionInput) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
Path: &pathCriterion,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
for _, scene := range scenes {
|
2022-07-13 06:30:54 +00:00
|
|
|
verifyString(t, scene.Path(), pathCriterion)
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func verifyNullString(t *testing.T, value sql.NullString, criterion models.StringCriterionInput) {
|
|
|
|
t.Helper()
|
|
|
|
assert := assert.New(t)
|
|
|
|
if criterion.Modifier == models.CriterionModifierIsNull {
|
2021-04-09 05:05:11 +00:00
|
|
|
if value.Valid && value.String == "" {
|
|
|
|
// correct
|
|
|
|
return
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
assert.False(value.Valid, "expect is null values to be null")
|
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierNotNull {
|
|
|
|
assert.True(value.Valid, "expect is null values to be null")
|
2021-04-09 05:05:11 +00:00
|
|
|
assert.Greater(len(value.String), 0)
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierEquals {
|
|
|
|
assert.Equal(criterion.Value, value.String)
|
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierNotEquals {
|
|
|
|
assert.NotEqual(criterion.Value, value.String)
|
|
|
|
}
|
2021-03-16 00:13:14 +00:00
|
|
|
if criterion.Modifier == models.CriterionModifierMatchesRegex {
|
|
|
|
assert.True(value.Valid)
|
|
|
|
assert.Regexp(regexp.MustCompile(criterion.Value), value)
|
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierNotMatchesRegex {
|
|
|
|
if !value.Valid {
|
|
|
|
// correct
|
|
|
|
return
|
|
|
|
}
|
|
|
|
assert.NotRegexp(regexp.MustCompile(criterion.Value), value)
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
func verifyStringPtr(t *testing.T, value *string, criterion models.StringCriterionInput) {
|
2021-01-18 01:23:20 +00:00
|
|
|
t.Helper()
|
|
|
|
assert := assert.New(t)
|
2022-07-13 06:30:54 +00:00
|
|
|
if criterion.Modifier == models.CriterionModifierIsNull {
|
|
|
|
if value != nil && *value == "" {
|
|
|
|
// correct
|
|
|
|
return
|
|
|
|
}
|
|
|
|
assert.Nil(value, "expect is null values to be null")
|
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierNotNull {
|
|
|
|
assert.NotNil(value, "expect is null values to be null")
|
|
|
|
assert.Greater(len(*value), 0)
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
if criterion.Modifier == models.CriterionModifierEquals {
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.Equal(criterion.Value, *value)
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierNotEquals {
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.NotEqual(criterion.Value, *value)
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
2021-02-01 20:57:56 +00:00
|
|
|
if criterion.Modifier == models.CriterionModifierMatchesRegex {
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.NotNil(value)
|
|
|
|
assert.Regexp(regexp.MustCompile(criterion.Value), *value)
|
2021-02-01 20:57:56 +00:00
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierNotMatchesRegex {
|
2022-07-13 06:30:54 +00:00
|
|
|
if value == nil {
|
|
|
|
// correct
|
|
|
|
return
|
|
|
|
}
|
|
|
|
assert.NotRegexp(regexp.MustCompile(criterion.Value), value)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func verifyString(t *testing.T, value string, criterion models.StringCriterionInput) {
|
|
|
|
t.Helper()
|
|
|
|
assert := assert.New(t)
|
|
|
|
switch criterion.Modifier {
|
|
|
|
case models.CriterionModifierEquals:
|
|
|
|
assert.Equal(criterion.Value, value)
|
|
|
|
case models.CriterionModifierNotEquals:
|
|
|
|
assert.NotEqual(criterion.Value, value)
|
|
|
|
case models.CriterionModifierMatchesRegex:
|
|
|
|
assert.Regexp(regexp.MustCompile(criterion.Value), value)
|
|
|
|
case models.CriterionModifierNotMatchesRegex:
|
2021-02-01 20:57:56 +00:00
|
|
|
assert.NotRegexp(regexp.MustCompile(criterion.Value), value)
|
2022-07-13 06:30:54 +00:00
|
|
|
case models.CriterionModifierIsNull:
|
|
|
|
assert.Equal("", value)
|
|
|
|
case models.CriterionModifierNotNull:
|
|
|
|
assert.NotEqual("", value)
|
2021-02-01 20:57:56 +00:00
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryRating(t *testing.T) {
|
|
|
|
const rating = 3
|
|
|
|
ratingCriterion := models.IntCriterionInput{
|
|
|
|
Value: rating,
|
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
}
|
|
|
|
|
|
|
|
verifyScenesRating(t, ratingCriterion)
|
|
|
|
|
|
|
|
ratingCriterion.Modifier = models.CriterionModifierNotEquals
|
|
|
|
verifyScenesRating(t, ratingCriterion)
|
|
|
|
|
|
|
|
ratingCriterion.Modifier = models.CriterionModifierGreaterThan
|
|
|
|
verifyScenesRating(t, ratingCriterion)
|
|
|
|
|
|
|
|
ratingCriterion.Modifier = models.CriterionModifierLessThan
|
|
|
|
verifyScenesRating(t, ratingCriterion)
|
|
|
|
|
|
|
|
ratingCriterion.Modifier = models.CriterionModifierIsNull
|
|
|
|
verifyScenesRating(t, ratingCriterion)
|
|
|
|
|
|
|
|
ratingCriterion.Modifier = models.CriterionModifierNotNull
|
|
|
|
verifyScenesRating(t, ratingCriterion)
|
|
|
|
}
|
|
|
|
|
|
|
|
func verifyScenesRating(t *testing.T, ratingCriterion models.IntCriterionInput) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
Rating: &ratingCriterion,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
for _, scene := range scenes {
|
2022-07-13 06:30:54 +00:00
|
|
|
verifyIntPtr(t, scene.Rating, ratingCriterion)
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func verifyInt64(t *testing.T, value sql.NullInt64, criterion models.IntCriterionInput) {
|
|
|
|
t.Helper()
|
|
|
|
assert := assert.New(t)
|
|
|
|
if criterion.Modifier == models.CriterionModifierIsNull {
|
|
|
|
assert.False(value.Valid, "expect is null values to be null")
|
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierNotNull {
|
|
|
|
assert.True(value.Valid, "expect is null values to be null")
|
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierEquals {
|
|
|
|
assert.Equal(int64(criterion.Value), value.Int64)
|
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierNotEquals {
|
|
|
|
assert.NotEqual(int64(criterion.Value), value.Int64)
|
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierGreaterThan {
|
|
|
|
assert.True(value.Int64 > int64(criterion.Value))
|
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierLessThan {
|
|
|
|
assert.True(value.Int64 < int64(criterion.Value))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
func verifyIntPtr(t *testing.T, value *int, criterion models.IntCriterionInput) {
|
|
|
|
t.Helper()
|
|
|
|
assert := assert.New(t)
|
|
|
|
if criterion.Modifier == models.CriterionModifierIsNull {
|
|
|
|
assert.Nil(value, "expect is null values to be null")
|
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierNotNull {
|
|
|
|
assert.NotNil(value, "expect is null values to be null")
|
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierEquals {
|
|
|
|
assert.Equal(criterion.Value, *value)
|
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierNotEquals {
|
|
|
|
assert.NotEqual(criterion.Value, *value)
|
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierGreaterThan {
|
|
|
|
assert.True(*value > criterion.Value)
|
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierLessThan {
|
|
|
|
assert.True(*value < criterion.Value)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
func TestSceneQueryOCounter(t *testing.T) {
|
|
|
|
const oCounter = 1
|
|
|
|
oCounterCriterion := models.IntCriterionInput{
|
|
|
|
Value: oCounter,
|
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
}
|
|
|
|
|
|
|
|
verifyScenesOCounter(t, oCounterCriterion)
|
|
|
|
|
|
|
|
oCounterCriterion.Modifier = models.CriterionModifierNotEquals
|
|
|
|
verifyScenesOCounter(t, oCounterCriterion)
|
|
|
|
|
|
|
|
oCounterCriterion.Modifier = models.CriterionModifierGreaterThan
|
|
|
|
verifyScenesOCounter(t, oCounterCriterion)
|
|
|
|
|
|
|
|
oCounterCriterion.Modifier = models.CriterionModifierLessThan
|
|
|
|
verifyScenesOCounter(t, oCounterCriterion)
|
|
|
|
}
|
|
|
|
|
|
|
|
func verifyScenesOCounter(t *testing.T, oCounterCriterion models.IntCriterionInput) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
OCounter: &oCounterCriterion,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
for _, scene := range scenes {
|
|
|
|
verifyInt(t, scene.OCounter, oCounterCriterion)
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func verifyInt(t *testing.T, value int, criterion models.IntCriterionInput) {
|
|
|
|
t.Helper()
|
|
|
|
assert := assert.New(t)
|
|
|
|
if criterion.Modifier == models.CriterionModifierEquals {
|
|
|
|
assert.Equal(criterion.Value, value)
|
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierNotEquals {
|
|
|
|
assert.NotEqual(criterion.Value, value)
|
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierGreaterThan {
|
2021-04-16 06:06:35 +00:00
|
|
|
assert.Greater(value, criterion.Value)
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierLessThan {
|
2021-04-16 06:06:35 +00:00
|
|
|
assert.Less(value, criterion.Value)
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryDuration(t *testing.T) {
|
|
|
|
duration := 200.432
|
|
|
|
|
|
|
|
durationCriterion := models.IntCriterionInput{
|
|
|
|
Value: int(duration),
|
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
}
|
|
|
|
verifyScenesDuration(t, durationCriterion)
|
|
|
|
|
|
|
|
durationCriterion.Modifier = models.CriterionModifierNotEquals
|
|
|
|
verifyScenesDuration(t, durationCriterion)
|
|
|
|
|
|
|
|
durationCriterion.Modifier = models.CriterionModifierGreaterThan
|
|
|
|
verifyScenesDuration(t, durationCriterion)
|
|
|
|
|
|
|
|
durationCriterion.Modifier = models.CriterionModifierLessThan
|
|
|
|
verifyScenesDuration(t, durationCriterion)
|
|
|
|
|
|
|
|
durationCriterion.Modifier = models.CriterionModifierIsNull
|
|
|
|
verifyScenesDuration(t, durationCriterion)
|
|
|
|
|
|
|
|
durationCriterion.Modifier = models.CriterionModifierNotNull
|
|
|
|
verifyScenesDuration(t, durationCriterion)
|
|
|
|
}
|
|
|
|
|
|
|
|
func verifyScenesDuration(t *testing.T, durationCriterion models.IntCriterionInput) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
Duration: &durationCriterion,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
for _, scene := range scenes {
|
2022-07-13 06:30:54 +00:00
|
|
|
duration := scene.Duration()
|
2021-01-18 01:23:20 +00:00
|
|
|
if durationCriterion.Modifier == models.CriterionModifierEquals {
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.True(t, duration >= float64(durationCriterion.Value) && duration < float64(durationCriterion.Value+1))
|
2021-01-18 01:23:20 +00:00
|
|
|
} else if durationCriterion.Modifier == models.CriterionModifierNotEquals {
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.True(t, duration < float64(durationCriterion.Value) || duration >= float64(durationCriterion.Value+1))
|
2021-01-18 01:23:20 +00:00
|
|
|
} else {
|
2022-07-13 06:30:54 +00:00
|
|
|
verifyFloat64(t, duration, durationCriterion)
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
func verifyFloat64(t *testing.T, value float64, criterion models.IntCriterionInput) {
|
2021-01-18 01:23:20 +00:00
|
|
|
assert := assert.New(t)
|
|
|
|
if criterion.Modifier == models.CriterionModifierEquals {
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.Equal(float64(criterion.Value), value)
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierNotEquals {
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.NotEqual(float64(criterion.Value), value)
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierGreaterThan {
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.True(value > float64(criterion.Value))
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
if criterion.Modifier == models.CriterionModifierLessThan {
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.True(value < float64(criterion.Value))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func verifyFloat64Ptr(t *testing.T, value *float64, criterion models.IntCriterionInput) {
|
|
|
|
assert := assert.New(t)
|
|
|
|
switch criterion.Modifier {
|
|
|
|
case models.CriterionModifierIsNull:
|
|
|
|
assert.Nil(value, "expect is null values to be null")
|
|
|
|
case models.CriterionModifierNotNull:
|
|
|
|
assert.NotNil(value, "expect is not null values to not be null")
|
|
|
|
case models.CriterionModifierEquals:
|
|
|
|
assert.EqualValues(float64(criterion.Value), value)
|
|
|
|
case models.CriterionModifierNotEquals:
|
|
|
|
assert.NotEqualValues(float64(criterion.Value), value)
|
|
|
|
case models.CriterionModifierGreaterThan:
|
|
|
|
assert.True(value != nil && *value > float64(criterion.Value))
|
|
|
|
case models.CriterionModifierLessThan:
|
|
|
|
assert.True(value != nil && *value < float64(criterion.Value))
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryResolution(t *testing.T) {
|
|
|
|
verifyScenesResolution(t, models.ResolutionEnumLow)
|
|
|
|
verifyScenesResolution(t, models.ResolutionEnumStandard)
|
|
|
|
verifyScenesResolution(t, models.ResolutionEnumStandardHd)
|
|
|
|
verifyScenesResolution(t, models.ResolutionEnumFullHd)
|
|
|
|
verifyScenesResolution(t, models.ResolutionEnumFourK)
|
|
|
|
verifyScenesResolution(t, models.ResolutionEnum("unknown"))
|
|
|
|
}
|
|
|
|
|
|
|
|
func verifyScenesResolution(t *testing.T, resolution models.ResolutionEnum) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
sceneFilter := models.SceneFilterType{
|
2021-08-02 03:22:39 +00:00
|
|
|
Resolution: &models.ResolutionCriterionInput{
|
|
|
|
Value: resolution,
|
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
},
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
for _, scene := range scenes {
|
2022-07-13 06:30:54 +00:00
|
|
|
f := scene.PrimaryFile()
|
|
|
|
height := 0
|
|
|
|
if f != nil {
|
|
|
|
height = f.Height
|
|
|
|
}
|
|
|
|
verifySceneResolution(t, &height, resolution)
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
func verifySceneResolution(t *testing.T, height *int, resolution models.ResolutionEnum) {
|
|
|
|
if !resolution.IsValid() {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
assert := assert.New(t)
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.NotNil(height)
|
|
|
|
if t.Failed() {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
h := *height
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
switch resolution {
|
|
|
|
case models.ResolutionEnumLow:
|
|
|
|
assert.True(h < 480)
|
|
|
|
case models.ResolutionEnumStandard:
|
|
|
|
assert.True(h >= 480 && h < 720)
|
|
|
|
case models.ResolutionEnumStandardHd:
|
|
|
|
assert.True(h >= 720 && h < 1080)
|
|
|
|
case models.ResolutionEnumFullHd:
|
|
|
|
assert.True(h >= 1080 && h < 2160)
|
|
|
|
case models.ResolutionEnumFourK:
|
|
|
|
assert.True(h >= 2160)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-02 03:22:39 +00:00
|
|
|
func TestAllResolutionsHaveResolutionRange(t *testing.T) {
|
|
|
|
for _, resolution := range models.AllResolutionEnum {
|
|
|
|
assert.NotZero(t, resolution.GetMinResolution(), "Define resolution range for %s in extension_resolution.go", resolution)
|
|
|
|
assert.NotZero(t, resolution.GetMaxResolution(), "Define resolution range for %s in extension_resolution.go", resolution)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryResolutionModifiers(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
if err := withRollbackTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
qb := db.Scene
|
|
|
|
sceneNoResolution, _ := createScene(ctx, 0, 0)
|
|
|
|
firstScene540P, _ := createScene(ctx, 960, 540)
|
|
|
|
secondScene540P, _ := createScene(ctx, 1280, 719)
|
|
|
|
firstScene720P, _ := createScene(ctx, 1280, 720)
|
|
|
|
secondScene720P, _ := createScene(ctx, 1280, 721)
|
|
|
|
thirdScene720P, _ := createScene(ctx, 1920, 1079)
|
|
|
|
scene1080P, _ := createScene(ctx, 1920, 1080)
|
2022-05-19 07:49:32 +00:00
|
|
|
|
|
|
|
scenesEqualTo720P := queryScenes(ctx, t, qb, models.ResolutionEnumStandardHd, models.CriterionModifierEquals)
|
|
|
|
scenesNotEqualTo720P := queryScenes(ctx, t, qb, models.ResolutionEnumStandardHd, models.CriterionModifierNotEquals)
|
|
|
|
scenesGreaterThan720P := queryScenes(ctx, t, qb, models.ResolutionEnumStandardHd, models.CriterionModifierGreaterThan)
|
|
|
|
scenesLessThan720P := queryScenes(ctx, t, qb, models.ResolutionEnumStandardHd, models.CriterionModifierLessThan)
|
2021-08-02 03:22:39 +00:00
|
|
|
|
|
|
|
assert.Subset(t, scenesEqualTo720P, []*models.Scene{firstScene720P, secondScene720P, thirdScene720P})
|
|
|
|
assert.NotSubset(t, scenesEqualTo720P, []*models.Scene{sceneNoResolution, firstScene540P, secondScene540P, scene1080P})
|
|
|
|
|
|
|
|
assert.Subset(t, scenesNotEqualTo720P, []*models.Scene{sceneNoResolution, firstScene540P, secondScene540P, scene1080P})
|
|
|
|
assert.NotSubset(t, scenesNotEqualTo720P, []*models.Scene{firstScene720P, secondScene720P, thirdScene720P})
|
|
|
|
|
|
|
|
assert.Subset(t, scenesGreaterThan720P, []*models.Scene{scene1080P})
|
|
|
|
assert.NotSubset(t, scenesGreaterThan720P, []*models.Scene{sceneNoResolution, firstScene540P, secondScene540P, firstScene720P, secondScene720P, thirdScene720P})
|
|
|
|
|
|
|
|
assert.Subset(t, scenesLessThan720P, []*models.Scene{sceneNoResolution, firstScene540P, secondScene540P})
|
|
|
|
assert.NotSubset(t, scenesLessThan720P, []*models.Scene{scene1080P, firstScene720P, secondScene720P, thirdScene720P})
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}); err != nil {
|
|
|
|
t.Error(err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
func queryScenes(ctx context.Context, t *testing.T, queryBuilder models.SceneReaderWriter, resolution models.ResolutionEnum, modifier models.CriterionModifier) []*models.Scene {
|
2021-08-02 03:22:39 +00:00
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
Resolution: &models.ResolutionCriterionInput{
|
|
|
|
Value: resolution,
|
|
|
|
Modifier: modifier,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
return queryScene(ctx, t, queryBuilder, &sceneFilter, nil)
|
2021-08-02 03:22:39 +00:00
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
func createScene(ctx context.Context, width int, height int) (*models.Scene, error) {
|
2021-08-02 03:22:39 +00:00
|
|
|
name := fmt.Sprintf("TestSceneQueryResolutionModifiers %d %d", width, height)
|
2022-07-13 06:30:54 +00:00
|
|
|
|
|
|
|
sceneFile := &file.VideoFile{
|
|
|
|
BaseFile: &file.BaseFile{
|
|
|
|
Basename: name,
|
|
|
|
ParentFolderID: folderIDs[folderIdxWithSceneFiles],
|
2021-08-02 03:22:39 +00:00
|
|
|
},
|
2022-07-13 06:30:54 +00:00
|
|
|
Width: width,
|
|
|
|
Height: height,
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := db.File.Create(ctx, sceneFile); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
scene := &models.Scene{}
|
|
|
|
|
|
|
|
if err := db.Scene.Create(ctx, scene, []file.ID{sceneFile.ID}); err != nil {
|
|
|
|
return nil, err
|
2021-08-02 03:22:39 +00:00
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
return scene, nil
|
2021-08-02 03:22:39 +00:00
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
func TestSceneQueryHasMarkers(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
hasMarkers := "true"
|
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
HasMarkers: &hasMarkers,
|
|
|
|
}
|
|
|
|
|
2021-11-06 22:34:33 +00:00
|
|
|
q := getSceneStringValue(sceneIdxWithMarkers, titleField)
|
2021-01-18 01:23:20 +00:00
|
|
|
findFilter := models.FindFilterType{
|
|
|
|
Q: &q,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.Len(t, scenes, 1)
|
2021-11-06 22:34:33 +00:00
|
|
|
assert.Equal(t, sceneIDs[sceneIdxWithMarkers], scenes[0].ID)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
hasMarkers = "false"
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
|
|
|
findFilter.Q = nil
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.NotEqual(t, 0, len(scenes))
|
|
|
|
|
|
|
|
// ensure non of the ids equal the one with gallery
|
|
|
|
for _, scene := range scenes {
|
2021-11-06 22:34:33 +00:00
|
|
|
assert.NotEqual(t, sceneIDs[sceneIdxWithMarkers], scene.ID)
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryIsMissingGallery(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-02-01 20:56:54 +00:00
|
|
|
isMissing := "galleries"
|
2021-01-18 01:23:20 +00:00
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
IsMissing: &isMissing,
|
|
|
|
}
|
|
|
|
|
|
|
|
q := getSceneStringValue(sceneIdxWithGallery, titleField)
|
|
|
|
findFilter := models.FindFilterType{
|
|
|
|
Q: &q,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
|
|
|
findFilter.Q = nil
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
// ensure non of the ids equal the one with gallery
|
|
|
|
for _, scene := range scenes {
|
|
|
|
assert.NotEqual(t, sceneIDs[sceneIdxWithGallery], scene.ID)
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryIsMissingStudio(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
isMissing := "studio"
|
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
IsMissing: &isMissing,
|
|
|
|
}
|
|
|
|
|
|
|
|
q := getSceneStringValue(sceneIdxWithStudio, titleField)
|
|
|
|
findFilter := models.FindFilterType{
|
|
|
|
Q: &q,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
|
|
|
findFilter.Q = nil
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
// ensure non of the ids equal the one with studio
|
|
|
|
for _, scene := range scenes {
|
|
|
|
assert.NotEqual(t, sceneIDs[sceneIdxWithStudio], scene.ID)
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryIsMissingMovies(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
isMissing := "movie"
|
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
IsMissing: &isMissing,
|
|
|
|
}
|
|
|
|
|
|
|
|
q := getSceneStringValue(sceneIdxWithMovie, titleField)
|
|
|
|
findFilter := models.FindFilterType{
|
|
|
|
Q: &q,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
|
|
|
findFilter.Q = nil
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
// ensure non of the ids equal the one with movies
|
|
|
|
for _, scene := range scenes {
|
|
|
|
assert.NotEqual(t, sceneIDs[sceneIdxWithMovie], scene.ID)
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryIsMissingPerformers(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
isMissing := "performers"
|
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
IsMissing: &isMissing,
|
|
|
|
}
|
|
|
|
|
|
|
|
q := getSceneStringValue(sceneIdxWithPerformer, titleField)
|
|
|
|
findFilter := models.FindFilterType{
|
|
|
|
Q: &q,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
|
|
|
findFilter.Q = nil
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.True(t, len(scenes) > 0)
|
|
|
|
|
|
|
|
// ensure non of the ids equal the one with movies
|
|
|
|
for _, scene := range scenes {
|
|
|
|
assert.NotEqual(t, sceneIDs[sceneIdxWithPerformer], scene.ID)
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryIsMissingDate(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
isMissing := "date"
|
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
IsMissing: &isMissing,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-03-28 19:45:46 +00:00
|
|
|
// three in four scenes have no date
|
|
|
|
assert.Len(t, scenes, int(math.Ceil(float64(totalScenes)/4*3)))
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
// ensure date is null, empty or "0001-01-01"
|
|
|
|
for _, scene := range scenes {
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.True(t, scene.Date == nil || scene.Date.Time == time.Time{})
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryIsMissingTags(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
isMissing := "tags"
|
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
IsMissing: &isMissing,
|
|
|
|
}
|
|
|
|
|
|
|
|
q := getSceneStringValue(sceneIdxWithTwoTags, titleField)
|
|
|
|
findFilter := models.FindFilterType{
|
|
|
|
Q: &q,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
|
|
|
findFilter.Q = nil
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.True(t, len(scenes) > 0)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryIsMissingRating(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
isMissing := "rating"
|
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
IsMissing: &isMissing,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.True(t, len(scenes) > 0)
|
|
|
|
|
|
|
|
// ensure date is null, empty or "0001-01-01"
|
|
|
|
for _, scene := range scenes {
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.Nil(t, scene.Rating)
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryIsMissingPhash(t *testing.T) {
|
|
|
|
withTxn(func(ctx context.Context) error {
|
|
|
|
sqb := db.Scene
|
|
|
|
isMissing := "phash"
|
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
IsMissing: &isMissing,
|
|
|
|
}
|
|
|
|
|
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
|
|
|
|
|
|
|
if !assert.Len(t, scenes, 1) {
|
|
|
|
return nil
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.Equal(t, sceneIDs[sceneIdxMissingPhash], scenes[0].ID)
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryPerformers(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
performerCriterion := models.MultiCriterionInput{
|
|
|
|
Value: []string{
|
|
|
|
strconv.Itoa(performerIDs[performerIdxWithScene]),
|
|
|
|
strconv.Itoa(performerIDs[performerIdx1WithScene]),
|
|
|
|
},
|
|
|
|
Modifier: models.CriterionModifierIncludes,
|
|
|
|
}
|
|
|
|
|
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
Performers: &performerCriterion,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.Len(t, scenes, 2)
|
|
|
|
|
|
|
|
// ensure ids are correct
|
|
|
|
for _, scene := range scenes {
|
|
|
|
assert.True(t, scene.ID == sceneIDs[sceneIdxWithPerformer] || scene.ID == sceneIDs[sceneIdxWithTwoPerformers])
|
|
|
|
}
|
|
|
|
|
|
|
|
performerCriterion = models.MultiCriterionInput{
|
|
|
|
Value: []string{
|
|
|
|
strconv.Itoa(performerIDs[performerIdx1WithScene]),
|
|
|
|
strconv.Itoa(performerIDs[performerIdx2WithScene]),
|
|
|
|
},
|
|
|
|
Modifier: models.CriterionModifierIncludesAll,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.Len(t, scenes, 1)
|
|
|
|
assert.Equal(t, sceneIDs[sceneIdxWithTwoPerformers], scenes[0].ID)
|
|
|
|
|
|
|
|
performerCriterion = models.MultiCriterionInput{
|
|
|
|
Value: []string{
|
|
|
|
strconv.Itoa(performerIDs[performerIdx1WithScene]),
|
|
|
|
},
|
|
|
|
Modifier: models.CriterionModifierExcludes,
|
|
|
|
}
|
|
|
|
|
|
|
|
q := getSceneStringValue(sceneIdxWithTwoPerformers, titleField)
|
|
|
|
findFilter := models.FindFilterType{
|
|
|
|
Q: &q,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryTags(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
Tag hierarchy (#1519)
* Add migration script for tag relations table
* Expand hierarchical filter features
Expand the features of the hierarchical multi input filter with support
for using a relations table, which only has parent_id and child_id
columns, and support adding an additional intermediate table to join on,
for example for scenes and tags which are linked by the scenes_tags
table as well.
* Add hierarchical filtering for tags
* Add hierarchical tags support to scene markers
Refactor filtering of scene markers to filterBuilder and in the process
add support for hierarchical tags as well.
* List parent and child tags on tag details page
* Support setting parent and child tags
Add support for setting parent and child tags during tag creation and
tag updates.
* Validate no loops are created in tags hierarchy
* Update tag merging to support tag hierarcy
* Add unit tests for tags.EnsureUniqueHierarchy
* Fix applying recursive to with clause
The SQL `RECURSIVE` of a `WITH` clause only needs to be applied once,
imediately after the `WITH`. So this fixes the query building to do just
that, automatically applying the `RECURSIVE` keyword when any added with
clause is added as recursive.
* Rename hierarchical root id column
* Rewrite hierarchical filtering for performance
Completely rewrite the hierarchical filtering to optimize for
performance. Doing the recursive query in combination with a complex
query seems to break SQLite optimizing some things which means that the
recursive part might be 2,5 second slower than adding a static
`VALUES()` list. This is mostly noticable in case of the tag hierarchy
where setting an exclusion with any depth (or depth: all) being applied
has this performance impact of 2,5 second. "Include" also suffered this
issue, but some rewritten query by joining in the *_tags table in one
pass and applying a `WHERE x IS NOT NULL` filter did seem to optimize
that case. But that optimization isn't applied to the `IS NULL` filter
of "exclude". Running a simple query beforehand to get all (recursive)
items and then applying them to the query doesn't have this performance
penalty.
* Remove UI references to child studios and tags
* Add parents to tag export
* Support importing of parent relationship for tags
* Assign stable ids to parent / child badges
* Silence Apollo warning on parents/children fields on tags
Silence warning triggered by Apollo GraphQL by explicitly instructing it
to use the incoming parents/children values. By default it already does
this, but it triggers a warning as it might be unintended that it uses
the incoming values (instead of for example merging both arrays).
Setting merge to false still applies the same behaviour (use only
incoming values) but silences the warning as it's explicitly configured
to work like this.
* Rework detecting unique tag hierarchy
Completely rework the unique tag hierarchy to detect invalid hierarchies
for which a tag is "added in the middle". So when there are tags A <- B
and A <- C, you could previously edit tag B and add tag C as a sub tag
without it being noticed as parent A being applied twice (to tag C).
While afterwards saving tag C would fail as tag A was applied as parent
twice. The updated code correctly detects this scenario as well.
Furthermore the error messaging has been reworked a bit and the message
now mentions both the direct parent / sub tag as well as the tag which
would results in the error. So in aboves example it would now show the
message that tag C can't be applied because tag A already is a parent.
* Update relations on cached tags when needed
Update the relations on cached tags when a tag is created / updated /
deleted so these always reflect the correct state. Otherwise (re)opening
a tag might still show the old relations untill the page is fully
reloaded or the list is navigated. But this obviously is strange when
you for example have tag A, create or update tag B to have a relation to
tag A, and from tags B page click through to tag A and it doesn't show
that it is linked to tag B.
2021-09-09 04:58:43 +00:00
|
|
|
tagCriterion := models.HierarchicalMultiCriterionInput{
|
2021-01-18 01:23:20 +00:00
|
|
|
Value: []string{
|
|
|
|
strconv.Itoa(tagIDs[tagIdxWithScene]),
|
|
|
|
strconv.Itoa(tagIDs[tagIdx1WithScene]),
|
|
|
|
},
|
|
|
|
Modifier: models.CriterionModifierIncludes,
|
|
|
|
}
|
|
|
|
|
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
Tags: &tagCriterion,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-01-18 01:23:20 +00:00
|
|
|
assert.Len(t, scenes, 2)
|
|
|
|
|
|
|
|
// ensure ids are correct
|
|
|
|
for _, scene := range scenes {
|
|
|
|
assert.True(t, scene.ID == sceneIDs[sceneIdxWithTag] || scene.ID == sceneIDs[sceneIdxWithTwoTags])
|
|
|
|
}
|
|
|
|
|
Tag hierarchy (#1519)
* Add migration script for tag relations table
* Expand hierarchical filter features
Expand the features of the hierarchical multi input filter with support
for using a relations table, which only has parent_id and child_id
columns, and support adding an additional intermediate table to join on,
for example for scenes and tags which are linked by the scenes_tags
table as well.
* Add hierarchical filtering for tags
* Add hierarchical tags support to scene markers
Refactor filtering of scene markers to filterBuilder and in the process
add support for hierarchical tags as well.
* List parent and child tags on tag details page
* Support setting parent and child tags
Add support for setting parent and child tags during tag creation and
tag updates.
* Validate no loops are created in tags hierarchy
* Update tag merging to support tag hierarcy
* Add unit tests for tags.EnsureUniqueHierarchy
* Fix applying recursive to with clause
The SQL `RECURSIVE` of a `WITH` clause only needs to be applied once,
imediately after the `WITH`. So this fixes the query building to do just
that, automatically applying the `RECURSIVE` keyword when any added with
clause is added as recursive.
* Rename hierarchical root id column
* Rewrite hierarchical filtering for performance
Completely rewrite the hierarchical filtering to optimize for
performance. Doing the recursive query in combination with a complex
query seems to break SQLite optimizing some things which means that the
recursive part might be 2,5 second slower than adding a static
`VALUES()` list. This is mostly noticable in case of the tag hierarchy
where setting an exclusion with any depth (or depth: all) being applied
has this performance impact of 2,5 second. "Include" also suffered this
issue, but some rewritten query by joining in the *_tags table in one
pass and applying a `WHERE x IS NOT NULL` filter did seem to optimize
that case. But that optimization isn't applied to the `IS NULL` filter
of "exclude". Running a simple query beforehand to get all (recursive)
items and then applying them to the query doesn't have this performance
penalty.
* Remove UI references to child studios and tags
* Add parents to tag export
* Support importing of parent relationship for tags
* Assign stable ids to parent / child badges
* Silence Apollo warning on parents/children fields on tags
Silence warning triggered by Apollo GraphQL by explicitly instructing it
to use the incoming parents/children values. By default it already does
this, but it triggers a warning as it might be unintended that it uses
the incoming values (instead of for example merging both arrays).
Setting merge to false still applies the same behaviour (use only
incoming values) but silences the warning as it's explicitly configured
to work like this.
* Rework detecting unique tag hierarchy
Completely rework the unique tag hierarchy to detect invalid hierarchies
for which a tag is "added in the middle". So when there are tags A <- B
and A <- C, you could previously edit tag B and add tag C as a sub tag
without it being noticed as parent A being applied twice (to tag C).
While afterwards saving tag C would fail as tag A was applied as parent
twice. The updated code correctly detects this scenario as well.
Furthermore the error messaging has been reworked a bit and the message
now mentions both the direct parent / sub tag as well as the tag which
would results in the error. So in aboves example it would now show the
message that tag C can't be applied because tag A already is a parent.
* Update relations on cached tags when needed
Update the relations on cached tags when a tag is created / updated /
deleted so these always reflect the correct state. Otherwise (re)opening
a tag might still show the old relations untill the page is fully
reloaded or the list is navigated. But this obviously is strange when
you for example have tag A, create or update tag B to have a relation to
tag A, and from tags B page click through to tag A and it doesn't show
that it is linked to tag B.
2021-09-09 04:58:43 +00:00
|
|
|
tagCriterion = models.HierarchicalMultiCriterionInput{
|
2021-01-18 01:23:20 +00:00
|
|
|
Value: []string{
|
|
|
|
strconv.Itoa(tagIDs[tagIdx1WithScene]),
|
|
|
|
strconv.Itoa(tagIDs[tagIdx2WithScene]),
|
|
|
|
},
|
|
|
|
Modifier: models.CriterionModifierIncludesAll,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.Len(t, scenes, 1)
|
|
|
|
assert.Equal(t, sceneIDs[sceneIdxWithTwoTags], scenes[0].ID)
|
|
|
|
|
Tag hierarchy (#1519)
* Add migration script for tag relations table
* Expand hierarchical filter features
Expand the features of the hierarchical multi input filter with support
for using a relations table, which only has parent_id and child_id
columns, and support adding an additional intermediate table to join on,
for example for scenes and tags which are linked by the scenes_tags
table as well.
* Add hierarchical filtering for tags
* Add hierarchical tags support to scene markers
Refactor filtering of scene markers to filterBuilder and in the process
add support for hierarchical tags as well.
* List parent and child tags on tag details page
* Support setting parent and child tags
Add support for setting parent and child tags during tag creation and
tag updates.
* Validate no loops are created in tags hierarchy
* Update tag merging to support tag hierarcy
* Add unit tests for tags.EnsureUniqueHierarchy
* Fix applying recursive to with clause
The SQL `RECURSIVE` of a `WITH` clause only needs to be applied once,
imediately after the `WITH`. So this fixes the query building to do just
that, automatically applying the `RECURSIVE` keyword when any added with
clause is added as recursive.
* Rename hierarchical root id column
* Rewrite hierarchical filtering for performance
Completely rewrite the hierarchical filtering to optimize for
performance. Doing the recursive query in combination with a complex
query seems to break SQLite optimizing some things which means that the
recursive part might be 2,5 second slower than adding a static
`VALUES()` list. This is mostly noticable in case of the tag hierarchy
where setting an exclusion with any depth (or depth: all) being applied
has this performance impact of 2,5 second. "Include" also suffered this
issue, but some rewritten query by joining in the *_tags table in one
pass and applying a `WHERE x IS NOT NULL` filter did seem to optimize
that case. But that optimization isn't applied to the `IS NULL` filter
of "exclude". Running a simple query beforehand to get all (recursive)
items and then applying them to the query doesn't have this performance
penalty.
* Remove UI references to child studios and tags
* Add parents to tag export
* Support importing of parent relationship for tags
* Assign stable ids to parent / child badges
* Silence Apollo warning on parents/children fields on tags
Silence warning triggered by Apollo GraphQL by explicitly instructing it
to use the incoming parents/children values. By default it already does
this, but it triggers a warning as it might be unintended that it uses
the incoming values (instead of for example merging both arrays).
Setting merge to false still applies the same behaviour (use only
incoming values) but silences the warning as it's explicitly configured
to work like this.
* Rework detecting unique tag hierarchy
Completely rework the unique tag hierarchy to detect invalid hierarchies
for which a tag is "added in the middle". So when there are tags A <- B
and A <- C, you could previously edit tag B and add tag C as a sub tag
without it being noticed as parent A being applied twice (to tag C).
While afterwards saving tag C would fail as tag A was applied as parent
twice. The updated code correctly detects this scenario as well.
Furthermore the error messaging has been reworked a bit and the message
now mentions both the direct parent / sub tag as well as the tag which
would results in the error. So in aboves example it would now show the
message that tag C can't be applied because tag A already is a parent.
* Update relations on cached tags when needed
Update the relations on cached tags when a tag is created / updated /
deleted so these always reflect the correct state. Otherwise (re)opening
a tag might still show the old relations untill the page is fully
reloaded or the list is navigated. But this obviously is strange when
you for example have tag A, create or update tag B to have a relation to
tag A, and from tags B page click through to tag A and it doesn't show
that it is linked to tag B.
2021-09-09 04:58:43 +00:00
|
|
|
tagCriterion = models.HierarchicalMultiCriterionInput{
|
2021-01-18 01:23:20 +00:00
|
|
|
Value: []string{
|
|
|
|
strconv.Itoa(tagIDs[tagIdx1WithScene]),
|
|
|
|
},
|
|
|
|
Modifier: models.CriterionModifierExcludes,
|
|
|
|
}
|
|
|
|
|
|
|
|
q := getSceneStringValue(sceneIdxWithTwoTags, titleField)
|
|
|
|
findFilter := models.FindFilterType{
|
|
|
|
Q: &q,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-03-10 01:25:51 +00:00
|
|
|
func TestSceneQueryPerformerTags(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
Tag hierarchy (#1519)
* Add migration script for tag relations table
* Expand hierarchical filter features
Expand the features of the hierarchical multi input filter with support
for using a relations table, which only has parent_id and child_id
columns, and support adding an additional intermediate table to join on,
for example for scenes and tags which are linked by the scenes_tags
table as well.
* Add hierarchical filtering for tags
* Add hierarchical tags support to scene markers
Refactor filtering of scene markers to filterBuilder and in the process
add support for hierarchical tags as well.
* List parent and child tags on tag details page
* Support setting parent and child tags
Add support for setting parent and child tags during tag creation and
tag updates.
* Validate no loops are created in tags hierarchy
* Update tag merging to support tag hierarcy
* Add unit tests for tags.EnsureUniqueHierarchy
* Fix applying recursive to with clause
The SQL `RECURSIVE` of a `WITH` clause only needs to be applied once,
imediately after the `WITH`. So this fixes the query building to do just
that, automatically applying the `RECURSIVE` keyword when any added with
clause is added as recursive.
* Rename hierarchical root id column
* Rewrite hierarchical filtering for performance
Completely rewrite the hierarchical filtering to optimize for
performance. Doing the recursive query in combination with a complex
query seems to break SQLite optimizing some things which means that the
recursive part might be 2,5 second slower than adding a static
`VALUES()` list. This is mostly noticable in case of the tag hierarchy
where setting an exclusion with any depth (or depth: all) being applied
has this performance impact of 2,5 second. "Include" also suffered this
issue, but some rewritten query by joining in the *_tags table in one
pass and applying a `WHERE x IS NOT NULL` filter did seem to optimize
that case. But that optimization isn't applied to the `IS NULL` filter
of "exclude". Running a simple query beforehand to get all (recursive)
items and then applying them to the query doesn't have this performance
penalty.
* Remove UI references to child studios and tags
* Add parents to tag export
* Support importing of parent relationship for tags
* Assign stable ids to parent / child badges
* Silence Apollo warning on parents/children fields on tags
Silence warning triggered by Apollo GraphQL by explicitly instructing it
to use the incoming parents/children values. By default it already does
this, but it triggers a warning as it might be unintended that it uses
the incoming values (instead of for example merging both arrays).
Setting merge to false still applies the same behaviour (use only
incoming values) but silences the warning as it's explicitly configured
to work like this.
* Rework detecting unique tag hierarchy
Completely rework the unique tag hierarchy to detect invalid hierarchies
for which a tag is "added in the middle". So when there are tags A <- B
and A <- C, you could previously edit tag B and add tag C as a sub tag
without it being noticed as parent A being applied twice (to tag C).
While afterwards saving tag C would fail as tag A was applied as parent
twice. The updated code correctly detects this scenario as well.
Furthermore the error messaging has been reworked a bit and the message
now mentions both the direct parent / sub tag as well as the tag which
would results in the error. So in aboves example it would now show the
message that tag C can't be applied because tag A already is a parent.
* Update relations on cached tags when needed
Update the relations on cached tags when a tag is created / updated /
deleted so these always reflect the correct state. Otherwise (re)opening
a tag might still show the old relations untill the page is fully
reloaded or the list is navigated. But this obviously is strange when
you for example have tag A, create or update tag B to have a relation to
tag A, and from tags B page click through to tag A and it doesn't show
that it is linked to tag B.
2021-09-09 04:58:43 +00:00
|
|
|
tagCriterion := models.HierarchicalMultiCriterionInput{
|
2021-03-10 01:25:51 +00:00
|
|
|
Value: []string{
|
|
|
|
strconv.Itoa(tagIDs[tagIdxWithPerformer]),
|
|
|
|
strconv.Itoa(tagIDs[tagIdx1WithPerformer]),
|
|
|
|
},
|
|
|
|
Modifier: models.CriterionModifierIncludes,
|
|
|
|
}
|
|
|
|
|
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
PerformerTags: &tagCriterion,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-03-10 01:25:51 +00:00
|
|
|
assert.Len(t, scenes, 2)
|
|
|
|
|
|
|
|
// ensure ids are correct
|
|
|
|
for _, scene := range scenes {
|
|
|
|
assert.True(t, scene.ID == sceneIDs[sceneIdxWithPerformerTag] || scene.ID == sceneIDs[sceneIdxWithPerformerTwoTags])
|
|
|
|
}
|
|
|
|
|
Tag hierarchy (#1519)
* Add migration script for tag relations table
* Expand hierarchical filter features
Expand the features of the hierarchical multi input filter with support
for using a relations table, which only has parent_id and child_id
columns, and support adding an additional intermediate table to join on,
for example for scenes and tags which are linked by the scenes_tags
table as well.
* Add hierarchical filtering for tags
* Add hierarchical tags support to scene markers
Refactor filtering of scene markers to filterBuilder and in the process
add support for hierarchical tags as well.
* List parent and child tags on tag details page
* Support setting parent and child tags
Add support for setting parent and child tags during tag creation and
tag updates.
* Validate no loops are created in tags hierarchy
* Update tag merging to support tag hierarcy
* Add unit tests for tags.EnsureUniqueHierarchy
* Fix applying recursive to with clause
The SQL `RECURSIVE` of a `WITH` clause only needs to be applied once,
imediately after the `WITH`. So this fixes the query building to do just
that, automatically applying the `RECURSIVE` keyword when any added with
clause is added as recursive.
* Rename hierarchical root id column
* Rewrite hierarchical filtering for performance
Completely rewrite the hierarchical filtering to optimize for
performance. Doing the recursive query in combination with a complex
query seems to break SQLite optimizing some things which means that the
recursive part might be 2,5 second slower than adding a static
`VALUES()` list. This is mostly noticable in case of the tag hierarchy
where setting an exclusion with any depth (or depth: all) being applied
has this performance impact of 2,5 second. "Include" also suffered this
issue, but some rewritten query by joining in the *_tags table in one
pass and applying a `WHERE x IS NOT NULL` filter did seem to optimize
that case. But that optimization isn't applied to the `IS NULL` filter
of "exclude". Running a simple query beforehand to get all (recursive)
items and then applying them to the query doesn't have this performance
penalty.
* Remove UI references to child studios and tags
* Add parents to tag export
* Support importing of parent relationship for tags
* Assign stable ids to parent / child badges
* Silence Apollo warning on parents/children fields on tags
Silence warning triggered by Apollo GraphQL by explicitly instructing it
to use the incoming parents/children values. By default it already does
this, but it triggers a warning as it might be unintended that it uses
the incoming values (instead of for example merging both arrays).
Setting merge to false still applies the same behaviour (use only
incoming values) but silences the warning as it's explicitly configured
to work like this.
* Rework detecting unique tag hierarchy
Completely rework the unique tag hierarchy to detect invalid hierarchies
for which a tag is "added in the middle". So when there are tags A <- B
and A <- C, you could previously edit tag B and add tag C as a sub tag
without it being noticed as parent A being applied twice (to tag C).
While afterwards saving tag C would fail as tag A was applied as parent
twice. The updated code correctly detects this scenario as well.
Furthermore the error messaging has been reworked a bit and the message
now mentions both the direct parent / sub tag as well as the tag which
would results in the error. So in aboves example it would now show the
message that tag C can't be applied because tag A already is a parent.
* Update relations on cached tags when needed
Update the relations on cached tags when a tag is created / updated /
deleted so these always reflect the correct state. Otherwise (re)opening
a tag might still show the old relations untill the page is fully
reloaded or the list is navigated. But this obviously is strange when
you for example have tag A, create or update tag B to have a relation to
tag A, and from tags B page click through to tag A and it doesn't show
that it is linked to tag B.
2021-09-09 04:58:43 +00:00
|
|
|
tagCriterion = models.HierarchicalMultiCriterionInput{
|
2021-03-10 01:25:51 +00:00
|
|
|
Value: []string{
|
|
|
|
strconv.Itoa(tagIDs[tagIdx1WithPerformer]),
|
|
|
|
strconv.Itoa(tagIDs[tagIdx2WithPerformer]),
|
|
|
|
},
|
|
|
|
Modifier: models.CriterionModifierIncludesAll,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-03-10 01:25:51 +00:00
|
|
|
|
|
|
|
assert.Len(t, scenes, 1)
|
|
|
|
assert.Equal(t, sceneIDs[sceneIdxWithPerformerTwoTags], scenes[0].ID)
|
|
|
|
|
Tag hierarchy (#1519)
* Add migration script for tag relations table
* Expand hierarchical filter features
Expand the features of the hierarchical multi input filter with support
for using a relations table, which only has parent_id and child_id
columns, and support adding an additional intermediate table to join on,
for example for scenes and tags which are linked by the scenes_tags
table as well.
* Add hierarchical filtering for tags
* Add hierarchical tags support to scene markers
Refactor filtering of scene markers to filterBuilder and in the process
add support for hierarchical tags as well.
* List parent and child tags on tag details page
* Support setting parent and child tags
Add support for setting parent and child tags during tag creation and
tag updates.
* Validate no loops are created in tags hierarchy
* Update tag merging to support tag hierarcy
* Add unit tests for tags.EnsureUniqueHierarchy
* Fix applying recursive to with clause
The SQL `RECURSIVE` of a `WITH` clause only needs to be applied once,
imediately after the `WITH`. So this fixes the query building to do just
that, automatically applying the `RECURSIVE` keyword when any added with
clause is added as recursive.
* Rename hierarchical root id column
* Rewrite hierarchical filtering for performance
Completely rewrite the hierarchical filtering to optimize for
performance. Doing the recursive query in combination with a complex
query seems to break SQLite optimizing some things which means that the
recursive part might be 2,5 second slower than adding a static
`VALUES()` list. This is mostly noticable in case of the tag hierarchy
where setting an exclusion with any depth (or depth: all) being applied
has this performance impact of 2,5 second. "Include" also suffered this
issue, but some rewritten query by joining in the *_tags table in one
pass and applying a `WHERE x IS NOT NULL` filter did seem to optimize
that case. But that optimization isn't applied to the `IS NULL` filter
of "exclude". Running a simple query beforehand to get all (recursive)
items and then applying them to the query doesn't have this performance
penalty.
* Remove UI references to child studios and tags
* Add parents to tag export
* Support importing of parent relationship for tags
* Assign stable ids to parent / child badges
* Silence Apollo warning on parents/children fields on tags
Silence warning triggered by Apollo GraphQL by explicitly instructing it
to use the incoming parents/children values. By default it already does
this, but it triggers a warning as it might be unintended that it uses
the incoming values (instead of for example merging both arrays).
Setting merge to false still applies the same behaviour (use only
incoming values) but silences the warning as it's explicitly configured
to work like this.
* Rework detecting unique tag hierarchy
Completely rework the unique tag hierarchy to detect invalid hierarchies
for which a tag is "added in the middle". So when there are tags A <- B
and A <- C, you could previously edit tag B and add tag C as a sub tag
without it being noticed as parent A being applied twice (to tag C).
While afterwards saving tag C would fail as tag A was applied as parent
twice. The updated code correctly detects this scenario as well.
Furthermore the error messaging has been reworked a bit and the message
now mentions both the direct parent / sub tag as well as the tag which
would results in the error. So in aboves example it would now show the
message that tag C can't be applied because tag A already is a parent.
* Update relations on cached tags when needed
Update the relations on cached tags when a tag is created / updated /
deleted so these always reflect the correct state. Otherwise (re)opening
a tag might still show the old relations untill the page is fully
reloaded or the list is navigated. But this obviously is strange when
you for example have tag A, create or update tag B to have a relation to
tag A, and from tags B page click through to tag A and it doesn't show
that it is linked to tag B.
2021-09-09 04:58:43 +00:00
|
|
|
tagCriterion = models.HierarchicalMultiCriterionInput{
|
2021-03-10 01:25:51 +00:00
|
|
|
Value: []string{
|
|
|
|
strconv.Itoa(tagIDs[tagIdx1WithPerformer]),
|
|
|
|
},
|
|
|
|
Modifier: models.CriterionModifierExcludes,
|
|
|
|
}
|
|
|
|
|
|
|
|
q := getSceneStringValue(sceneIdxWithPerformerTwoTags, titleField)
|
|
|
|
findFilter := models.FindFilterType{
|
|
|
|
Q: &q,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-03-10 01:25:51 +00:00
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
2021-11-06 22:34:33 +00:00
|
|
|
tagCriterion = models.HierarchicalMultiCriterionInput{
|
|
|
|
Modifier: models.CriterionModifierIsNull,
|
|
|
|
}
|
|
|
|
q = getSceneStringValue(sceneIdx1WithPerformer, titleField)
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-11-06 22:34:33 +00:00
|
|
|
assert.Len(t, scenes, 1)
|
|
|
|
assert.Equal(t, sceneIDs[sceneIdx1WithPerformer], scenes[0].ID)
|
|
|
|
|
|
|
|
q = getSceneStringValue(sceneIdxWithPerformerTag, titleField)
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-11-06 22:34:33 +00:00
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
|
|
|
tagCriterion.Modifier = models.CriterionModifierNotNull
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-11-06 22:34:33 +00:00
|
|
|
assert.Len(t, scenes, 1)
|
|
|
|
assert.Equal(t, sceneIDs[sceneIdxWithPerformerTag], scenes[0].ID)
|
|
|
|
|
|
|
|
q = getSceneStringValue(sceneIdx1WithPerformer, titleField)
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-11-06 22:34:33 +00:00
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
2021-03-10 01:25:51 +00:00
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
func TestSceneQueryStudio(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-06-03 10:52:19 +00:00
|
|
|
studioCriterion := models.HierarchicalMultiCriterionInput{
|
2021-01-18 01:23:20 +00:00
|
|
|
Value: []string{
|
|
|
|
strconv.Itoa(studioIDs[studioIdxWithScene]),
|
|
|
|
},
|
|
|
|
Modifier: models.CriterionModifierIncludes,
|
|
|
|
}
|
|
|
|
|
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
Studios: &studioCriterion,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.Len(t, scenes, 1)
|
|
|
|
|
|
|
|
// ensure id is correct
|
|
|
|
assert.Equal(t, sceneIDs[sceneIdxWithStudio], scenes[0].ID)
|
|
|
|
|
2021-06-03 10:52:19 +00:00
|
|
|
studioCriterion = models.HierarchicalMultiCriterionInput{
|
2021-01-18 01:23:20 +00:00
|
|
|
Value: []string{
|
|
|
|
strconv.Itoa(studioIDs[studioIdxWithScene]),
|
|
|
|
},
|
|
|
|
Modifier: models.CriterionModifierExcludes,
|
|
|
|
}
|
|
|
|
|
|
|
|
q := getSceneStringValue(sceneIdxWithStudio, titleField)
|
|
|
|
findFilter := models.FindFilterType{
|
|
|
|
Q: &q,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-06-03 10:52:19 +00:00
|
|
|
func TestSceneQueryStudioDepth(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-09-16 10:41:07 +00:00
|
|
|
depth := 2
|
2021-06-03 10:52:19 +00:00
|
|
|
studioCriterion := models.HierarchicalMultiCriterionInput{
|
|
|
|
Value: []string{
|
|
|
|
strconv.Itoa(studioIDs[studioIdxWithGrandChild]),
|
|
|
|
},
|
|
|
|
Modifier: models.CriterionModifierIncludes,
|
2021-09-16 10:41:07 +00:00
|
|
|
Depth: &depth,
|
2021-06-03 10:52:19 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
Studios: &studioCriterion,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-06-03 10:52:19 +00:00
|
|
|
assert.Len(t, scenes, 1)
|
|
|
|
|
2021-09-16 10:41:07 +00:00
|
|
|
depth = 1
|
2021-06-03 10:52:19 +00:00
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-06-03 10:52:19 +00:00
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
|
|
|
studioCriterion.Value = []string{strconv.Itoa(studioIDs[studioIdxWithParentAndChild])}
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-06-03 10:52:19 +00:00
|
|
|
assert.Len(t, scenes, 1)
|
|
|
|
|
|
|
|
// ensure id is correct
|
|
|
|
assert.Equal(t, sceneIDs[sceneIdxWithGrandChildStudio], scenes[0].ID)
|
2021-09-16 10:41:07 +00:00
|
|
|
depth = 2
|
2021-06-03 10:52:19 +00:00
|
|
|
|
|
|
|
studioCriterion = models.HierarchicalMultiCriterionInput{
|
|
|
|
Value: []string{
|
|
|
|
strconv.Itoa(studioIDs[studioIdxWithGrandChild]),
|
|
|
|
},
|
|
|
|
Modifier: models.CriterionModifierExcludes,
|
2021-09-16 10:41:07 +00:00
|
|
|
Depth: &depth,
|
2021-06-03 10:52:19 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
q := getSceneStringValue(sceneIdxWithGrandChildStudio, titleField)
|
|
|
|
findFilter := models.FindFilterType{
|
|
|
|
Q: &q,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-06-03 10:52:19 +00:00
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
2021-09-16 10:41:07 +00:00
|
|
|
depth = 1
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-06-03 10:52:19 +00:00
|
|
|
assert.Len(t, scenes, 1)
|
|
|
|
|
|
|
|
studioCriterion.Value = []string{strconv.Itoa(studioIDs[studioIdxWithParentAndChild])}
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-06-03 10:52:19 +00:00
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
func TestSceneQueryMovies(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
movieCriterion := models.MultiCriterionInput{
|
|
|
|
Value: []string{
|
|
|
|
strconv.Itoa(movieIDs[movieIdxWithScene]),
|
|
|
|
},
|
|
|
|
Modifier: models.CriterionModifierIncludes,
|
|
|
|
}
|
|
|
|
|
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
Movies: &movieCriterion,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.Len(t, scenes, 1)
|
|
|
|
|
|
|
|
// ensure id is correct
|
|
|
|
assert.Equal(t, sceneIDs[sceneIdxWithMovie], scenes[0].ID)
|
|
|
|
|
|
|
|
movieCriterion = models.MultiCriterionInput{
|
|
|
|
Value: []string{
|
|
|
|
strconv.Itoa(movieIDs[movieIdxWithScene]),
|
|
|
|
},
|
|
|
|
Modifier: models.CriterionModifierExcludes,
|
|
|
|
}
|
|
|
|
|
|
|
|
q := getSceneStringValue(sceneIdxWithMovie, titleField)
|
|
|
|
findFilter := models.FindFilterType{
|
|
|
|
Q: &q,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
func TestSceneQueryPhashDuplicated(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
|
|
|
duplicated := true
|
|
|
|
phashCriterion := models.PHashDuplicationCriterionInput{
|
|
|
|
Duplicated: &duplicated,
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
Duplicated: &phashCriterion,
|
|
|
|
}
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
assert.Len(t, scenes, dupeScenePhashes*2)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
duplicated = false
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, &sceneFilter, nil)
|
|
|
|
// -1 for missing phash
|
|
|
|
assert.Len(t, scenes, totalScenes-(dupeScenePhashes*2)-1)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
func TestSceneQuerySorting(t *testing.T) {
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
sortBy string
|
|
|
|
dir models.SortDirectionEnum
|
|
|
|
firstSceneIdx int // -1 to ignore
|
|
|
|
lastSceneIdx int
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
"bitrate",
|
|
|
|
"bitrate",
|
|
|
|
models.SortDirectionEnumAsc,
|
|
|
|
-1,
|
|
|
|
-1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"duration",
|
|
|
|
"duration",
|
|
|
|
models.SortDirectionEnumDesc,
|
|
|
|
-1,
|
|
|
|
-1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"file mod time",
|
|
|
|
"file_mod_time",
|
|
|
|
models.SortDirectionEnumDesc,
|
|
|
|
-1,
|
|
|
|
-1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"file size",
|
|
|
|
"size",
|
|
|
|
models.SortDirectionEnumDesc,
|
|
|
|
-1,
|
|
|
|
-1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"frame rate",
|
|
|
|
"framerate",
|
|
|
|
models.SortDirectionEnumDesc,
|
|
|
|
-1,
|
|
|
|
-1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"path",
|
|
|
|
"path",
|
|
|
|
models.SortDirectionEnumDesc,
|
|
|
|
-1,
|
|
|
|
-1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"perceptual_similarity",
|
|
|
|
"perceptual_similarity",
|
|
|
|
models.SortDirectionEnumDesc,
|
|
|
|
-1,
|
|
|
|
-1,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) {
|
|
|
|
assert := assert.New(t)
|
|
|
|
got, err := qb.Query(ctx, models.SceneQueryOptions{
|
|
|
|
QueryOptions: models.QueryOptions{
|
|
|
|
FindFilter: &models.FindFilterType{
|
|
|
|
Sort: &tt.sortBy,
|
|
|
|
Direction: &tt.dir,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
})
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("sceneQueryBuilder.TestSceneQuerySorting() error = %v", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
scenes, err := got.Resolve(ctx)
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("sceneQueryBuilder.TestSceneQuerySorting() error = %v", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if !assert.Greater(len(scenes), 0) {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// scenes should be in same order as indexes
|
|
|
|
firstScene := scenes[0]
|
|
|
|
lastScene := scenes[len(scenes)-1]
|
|
|
|
|
|
|
|
if tt.firstSceneIdx != -1 {
|
|
|
|
firstSceneID := sceneIDs[tt.firstSceneIdx]
|
|
|
|
assert.Equal(firstSceneID, firstScene.ID)
|
|
|
|
}
|
|
|
|
if tt.lastSceneIdx != -1 {
|
|
|
|
lastSceneID := sceneIDs[tt.lastSceneIdx]
|
|
|
|
assert.Equal(lastSceneID, lastScene.ID)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
func TestSceneQueryPagination(t *testing.T) {
|
|
|
|
perPage := 1
|
|
|
|
findFilter := models.FindFilterType{
|
|
|
|
PerPage: &perPage,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, nil, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.Len(t, scenes, 1)
|
|
|
|
|
|
|
|
firstID := scenes[0].ID
|
|
|
|
|
|
|
|
page := 2
|
|
|
|
findFilter.Page = &page
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, nil, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
assert.Len(t, scenes, 1)
|
|
|
|
secondID := scenes[0].ID
|
|
|
|
assert.NotEqual(t, firstID, secondID)
|
|
|
|
|
|
|
|
perPage = 2
|
|
|
|
page = 1
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes = queryScene(ctx, t, sqb, nil, &findFilter)
|
2021-01-18 01:23:20 +00:00
|
|
|
assert.Len(t, scenes, 2)
|
|
|
|
assert.Equal(t, firstID, scenes[0].ID)
|
|
|
|
assert.Equal(t, secondID, scenes[1].ID)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-04-09 08:46:00 +00:00
|
|
|
func TestSceneQueryTagCount(t *testing.T) {
|
|
|
|
const tagCount = 1
|
|
|
|
tagCountCriterion := models.IntCriterionInput{
|
|
|
|
Value: tagCount,
|
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
}
|
|
|
|
|
|
|
|
verifyScenesTagCount(t, tagCountCriterion)
|
|
|
|
|
|
|
|
tagCountCriterion.Modifier = models.CriterionModifierNotEquals
|
|
|
|
verifyScenesTagCount(t, tagCountCriterion)
|
|
|
|
|
|
|
|
tagCountCriterion.Modifier = models.CriterionModifierGreaterThan
|
|
|
|
verifyScenesTagCount(t, tagCountCriterion)
|
|
|
|
|
|
|
|
tagCountCriterion.Modifier = models.CriterionModifierLessThan
|
|
|
|
verifyScenesTagCount(t, tagCountCriterion)
|
|
|
|
}
|
|
|
|
|
|
|
|
func verifyScenesTagCount(t *testing.T, tagCountCriterion models.IntCriterionInput) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-04-09 08:46:00 +00:00
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
TagCount: &tagCountCriterion,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-04-09 08:46:00 +00:00
|
|
|
assert.Greater(t, len(scenes), 0)
|
|
|
|
|
|
|
|
for _, scene := range scenes {
|
2022-08-12 02:21:46 +00:00
|
|
|
if err := scene.LoadTagIDs(ctx, sqb); err != nil {
|
|
|
|
t.Errorf("scene.LoadTagIDs() error = %v", err)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
verifyInt(t, len(scene.TagIDs.List()), tagCountCriterion)
|
2021-04-09 08:46:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneQueryPerformerCount(t *testing.T) {
|
|
|
|
const performerCount = 1
|
|
|
|
performerCountCriterion := models.IntCriterionInput{
|
|
|
|
Value: performerCount,
|
|
|
|
Modifier: models.CriterionModifierEquals,
|
|
|
|
}
|
|
|
|
|
|
|
|
verifyScenesPerformerCount(t, performerCountCriterion)
|
|
|
|
|
|
|
|
performerCountCriterion.Modifier = models.CriterionModifierNotEquals
|
|
|
|
verifyScenesPerformerCount(t, performerCountCriterion)
|
|
|
|
|
|
|
|
performerCountCriterion.Modifier = models.CriterionModifierGreaterThan
|
|
|
|
verifyScenesPerformerCount(t, performerCountCriterion)
|
|
|
|
|
|
|
|
performerCountCriterion.Modifier = models.CriterionModifierLessThan
|
|
|
|
verifyScenesPerformerCount(t, performerCountCriterion)
|
|
|
|
}
|
|
|
|
|
|
|
|
func verifyScenesPerformerCount(t *testing.T, performerCountCriterion models.IntCriterionInput) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-04-09 08:46:00 +00:00
|
|
|
sceneFilter := models.SceneFilterType{
|
|
|
|
PerformerCount: &performerCountCriterion,
|
|
|
|
}
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, sqb, &sceneFilter, nil)
|
2021-04-09 08:46:00 +00:00
|
|
|
assert.Greater(t, len(scenes), 0)
|
|
|
|
|
|
|
|
for _, scene := range scenes {
|
2022-08-12 02:21:46 +00:00
|
|
|
if err := scene.LoadPerformerIDs(ctx, sqb); err != nil {
|
|
|
|
t.Errorf("scene.LoadPerformerIDs() error = %v", err)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
verifyInt(t, len(scene.PerformerIDs.List()), performerCountCriterion)
|
2021-04-09 08:46:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
func TestSceneCountByTagID(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
sceneCount, err := sqb.CountByTagID(ctx, tagIDs[tagIdxWithScene])
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("error calling CountByTagID: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Equal(t, 1, sceneCount)
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
sceneCount, err = sqb.CountByTagID(ctx, 0)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("error calling CountByTagID: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Equal(t, 0, sceneCount)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneCountByMovieID(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
sceneCount, err := sqb.CountByMovieID(ctx, movieIDs[movieIdxWithScene])
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("error calling CountByMovieID: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Equal(t, 1, sceneCount)
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
sceneCount, err = sqb.CountByMovieID(ctx, 0)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("error calling CountByMovieID: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Equal(t, 0, sceneCount)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneCountByStudioID(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
sceneCount, err := sqb.CountByStudioID(ctx, studioIDs[studioIdxWithScene])
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("error calling CountByStudioID: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Equal(t, 1, sceneCount)
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
sceneCount, err = sqb.CountByStudioID(ctx, 0)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("error calling CountByStudioID: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Equal(t, 0, sceneCount)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestFindByMovieID(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes, err := sqb.FindByMovieID(ctx, movieIDs[movieIdxWithScene])
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("error calling FindByMovieID: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Len(t, scenes, 1)
|
|
|
|
assert.Equal(t, sceneIDs[sceneIdxWithMovie], scenes[0].ID)
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes, err = sqb.FindByMovieID(ctx, 0)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("error calling FindByMovieID: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestFindByPerformerID(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
sqb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes, err := sqb.FindByPerformerID(ctx, performerIDs[performerIdxWithScene])
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("error calling FindByPerformerID: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Len(t, scenes, 1)
|
|
|
|
assert.Equal(t, sceneIDs[sceneIdxWithPerformer], scenes[0].ID)
|
|
|
|
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes, err = sqb.FindByPerformerID(ctx, 0)
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("error calling FindByPerformerID: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Len(t, scenes, 0)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneUpdateSceneCover(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
if err := withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
qb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
sceneID := sceneIDs[sceneIdxWithGallery]
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
image := []byte("image")
|
2022-07-13 06:30:54 +00:00
|
|
|
if err := qb.UpdateCover(ctx, sceneID, image); err != nil {
|
2021-01-18 01:23:20 +00:00
|
|
|
return fmt.Errorf("Error updating scene cover: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
// ensure image set
|
2022-07-13 06:30:54 +00:00
|
|
|
storedImage, err := qb.GetCover(ctx, sceneID)
|
2021-01-18 01:23:20 +00:00
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("Error getting image: %s", err.Error())
|
|
|
|
}
|
|
|
|
assert.Equal(t, storedImage, image)
|
|
|
|
|
|
|
|
// set nil image
|
2022-07-13 06:30:54 +00:00
|
|
|
err = qb.UpdateCover(ctx, sceneID, nil)
|
2021-01-18 01:23:20 +00:00
|
|
|
if err == nil {
|
|
|
|
return fmt.Errorf("Expected error setting nil image")
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}); err != nil {
|
|
|
|
t.Error(err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneDestroySceneCover(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
if err := withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
qb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
sceneID := sceneIDs[sceneIdxWithGallery]
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
image := []byte("image")
|
2022-07-13 06:30:54 +00:00
|
|
|
if err := qb.UpdateCover(ctx, sceneID, image); err != nil {
|
2021-01-18 01:23:20 +00:00
|
|
|
return fmt.Errorf("Error updating scene image: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
if err := qb.DestroyCover(ctx, sceneID); err != nil {
|
2021-01-18 01:23:20 +00:00
|
|
|
return fmt.Errorf("Error destroying scene cover: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
// image should be nil
|
2022-07-13 06:30:54 +00:00
|
|
|
storedImage, err := qb.GetCover(ctx, sceneID)
|
2021-01-18 01:23:20 +00:00
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("Error getting image: %s", err.Error())
|
|
|
|
}
|
|
|
|
assert.Nil(t, storedImage)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}); err != nil {
|
|
|
|
t.Error(err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneStashIDs(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
if err := withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
qb := db.Scene
|
2021-01-18 01:23:20 +00:00
|
|
|
|
|
|
|
// create scene to test against
|
|
|
|
const name = "TestSceneStashIDs"
|
2022-07-13 06:30:54 +00:00
|
|
|
scene := &models.Scene{
|
|
|
|
Title: name,
|
2021-01-18 01:23:20 +00:00
|
|
|
}
|
2022-07-13 06:30:54 +00:00
|
|
|
if err := qb.Create(ctx, scene, nil); err != nil {
|
2021-01-18 01:23:20 +00:00
|
|
|
return fmt.Errorf("Error creating scene: %s", err.Error())
|
|
|
|
}
|
|
|
|
|
2022-08-12 02:21:46 +00:00
|
|
|
if err := scene.LoadStashIDs(ctx, qb); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
testSceneStashIDs(ctx, t, scene)
|
2021-01-18 01:23:20 +00:00
|
|
|
return nil
|
|
|
|
}); err != nil {
|
|
|
|
t.Error(err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
func testSceneStashIDs(ctx context.Context, t *testing.T, s *models.Scene) {
|
|
|
|
// ensure no stash IDs to begin with
|
2022-08-12 02:21:46 +00:00
|
|
|
assert.Len(t, s.StashIDs.List(), 0)
|
2022-07-13 06:30:54 +00:00
|
|
|
|
|
|
|
// add stash ids
|
|
|
|
const stashIDStr = "stashID"
|
|
|
|
const endpoint = "endpoint"
|
|
|
|
stashID := models.StashID{
|
|
|
|
StashID: stashIDStr,
|
|
|
|
Endpoint: endpoint,
|
|
|
|
}
|
|
|
|
|
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
// update stash ids and ensure was updated
|
|
|
|
var err error
|
|
|
|
s, err = qb.UpdatePartial(ctx, s.ID, models.ScenePartial{
|
|
|
|
StashIDs: &models.UpdateStashIDs{
|
|
|
|
StashIDs: []models.StashID{stashID},
|
|
|
|
Mode: models.RelationshipUpdateModeSet,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
t.Error(err.Error())
|
|
|
|
}
|
|
|
|
|
2022-08-12 02:21:46 +00:00
|
|
|
if err := s.LoadStashIDs(ctx, qb); err != nil {
|
|
|
|
t.Error(err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Equal(t, []models.StashID{stashID}, s.StashIDs.List())
|
2022-07-13 06:30:54 +00:00
|
|
|
|
|
|
|
// remove stash ids and ensure was updated
|
|
|
|
s, err = qb.UpdatePartial(ctx, s.ID, models.ScenePartial{
|
|
|
|
StashIDs: &models.UpdateStashIDs{
|
|
|
|
StashIDs: []models.StashID{stashID},
|
|
|
|
Mode: models.RelationshipUpdateModeRemove,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
t.Error(err.Error())
|
|
|
|
}
|
|
|
|
|
2022-08-12 02:21:46 +00:00
|
|
|
if err := s.LoadStashIDs(ctx, qb); err != nil {
|
|
|
|
t.Error(err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Len(t, s.StashIDs.List(), 0)
|
2022-07-13 06:30:54 +00:00
|
|
|
}
|
|
|
|
|
2021-04-13 00:32:52 +00:00
|
|
|
func TestSceneQueryQTrim(t *testing.T) {
|
2022-05-19 07:49:32 +00:00
|
|
|
if err := withTxn(func(ctx context.Context) error {
|
2022-07-13 06:30:54 +00:00
|
|
|
qb := db.Scene
|
2021-04-13 00:32:52 +00:00
|
|
|
|
|
|
|
expectedID := sceneIDs[sceneIdxWithSpacedName]
|
|
|
|
|
|
|
|
type test struct {
|
|
|
|
query string
|
|
|
|
id int
|
|
|
|
count int
|
|
|
|
}
|
|
|
|
tests := []test{
|
|
|
|
{query: " zzz yyy ", id: expectedID, count: 1},
|
|
|
|
{query: " \"zzz yyy xxx\" ", id: expectedID, count: 1},
|
|
|
|
{query: "zzz", id: expectedID, count: 1},
|
|
|
|
{query: "\" zzz yyy \"", count: 0},
|
|
|
|
{query: "\"zzz yyy\"", count: 0},
|
|
|
|
{query: "\" zzz yyy\"", count: 0},
|
|
|
|
{query: "\"zzz yyy \"", count: 0},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tst := range tests {
|
|
|
|
f := models.FindFilterType{
|
|
|
|
Q: &tst.query,
|
|
|
|
}
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, qb, nil, &f)
|
2021-04-13 00:32:52 +00:00
|
|
|
|
|
|
|
assert.Len(t, scenes, tst.count)
|
|
|
|
if len(scenes) > 0 {
|
|
|
|
assert.Equal(t, tst.id, scenes[0].ID)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
findFilter := models.FindFilterType{}
|
2022-05-19 07:49:32 +00:00
|
|
|
scenes := queryScene(ctx, t, qb, nil, &findFilter)
|
2021-04-13 00:32:52 +00:00
|
|
|
assert.NotEqual(t, 0, len(scenes))
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}); err != nil {
|
|
|
|
t.Error(err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-07-13 06:30:54 +00:00
|
|
|
func TestSceneStore_All(t *testing.T) {
|
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
withRollbackTxn(func(ctx context.Context) error {
|
|
|
|
got, err := qb.All(ctx)
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("SceneStore.All() error = %v", err)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// it's possible that other tests have created scenes
|
|
|
|
assert.GreaterOrEqual(t, len(got), len(sceneIDs))
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSceneStore_FindDuplicates(t *testing.T) {
|
|
|
|
qb := db.Scene
|
|
|
|
|
|
|
|
withRollbackTxn(func(ctx context.Context) error {
|
|
|
|
distance := 0
|
|
|
|
got, err := qb.FindDuplicates(ctx, distance)
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("SceneStore.FindDuplicates() error = %v", err)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Len(t, got, dupeScenePhashes)
|
|
|
|
|
|
|
|
distance = 1
|
|
|
|
got, err = qb.FindDuplicates(ctx, distance)
|
|
|
|
if err != nil {
|
|
|
|
t.Errorf("SceneStore.FindDuplicates() error = %v", err)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
assert.Len(t, got, dupeScenePhashes)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-01-18 01:23:20 +00:00
|
|
|
// TODO Count
|
|
|
|
// TODO SizeCount
|