mirror of https://github.com/stashapp/stash.git
Various bug fixes (#2935)
* Sort scene/image/gallery tags by name * Calculate md5 if missing * Prevent multiple folder create logs
This commit is contained in:
parent
3fa7b470e7
commit
cffcd9f4b8
|
@ -9,6 +9,7 @@ import (
|
|||
"github.com/stashapp/stash/pkg/file"
|
||||
"github.com/stashapp/stash/pkg/hash/md5"
|
||||
"github.com/stashapp/stash/pkg/hash/oshash"
|
||||
"github.com/stashapp/stash/pkg/logger"
|
||||
)
|
||||
|
||||
type fingerprintCalculator struct {
|
||||
|
@ -58,27 +59,55 @@ func (c *fingerprintCalculator) calculateMD5(o file.Opener) (*file.Fingerprint,
|
|||
}, nil
|
||||
}
|
||||
|
||||
func (c *fingerprintCalculator) CalculateFingerprints(f *file.BaseFile, o file.Opener) ([]file.Fingerprint, error) {
|
||||
func (c *fingerprintCalculator) CalculateFingerprints(f *file.BaseFile, o file.Opener, useExisting bool) ([]file.Fingerprint, error) {
|
||||
var ret []file.Fingerprint
|
||||
calculateMD5 := true
|
||||
|
||||
if isVideo(f.Basename) {
|
||||
// calculate oshash first
|
||||
fp, err := c.calculateOshash(f, o)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
var (
|
||||
fp *file.Fingerprint
|
||||
err error
|
||||
)
|
||||
|
||||
if useExisting {
|
||||
fp = f.Fingerprints.For(file.FingerprintTypeOshash)
|
||||
}
|
||||
|
||||
if fp == nil {
|
||||
// calculate oshash first
|
||||
fp, err = c.calculateOshash(f, o)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
ret = append(ret, *fp)
|
||||
|
||||
// only calculate MD5 if enabled in config
|
||||
calculateMD5 = c.Config.IsCalculateMD5()
|
||||
// always re-calculate MD5 if the file already has it
|
||||
calculateMD5 = c.Config.IsCalculateMD5() || f.Fingerprints.For(file.FingerprintTypeMD5) != nil
|
||||
}
|
||||
|
||||
if calculateMD5 {
|
||||
fp, err := c.calculateMD5(o)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
var (
|
||||
fp *file.Fingerprint
|
||||
err error
|
||||
)
|
||||
|
||||
if useExisting {
|
||||
fp = f.Fingerprints.For(file.FingerprintTypeMD5)
|
||||
}
|
||||
|
||||
if fp == nil {
|
||||
if useExisting {
|
||||
// log to indicate missing fingerprint is being calculated
|
||||
logger.Infof("Calculating checksum for %s ...", f.Path)
|
||||
}
|
||||
|
||||
fp, err = c.calculateMD5(o)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
ret = append(ret, *fp)
|
||||
|
|
|
@ -14,6 +14,39 @@ type Fingerprint struct {
|
|||
|
||||
type Fingerprints []Fingerprint
|
||||
|
||||
func (f Fingerprints) Equals(other Fingerprints) bool {
|
||||
if len(f) != len(other) {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, ff := range f {
|
||||
found := false
|
||||
for _, oo := range other {
|
||||
if ff == oo {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// For returns a pointer to the first Fingerprint element matching the provided type.
|
||||
func (f Fingerprints) For(type_ string) *Fingerprint {
|
||||
for _, fp := range f {
|
||||
if fp.Type == type_ {
|
||||
return &fp
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f Fingerprints) Get(type_ string) interface{} {
|
||||
for _, fp := range f {
|
||||
if fp.Type == type_ {
|
||||
|
@ -59,5 +92,5 @@ func (f Fingerprints) AppendUnique(o Fingerprint) Fingerprints {
|
|||
|
||||
// FingerprintCalculator calculates a fingerprint for the provided file.
|
||||
type FingerprintCalculator interface {
|
||||
CalculateFingerprints(f *BaseFile, o Opener) ([]Fingerprint, error)
|
||||
CalculateFingerprints(f *BaseFile, o Opener, useExisting bool) ([]Fingerprint, error)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,86 @@
|
|||
package file
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestFingerprints_Equals(t *testing.T) {
|
||||
var (
|
||||
value1 = 1
|
||||
value2 = "2"
|
||||
value3 = 1.23
|
||||
|
||||
fingerprint1 = Fingerprint{
|
||||
Type: FingerprintTypeMD5,
|
||||
Fingerprint: value1,
|
||||
}
|
||||
fingerprint2 = Fingerprint{
|
||||
Type: FingerprintTypeOshash,
|
||||
Fingerprint: value2,
|
||||
}
|
||||
fingerprint3 = Fingerprint{
|
||||
Type: FingerprintTypePhash,
|
||||
Fingerprint: value3,
|
||||
}
|
||||
)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
f Fingerprints
|
||||
other Fingerprints
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
"identical",
|
||||
Fingerprints{
|
||||
fingerprint1,
|
||||
fingerprint2,
|
||||
},
|
||||
Fingerprints{
|
||||
fingerprint1,
|
||||
fingerprint2,
|
||||
},
|
||||
true,
|
||||
},
|
||||
{
|
||||
"different order",
|
||||
Fingerprints{
|
||||
fingerprint1,
|
||||
fingerprint2,
|
||||
},
|
||||
Fingerprints{
|
||||
fingerprint2,
|
||||
fingerprint1,
|
||||
},
|
||||
true,
|
||||
},
|
||||
{
|
||||
"different length",
|
||||
Fingerprints{
|
||||
fingerprint1,
|
||||
fingerprint2,
|
||||
},
|
||||
Fingerprints{
|
||||
fingerprint1,
|
||||
},
|
||||
false,
|
||||
},
|
||||
{
|
||||
"different",
|
||||
Fingerprints{
|
||||
fingerprint1,
|
||||
fingerprint2,
|
||||
},
|
||||
Fingerprints{
|
||||
fingerprint1,
|
||||
fingerprint3,
|
||||
},
|
||||
false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := tt.f.Equals(tt.other); got != tt.want {
|
||||
t.Errorf("Fingerprints.Equals() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
|
@ -482,7 +482,14 @@ func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*Folder, erro
|
|||
}
|
||||
}
|
||||
|
||||
logger.Infof("%s doesn't exist. Creating new folder entry...", file.Path)
|
||||
txn.AddPostCommitHook(ctx, func(ctx context.Context) error {
|
||||
// log at the end so that if anything fails above due to a locked database
|
||||
// error and the transaction must be retried, then we shouldn't get multiple
|
||||
// logs of the same thing.
|
||||
logger.Infof("%s doesn't exist. Creating new folder entry...", file.Path)
|
||||
return nil
|
||||
})
|
||||
|
||||
if err := s.Repository.FolderStore.Create(ctx, toCreate); err != nil {
|
||||
return nil, fmt.Errorf("creating folder %q: %w", file.Path, err)
|
||||
}
|
||||
|
@ -513,6 +520,8 @@ func modTime(info fs.FileInfo) time.Time {
|
|||
}
|
||||
|
||||
func (s *scanJob) handleFile(ctx context.Context, f scanFile) error {
|
||||
defer s.incrementProgress(f)
|
||||
|
||||
var ff File
|
||||
// don't use a transaction to check if new or existing
|
||||
if err := s.withDB(ctx, func(ctx context.Context) error {
|
||||
|
@ -581,7 +590,6 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) {
|
|||
// add this file to the queue to be created later
|
||||
if s.retrying {
|
||||
// if we're retrying and the folder still doesn't exist, then it's a problem
|
||||
s.incrementProgress(f)
|
||||
return nil, fmt.Errorf("parent folder for %q doesn't exist", path)
|
||||
}
|
||||
|
||||
|
@ -593,7 +601,6 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) {
|
|||
|
||||
zipFileID, err := s.getZipFileID(ctx, f.zipFile)
|
||||
if err != nil {
|
||||
s.incrementProgress(f)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
@ -601,9 +608,9 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) {
|
|||
baseFile.ZipFileID = zipFileID
|
||||
}
|
||||
|
||||
fp, err := s.calculateFingerprints(f.fs, baseFile, path)
|
||||
const useExisting = false
|
||||
fp, err := s.calculateFingerprints(f.fs, baseFile, path, useExisting)
|
||||
if err != nil {
|
||||
s.incrementProgress(f)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
@ -611,7 +618,6 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) {
|
|||
|
||||
file, err := s.fireDecorators(ctx, f.fs, baseFile)
|
||||
if err != nil {
|
||||
s.incrementProgress(f)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
@ -619,7 +625,6 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) {
|
|||
// do this after decoration so that missing fields can be populated
|
||||
renamed, err := s.handleRename(ctx, file, fp)
|
||||
if err != nil {
|
||||
s.incrementProgress(f)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
@ -667,14 +672,17 @@ func (s *scanJob) fireHandlers(ctx context.Context, f File) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (s *scanJob) calculateFingerprints(fs FS, f *BaseFile, path string) ([]Fingerprint, error) {
|
||||
logger.Infof("Calculating fingerprints for %s ...", path)
|
||||
func (s *scanJob) calculateFingerprints(fs FS, f *BaseFile, path string, useExisting bool) (Fingerprints, error) {
|
||||
// only log if we're (re)calculating fingerprints
|
||||
if !useExisting {
|
||||
logger.Infof("Calculating fingerprints for %s ...", path)
|
||||
}
|
||||
|
||||
// calculate primary fingerprint for the file
|
||||
fp, err := s.FingerprintCalculator.CalculateFingerprints(f, &fsOpener{
|
||||
fs: fs,
|
||||
name: path,
|
||||
})
|
||||
}, useExisting)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("calculating fingerprint for file %q: %w", path, err)
|
||||
}
|
||||
|
@ -844,6 +852,30 @@ func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing F
|
|||
return existing, nil
|
||||
}
|
||||
|
||||
func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existing File) (File, error) {
|
||||
const useExisting = true
|
||||
fp, err := s.calculateFingerprints(f.fs, existing.Base(), f.Path, useExisting)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !fp.Equals(existing.Base().Fingerprints) {
|
||||
existing.SetFingerprints(fp)
|
||||
|
||||
if err := s.withTxn(ctx, func(ctx context.Context) error {
|
||||
if err := s.Repository.Update(ctx, existing); err != nil {
|
||||
return fmt.Errorf("updating file %q: %w", f.Path, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return existing, nil
|
||||
}
|
||||
|
||||
// returns a file only if it was updated
|
||||
func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File) (File, error) {
|
||||
base := existing.Base()
|
||||
|
@ -853,16 +885,23 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File)
|
|||
updated := !fileModTime.Equal(base.ModTime)
|
||||
|
||||
if !updated {
|
||||
var err error
|
||||
|
||||
isMissingMetdata := s.isMissingMetadata(existing)
|
||||
// set missing information
|
||||
if isMissingMetdata {
|
||||
var err error
|
||||
existing, err = s.setMissingMetadata(ctx, f, existing)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// calculate missing fingerprints
|
||||
existing, err = s.setMissingFingerprints(ctx, f, existing)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
handlerRequired := false
|
||||
if err := s.withDB(ctx, func(ctx context.Context) error {
|
||||
// check if the handler needs to be run
|
||||
|
@ -873,8 +912,6 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File)
|
|||
}
|
||||
|
||||
if !handlerRequired {
|
||||
s.incrementProgress(f)
|
||||
|
||||
// if this file is a zip file, then we need to rescan the contents
|
||||
// as well. We do this by returning the file, instead of nil.
|
||||
if isMissingMetdata {
|
||||
|
@ -889,7 +926,6 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File)
|
|||
return err
|
||||
}
|
||||
|
||||
s.incrementProgress(f)
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
|
@ -910,9 +946,9 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File)
|
|||
base.UpdatedAt = time.Now()
|
||||
|
||||
// calculate and update fingerprints for the file
|
||||
fp, err := s.calculateFingerprints(f.fs, base, path)
|
||||
const useExisting = false
|
||||
fp, err := s.calculateFingerprints(f.fs, base, path, useExisting)
|
||||
if err != nil {
|
||||
s.incrementProgress(f)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
@ -920,7 +956,6 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File)
|
|||
|
||||
existing, err = s.fireDecorators(ctx, f.fs, existing)
|
||||
if err != nil {
|
||||
s.incrementProgress(f)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
|
|
@ -1135,7 +1135,9 @@ func (qb *GalleryStore) tagsRepository() *joinRepository {
|
|||
tableName: galleriesTagsTable,
|
||||
idColumn: galleryIDColumn,
|
||||
},
|
||||
fkColumn: "tag_id",
|
||||
fkColumn: "tag_id",
|
||||
foreignTable: tagTable,
|
||||
orderBy: "tags.name ASC",
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -82,7 +82,7 @@ func Test_galleryQueryBuilder_Create(t *testing.T) {
|
|||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
||||
},
|
||||
false,
|
||||
|
@ -103,7 +103,7 @@ func Test_galleryQueryBuilder_Create(t *testing.T) {
|
|||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
||||
},
|
||||
false,
|
||||
|
@ -235,7 +235,7 @@ func Test_galleryQueryBuilder_Update(t *testing.T) {
|
|||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
SceneIDs: models.NewRelatedIDs([]int{sceneIDs[sceneIdx1WithPerformer], sceneIDs[sceneIdx1WithStudio]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
||||
},
|
||||
false,
|
||||
|
|
|
@ -1076,7 +1076,9 @@ func (qb *ImageStore) tagsRepository() *joinRepository {
|
|||
tableName: imagesTagsTable,
|
||||
idColumn: imageIDColumn,
|
||||
},
|
||||
fkColumn: tagIDColumn,
|
||||
fkColumn: tagIDColumn,
|
||||
foreignTable: tagTable,
|
||||
orderBy: "tags.name ASC",
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -78,7 +78,7 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
|
|||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithImage]}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}),
|
||||
},
|
||||
false,
|
||||
|
@ -99,7 +99,7 @@ func Test_imageQueryBuilder_Create(t *testing.T) {
|
|||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithImage]}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}),
|
||||
},
|
||||
false,
|
||||
|
@ -231,7 +231,7 @@ func Test_imageQueryBuilder_Update(t *testing.T) {
|
|||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithImage]}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}),
|
||||
},
|
||||
false,
|
||||
|
@ -432,7 +432,7 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) {
|
|||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithImage]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithImage], tagIDs[tagIdx1WithDupName]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithImage]}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithImage], performerIDs[performerIdx1WithDupName]}),
|
||||
},
|
||||
false,
|
||||
|
@ -537,9 +537,12 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||
},
|
||||
},
|
||||
models.Image{
|
||||
TagIDs: models.NewRelatedIDs(append(indexesToIDs(tagIDs, imageTags[imageIdxWithTwoTags]),
|
||||
tagIDs[tagIdx1WithDupName],
|
||||
tagIDs[tagIdx1WithGallery],
|
||||
TagIDs: models.NewRelatedIDs(append(
|
||||
[]int{
|
||||
tagIDs[tagIdx1WithGallery],
|
||||
tagIDs[tagIdx1WithDupName],
|
||||
},
|
||||
indexesToIDs(tagIDs, imageTags[imageIdxWithTwoTags])...,
|
||||
)),
|
||||
},
|
||||
false,
|
||||
|
@ -587,8 +590,9 @@ func Test_imageQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||
},
|
||||
},
|
||||
models.Image{
|
||||
TagIDs: models.NewRelatedIDs(append(indexesToIDs(tagIDs, imageTags[imageIdxWithTwoTags]),
|
||||
tagIDs[tagIdx1WithGallery],
|
||||
TagIDs: models.NewRelatedIDs(append(
|
||||
[]int{tagIDs[tagIdx1WithGallery]},
|
||||
indexesToIDs(tagIDs, imageTags[imageIdxWithTwoTags])...,
|
||||
)),
|
||||
},
|
||||
false,
|
||||
|
|
|
@ -303,10 +303,24 @@ type joiner interface {
|
|||
type joinRepository struct {
|
||||
repository
|
||||
fkColumn string
|
||||
|
||||
// fields for ordering
|
||||
foreignTable string
|
||||
orderBy string
|
||||
}
|
||||
|
||||
func (r *joinRepository) getIDs(ctx context.Context, id int) ([]int, error) {
|
||||
query := fmt.Sprintf(`SELECT %s as id from %s WHERE %s = ?`, r.fkColumn, r.tableName, r.idColumn)
|
||||
var joinStr string
|
||||
if r.foreignTable != "" {
|
||||
joinStr = fmt.Sprintf(" INNER JOIN %s ON %[1]s.id = %s.%s", r.foreignTable, r.tableName, r.fkColumn)
|
||||
}
|
||||
|
||||
query := fmt.Sprintf(`SELECT %[2]s.%[1]s as id from %s%s WHERE %s = ?`, r.fkColumn, r.tableName, joinStr, r.idColumn)
|
||||
|
||||
if r.orderBy != "" {
|
||||
query += " ORDER BY " + r.orderBy
|
||||
}
|
||||
|
||||
return r.runIdsQuery(ctx, query, []interface{}{id})
|
||||
}
|
||||
|
||||
|
|
|
@ -1469,7 +1469,9 @@ func (qb *SceneStore) tagsRepository() *joinRepository {
|
|||
tableName: scenesTagsTable,
|
||||
idColumn: sceneIDColumn,
|
||||
},
|
||||
fkColumn: tagIDColumn,
|
||||
fkColumn: tagIDColumn,
|
||||
foreignTable: tagTable,
|
||||
orderBy: "tags.name ASC",
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -110,7 +110,7 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
|
|||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
||||
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
||||
{
|
||||
|
@ -152,7 +152,7 @@ func Test_sceneQueryBuilder_Create(t *testing.T) {
|
|||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
||||
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
||||
{
|
||||
|
@ -330,7 +330,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) {
|
|||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
||||
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
||||
{
|
||||
|
@ -588,7 +588,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) {
|
|||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithScene], tagIDs[tagIdx1WithDupName]}),
|
||||
TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}),
|
||||
PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}),
|
||||
Movies: models.NewRelatedMovies([]models.MoviesScenes{
|
||||
{
|
||||
|
@ -749,9 +749,12 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||
},
|
||||
},
|
||||
models.Scene{
|
||||
TagIDs: models.NewRelatedIDs(append(indexesToIDs(tagIDs, sceneTags[sceneIdxWithTwoTags]),
|
||||
tagIDs[tagIdx1WithDupName],
|
||||
tagIDs[tagIdx1WithGallery],
|
||||
TagIDs: models.NewRelatedIDs(append(
|
||||
[]int{
|
||||
tagIDs[tagIdx1WithGallery],
|
||||
tagIDs[tagIdx1WithDupName],
|
||||
},
|
||||
indexesToIDs(tagIDs, sceneTags[sceneIdxWithTwoTags])...,
|
||||
)),
|
||||
},
|
||||
false,
|
||||
|
@ -854,8 +857,9 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) {
|
|||
},
|
||||
},
|
||||
models.Scene{
|
||||
TagIDs: models.NewRelatedIDs(append(indexesToIDs(tagIDs, sceneTags[sceneIdxWithTwoTags]),
|
||||
tagIDs[tagIdx1WithGallery],
|
||||
TagIDs: models.NewRelatedIDs(append(
|
||||
[]int{tagIDs[tagIdx1WithGallery]},
|
||||
indexesToIDs(tagIDs, sceneTags[sceneIdxWithTwoTags])...,
|
||||
)),
|
||||
},
|
||||
false,
|
||||
|
|
Loading…
Reference in New Issue